Implement refreshing the Cloud token in Enso libraries (#9390)

- Closes #9300
- Now the Enso libraries are themselves capable of refreshing the access token, thus there is no more problems if the token expires during a long running workflow.
- Adds `get_optional_field` sibling to `get_required_field` for more unified parsing of JSON responses from the Cloud.
- Adds `expected_type` that checks the type of extracted fields. This way, if the response is malformed we get a nice Enso Cloud error telling us what is wrong with the payload instead of a `Type_Error` later down the line.
- Fixes `Test.expect_panic_with` to actually catch only panics. Before it used to also handle dataflow errors - but these have `.should_fail_with` instead. We should distinguish these scenarios.
This commit is contained in:
Radosław Waśko 2024-03-19 20:26:34 +01:00 committed by GitHub
parent 53e2636b8c
commit 6e5b4d93a3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
50 changed files with 959 additions and 405 deletions

View File

@ -14,7 +14,7 @@ decode_aws_credential json -> AWS_Credential =
"aws_auth" -> case get_required_field "subType" json of
"default" -> AWS_Credential.Default
"profile" ->
profile = get_required_field "profile" json
profile = get_required_field "profile" json expected_type=Text
AWS_Credential.Profile profile
"access_key" ->
access_key_id = get_required_field "accessKeyId" json |> parse_secure_value

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from Standard.Base.Enso_Cloud.Data_Link import parse_format
from Standard.Base.Enso_Cloud.Public_Utils import get_required_field
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
import project.AWS_Credential.AWS_Credential
import project.S3.S3_File.S3_File
@ -13,9 +13,9 @@ type S3_Data_Link
## PRIVATE
parse json -> S3_Data_Link =
uri = get_required_field "uri" json
uri = get_required_field "uri" json expected_type=Text
auth = decode_aws_credential (get_required_field "auth" json)
format = parse_format (json.get "format" Nothing)
format = parse_format (get_optional_field "format" json)
S3_Data_Link.Value uri format auth
## PRIVATE

View File

@ -271,7 +271,7 @@ type Filter_Action
Remove
## PRIVATE
sql_like_to_regex sql_pattern =
sql_like_to_regex (sql_pattern : Text) =
regex_pattern = Regex_Utils.sql_like_pattern_to_regex sql_pattern
Regex.compile regex_pattern

View File

@ -280,7 +280,7 @@ handle_unmatched_type expected_types actual_value =
m = Meta.meta actual_value
return_type_error =
expected_types_str = expected_types . map .to_text . join " | "
Error.throw (Type_Error.Error expected_types_str actual_value "range")
Panic.throw (Type_Error.Error expected_types_str actual_value "Expected range to be {exp}, but got {got}")
case m of
_ : Meta.Constructor ->
declaring_type = m.declaring_type

View File

@ -3,9 +3,9 @@ import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Text.Normalization.Normalization
import project.Error.Error
import project.Errors.Common.Type_Error
import project.Meta
import project.Panic.Panic
from project.Data.Boolean import Boolean, False, True
polyglot java import org.enso.base.Text_Utils
@ -103,7 +103,7 @@ type Text
@locale Locale.default_widget
compare_to_ignore_case : Text -> Locale -> Ordering
compare_to_ignore_case self that locale=Locale.default =
if that.is_nothing then Error.throw (Type_Error.Error Text that "that") else
if that.is_nothing then Panic.throw (Type_Error.Error Text that "Expected `that` to be a Text, but got {got}") else
comparison_result = Text_Utils.compare_normalized_ignoring_case self that locale.java_locale
Ordering.from_sign comparison_result

View File

@ -1,15 +0,0 @@
import project.Any.Any
import project.Data.Text.Text
import project.Error.Error
import project.Errors.Common.Type_Error
import project.Meta
## PRIVATE
Assert that `text_maybe` is a Text, then call the action.
expect_text : Any -> Any -> Any ! Type_Error
expect_text text_maybe ~action = case text_maybe of
_ : Text -> action
_ ->
Error.throw (Type_Error.Error Text text_maybe "text_maybe")

View File

@ -6,7 +6,6 @@ import project.Data.Map.Map
import project.Data.Numbers.Integer
import project.Data.Range.Range
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
import project.Data.Text.Helpers
import project.Data.Text.Prim_Text_Helper
import project.Data.Text.Regex.Internal.Match_Iterator.Match_Iterator
import project.Data.Text.Regex.Internal.Match_Iterator.Match_Iterator_Value
@ -87,10 +86,9 @@ type Regex
Arguments:
- input: The text to check for matching.
matches : Text -> Boolean | Type_Error
matches self input =
Helpers.expect_text input <|
m = self.internal_regex_object.exec input 0
m . isMatch && m.getStart 0 == 0 && m.getEnd 0 == input.length
matches self (input : Text) =
m = self.internal_regex_object.exec input 0
m . isMatch && m.getStart 0 == 0 && m.getEnd 0 == input.length
## GROUP Text
ICON find
@ -102,12 +100,11 @@ type Regex
Arguments:
- input: The text to match the pattern described by `self` against.
match : Text -> Match | Nothing ! Type_Error
match self input =
Helpers.expect_text input <|
it = Match_Iterator.new self input
case it.next of
Match_Iterator_Value.Next _ match _ -> match
Match_Iterator_Value.Last _ -> Nothing
match self (input : Text) =
it = Match_Iterator.new self input
case it.next of
Match_Iterator_Value.Next _ match _ -> match
Match_Iterator_Value.Last _ -> Nothing
## GROUP Text
ICON find
@ -119,19 +116,18 @@ type Regex
Arguments:
- input: The text to match the pattern described by `self` against.
match_all : Text -> Vector Match ! Type_Error | Illegal_Argument
match_all self input =
Helpers.expect_text input <|
pattern_is_empty = self.pattern == ''
if pattern_is_empty then Error.throw (Illegal_Argument.Error "Cannot run match_all with an empty pattern") else
builder = Vector.new_builder
it = Match_Iterator.new self input
go it = case it.next of
Match_Iterator_Value.Next _ match next_it ->
builder.append match
@Tail_Call go next_it
Match_Iterator_Value.Last _ -> Nothing
go it
builder.to_vector
match_all self (input : Text) =
pattern_is_empty = self.pattern == ''
if pattern_is_empty then Error.throw (Illegal_Argument.Error "Cannot run match_all with an empty pattern") else
builder = Vector.new_builder
it = Match_Iterator.new self input
go it = case it.next of
Match_Iterator_Value.Next _ match next_it ->
builder.append match
@Tail_Call go next_it
Match_Iterator_Value.Last _ -> Nothing
go it
builder.to_vector
## GROUP Selections
Tries to match the provided `input` against the pattern `self`.
@ -142,9 +138,8 @@ type Regex
Arguments:
- input: The text to match the pattern described by `self` against.
find : Text -> Text | Nothing | Type_Error
find self input =
Helpers.expect_text input <|
match_to_group_maybe <| self.match input
find self (input : Text) =
match_to_group_maybe <| self.match input
## GROUP Selections
Tries to match the provided `input` against the pattern `self`.
@ -155,9 +150,8 @@ type Regex
Arguments:
- input: The text to match the pattern described by `self` against.
find_all : Text -> Vector Text ! Type_Error
find_all self input =
Helpers.expect_text input <|
self.match_all input . map match_to_group_maybe
find_all self (input : Text) =
self.match_all input . map match_to_group_maybe
## GROUP Conversions
Splits the `input` text based on the pattern described by `self`.
@ -190,19 +184,18 @@ type Regex
texts = pattern.split input
texts . should_equal ["abcdefghij"]
split : Text -> Boolean -> Vector Text | Type_Error
split self input only_first=False =
Helpers.expect_text input <|
builder = Vector.new_builder
it = Match_Iterator.new self input
go next = case next of
Match_Iterator_Value.Next filler _ next_it ->
builder.append filler.text
next = if only_first then next_it.early_exit else next_it.next
@Tail_Call go next
Match_Iterator_Value.Last filler ->
builder.append filler.text
go it.next
builder.to_vector
split self (input : Text) (only_first : Boolean = False) =
builder = Vector.new_builder
it = Match_Iterator.new self input
go next = case next of
Match_Iterator_Value.Next filler _ next_it ->
builder.append filler.text
next = if only_first then next_it.early_exit else next_it.next
@Tail_Call go next
Match_Iterator_Value.Last filler ->
builder.append filler.text
go it.next
builder.to_vector
## GROUP Conversions
Takes an input string and returns all the matches as a `Vector Text`.
@ -290,23 +283,22 @@ type Regex
pattern = Regex.compile "([a-z]+)"
pattern.replace "foo bar, baz" "[$1]" == "[foo] [bar], [baz]"
replace : Text -> Text -> Boolean -> Text | Type_Error
replace self input replacement only_first=False =
Helpers.expect_text input <|
it = Match_Iterator.new self input
case it of
Match_Iterator_Value.Last filler -> filler.text
_ ->
replacer = Replacer.new replacement self
replace self (input : Text) (replacement : Text) (only_first : Boolean = False) =
it = Match_Iterator.new self input
case it of
Match_Iterator_Value.Last filler -> filler.text
_ ->
replacer = Replacer.new replacement self
replacer.if_not_error <|
go next current = case next of
Match_Iterator_Value.Next filler match next_it ->
new_value = current + filler.text + (replacer.replace match)
next = if only_first then next_it.early_exit else next_it.next
@Tail_Call go next new_value
Match_Iterator_Value.Last filler ->
current + filler.text
go it.next ""
replacer.if_not_error <|
go next current = case next of
Match_Iterator_Value.Next filler match next_it ->
new_value = current + filler.text + (replacer.replace match)
next = if only_first then next_it.early_exit else next_it.next
@Tail_Call go next new_value
Match_Iterator_Value.Last filler ->
current + filler.text
go it.next ""
## PRIVATE
@ -499,4 +491,3 @@ type Regex_Syntax_Error
compile. Must be non-empty.
regex : Text -> Regex ! Regex_Syntax_Error | Illegal_Argument
regex expression:Text = Regex.compile expression

View File

@ -103,7 +103,7 @@ type Duration
example_duration = Duration.new hours=2
new : Integer -> Integer -> Integer -> Integer -> Integer -> Duration
new hours=0 minutes=0 seconds=0 milliseconds=0 nanoseconds=0 =
new (hours : Integer = 0) (minutes : Integer = 0) (seconds : Integer = 0) (milliseconds : Integer = 0) (nanoseconds : Integer = 0) =
new_builtin hours minutes seconds milliseconds nanoseconds
## ICON date_and_time

View File

@ -7,6 +7,7 @@ import project.Error.Error
import project.Errors.Illegal_State.Illegal_State
import project.Errors.Problem_Behavior.Problem_Behavior
import project.Errors.Unimplemented.Unimplemented
import project.Meta
import project.Nothing.Nothing
import project.System.File.File
import project.System.File.Generic.Writable_File.Writable_File
@ -59,10 +60,10 @@ type Data_Link_Format
## PRIVATE
interpret_json_as_datalink json =
typ = get_required_field "type" json
typ = get_required_field "type" json expected_type=Text
case DataLinkSPI.findDataLinkType typ of
Nothing ->
library_name = get_required_field "libraryName" json
library_name = get_required_field "libraryName" json expected_type=Text
Error.throw (Illegal_State.Error "The data link for "+typ+" is provided by the library "+library_name+" which is not loaded. Please import the library, and if necessary, restart the project.")
data_link_type ->
data_link_type.parse json
@ -73,15 +74,16 @@ read_datalink json on_problems =
data_link_instance.read on_problems
## PRIVATE
parse_secure_value (json : Text | JS_Object) -> Text | Enso_Secret =
parse_secure_value json -> Text | Enso_Secret =
case json of
raw_text : Text -> raw_text
_ : JS_Object ->
case get_required_field "type" json of
case get_required_field "type" json expected_type=Text of
"secret" ->
secret_path = get_required_field "secretPath" json
secret_path = get_required_field "secretPath" json expected_type=Text
Enso_Secret.get secret_path
other -> Error.throw (Illegal_State.Error "Unexpected value inside of a data-link: "+other+".")
_ -> Error.throw (Illegal_State.Error "Parsing a secure value failed. Expected either a string or an object representing a secret, but got "+(Meta.type_of json . to_display_text)+".")
## PRIVATE
parse_format json = case json of

View File

@ -108,14 +108,14 @@ type Enso_File
size : Integer
size self -> Integer = if self.is_regular_file.not then Error.throw (Illegal_Argument.Error "`size` can only be queried for regular files.") else
metadata = get_file_description self |> get_required_field "metadata"
get_required_field "size" metadata
get_required_field "size" metadata expected_type=Integer
## GROUP Metadata
Gets the creation time of a file.
creation_time : Date_Time
creation_time self -> Date_Time = if self.is_directory then Unimplemented.throw "Enso_File.creation_time is not implemented yet for directories." else
metadata = get_file_description self |> get_required_field "metadata"
Date_Time.parse (get_required_field "created_at" metadata) Date_Time_Formatter.iso_offset_date_time
Date_Time.parse (get_required_field "created_at" metadata expected_type=Text) Date_Time_Formatter.iso_offset_date_time
. catch Time_Error error-> Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
## GROUP Metadata
@ -123,7 +123,7 @@ type Enso_File
last_modified_time : Date_Time
last_modified_time self = if self.is_directory then Unimplemented.throw "Enso_File.last_modified_time is not implemented yet for directories." else
metadata = get_file_description self |> get_required_field "metadata"
Date_Time.parse (get_required_field "modified_at" metadata) Date_Time_Formatter.iso_offset_date_time
Date_Time.parse (get_required_field "modified_at" metadata expected_type=Text) Date_Time_Formatter.iso_offset_date_time
. catch Time_Error error-> Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
## GROUP Metadata
@ -386,5 +386,5 @@ get_file_description file:Enso_File -> JS_Object =
stored anywhere.
get_download_url_for_file file:Enso_File -> Text =
file_description = get_file_description file
presigned_url = file_description |> get_required_field "url"
presigned_url = file_description |> get_required_field "url" expected_type=Text
presigned_url

View File

@ -10,7 +10,7 @@ import project.Network.HTTP.HTTP
import project.Network.HTTP.HTTP_Method.HTTP_Method
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, False, True
from project.Enso_Cloud.Public_Utils import get_required_field
from project.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
type Enso_User
## PRIVATE
@ -45,7 +45,10 @@ type Enso_User
## PRIVATE
Enso_User.from (that:JS_Object) =
root_folder_id = get_required_field "rootDirectoryId" that
root_folder_id = get_required_field "rootDirectoryId" that expected_type=Text
root_folder = Enso_File.Value "" root_folder_id "" Enso_Asset_Type.Directory
is_enabled = that.get "isEnabled" True
Enso_User.User (get_required_field "name" that) (get_required_field "email" that) (get_required_field "id" that) root_folder is_enabled
is_enabled = get_optional_field "isEnabled" that if_missing=True expected_type=Boolean
name = get_required_field "name" that expected_type=Text
email = get_required_field "email" that expected_type=Text
id = get_required_field "id" that expected_type=Text
Enso_User.User name email id root_folder is_enabled

View File

@ -5,10 +5,23 @@ import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code
## PRIVATE
Error if the user is not logged into Enso Cloud.
type Not_Logged_In
## PRIVATE
Error
## PRIVATE
to_display_text : Text
to_display_text self = "Not logged into Enso cloud. Please log in and restart."
## PRIVATE
Indicates that the session with Enso Cloud has expired and the user needs to log in again.
type Cloud_Session_Expired
## PRIVATE
The `underlying_error` is not shown in the error message, but is saved for debugging purposes.
Error underlying_error
to_display_text : Text
to_display_text self = "Login session with Enso Cloud has expired. Please log in again and restart."
## PRIVATE
type Enso_Cloud_Error
## PRIVATE

View File

@ -0,0 +1,182 @@
private
import project.Data.Json.Invalid_JSON
import project.Data.Json.JS_Object
import project.Data.Numbers.Integer
import project.Data.Text.Text
import project.Data.Time.Date_Time.Date_Time
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
import project.Data.Time.Duration.Duration
import project.Data.Time.Time_Zone.Time_Zone
import project.Enso_Cloud.Errors.Cloud_Session_Expired
import project.Enso_Cloud.Errors.Enso_Cloud_Error
import project.Enso_Cloud.Errors.Not_Logged_In
import project.Errors.File_Error.File_Error
import project.Errors.Illegal_State.Illegal_State
import project.Errors.Time_Error.Time_Error
import project.Meta
import project.Network.HTTP.Header.Header
import project.Network.HTTP.HTTP
import project.Network.HTTP.HTTP_Error.HTTP_Error
import project.Network.HTTP.HTTP_Method.HTTP_Method
import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code
import project.Network.HTTP.Request.Request
import project.Network.HTTP.Request_Body.Request_Body
import project.Network.HTTP.Request_Error
import project.Network.HTTP.Response.Response
import project.Network.URI.URI
import project.Nothing.Nothing
import project.Panic.Panic
import project.Runtime.Ref.Ref
import project.System.Environment
import project.System.File.File
from project.Data.Boolean import Boolean, False, True
from project.Data.Text.Extensions import all
from project.Enso_Cloud.Public_Utils import get_required_field
polyglot java import org.enso.base.enso_cloud.AuthenticationProvider
## PRIVATE
Returns an access token for the current user, that should be valid for
at least 2 more minutes.
If the current token is expired or <2m from expiration, it will be refreshed
and a new one will be returned. Because of that, this method may make network
requests.
get_access_token : Text
get_access_token = AuthenticationProvider.getAccessToken
## PRIVATE
Forcibly refreshes the access token.
refresh_access_token : Nothing
refresh_access_token =
AuthenticationProvider.getAuthenticationServiceEnsoInstance.force_refresh
## PRIVATE
credentials_file : File
credentials_file = case Environment.get "ENSO_CLOUD_CREDENTIALS_FILE" of
Nothing -> File.home / ".enso" / "credentials"
path -> File.new path
## PRIVATE
type Authentication_Service
## PRIVATE
Instance (auth_data : Ref Authentication_Data)
## PRIVATE
get_access_token self -> Text =
is_still_valid = self.auth_data.get.expire_at > (Date_Time.now + token_early_refresh_period)
if is_still_valid then self.auth_data.get.access_token else
# The token has expired or will expire soon, so we need to refresh it.
self.force_refresh
self.auth_data.get.access_token
## PRIVATE
Refreshes the access token using the refresh token, regardless of its freshness.
force_refresh self -> Nothing =
refresh_token_data = Refresh_Token_Data.read_from_credentials
new_auth_data = refresh_token_data.fetch_new_access_token
Panic.rethrow <| self.auth_data.put new_auth_data
Nothing
## PRIVATE
new -> Authentication_Service =
initial_data = Authentication_Data.read_from_credentials
Authentication_Service.Instance (Ref.new initial_data)
## PRIVATE
The method called from Java that creates a new instance of the service, to be
stored in the global state.
instantiate_authentication_service =
# We rethrow any dataflow errors, because this is fed back to Java code that only understands exceptions (panics).
Panic.rethrow <|
Authentication_Service.new
## PRIVATE
type Authentication_Data
## PRIVATE
Value access_token:Text expire_at:Date_Time
## PRIVATE
read_from_credentials -> Authentication_Data =
content = credentials_file.read_text . catch File_Error error-> case error of
File_Error.Not_Found _ -> Panic.throw Not_Logged_In.Error
_ -> Panic.throw (Illegal_State.Error "Failed to read credentials file: "+error.to_display_text cause=error)
invalid_format_prefix = "Invalid credentials file format: "
as_json = content.parse_json.catch Invalid_JSON error->
Panic.throw (Illegal_State.Error invalid_format_prefix+error.to_display_text cause=error)
get_field = file_get_required_string_field as_json invalid_format_prefix
token = get_field "access_token"
expiration_date_string = get_field "expire_at"
expiration_date = Date_Time.parse expiration_date_string format=Date_Time_Formatter.iso_zoned_date_time . catch Time_Error error->
Panic.throw (Illegal_State.Error invalid_format_prefix+"invalid date format in `expire_at` field: "+error.to_display_text cause=error)
Authentication_Data.Value access_token=token expire_at=expiration_date
## PRIVATE
type Refresh_Token_Data
## PRIVATE
Value client_id:Text refresh_url:Text refresh_token:Text
## PRIVATE
read_from_credentials -> Refresh_Token_Data =
content = credentials_file.read_text . catch File_Error error->
Panic.throw (Illegal_State.Error "Token refresh failed - cannot read credentials file: "+error.to_display_text cause=error)
as_json = content.parse_json.catch Invalid_JSON error->
Panic.throw (Illegal_State.Error "Token refresh failed due to invalid credentials file format: "+error.to_display_text cause=error)
get_field = file_get_required_string_field as_json "Token refresh failed due to invalid credentials file format: "
client_id = get_field "client_id"
refresh_url = get_field "refresh_url"
refresh_token = get_field "refresh_token"
Refresh_Token_Data.Value client_id=client_id refresh_url=refresh_url refresh_token=refresh_token
## PRIVATE
fetch_new_access_token self -> Authentication_Data =
headers = [Header.content_type "application/x-amz-json-1.1", Header.new "X-Amz-Target" "AWSCognitoIdentityProviderService.InitiateAuth"]
auth_parameters = JS_Object.from_pairs [["REFRESH_TOKEN", self.refresh_token], ["DEVICE_KEY", Nothing]]
payload = JS_Object.from_pairs [["ClientId", self.client_id], ["AuthFlow", "REFRESH_TOKEN_AUTH"], ["AuthParameters", auth_parameters]]
response = HTTP.post self.refresh_url body=(Request_Body.Json payload) headers=headers
. catch HTTP_Error error-> case error of
HTTP_Error.Status_Error status _ _ ->
# If the status code is 400-499, then most likely the reason is that the session has expired, so we ask the user to log in again.
if (400 <= status.code) && (status.code < 500) then Panic.throw (Cloud_Session_Expired.Error error) else
# Otherwise, we fail with the generic error that gives more details.
Panic.throw (Enso_Cloud_Error.Connection_Error error)
_ -> Panic.throw (Enso_Cloud_Error.Connection_Error error)
. catch Request_Error error->
Panic.throw (Enso_Cloud_Error.Connection_Error error)
response_received_time = Date_Time.now
json = response.decode_as_json . catch Invalid_JSON error->
Panic.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
auth_result = Panic.rethrow <| get_required_field "AuthenticationResult" json show_value=False expected_type=JS_Object
access_token = Panic.rethrow <| get_required_field "AccessToken" auth_result show_value=False expected_type=Text
token_type = Panic.rethrow <| get_required_field "TokenType" auth_result show_value=False expected_type=Text
expires_in = Panic.rethrow <| get_required_field "ExpiresIn" auth_result show_value=False expected_type=Integer
token_lifetime = Duration.new seconds=expires_in
if token_lifetime < token_early_refresh_period then
Panic.throw (Enso_Cloud_Error.Invalid_Response_Payload "Token lifetime is too short: "+token_lifetime.to_display_text+", smaller than our minimum lifetime of "+token_early_refresh_period.to_display_text+".")
expire_at = (response_received_time + token_lifetime) . at_zone Time_Zone.utc
if token_type != "Bearer" then
Panic.throw (Enso_Cloud_Error.Invalid_Response_Payload "Invalid `TokenType` field in response: expected `Bearer`, got `"+token_type+"`.")
Authentication_Data.Value access_token=access_token expire_at=expire_at
## PRIVATE
The amount of time before the token expiration that we pro-actively refresh
it to reduce the chance of it expiring during a request.
token_early_refresh_period = Duration.new minutes=2
## PRIVATE
A sibling to `get_required_field`.
This one raises `Illegal_State` error, because it is dealing with local files and not cloud responses.
file_get_required_string_field json prefix field_name = case json of
_ : JS_Object ->
result = json.get field_name if_missing=(Panic.throw (Illegal_State.Error prefix+"missing field `"+field_name+"`."))
case result of
_ : Text -> result
_ ->
got_type = Meta.type_of result . to_display_text
Panic.throw (Illegal_State.Error prefix+"expected `"+field_name+"` to be a string, but got "+got_type+".")
_ -> Panic.throw (Illegal_State.Error prefix+"expected an object, got "+(Meta.type_of json))

View File

@ -3,13 +3,12 @@ private
import project.Any.Any
import project.Data.Json.Invalid_JSON
import project.Data.Map.Map
import project.Data.Pair.Pair
import project.Data.Numbers.Integer
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Enso_Cloud.Enso_Secret.Derived_Secret_Value
import project.Enso_Cloud.Enso_Secret.Enso_Secret
import project.Enso_Cloud.Errors.Enso_Cloud_Error
import project.Enso_Cloud.Errors.Not_Logged_In
import project.Enso_Cloud.Internal.Authentication
import project.Error.Error
import project.Function.Function
import project.Network.HTTP.Header.Header
@ -23,46 +22,21 @@ import project.Network.HTTP.Request_Error
import project.Network.HTTP.Response.Response
import project.Network.URI.URI
import project.Nothing.Nothing
import project.Runtime.Ref.Ref
import project.System.Environment
import project.System.File.File
from project.Data.Boolean import Boolean, False, True
from project.Data.Text.Extensions import all
polyglot java import org.enso.base.enso_cloud.AuthenticationProvider
polyglot java import org.enso.base.enso_cloud.CloudAPI
## PRIVATE
cloud_root_uri = "" + AuthenticationProvider.getAPIRootURI
cloud_root_uri = "" + CloudAPI.getAPIRootURI
## PRIVATE
Construct the authorization header for the request
authorization_header : Header
authorization_header =
token = AuthenticationProvider.getToken.if_nothing <|
f = credentials_file
if f.exists.not then Error.throw Not_Logged_In else
access_token = read_access_token f
AuthenticationProvider.setToken access_token
token = Authentication.get_access_token
Header.authorization_bearer token
## PRIVATE
credentials_file : File
credentials_file = case Environment.get "ENSO_CLOUD_CREDENTIALS_FILE" of
Nothing -> File.home / ".enso" / "credentials"
path -> File.new path
## PRIVATE
Reads the access token from the provided credentials file.
It supports both the old (raw string) and new (JSON) formats.
read_access_token (file:File) -> Text =
content = file.read_text
as_json = content.parse_json
# If this is not valid JSON, we assume old format - raw token.
if as_json.is_error then content else
# Otherwise, we extract the token from JSON:
as_json.get "access_token" if_missing=(Error.throw "Invalid credentials file format: missing field `access_token`.")
## PRIVATE
Root address for listing folders
directory_api = cloud_root_uri + "directories"
@ -86,17 +60,19 @@ datalinks_api = cloud_root_uri + "connectors"
## PRIVATE
The current project directory that will be used as the working directory,
if the user is running in the Cloud.
internal_cloud_project_directory = AuthenticationProvider.getCurrentWorkingDirectory
internal_cloud_project_directory = CloudAPI.getCurrentWorkingDirectory
## PRIVATE
Flushes all cloud caches, including the authentication data
(so the next request will re-read the credentials file).
flush_caches : Nothing
flush_caches = AuthenticationProvider.flushCloudCaches
flush_caches = CloudAPI.flushCloudCaches
## PRIVATE
Performs a standard request to the Enso Cloud API,
parsing the result as JSON.
http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) -> Any ! Enso_Cloud_Error =
response = http_request method url body additional_headers error_handlers
http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) (retries : Integer = 3) -> Any ! Enso_Cloud_Error =
response = http_request method url body additional_headers error_handlers retries
response.decode_as_json.catch Invalid_JSON error->
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
@ -104,24 +80,33 @@ http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = R
Performs a standard request to the Enso Cloud API,
returning a raw response on success, but handling the error responses.
This method has built-in retry mechanism that may retry the request if it
failed due to a network error or access token expiration.
Custom error handlers can be provided as a mapping from error codes
(defined in the cloud project) to functions that take the full JSON payload
and return a custom error.
http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) -> Response ! Enso_Cloud_Error =
http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) (retries : Integer = 3) -> Response ! Enso_Cloud_Error =
all_headers = [authorization_header] + additional_headers
as_connection_error err = Error.throw (Enso_Cloud_Error.Connection_Error err)
response = HTTP.new.request (Request.new method url headers=all_headers body=body) error_on_failure_code=False
. catch HTTP_Error as_connection_error
. catch Request_Error as_connection_error
if response.code.is_success then response else
if response.code == HTTP_Status_Code.unauthorized then Error.throw Enso_Cloud_Error.Unauthorized else
payload = response.decode_as_text
## If the payload can be parsed as JSON, contains a `code` field and this `code` is found in the error
handlers, a specialized handler will be used for the error. Otherwise, a generic Unexpected_Service_Error
will be reported, containing the payload for inspection.
json_payload = payload.parse_json.catch Invalid_JSON (_->Nothing)
error_code = json_payload.get "code"
handler = error_code.if_not_nothing <| error_handlers.get error_code
case handler of
Nothing -> Error.throw (Enso_Cloud_Error.Unexpected_Service_Error response.code payload)
_ : Function -> handler json_payload
if response.is_error && (retries > 0) then http_request method url body additional_headers error_handlers (retries - 1) else
if response.code.is_success then response else
is_unauthorized = (response.code == HTTP_Status_Code.unauthorized) || (response.code == HTTP_Status_Code.forbidden)
case is_unauthorized of
True -> if retries < 0 then Error.throw Enso_Cloud_Error.Unauthorized else
Authentication.refresh_access_token
http_request method url body additional_headers error_handlers (retries - 1)
False ->
payload = response.decode_as_text
## If the payload can be parsed as JSON, contains a `code` field and this `code` is found in the error
handlers, a specialized handler will be used for the error. Otherwise, a generic Unexpected_Service_Error
will be reported, containing the payload for inspection.
json_payload = payload.parse_json.catch Invalid_JSON (_->Nothing)
error_code = json_payload.get "code"
handler = error_code.if_not_nothing <| error_handlers.get error_code
case handler of
Nothing -> Error.throw (Enso_Cloud_Error.Unexpected_Service_Error response.code payload)
_ : Function -> handler json_payload

View File

@ -1,10 +1,59 @@
import project.Any.Any
import project.Data.Json.JS_Object
import project.Data.Text.Text
import project.Enso_Cloud.Errors.Enso_Cloud_Error
import project.Error.Error
import project.Meta
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, False, True
## PRIVATE
A helper that extracts a field from a response and handles unexpected
response structure.
get_required_field key js_object = case js_object of
_ : JS_Object -> js_object.get key if_missing=(Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Missing required field `"+key+"` in "+js_object.to_display_text+"."))
_ -> Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Expected a JSON object, but got "+js_object.to_display_text+".")
A helper that extracts a required field from a response and handles
unexpected response structure.
Arguments:
- key: The key of the field to extract.
- js_object: The JSON object to extract the field from.
- show_value: Whether to include the value of the field in the error message.
It is usually better to include it, but in cases like handling of access
tokens, it may be better to just include the type of the field.
- expected_type: The type that the field is expected to be.
get_required_field (key : Text) js_object (show_value : Boolean = True) (expected_type = Any) = case js_object of
_ : JS_Object ->
handle_missing =
suffix = if show_value then " in "+js_object.to_display_text else ""
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Missing required field `"+key+"`"+suffix+".")
result = js_object.get key if_missing=handle_missing
if result.is_a expected_type then result else
representation = if show_value then result.to_display_text else Meta.type_of result . to_display_text
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Expected field `"+key+"` to be of type "+expected_type.to_display_text+", but got "+representation+".")
_ ->
representation = if show_value then js_object.to_display_text else Meta.type_of js_object . to_display_text
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Expected a JSON object, but got "+representation+".")
## PRIVATE
A helper that extracts an optional field from a response and handles
unexpected response structure.
This method treats the field missing or being set to `null` as the same.
Arguments:
- key: The key of the field to extract.
- js_object: The JSON object to extract the field from.
- if_missing: The value to return if the field is missing or `null`.
- show_value: Whether to include the value of the field in the error message.
It is usually better to include it, but in cases like handling of access
tokens, it may be better to just include the type of the field.
- expected_type: The type that the field is expected to be.
The type of `if_missing` is not checked against `expected_type`.
get_optional_field (key : Text) js_object (~if_missing = Nothing) (show_value : Boolean = True) (expected_type = Any) = case js_object of
_ : JS_Object ->
result = js_object.get key if_missing=Nothing
case result of
Nothing -> if_missing
_ -> if result.is_a expected_type then result else
representation = if show_value then result.to_display_text else Meta.type_of result . to_display_text
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Expected field `"+key+"` to be of type "+expected_type.to_display_text+", but got "+representation+".")
_ ->
representation = if show_value then js_object.to_display_text else Meta.type_of js_object . to_display_text
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Expected a JSON object, but got "+representation+".")

View File

@ -1,10 +1,11 @@
import project.Data.Text.Text
import project.Errors.Problem_Behavior.Problem_Behavior
import project.Network.HTTP.HTTP
import project.Network.HTTP.HTTP_Method.HTTP_Method
import project.Network.HTTP.Request.Request
import project.Nothing.Nothing
from project.Enso_Cloud.Data_Link import parse_format, parse_secure_value
from project.Enso_Cloud.Public_Utils import get_required_field
from project.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
## PRIVATE
type HTTP_Fetch_Data_Link
@ -13,9 +14,9 @@ type HTTP_Fetch_Data_Link
## PRIVATE
parse json -> HTTP_Fetch_Data_Link =
uri = get_required_field "uri" json
method = HTTP_Method.from (get_required_field "method" json)
format = parse_format (json.get "format" Nothing)
uri = get_required_field "uri" json expected_type=Text
method = HTTP_Method.from (get_required_field "method" json expected_type=Text)
format = parse_format (get_optional_field "format" json)
# TODO headers
headers = []
request = Request.new method uri headers

View File

@ -71,7 +71,7 @@ type File
import Standard.Examples
example_new = File.new Examples.csv_path
new : (Text | File) -> Any
new : (Text | File) -> Any ! Illegal_Argument
new path = case path of
_ : Text -> if path.contains "://" . not then get_file path else
protocol = path.split "://" . first
@ -82,7 +82,7 @@ type File
_ ->
## Check to see if a valid "File" type.
if (file_types.any file_type-> path.is_a file_type) then path else
Error.throw (Type_Error.Error File path "path")
Error.throw (Illegal_Argument.Error "The provided path is neither a Text, nor any recognized File-like type.")
## ICON folder_add

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from Standard.Base.Enso_Cloud.Data_Link import parse_secure_value
from Standard.Base.Enso_Cloud.Public_Utils import get_required_field
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
import project.Connection.Connection_Options.Connection_Options
import project.Connection.Credentials.Credentials
@ -19,19 +19,19 @@ type Postgres_Data_Link
## PRIVATE
parse json -> Postgres_Data_Link =
host = get_required_field "host" json
port = get_required_field "port" json
db_name = get_required_field "database_name" json
schema = json.get "schema" if_missing=""
host = get_required_field "host" json expected_type=Text
port = get_required_field "port" json expected_type=Integer
db_name = get_required_field "database_name" json expected_type=Text
schema = get_optional_field "schema" json if_missing="" expected_type=Text
credentials_json = json.get "credentials"
credentials_json = get_optional_field "credentials" json
credentials = credentials_json.if_not_nothing <|
username = get_required_field "username" credentials_json |> parse_secure_value
password = get_required_field "password" credentials_json |> parse_secure_value
Credentials.Username_And_Password username password
details = Postgres_Details.Postgres host=host port=port database=db_name schema=schema credentials=credentials
case json.get "table" of
case get_optional_field "table" json expected_type=Text of
Nothing ->
Postgres_Data_Link.Connection details
table_name : Text ->

View File

@ -1,6 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Common.No_Such_Method
import Standard.Base.Errors.Common.Type_Error
from Standard.Base.Data.Filter_Condition.Filter_Condition import all
import project.Data.Type.Value_Type.Value_Type

View File

@ -22,29 +22,46 @@ type Test
## Expect a function to fail with the provided panic.
Arguments:
- action: The action to evaluate that is expected to fail with a panic.
- matcher: The expected type of the panic thrown by `action`.
It returns the caught panic if it matches the expected panic type,
otherwise it fails the test.
> Example
Expect that a computation should panic as part of a test.
Arguments:
- action: The action to evaluate that is expected to fail with a panic.
- matcher: The expected type of the panic thrown by `action`.
import Standard.Examples
from Standard.Test import Test
> Example
Expect that a computation should panic as part of a test.
import Standard.Examples
from Standard.Test import Test
example_expect_panic_with =
Test.expect_panic_with Examples.throw_panic Examples.My_Error
expect_panic_with : Any -> Any -> Integer -> Test_Result
expect_panic_with ~action matcher frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
handle_panic caught_panic =
payload = caught_panic.payload
is_internal_test_panic = payload.is_a Spec_Result
if is_internal_test_panic then
# If we just caught the internal test panic, we rethrow it.
Panic.throw caught_panic
is_panic_matching = payload.is_a matcher
if is_panic_matching then payload else
stack_trace = caught_panic.convert_to_dataflow_error.get_stack_trace_text
Test.fail ("Expected a " + matcher.to_text + ", but " + payload.to_text + " was thrown instead (at "+loc+").") details=stack_trace
Panic.catch Any handler=handle_panic <|
res = action
# If the action did not panic above, we fail the test.
case res.is_error of
True ->
Test.fail ("Expected a Panic " + matcher.to_text + " to be thrown, but the action returned a Dataflow Error " + res.catch.to_display_text + " instead (at "+loc+").")
False ->
return_suffix = if res.is_nothing then "" else " and returned ["+res.to_text+"]"
Test.fail ("Expected a Panic " + matcher.to_text + " to be thrown, but the action succeeded" + return_suffix + " (at "+loc+").")
example_expect_panic_with =
Test.expect_panic_with Examples.throw_panic Examples.My_Error
expect_panic_with : Any -> Any -> Test_Result
expect_panic_with ~action matcher =
res = Panic.recover Any action
case res of
_ ->
loc = Meta.get_source_location 2
return_suffix = if res.is_nothing then "" else "and returned ["+res.to_text+"]"
Test.fail ("Expected a " + matcher.to_text + " to be thrown, but the action succeeded " + return_suffix + " (at "+loc+").")
err = res.catch
if err.is_a matcher then Nothing else
Test.fail ("Expected a " + matcher.to_text + ", but " + err.to_text + " was thrown instead.")
## Expect a function to fail with the provided panic.
@ -68,7 +85,7 @@ type Test
IO.println 'this is not reached'
expect_panic : Any -> Any -> Test_Result
expect_panic matcher ~action = Test.expect_panic_with action matcher
expect_panic matcher ~action = Test.expect_panic_with action matcher frames_to_skip=1
## Checks that the provided action returns without any errors or warnings.
@ -150,4 +167,3 @@ type Test
result = behavior
State.put Clue prev_clue
result

View File

@ -5049,7 +5049,7 @@ class RuntimeServerTest
Api.ExecutionFailed(
contextId,
Api.ExecutionResult.Diagnostic.error(
"Type error: expected `str` to be Text, but got Integer.",
"Type error: Expected `str` to be Text, but got Integer.",
Some(mainFile),
Some(model.Range(model.Position(3, 10), model.Position(3, 15))),
None,
@ -5379,7 +5379,7 @@ class RuntimeServerTest
Api.ExecutionFailed(
contextId,
Api.ExecutionResult.Diagnostic.error(
"Type error: expected `that` to be Integer, but got Function.",
"Type error: Expected `that` to be Integer, but got Function.",
None,
Some(model.Range(model.Position(6, 18), model.Position(6, 43))),
None,

View File

@ -108,6 +108,7 @@ class TextTest extends InterpreterTest {
"""
|import Standard.Base.Any.Any
|import Standard.Base.Data.List.List
|import Standard.Base.Data.Numbers
|from Standard.Base.Errors.Common import all
|import Standard.Base.Panic.Panic
|import Standard.Base.IO
@ -117,7 +118,7 @@ class TextTest extends InterpreterTest {
|main =
| IO.println (List.Cons Nothing Nothing).to_display_text
| IO.println (Syntax_Error.Error "foo").to_display_text
| IO.println (Type_Error.Error Nothing List.Nil "`myvar`").to_display_text
| IO.println (Type_Error.Error Nothing List.Nil "`myvar`: exp={exp} got={got}").to_display_text
| IO.println (Compile_Error.Error "error :(").to_display_text
| IO.println (Inexhaustive_Pattern_Match.Error 32).to_display_text
| IO.println (Arithmetic_Error.Error "cannot frobnicate quaternions").to_display_text
@ -129,11 +130,11 @@ class TextTest extends InterpreterTest {
consumeOut shouldEqual List(
"Cons",
"Syntax error: foo.",
"Type error: expected `myvar` to be Nothing, but got List.",
"Type error: `myvar`: exp=Nothing got=List.",
"Compile error: error :(.",
"Inexhaustive pattern match: no branch matches 32.",
"Arithmetic error: cannot frobnicate quaternions.",
"Type error: expected `that` to be Integer, but got Text.",
"Type error: Expected `that` to be Integer, but got Text.",
"Type error: expected a function, but got 7.",
"Wrong number of arguments. Expected 10, but got 20."
)

View File

@ -184,7 +184,8 @@ public final class Error {
*/
@CompilerDirectives.TruffleBoundary
public Atom makeTypeError(Object expected, Object actual, String name) {
return typeError.newInstance(expected, actual, Text.create("`" + name + "`"));
return typeError.newInstance(
expected, actual, Text.create("Expected `" + name + "` to be {exp}, but got {got}"));
}
/**

View File

@ -1,51 +1,59 @@
package org.enso.base.enso_cloud;
import org.enso.base.Environment_Utils;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Value;
public class AuthenticationProvider {
private static String token = null;
public static String setToken(String token) {
AuthenticationProvider.token = token;
return AuthenticationProvider.token;
public interface AuthenticationService {
String get_access_token();
void force_refresh();
}
public static String getToken() {
return AuthenticationProvider.token;
private static Value authenticationServiceAsEnso = null;
private static AuthenticationService authenticationServiceAsJava = null;
public static void reset() {
authenticationServiceAsEnso = null;
authenticationServiceAsJava = null;
}
public static String getAPIRootURI() {
var envUri = Environment_Utils.get_environment_variable("ENSO_CLOUD_API_URI");
var effectiveUri =
envUri == null ? "https://7aqkn3tnbc.execute-api.eu-west-1.amazonaws.com/" : envUri;
var uriWithSlash = effectiveUri.endsWith("/") ? effectiveUri : effectiveUri + "/";
return uriWithSlash;
private static Value createAuthenticationService() {
var context = Context.getCurrent().getBindings("enso");
var module =
context.invokeMember("get_module", "Standard.Base.Enso_Cloud.Internal.Authentication");
var moduleType = module.invokeMember("get_associated_type");
var factory =
module.invokeMember("get_method", moduleType, "instantiate_authentication_service");
// The static method takes the module as the synthetic 'self' argument.
return factory.execute(moduleType);
}
public record CloudWorkingDirectory(String name, String id, String organizationId) {}
private static void ensureServicesSetup() {
var ensoInstance = createAuthenticationService();
var javaInstance = ensoInstance.as(AuthenticationService.class);
authenticationServiceAsEnso = ensoInstance;
authenticationServiceAsJava = javaInstance;
}
public static CloudWorkingDirectory getCurrentWorkingDirectory() {
if (cachedWorkingDirectory != null) {
return cachedWorkingDirectory;
static AuthenticationService getAuthenticationService() {
if (authenticationServiceAsJava == null) {
ensureServicesSetup();
}
String directoryId = Environment_Utils.get_environment_variable("ENSO_PROJECT_PATH");
if (directoryId == null) {
// No current working directory is set
return null;
}
// TODO we should be able to fetch the name and organizationId from the cloud:
String directoryName = "???";
String organizationId = "";
cachedWorkingDirectory = new CloudWorkingDirectory(directoryName, directoryId, organizationId);
return cachedWorkingDirectory;
return authenticationServiceAsJava;
}
private static CloudWorkingDirectory cachedWorkingDirectory = null;
public static Value getAuthenticationServiceEnsoInstance() {
if (authenticationServiceAsEnso == null) {
ensureServicesSetup();
}
public static void flushCloudCaches() {
EnsoSecretReader.flushCache();
cachedWorkingDirectory = null;
return authenticationServiceAsEnso;
}
public static String getAccessToken() {
return getAuthenticationService().get_access_token();
}
}

View File

@ -0,0 +1,43 @@
package org.enso.base.enso_cloud;
import org.enso.base.Environment_Utils;
public class CloudAPI {
public static String getAPIRootURI() {
var envUri = Environment_Utils.get_environment_variable("ENSO_CLOUD_API_URI");
var effectiveUri =
envUri == null ? "https://7aqkn3tnbc.execute-api.eu-west-1.amazonaws.com/" : envUri;
var uriWithSlash = effectiveUri.endsWith("/") ? effectiveUri : effectiveUri + "/";
return uriWithSlash;
}
public record CloudWorkingDirectory(String name, String id, String organizationId) {}
public static CloudWorkingDirectory getCurrentWorkingDirectory() {
if (cachedWorkingDirectory != null) {
return cachedWorkingDirectory;
}
String directoryId = Environment_Utils.get_environment_variable("ENSO_PROJECT_PATH");
if (directoryId == null) {
// No current working directory is set
return null;
}
// TODO we should be able to fetch the name and organizationId from the cloud:
// To be done in https://github.com/enso-org/enso/issues/9289
String directoryName = "???";
String organizationId = "";
cachedWorkingDirectory = new CloudWorkingDirectory(directoryName, directoryId, organizationId);
return cachedWorkingDirectory;
}
private static CloudWorkingDirectory cachedWorkingDirectory = null;
public static void flushCloudCaches() {
AuthenticationProvider.reset();
cachedWorkingDirectory = null;
EnsoSecretReader.flushCache();
}
}

View File

@ -31,12 +31,16 @@ class EnsoSecretReader {
return secrets.get(secretId);
}
var apiUri = AuthenticationProvider.getAPIRootURI() + "s3cr3tz/" + secretId;
return fetchSecretValue(secretId, 3);
}
private static String fetchSecretValue(String secretId, int retryCount) {
var apiUri = CloudAPI.getAPIRootURI() + "s3cr3tz/" + secretId;
var client = HttpClient.newBuilder().followRedirects(HttpClient.Redirect.ALWAYS).build();
var request =
HttpRequest.newBuilder()
.uri(URI.create(apiUri))
.header("Authorization", "Bearer " + AuthenticationProvider.getToken())
.header("Authorization", "Bearer " + AuthenticationProvider.getAccessToken())
.GET()
.build();
@ -45,11 +49,29 @@ class EnsoSecretReader {
try {
response = client.send(request, HttpResponse.BodyHandlers.ofString());
} catch (IOException | InterruptedException e) {
throw new IllegalArgumentException("Unable to read secret.");
if (retryCount < 0) {
throw new IllegalArgumentException("Unable to read secret.");
} else {
return fetchSecretValue(secretId, retryCount - 1);
}
}
if (response.statusCode() != 200) {
throw new IllegalArgumentException("Unable to read secret.");
int status = response.statusCode();
if (status == 401 || status == 403 || status >= 500) {
if (retryCount < 0) {
String kind = status >= 500 ? "server" : "authentication";
throw new IllegalArgumentException(
"Unable to read secret - numerous " + kind + " failures.");
} else {
// We forcibly refresh the access token and try again.
AuthenticationProvider.getAuthenticationService().force_refresh();
return fetchSecretValue(secretId, retryCount - 1);
}
}
if (status != 200) {
throw new IllegalArgumentException(
"Unable to read secret - the service responded with status " + status + ".");
}
var secretJSON = response.body();

View File

@ -43,7 +43,7 @@ add_specs suite_builder =
group_builder.specify "should check validity at construction" <|
Test.expect_panic_with (Array_Proxy.new 0 0) Type_Error
Test.expect_panic_with (Array_Proxy.new -1 (x->x)) Illegal_Argument
(Array_Proxy.new -1 (x->x)).should_fail_with Illegal_Argument
group_builder.specify "should pretend to just be an Array" <|
proxy = Array_Proxy.new 3 (ix -> ix + 10)
@ -59,4 +59,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -612,7 +612,7 @@ add_common_specs suite_builder prefix:Text (pending : (Text | Nothing)) (empty_m
m2 = m1.remove "B"
m2.get "A" . should_equal 1
m2.remove "A" . should_equal empty_map
Test.expect_panic_with (m1.remove "foo") Any
m1.remove "foo" . should_fail_with No_Such_Key
group_builder.specify "should be able to remove entries (2)" <|
m1 = empty_map.insert "A" 1
@ -632,4 +632,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -76,9 +76,9 @@ add_specs suite_builder =
pattern = Regex.compile ""
pattern.matches "ABC" . should_fail_with Illegal_Argument
group_builder.specify "`matches` against a non-Text should fail with Illegal_Argument" <|
pattern = Regex.compile "abc"
pattern.matches 1 . should_fail_with Type_Error
group_builder.specify "`matches` against a non-Text should fail with Type_Error" <|
pattern = Regex.compile "abc"
Test.expect_panic Type_Error <| pattern.matches 1
suite_builder.group "Pattern.match and .match_all" group_builder->
@ -122,11 +122,11 @@ add_specs suite_builder =
group_builder.specify "`match` against a non-Text should fail with Illegal_Argument" <|
pattern = Regex.compile "abc"
pattern.match 1 . should_fail_with Type_Error
Test.expect_panic Type_Error <| pattern.match 1
group_builder.specify "`match_all` against a non-Text should fail with Illegal_Argument" <|
pattern = Regex.compile "abc"
pattern.match_all 1 . should_fail_with Type_Error
Test.expect_panic Type_Error <| pattern.match_all 1
suite_builder.group "Pattern.find and .find_all" group_builder->
@ -217,7 +217,7 @@ add_specs suite_builder =
group_builder.specify "`split` against a non-Text should fail with Illegal_Argument" <|
pattern = Regex.compile "abc"
pattern.split 1 . should_fail_with Type_Error
Test.expect_panic Type_Error <| pattern.split 1
suite_builder.group "Pattern.tokenize" group_builder->
@ -342,13 +342,13 @@ add_specs suite_builder =
pattern = Regex.compile "([a-z]+)"
pattern.replace "foo bar, baz" "[$1]" . should_equal "[foo] [bar], [baz]"
group_builder.specify "`replace` with an empty pattern should be an error" <|
pattern = Regex.compile ""
pattern.replace "ABC" . should_fail_with Illegal_Argument
group_builder.specify "`replace` with an empty pattern should be an error" <|
pattern = Regex.compile ""
pattern.replace "ABC" . should_fail_with Illegal_Argument
group_builder.specify "`replace` against a non-Text should fail with Illegal_Argument" <|
pattern = Regex.compile "abc"
pattern.replace 1 "abc" . should_fail_with Type_Error
group_builder.specify "`replace` against a non-Text should fail with Type_Error" <|
pattern = Regex.compile "abc"
Test.expect_panic Type_Error <| pattern.replace 1 "abc"
suite_builder.group "Match.text" group_builder->
@ -538,4 +538,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -123,7 +123,7 @@ add_specs suite_builder =
(accent_1 != Nothing) . should_be_true
Ordering.compare accent_1 Nothing . should_fail_with Incomparable_Values
(accent_1 > Nothing) . should_fail_with Incomparable_Values
accent_1 . compare_to_ignore_case Nothing . should_fail_with Type_Error
Test.expect_panic Type_Error <| accent_1 . compare_to_ignore_case Nothing
earlier_suffix = "aooooz"
later_suffix = "bo"
@ -758,8 +758,8 @@ add_specs suite_builder =
"".drop (Sample 100) . should_equal ""
group_builder.specify "take and drop should gracefully handle missing constructor arguments" <|
"".take "FOO" . should_fail_with Type_Error
"".drop "FOO" . should_fail_with Type_Error
Test.expect_panic Type_Error <| "".take "FOO"
Test.expect_panic Type_Error <| "".drop "FOO"
r1 = "".take (Index_Sub_Range.While)
r1.should_fail_with Illegal_Argument
@ -774,7 +774,7 @@ add_specs suite_builder =
r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?"
# Double-check that constructors of _unexpected_ types are still yielding a type error.
"".take (Case_Sensitivity.Insensitive ...) . should_fail_with Type_Error
Test.expect_panic Type_Error <| "".take (Case_Sensitivity.Insensitive ...)
group_builder.specify "should correctly convert character case" <|
"FooBar Baz".to_case Case.Lower . should_equal "foobar baz"
@ -1666,4 +1666,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -676,8 +676,8 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
v.take (Sample 3) . should_not_equal (v.take (Sample 3))
group_builder.specify "take/drop should gracefully handle missing constructor arguments" <|
[].take "FOO" . should_fail_with Type_Error
[].drop "FOO" . should_fail_with Type_Error
Test.expect_panic Type_Error <| [].take "FOO"
Test.expect_panic Type_Error <| [].drop "FOO"
r1 = [].take (Index_Sub_Range.While)
r1.should_fail_with Illegal_Argument

View File

@ -8,7 +8,7 @@ polyglot java import java.lang.Thread
polyglot java import java.security.KeyStore
polyglot java import javax.net.ssl.SSLContext
polyglot java import javax.net.ssl.TrustManagerFactory
polyglot java import org.enso.base.enso_cloud.AuthenticationProvider
polyglot java import org.enso.base.enso_cloud.CloudAPI
type Cloud_Tests_Setup
Mock api_url:URI credentials_location:File
@ -16,20 +16,17 @@ type Cloud_Tests_Setup
None
with_prepared_environment self ~action =
if self == Cloud_Tests_Setup.None then action else
Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_API_URI" self.api_url.to_text <|
Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" self.credentials_location.absolute.normalize.path <|
action
Cloud_Tests_Setup.reset
Panic.with_finalizer Cloud_Tests_Setup.reset <|
if self == Cloud_Tests_Setup.None then action else
Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_API_URI" self.api_url.to_text <|
Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" self.credentials_location.absolute.normalize.path <|
action
pending self = case self of
Cloud_Tests_Setup.None -> "Cloud tests run only if ENSO_RUN_REAL_CLOUD_TEST or ENSO_HTTP_TEST_HTTPBIN_URL environment variable is defined."
_ -> Nothing
mock_only_pending self = case self of
Cloud_Tests_Setup.Mock _ _ -> Nothing
Cloud_Tests_Setup.Cloud _ _ -> "Mock cloud tests only run if ENSO_RUN_REAL_CLOUD_TEST is NOT defined, as they would interfere with other cloud tests."
_ -> "Cloud tests run only if ENSO_HTTP_TEST_HTTPBIN_URL environment variable is defined."
real_cloud_pending self = case self of
Cloud_Tests_Setup.Cloud _ _ -> Nothing
_ -> "These cloud tests only run if ENSO_RUN_REAL_CLOUD_TEST is defined, as they require a proper cloud environment for testing, not just a minimal mock."
@ -72,9 +69,8 @@ type Cloud_Tests_Setup
Cloud_Tests_Setup.Mock _ _ -> True
_ -> False
## Resets the user token, to avoid cached token from other tests interfering.
reset_token =
AuthenticationProvider.setToken Nothing
## Flushes all cloud caches in order to allow switch between real and mock cloud environments.
reset = CloudAPI.flushCloudCaches
## Detects the setup based on environment settings.
prepare : Cloud_Tests_Setup
@ -87,22 +83,81 @@ type Cloud_Tests_Setup
credentials_location = Environment.get "ENSO_CLOUD_CREDENTIALS_FILE" . if_nothing <|
Panic.throw (Illegal_State.Error "If ENSO_RUN_REAL_CLOUD_TEST is defined, ENSO_CLOUD_CREDENTIALS_FILE must be defined as well.")
Cloud_Tests_Setup.reset_token
Cloud_Tests_Setup.Cloud (URI.from api_url) (File.new credentials_location)
False -> Cloud_Tests_Setup.prepare_mock_setup
False ->
base_url = Environment.get "ENSO_HTTP_TEST_HTTPBIN_URL"
if base_url.is_nothing then Cloud_Tests_Setup.None else
with_slash = if base_url.ends_with "/" then base_url else base_url + "/"
enso_cloud_url = with_slash + "enso-cloud-mock/"
## Runs the action inside of an environment set up with the Cloud Mock
running on our HTTP helper, regardless if the 'real' cloud environment is
available for testing.
tmp_cred_file = File.create_temporary_file "enso-test-credentials" ".txt"
Cloud_Tests_Setup.test_token.write tmp_cred_file
That's because some of our tests should always run on the mock environment
- for example authentication tests, because only these allow us enough
customization.
Cloud_Tests_Setup.reset_token
Cloud_Tests_Setup.Mock (URI.from enso_cloud_url) tmp_cred_file
If `custom_credentials` are not specified, the default test credentials are used.
run_with_mock_cloud ~action (custom_credentials : Mock_Credentials | Nothing = Nothing) =
setup = Cloud_Tests_Setup.prepare_mock_setup custom_credentials
setup.with_prepared_environment action
test_token -> Text = "TEST-ENSO-TOKEN-caffee"
prepare_mock_setup (custom_credentials : Mock_Credentials | Nothing = Nothing) -> Cloud_Tests_Setup =
base_url = Environment.get "ENSO_HTTP_TEST_HTTPBIN_URL"
if base_url.is_nothing then Cloud_Tests_Setup.None else
with_slash = if base_url.ends_with "/" then base_url else base_url + "/"
enso_cloud_url = with_slash + "enso-cloud-mock/"
credentials_payload = (custom_credentials.if_nothing (Mock_Credentials.default with_slash)).to_json
tmp_cred_file = File.create_temporary_file "enso-test-credentials" ".json"
credentials_payload.write tmp_cred_file
Cloud_Tests_Setup.Mock (URI.from enso_cloud_url) tmp_cred_file
## Returns the number of times requests failed due to an expired token.
Only valid for Mock setup.
get_expired_token_failures_count self -> Integer = case self of
Cloud_Tests_Setup.Mock _ _ ->
uri = self.httpbin_uri / "COUNT-EXPIRED-TOKEN-FAILURES"
HTTP.fetch uri . decode_as_json
_ -> Panic.throw (Illegal_State.Error "This method is only valid for Mock setup.")
type Mock_Credentials
Value access_token:Text expire_at:Date_Time refresh_token:Text refresh_url:Text client_id:Text
default (base_http_url : URI) -> Mock_Credentials =
expire_at = (Date_Time.now.start_of Time_Period.Minute) + (Period.new years=1)
refresh_url = base_http_url / "enso-cloud-auth-renew/"
Mock_Credentials.Value "TEST-ENSO-TOKEN-caffee" expire_at "TEST-ENSO-REFRESH-caffee" refresh_url.to_text "TEST-ENSO-CLIENT-ID"
to_json self -> Text =
fields = []
+ [["access_token", self.access_token]]
+ [["expire_at", self.expire_at.at_zone Time_Zone.utc . format Date_Time_Formatter.iso_offset_date_time]]
+ [["refresh_token", self.refresh_token]]
+ [["refresh_url", self.refresh_url]]
+ [["client_id", self.client_id]]
JS_Object.from_pairs fields . to_json
set_refresh_token self token:Text =
Mock_Credentials.Value self.access_token self.expire_at token self.refresh_url self.client_id
## Returns a credential whose access token is invalid.
This will test the case where the refresh token has expired and the user has to re-authenticate.
invalid_refresh_token self -> Mock_Credentials =
self.set_refresh_token "INVALID-ENSO-REFRESH-caffee"
## Returns a credential whose token has already expired, according to the `expire_at` field.
locally_expired self -> Mock_Credentials =
new_expire_at = (Date_Time.now.start_of Time_Period.Minute) - (Duration.new hours=1)
Mock_Credentials.Value self.access_token new_expire_at self.refresh_token self.refresh_url self.client_id
## Returns a credential which does not seem expired, but will be considered expired by the cloud.
This is to test the very rare edge case when a token is considered not expired locally, but the cloud does consider it expired.
remotely_expired self -> Mock_Credentials =
Mock_Credentials.Value "TEST-EXPIRED-TOKEN-beef" self.expire_at self.refresh_token self.refresh_url self.client_id
## Returns a credential whose token is about to expire in less than 2 minutes (1 minute).
about_to_expire self -> Mock_Credentials =
new_expire_at = Date_Time.now + (Duration.new minutes=1)
Mock_Credentials.Value self.access_token new_expire_at self.refresh_token self.refresh_url self.client_id
## PRIVATE
A helper method that retries the action a few times, to allow tests that may fail due to propagation delays to pass.

View File

@ -1,90 +1,168 @@
from Standard.Base import all
import Standard.Base.Enso_Cloud.Errors.Cloud_Session_Expired
import Standard.Base.Enso_Cloud.Errors.Enso_Cloud_Error
import Standard.Base.Enso_Cloud.Errors.Not_Logged_In
import Standard.Base.Errors.Common.No_Such_Conversion
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
from Standard.Test import all
import Standard.Test.Test_Environment
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Mock_Credentials
polyglot java import org.enso.base.enso_cloud.AuthenticationProvider
## To run this test locally:
$ sbt 'http-test-helper/run localhost 8080'
$ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/
add_specs suite_builder setup:Cloud_Tests_Setup =
setup.with_prepared_environment <|
suite_builder.group "Enso_User - offline logic tests" group_builder->
group_builder.specify "is correctly parsed from JSON" <|
json = Json.parse """
{
"id": "organization-27xJM00p8jWoL2qByTo6tQfciWC",
"name": "Parsed user",
"email": "enso-parse-test@example.com",
"isEnabled": true,
"rootDirectoryId": "directory-27xJM00p8jWoL2qByTo6tQfciWC"
}
parsed_user = Enso_User.from json
parsed_user.id.should_equal "organization-27xJM00p8jWoL2qByTo6tQfciWC"
parsed_user.name.should_equal "Parsed user"
parsed_user.email.should_equal "enso-parse-test@example.com"
parsed_user.is_enabled.should_be_true
parsed_user.home . should_be_a Enso_File
parsed_user.home.is_directory.should_be_true
suite_builder.group "Enso_User - offline logic tests" group_builder->
group_builder.specify "is correctly parsed from JSON" <|
json = Json.parse """
{
"id": "organization-27xJM00p8jWoL2qByTo6tQfciWC",
"name": "Parsed user",
"email": "enso-parse-test@example.com",
"isEnabled": true,
"rootDirectoryId": "directory-27xJM00p8jWoL2qByTo6tQfciWC"
}
parsed_user = Enso_User.from json
parsed_user.id.should_equal "organization-27xJM00p8jWoL2qByTo6tQfciWC"
parsed_user.name.should_equal "Parsed user"
parsed_user.email.should_equal "enso-parse-test@example.com"
parsed_user.is_enabled.should_be_true
parsed_user.home . should_be_a Enso_File
parsed_user.home.is_directory.should_be_true
invalid_json = Json.parse "{}"
r = Enso_User.from invalid_json
r.should_fail_with Enso_Cloud_Error
r.catch.should_be_a Enso_Cloud_Error.Invalid_Response_Payload
Test.expect_panic No_Such_Conversion (Enso_User.from (Json.parse "[]"))
invalid_json = Json.parse "{}"
r = Enso_User.from invalid_json
r.should_fail_with Enso_Cloud_Error
r.catch.should_be_a Enso_Cloud_Error.Invalid_Response_Payload
Test.expect_panic No_Such_Conversion (Enso_User.from (Json.parse "[]"))
suite_builder.group "Enso_User - local mock integration tests" group_builder->
# These tests should be kept in sync with tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java
group_builder.specify "current user can be fetched from mock API" pending=setup.mock_only_pending <|
current = Enso_User.current
current.id.should_equal "organization-27xJM00p8jWoL2qByTo6tQfciWC"
current.name.should_equal "My test User 1"
current.email.should_equal "enso-test-user-1@example.com"
current.is_enabled.should_be_true
# These tests should be kept in sync with tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java
suite_builder.group "Enso_User - local mock integration tests" pending=setup.httpbin_pending group_builder->
group_builder.specify "current user can be fetched from mock API" <| Cloud_Tests_Setup.run_with_mock_cloud <|
current = Enso_User.current
current.id.should_equal "organization-27xJM00p8jWoL2qByTo6tQfciWC"
current.name.should_equal "My test User 1"
current.email.should_equal "enso-test-user-1@example.com"
current.is_enabled.should_be_true
# TODO separate Enso_File tests could test that this is a valid directory
home = current.home
home . should_be_a Enso_File
home.is_directory.should_be_true
# TODO separate Enso_File tests could test that this is a valid directory
home = current.home
home . should_be_a Enso_File
home.is_directory.should_be_true
group_builder.specify "user list can be fetched from mock API" pending=setup.mock_only_pending <|
users = Enso_User.list
group_builder.specify "user list can be fetched from mock API" <| Cloud_Tests_Setup.run_with_mock_cloud <|
users = Enso_User.list
users.length.should_equal 2
users.at 0 . name . should_equal "My test User 1"
users.at 1 . name . should_equal "My test User 2"
users.at 1 . is_enabled . should_be_false
users.length.should_equal 2
users.at 0 . name . should_equal "My test User 1"
users.at 1 . name . should_equal "My test User 2"
users.at 1 . is_enabled . should_be_false
users.should_contain Enso_User.current
users.should_contain Enso_User.current
group_builder.specify "user can be fetched from real API" pending=(setup.real_cloud_pending.if_nothing "Disabled until https://github.com/enso-org/cloud-v2/issues/912 is resolved.") <|
suite_builder.group "Enso_User" group_builder->
group_builder.specify "user can be fetched from real API" pending=(setup.real_cloud_pending.if_nothing "Disabled until https://github.com/enso-org/cloud-v2/issues/912 is resolved.") <|
setup.with_prepared_environment <|
Enso_User.list . should_contain Enso_User.current
group_builder.specify "will fail if the user is not logged in" <|
non_existent_file = (enso_project.data / "nonexistent-file") . absolute . normalize
non_existent_file.exists.should_be_false
Panic.with_finalizer Cloud_Tests_Setup.reset_token <|
r = Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" non_existent_file.path <|
Cloud_Tests_Setup.reset_token
group_builder.specify "will fail if the user is not logged in" <| setup.with_prepared_environment <|
non_existent_file = (enso_project.data / "nonexistent-file") . absolute . normalize
non_existent_file.exists.should_be_false
Panic.with_finalizer Cloud_Tests_Setup.reset <|
Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" non_existent_file.path <|
Cloud_Tests_Setup.reset
payload = Test.expect_panic Not_Logged_In <|
Enso_User.current
r.should_fail_with Not_Logged_In
payload.to_display_text . should_contain "Please log in and restart"
group_builder.specify "will fail if the token is invalid" pending=setup.pending <|
invalid_token_file = File.create_temporary_file "enso-test-credentials" "-invalid.txt"
"invalid-token".write invalid_token_file . should_succeed
Cloud_Tests_Setup.reset_token
Panic.with_finalizer Cloud_Tests_Setup.reset_token <|
r = Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" invalid_token_file.absolute.normalize.path <|
Enso_User.current
r.should_fail_with Enso_Cloud_Error
r.catch.should_be_a Enso_Cloud_Error.Unauthorized
r.catch.to_display_text . should_contain "401"
group_builder.specify "will fail if the token is malformed" pending=setup.pending <| setup.with_prepared_environment <|
invalid_token_file = File.create_temporary_file "enso-test-credentials" "-invalid.txt"
run_with_token_payload payload ~action =
payload.write invalid_token_file . should_succeed
Cloud_Tests_Setup.reset
Panic.with_finalizer Cloud_Tests_Setup.reset <|
Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" invalid_token_file.absolute.normalize.path <|
action
r1 = run_with_token_payload "invalid-token" <|
Test.expect_panic Illegal_State <| Enso_User.current
r1.to_display_text . should_contain "Invalid credentials file format: Parse error in parsing JSON"
r2 = run_with_token_payload '{"access_token": 42}' <|
Test.expect_panic Illegal_State <| Enso_User.current
r2.to_display_text . should_contain "Invalid credentials file format:"
r2.to_display_text . should_contain "expected `access_token` to be a string"
r3 = run_with_token_payload '{"access_token": "foo", "expire_at": "yesterday"}' <|
Test.expect_panic Illegal_State <| Enso_User.current
r3.to_display_text . should_contain "Invalid credentials file format:"
r3.to_display_text . should_contain "invalid date format"
r3.to_display_text . should_contain "yesterday"
suite_builder.group "Enso Cloud Authentication" pending=setup.httpbin_pending group_builder->
get_current_token =
## We cannot just use `getAccessToken` here, because it would trigger the refresh too early.
Instead, we want to see the token as is currently set.
AuthenticationProvider.getAuthenticationServiceEnsoInstance.auth_data.get.access_token
base_credentials = Lazy_Ref.Value <|
Mock_Credentials.default Cloud_Tests_Setup.prepare_mock_setup.httpbin_uri
group_builder.specify "refreshes an expired token" <|
Cloud_Tests_Setup.run_with_mock_cloud custom_credentials=base_credentials.get.locally_expired <|
previous_token = get_current_token
## Trigger some cloud endpoint - it should succeed
Enso_User.list . should_be_a Vector
## And afterwards, the token should be refreshed
get_current_token . should_not_equal previous_token
group_builder.specify "retries if the token is considered expired by the backend" <|
# This test is the same as the above, just with different credentials settings.
mock_setup = Cloud_Tests_Setup.prepare_mock_setup custom_credentials=base_credentials.get.remotely_expired
mock_setup.with_prepared_environment <|
count_before = mock_setup.get_expired_token_failures_count
previous_token = get_current_token
Enso_User.list . should_be_a Vector
# The token should have been refreshed.
get_current_token . should_not_equal previous_token
# We also verify that one more request has failed due to expired token, proving that a retry was made.
count_after = mock_setup.get_expired_token_failures_count
count_after . should_equal count_before+1
group_builder.specify "refreshes a token that is about to expire" <|
# The token here is not yet expired, but we still refresh it.
Cloud_Tests_Setup.run_with_mock_cloud custom_credentials=base_credentials.get.about_to_expire <|
previous_token = get_current_token
Enso_User.list . should_be_a Vector
get_current_token . should_not_equal previous_token
group_builder.specify "graciously fails if the refresh token is expired" <|
Cloud_Tests_Setup.run_with_mock_cloud custom_credentials=base_credentials.get.invalid_refresh_token.locally_expired <|
err = Test.expect_panic Cloud_Session_Expired <|
Enso_User.list
err.to_display_text . should_contain "Please log in again and restart"
group_builder.specify "nicely handles errors if the response is malformed" <|
credentials = base_credentials.get
. locally_expired
. set_refresh_token "GET-MALFORMED-RESPONSE"
Cloud_Tests_Setup.run_with_mock_cloud custom_credentials=credentials <|
err = Test.expect_panic Enso_Cloud_Error <|
Enso_User.list
err.to_display_text . should_contain "response from Enso Cloud could not be parsed"
err.to_display_text . should_contain "Expected field `TokenType` to be of type Text, but got Integer."
type Lazy_Ref
Value ~get
main filter=Nothing =
setup = Cloud_Tests_Setup.prepare

View File

@ -14,8 +14,8 @@ from Standard.Test import all
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Mock_Credentials
from project.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries
polyglot java import org.enso.base.enso_cloud.EnsoSecretAccessDenied
@ -223,6 +223,24 @@ add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environmen
# Get should still work
with_retries <| Enso_Secret.get "my_test_secret-13" . should_equal secret1
group_builder.specify "should be able to retry fetching a secret if the token is expired" pending=setup.httpbin_pending <|
mock_setup = Cloud_Tests_Setup.prepare_mock_setup
mock_setup.with_prepared_environment <|
secret1 = Enso_Secret.create "my_test_secret-"+Random.uuid "Something123"
secret1.should_succeed
Panic.with_finalizer secret1.delete <|
credentials = Mock_Credentials.default mock_setup.httpbin_uri . remotely_expired
Cloud_Tests_Setup.run_with_mock_cloud custom_credentials=credentials <|
count_before = mock_setup.get_expired_token_failures_count
https = setup.httpbin_secure_client
response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Secret" secret1])
response.decode_as_json.at "headers" . at "X-My-Secret" . should_equal "Something123"
# We also verify that one more request has failed due to expired token, proving that a retry was made.
count_after = mock_setup.get_expired_token_failures_count
count_after . should_equal count_before+1
main filter=Nothing =
setup = Cloud_Tests_Setup.prepare

View File

@ -22,8 +22,6 @@ add_specs suite_builder = suite_builder.group "Function Ascribed Parameters" gro
surround x = "|" + x + "|"
with_type_error ~action = Panic.catch Type_Error action panic->panic.convert_to_dataflow_error
group_builder.specify "t1 with surround type check" <|
(t1 surround) . should_equal "|x|"
@ -37,10 +35,10 @@ add_specs suite_builder = suite_builder.group "Function Ascribed Parameters" gro
(t4 surround) . should_equal "|x|"
group_builder.specify "t1 with 42 type check" <|
with_type_error (t1 6) . should_fail_with Type_Error
Test.expect_panic Type_Error (t1 6)
group_builder.specify "t2 with 42 type check" <|
with_type_error (t2 6) . should_fail_with Type_Error
Test.expect_panic Type_Error (t2 6)
group_builder.specify "t3 with 42 type check" <|
(t3 6) . should_equal 42
@ -49,20 +47,19 @@ add_specs suite_builder = suite_builder.group "Function Ascribed Parameters" gro
(t4 6) . should_equal 42
group_builder.specify "t1 with text type check" <|
with_type_error (t1 "hi") . should_fail_with Type_Error
Test.expect_panic Type_Error (t1 "hi")
group_builder.specify "t2 with text type check" <|
with_type_error (t2 "hi") . should_fail_with Type_Error
Test.expect_panic Type_Error (t2 "hi")
group_builder.specify "t3 with text type check" <|
with_type_error (t3 "hi") . should_fail_with Type_Error
Test.expect_panic Type_Error (t3 "hi")
group_builder.specify "t4 with text type check" <|
with_type_error (t4 "hi") . should_fail_with Type_Error
Test.expect_panic Type_Error (t4 "hi")
main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -252,8 +252,7 @@ add_specs suite_builder =
check 1 "Ahoj" . should_equal "Aho"
# Boolean can be converted to Foo, but is not Text
fail = Panic.recover Type_Error <| check 0 True
fail . should_fail_with Type_Error
Test.expect_panic Type_Error <| check 0 True
group_builder.specify "Requesting Foo & Not_Foo & Boolean" <|
check a (n : Foo & Not_Foo & Boolean) = case a of
@ -265,8 +264,7 @@ add_specs suite_builder =
check 1 True . should_be_false
check 2 True . should_be_true
fail = Panic.recover Type_Error <| check 0 "not a boolean"
fail . should_fail_with Type_Error
Test.expect_panic Type_Error <| check 0 "not a boolean"
group_builder.specify "Requesting Number & Integer & Float" <|
m = MultiNumber.Value 5
@ -348,7 +346,7 @@ add_specs suite_builder =
x.to_text . should_equal "{FOOL True}"
(x:Fool).to_text . should_equal "{FOOL True}"
(x:Boolean).to_text . should_equal "True"
Panic.recover Any (x:Integer).to_text . should_fail_with Type_Error
Test.expect_panic Type_Error <| (x:Integer).to_text
do_boolean True
@ -365,7 +363,7 @@ add_specs suite_builder =
x.to_text . should_equal "Hello"
(x:Fool).to_text . should_equal "{FOOL Hello}"
(x:Text).to_text . should_equal "Hello"
Panic.recover Any (x:Boolean).to_text . should_fail_with Type_Error
Test.expect_panic Type_Error <| (x:Boolean).to_text
do_text "Hello"
@ -452,11 +450,10 @@ add_specs suite_builder =
group_builder.specify "Cannot find constructor" <|
v = ..Value 10
b = Panic.recover Any <|
b = Test.expect_panic Type_Error <|
x = v:Back
x
b . should_fail_with Type_Error
msg = b.to_display_text
msg . should_contain "Cannot find constructor ..Value among Back"
@ -480,8 +477,8 @@ add_specs suite_builder =
m_bar (m:Bar|Foo) = m
m_back (m:Foo|Bar|Back) = m
Panic.recover Any (m_foo v) . should_fail_with Type_Error
Panic.recover Any (m_bar v) . should_fail_with Type_Error
Test.expect_panic Type_Error <| (m_foo v)
Test.expect_panic Type_Error <| (m_bar v)
m_back v . should_equal <| Back.Times 10
group_builder.specify "Lazy constructor with State" <|
@ -531,7 +528,7 @@ add_specs suite_builder =
(f1 d).to_display_text . should_contain "ParseCaseSensitive(false)(Value"
group_builder.specify "f1 42" <|
Panic.recover Any (f1 42).to_display_text . should_fail_with Type_Error
Test.expect_panic Type_Error <| (f1 42).to_display_text
group_builder.specify "f2 AAA" <|
(f2 "AAA").to_display_text . should_equal "TEXT: AAA"
@ -589,7 +586,7 @@ add_specs suite_builder =
(f1 d).to_display_text . should_contain "ParseCaseSensitive(false)(Value"
group_builder.specify "f1 42" <|
Panic.recover Any (f1 42).to_display_text . should_fail_with Type_Error
Test.expect_panic Type_Error <| (f1 42).to_display_text
group_builder.specify "f2 foo AAA" <|
(f2 <| Foo.Value "AAA").to_display_text . should_equal "HELLO: (Say 'AAA ')"
@ -635,4 +632,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -372,56 +372,46 @@ add_specs suite_builder =
neg (my_func -5 -2) . should_equal 7
group_builder.specify "try to apply one argument" <|
r = Panic.recover Type_Error <| neg (my_func -5)
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| neg (my_func -5)
r.to_display_text.should_contain "Try to apply y argument."
group_builder.specify "try to apply two arguments" <|
r = Panic.recover Type_Error <| neg my_func
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| neg my_func
r.to_display_text.should_contain "Try to apply x, y arguments."
group_builder.specify "apply two arguments with one defaulted" <|
r = Panic.recover Type_Error <| neg my_defaulted_func
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| neg my_defaulted_func
r.to_display_text.should_contain "Try to apply y argument."
group_builder.specify "printed non-defaulted argument" <|
r = Panic.recover Type_Error <| neg (my_defaulted_func 33)
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| neg (my_defaulted_func 33)
r.to_display_text.should_contain "Try to apply y argument."
r.to_display_text.should_contain "x=33"
group_builder.specify "report unapplied constructor nicely" <|
r = Panic.recover Type_Error <| extract My_Type.Value
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| extract My_Type.Value
r.to_display_text.should_contain "Try to apply foo argument."
group_builder.specify "report unapplied constructor with default value nicely" <|
r = Panic.recover Type_Error <| extract My_Type.Default_Value
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| extract My_Type.Default_Value
r.to_display_text.should_contain "Try to apply bar argument."
group_builder.specify "report non-defaulted constructor argument" <|
r = Panic.recover Type_Error <| extract (My_Type.Default_Value foo=33)
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| extract (My_Type.Default_Value foo=33)
r.to_display_text.should_contain "Try to apply bar argument."
r.to_display_text.should_contain "foo=33"
group_builder.specify "report partially applied constructor nicely" <|
r = Panic.recover Type_Error <| extract (My_Type.Multi_Value 42)
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| extract (My_Type.Multi_Value 42)
r.to_display_text.should_contain "Try to apply bar argument."
group_builder.specify "try to apply two arguments with over-saturated" <|
r = Panic.recover Type_Error <| neg (my_func z=10)
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| neg (my_func z=10)
r.to_display_text . should_contain "Try to apply x, y arguments"
group_builder.specify "types and unapplied arguments" <|
c = C.Baz C.to_text
r = Panic.recover Type_Error <| neg (c.to_num c=3)
r . should_fail_with Type_Error
r = Test.expect_panic Type_Error <| neg (c.to_num c=3)
r.to_display_text . should_contain "Try to apply a, b arguments"
suite_builder.group "Wrapped errors" group_builder->
@ -488,4 +478,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -379,7 +379,7 @@ add_specs suite_builder =
group_builder.specify "reject wrong type" <|
pair = Meta.atom_with_hole (e -> My_Ascribed_Type.Value_With_Type 1 e 3)
fill = pair.fill
Panic.catch Any (fill "Two") err->err.convert_to_dataflow_error . should_fail_with Type_Error
Test.expect_panic Type_Error (fill "Two")
group_builder.specify "direct use of pair.fill" <|
pair = Meta.atom_with_hole (e -> My_Ascribed_Type.Value_With_Type 1 e 3)
@ -480,4 +480,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -228,8 +228,8 @@ add_specs suite_builder setup =
group_builder.specify "should gracefully handle missing constructor arguments" <|
t = table_builder [["X", [1, 2, 3]]]
t.take "FOO" . should_fail_with Type_Error
t.drop "FOO" . should_fail_with Type_Error
Test.expect_panic Type_Error <| t.take "FOO"
Test.expect_panic Type_Error <| t.drop "FOO"
r1 = t.take (Index_Sub_Range.While)
r1.should_fail_with Illegal_Argument
@ -432,8 +432,8 @@ add_specs suite_builder setup =
group_builder.specify "should gracefully handle missing constructor arguments" <|
c = table_builder [["X", [1, 2, 3]]] . at "X"
c.take "FOO" . should_fail_with Type_Error
c.drop "FOO" . should_fail_with Type_Error
Test.expect_panic Type_Error <| c.take "FOO"
Test.expect_panic Type_Error <| c.drop "FOO"
r1 = c.take (Index_Sub_Range.While)
r1.should_fail_with Illegal_Argument
@ -455,4 +455,3 @@ add_specs suite_builder setup =
case setup.is_database of
True -> unordered_table.at "alpha" . take . should_fail_with Illegal_Argument
False -> unordered_table.at "alpha" . take . to_vector . should_equal [1]

View File

@ -884,7 +884,7 @@ add_specs suite_builder =
suite_builder.group "[In-Memory-specific] Table.join" group_builder->
group_builder.specify "should correctly report unsupported cross-backend joins" <|
t = Table.new [["X", [1, 2, 3]]]
Panic.recover Type_Error (t.join 42) . should_fail_with Type_Error
Test.expect_panic Type_Error (t.join 42)
db_connection = Database.connect (SQLite In_Memory)
db_table = (Table.new [["Y", [4, 5, 6]]]).select_into_database_table db_connection "test"

View File

@ -1,7 +1,9 @@
import project.Any.Any
import project.Data.Text.Text
import project.Error.Error
import project.Meta
polyglot java import java.util.regex.Pattern as Java_Pattern
@Builtin_Type
type Syntax_Error
@ -23,7 +25,14 @@ type Type_Error
tpe = Meta.type_of self.actual
if tpe.is_error then self.actual.to_display_text else tpe.to_display_text
to_display_text self = "Type error: expected "+self.comment+" to be "+self.expected.to_display_text+", but got "+self.type_of_actual+"."
to_display_text self =
got = case self.type_of_actual of
text : Text -> text
_ -> "<ERR>"
exp = self.expected.to_display_text
msg_1 = replace_regex self.comment "\{exp\}" exp
msg = replace_regex msg_1 "\{got\}" got
"Type error: "+msg+"."
@Builtin_Type
type Not_Invokable
@ -76,3 +85,8 @@ type Forbidden_Operation
Error message
to_display_text self = "Forbidden operation: "+self.message
## PRIVATE
A helper that replaces complicated `Text.replace` from main distribution, used for simple error message formatting.
replace_regex (text : Text) (regex : Text) (replacement : Text) -> Text =
Java_Pattern.compile regex . matcher text . replaceAll replacement

View File

@ -11,7 +11,9 @@ import java.util.concurrent.Semaphore;
import java.util.stream.Stream;
import org.enso.shttp.auth.BasicAuthTestHandler;
import org.enso.shttp.auth.TokenAuthTestHandler;
import org.enso.shttp.cloud_mock.CloudAuthRenew;
import org.enso.shttp.cloud_mock.CloudRoot;
import org.enso.shttp.cloud_mock.ExpiredTokensCounter;
import sun.misc.Signal;
import sun.misc.SignalHandler;
@ -83,8 +85,11 @@ public class HTTPTestHelperServer {
server.addHandler("/test_token_auth", new TokenAuthTestHandler());
server.addHandler("/test_basic_auth", new BasicAuthTestHandler());
server.addHandler("/crash", new CrashingTestHandler());
CloudRoot cloudRoot = new CloudRoot();
var expiredTokensCounter = new ExpiredTokensCounter();
server.addHandler("/COUNT-EXPIRED-TOKEN-FAILURES", expiredTokensCounter);
CloudRoot cloudRoot = new CloudRoot(expiredTokensCounter);
server.addHandler(cloudRoot.prefix, cloudRoot);
server.addHandler("/enso-cloud-auth-renew", new CloudAuthRenew());
setupFileServer(server, projectRoot);
}

View File

@ -40,4 +40,8 @@ public abstract class SimpleHttpHandler implements HttpHandler {
}
exchange.close();
}
protected String decodeBodyAsText(HttpExchange exchange) throws IOException {
return new String(exchange.getRequestBody().readAllBytes(), StandardCharsets.UTF_8);
}
}

View File

@ -6,7 +6,15 @@ import java.util.List;
import org.enso.shttp.SimpleHttpHandler;
public abstract class HandlerWithTokenAuth extends SimpleHttpHandler {
protected abstract String getSecretToken();
protected abstract boolean isTokenAllowed(String token);
protected int getNoTokenStatus() {
return 401;
}
protected int getInvalidTokenStatus(String token) {
return 401;
}
protected abstract void handleAuthorized(HttpExchange exchange) throws IOException;
@ -14,7 +22,7 @@ public abstract class HandlerWithTokenAuth extends SimpleHttpHandler {
protected void doHandle(HttpExchange exchange) throws IOException {
List<String> authHeaders = exchange.getRequestHeaders().get("Authorization");
if (authHeaders == null || authHeaders.isEmpty()) {
sendResponse(401, "Not authorized.", exchange);
sendResponse(getNoTokenStatus(), "Not authorized.", exchange);
return;
} else if (authHeaders.size() > 1) {
sendResponse(400, "Ambiguous Authorization headers.", exchange);
@ -29,9 +37,9 @@ public abstract class HandlerWithTokenAuth extends SimpleHttpHandler {
}
String providedToken = authHeader.substring(prefix.length());
boolean authorized = providedToken.equals(getSecretToken());
boolean authorized = isTokenAllowed(providedToken);
if (!authorized) {
sendResponse(401, "Invalid token.", exchange);
sendResponse(getInvalidTokenStatus(providedToken), "Invalid token.", exchange);
return;
}

View File

@ -4,10 +4,9 @@ import com.sun.net.httpserver.HttpExchange;
import java.io.IOException;
public class TokenAuthTestHandler extends HandlerWithTokenAuth {
@Override
protected String getSecretToken() {
return "deadbeef-coffee-1234";
protected boolean isTokenAllowed(String token) {
return token.equals("deadbeef-coffee-1234");
}
@Override

View File

@ -0,0 +1,57 @@
package org.enso.shttp.cloud_mock;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import com.sun.net.httpserver.HttpExchange;
import java.io.IOException;
import java.util.Objects;
import org.enso.shttp.SimpleHttpHandler;
/** An endpoint simulating the refresh token flow. */
public class CloudAuthRenew extends SimpleHttpHandler {
private int counter = 0;
@Override
protected void doHandle(HttpExchange exchange) throws IOException {
JsonNode root = jsonMapper.readTree(decodeBodyAsText(exchange));
String flow = root.get("AuthFlow").asText();
if (!Objects.equals(flow, "REFRESH_TOKEN_AUTH")) {
sendResponse(400, "Invalid flow: " + flow, exchange);
return;
}
String refreshToken = root.get("AuthParameters").get("REFRESH_TOKEN").asText();
switch (refreshToken) {
case "TEST-ENSO-REFRESH-caffee" -> sendRenewedToken(exchange);
case "GET-MALFORMED-RESPONSE" -> sendMalformedResponse(exchange);
case null, default -> sendResponse(401, "Invalid refresh token.", exchange);
}
}
private void sendRenewedToken(HttpExchange exchange) throws IOException {
String newToken = "TEST-RENEWED-" + (counter++);
var response = new RenewResponse(new AuthenticationResult(newToken, "Bearer", 3600));
sendResponse(200, jsonMapper.writeValueAsString(response), exchange);
}
private void sendMalformedResponse(HttpExchange exchange) throws IOException {
var response = new MalformedRenewResponse(new MalformedAuthenticationResult("foobar", 123, 3600));
sendResponse(200, jsonMapper.writeValueAsString(response), exchange);
}
private final ObjectMapper jsonMapper = new ObjectMapper();
{
jsonMapper.setPropertyNamingStrategy(PropertyNamingStrategies.UPPER_CAMEL_CASE);
}
private record RenewResponse(AuthenticationResult authenticationResult) {}
private record AuthenticationResult(String accessToken, String tokenType, int expiresIn) {}
private record MalformedRenewResponse(MalformedAuthenticationResult authenticationResult) {}
private record MalformedAuthenticationResult(String accessToken, int tokenType, int expiresIn) {}
}

View File

@ -3,20 +3,35 @@ package org.enso.shttp.cloud_mock;
import com.sun.net.httpserver.HttpExchange;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import org.enso.shttp.auth.HandlerWithTokenAuth;
public class CloudRoot extends HandlerWithTokenAuth {
public final String prefix = "/enso-cloud-mock/";
@Override
protected String getSecretToken() {
return "TEST-ENSO-TOKEN-caffee";
}
private final ExpiredTokensCounter expiredTokensCounter;
private final CloudHandler[] handlers =
new CloudHandler[] {new UsersHandler(), new SecretsHandler()};
public CloudRoot(ExpiredTokensCounter expiredTokensCounter) {
this.expiredTokensCounter = expiredTokensCounter;
}
@Override
protected boolean isTokenAllowed(String token) {
return token.equals("TEST-ENSO-TOKEN-caffee") || token.startsWith("TEST-RENEWED-");
}
@Override
protected int getInvalidTokenStatus(String token) {
boolean isValidButExpired = token.equals("TEST-EXPIRED-TOKEN-beef");
if (isValidButExpired) {
expiredTokensCounter.registerExpiredTokenFailure();
return 403;
}
return 401;
}
@Override
protected void handleAuthorized(HttpExchange exchange) throws IOException {
URI uri = exchange.getRequestURI();
@ -57,7 +72,7 @@ public class CloudRoot extends HandlerWithTokenAuth {
@Override
public String decodeBodyAsText() throws IOException {
return new String(exchange.getRequestBody().readAllBytes(), StandardCharsets.UTF_8);
return CloudRoot.this.decodeBodyAsText(exchange);
}
};
}

View File

@ -0,0 +1,23 @@
package org.enso.shttp.cloud_mock;
import com.sun.net.httpserver.HttpExchange;
import java.io.IOException;
import org.enso.shttp.SimpleHttpHandler;
/**
* A special endpoint that returns the number of requests which failed due to providing an expired
* token. This is used in tests, to verify that requests are indeed failed and retried, and not just
* successful on first try.
*/
public class ExpiredTokensCounter extends SimpleHttpHandler {
private long counter = 0;
void registerExpiredTokenFailure() {
counter++;
}
@Override
protected void doHandle(HttpExchange exchange) throws IOException {
sendResponse(200, Long.toString(counter), exchange);
}
}

View File

@ -19,6 +19,7 @@ public class SecretsHandler implements CloudHandler {
private final String DIRECTORIES = "directories";
private final ObjectMapper jsonMapper = new ObjectMapper();
// <root> -> <secret_id> -> <secret_value>
private HashMap<String, HashMap<String, Secret>> mapping = new HashMap<>();