mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 13:21:35 +03:00
Enable audit logs for Snowflake backend (#11306)
- Closes #11292 - Tries to fix #11300
This commit is contained in:
parent
03369b9273
commit
244effde0c
@ -34,6 +34,7 @@ const REPO_ROOT = url.fileURLToPath(new URL('../'.repeat(DIR_DEPTH), import.meta
|
|||||||
const BASE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Base_Tests/data/datalinks/')
|
const BASE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Base_Tests/data/datalinks/')
|
||||||
const S3_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/AWS_Tests/data/')
|
const S3_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/AWS_Tests/data/')
|
||||||
const TABLE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Table_Tests/data/datalinks/')
|
const TABLE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Table_Tests/data/datalinks/')
|
||||||
|
const SNOWFLAKE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Snowflake_Tests/data/datalinks/')
|
||||||
|
|
||||||
v.test('correctly validates example HTTP .datalink files with the schema', () => {
|
v.test('correctly validates example HTTP .datalink files with the schema', () => {
|
||||||
const schemas = [
|
const schemas = [
|
||||||
@ -97,3 +98,11 @@ v.test('correctly validates example Database .datalink files with the schema', (
|
|||||||
testSchema(json, schema)
|
testSchema(json, schema)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
v.test('correctly validates example Snowflake .datalink files with the schema', () => {
|
||||||
|
const schemas = ['snowflake-db.datalink']
|
||||||
|
for (const schema of schemas) {
|
||||||
|
const json = loadDataLinkFile(path.resolve(SNOWFLAKE_DATA_LINKS_ROOT, schema))
|
||||||
|
testSchema(json, schema)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
@ -145,8 +145,7 @@ type Refresh_Token_Data
|
|||||||
HTTP_Error.Status_Error status message _ ->
|
HTTP_Error.Status_Error status message _ ->
|
||||||
Authentication_Service.log_message level=..Warning "Refresh token request failed with status "+status.to_text+": "+(message.if_nothing "<no message>")+"."
|
Authentication_Service.log_message level=..Warning "Refresh token request failed with status "+status.to_text+": "+(message.if_nothing "<no message>")+"."
|
||||||
|
|
||||||
# As per OAuth specification, an expired refresh token should result in a 401 status code: https://www.rfc-editor.org/rfc/rfc6750.html#section-3.1
|
if is_refresh_token_expired status message then Panic.throw (Cloud_Session_Expired.Error error) else
|
||||||
if status.code == 401 then Panic.throw (Cloud_Session_Expired.Error error) else
|
|
||||||
# Otherwise, we fail with the generic error that gives more details.
|
# Otherwise, we fail with the generic error that gives more details.
|
||||||
Panic.throw (Enso_Cloud_Error.Connection_Error error)
|
Panic.throw (Enso_Cloud_Error.Connection_Error error)
|
||||||
_ -> Panic.throw (Enso_Cloud_Error.Connection_Error error)
|
_ -> Panic.throw (Enso_Cloud_Error.Connection_Error error)
|
||||||
@ -175,6 +174,13 @@ type Refresh_Token_Data
|
|||||||
it to reduce the chance of it expiring during a request.
|
it to reduce the chance of it expiring during a request.
|
||||||
token_early_refresh_period = Duration.new minutes=2
|
token_early_refresh_period = Duration.new minutes=2
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
is_refresh_token_expired status message -> Boolean =
|
||||||
|
# As per OAuth specification, an expired refresh token should result in a 401 status code: https://www.rfc-editor.org/rfc/rfc6750.html#section-3.1
|
||||||
|
# But empirically, it failed with: 400 Bad Request: {"__type":"NotAuthorizedException","message":"Refresh Token has expired"}.
|
||||||
|
# Let's just handle both just in case
|
||||||
|
(status.code == 401) || (status.code == 400 && message.contains '"Refresh Token has expired"')
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A sibling to `get_required_field`.
|
A sibling to `get_required_field`.
|
||||||
This one raises `Illegal_State` error, because it is dealing with local files and not cloud responses.
|
This one raises `Illegal_State` error, because it is dealing with local files and not cloud responses.
|
||||||
|
@ -9,17 +9,17 @@ from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_requir
|
|||||||
import project.Connection.Connection_Options.Connection_Options
|
import project.Connection.Connection_Options.Connection_Options
|
||||||
import project.Connection.Credentials.Credentials
|
import project.Connection.Credentials.Credentials
|
||||||
import project.Connection.Postgres.Postgres
|
import project.Connection.Postgres.Postgres
|
||||||
|
import project.Internal.DB_Data_Link_Helpers
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
type Postgres_Data_Link
|
type Postgres_Data_Link
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A data-link returning a connection to the specified database.
|
A data-link returning a connection to the specified database.
|
||||||
Connection details:Postgres related_asset_id:Text|Nothing
|
Connection details:Postgres source:Data_Link_Source_Metadata
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A data-link returning a query to a specific table within a database.
|
A data-link returning a query to a specific table within a database.
|
||||||
Table name:Text details:Postgres related_asset_id:Text|Nothing
|
Table name:Text details:Postgres source:Data_Link_Source_Metadata
|
||||||
|
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
parse json source:Data_Link_Source_Metadata -> Postgres_Data_Link =
|
parse json source:Data_Link_Source_Metadata -> Postgres_Data_Link =
|
||||||
@ -34,23 +34,18 @@ type Postgres_Data_Link
|
|||||||
password = get_required_field "password" credentials_json |> parse_secure_value
|
password = get_required_field "password" credentials_json |> parse_secure_value
|
||||||
Credentials.Username_And_Password username password
|
Credentials.Username_And_Password username password
|
||||||
|
|
||||||
related_asset_id = case source of
|
|
||||||
Data_Link_Source_Metadata.Cloud_Asset id -> id
|
|
||||||
_ -> Nothing
|
|
||||||
details = Postgres.Server host=host port=port database=db_name schema=schema credentials=credentials
|
details = Postgres.Server host=host port=port database=db_name schema=schema credentials=credentials
|
||||||
case get_optional_field "table" json expected_type=Text of
|
case get_optional_field "table" json expected_type=Text of
|
||||||
Nothing ->
|
Nothing ->
|
||||||
Postgres_Data_Link.Connection details related_asset_id
|
Postgres_Data_Link.Connection details source
|
||||||
table_name : Text ->
|
table_name : Text ->
|
||||||
Postgres_Data_Link.Table table_name details related_asset_id
|
Postgres_Data_Link.Table table_name details source
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
|
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
|
||||||
_ = on_problems
|
_ = on_problems
|
||||||
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Postgres Data Link, as it points to a database.") else
|
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Postgres Data Link, as it points to a database.") else
|
||||||
audit_mode = if Enso_User.is_logged_in then "cloud" else "local"
|
default_options = DB_Data_Link_Helpers.data_link_connection_parameters self.source
|
||||||
options_vector = [["enso.internal.audit", audit_mode]] + (if self.related_asset_id.is_nothing then [] else [["enso.internal.relatedAssetId", self.related_asset_id]])
|
|
||||||
default_options = Connection_Options.Value options_vector
|
|
||||||
connection = self.details.connect default_options allow_data_links=False
|
connection = self.details.connect default_options allow_data_links=False
|
||||||
case self of
|
case self of
|
||||||
Postgres_Data_Link.Connection _ _ -> connection
|
Postgres_Data_Link.Connection _ _ -> connection
|
||||||
|
@ -0,0 +1,14 @@
|
|||||||
|
from Standard.Base import all
|
||||||
|
|
||||||
|
from Standard.Base.Enso_Cloud.Data_Link_Helpers import Data_Link_Source_Metadata
|
||||||
|
|
||||||
|
import project.Connection.Connection_Options.Connection_Options
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
data_link_connection_parameters (source : Data_Link_Source_Metadata) -> Connection_Options =
|
||||||
|
related_asset_id = case source of
|
||||||
|
Data_Link_Source_Metadata.Cloud_Asset id -> id
|
||||||
|
_ -> Nothing
|
||||||
|
audit_mode = if Enso_User.is_logged_in then "cloud" else "local"
|
||||||
|
options_vector = [["enso.internal.audit", audit_mode]] + (if related_asset_id.is_nothing then [] else [["enso.internal.relatedAssetId", related_asset_id]])
|
||||||
|
Connection_Options.Value options_vector
|
@ -36,8 +36,10 @@ type Snowflake_Details
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
- options: Overrides for the connection properties.
|
- options: Overrides for the connection properties.
|
||||||
connect : Connection_Options -> Snowflake_Connection
|
connect : Connection_Options -> Boolean -> Snowflake_Connection
|
||||||
connect self options =
|
connect self options (allow_data_links : Boolean = True) =
|
||||||
|
# TODO use this once #11294 is done
|
||||||
|
_ = allow_data_links
|
||||||
properties = options.merge self.jdbc_properties
|
properties = options.merge self.jdbc_properties
|
||||||
|
|
||||||
## Cannot use default argument values as gets in an infinite loop if you do.
|
## Cannot use default argument values as gets in an infinite loop if you do.
|
||||||
|
@ -250,7 +250,7 @@ type Snowflake_Dialect
|
|||||||
Dialect_Flag.Supports_Float_Decimal_Places -> True
|
Dialect_Flag.Supports_Float_Decimal_Places -> True
|
||||||
Dialect_Flag.Use_Builtin_Bankers -> True
|
Dialect_Flag.Use_Builtin_Bankers -> True
|
||||||
Dialect_Flag.Primary_Key_Allows_Nulls -> False
|
Dialect_Flag.Primary_Key_Allows_Nulls -> False
|
||||||
Dialect_Flag.Supports_Separate_NaN -> False
|
Dialect_Flag.Supports_Separate_NaN -> True
|
||||||
Dialect_Flag.Supports_Nested_With_Clause -> True
|
Dialect_Flag.Supports_Nested_With_Clause -> True
|
||||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
|
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
|
||||||
|
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from Standard.Base import all
|
from Standard.Base import all
|
||||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||||
from Standard.Base.Enso_Cloud.Data_Link_Helpers import parse_secure_value
|
from Standard.Base.Enso_Cloud.Data_Link_Helpers import Data_Link_Source_Metadata, parse_secure_value
|
||||||
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
|
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
|
||||||
|
|
||||||
import Standard.Database.Connection.Connection_Options.Connection_Options
|
|
||||||
import Standard.Database.Connection.Credentials.Credentials
|
import Standard.Database.Connection.Credentials.Credentials
|
||||||
|
import Standard.Database.Internal.DB_Data_Link_Helpers
|
||||||
|
|
||||||
import project.Connection.Snowflake_Details.Snowflake_Details
|
import project.Connection.Snowflake_Details.Snowflake_Details
|
||||||
|
|
||||||
@ -12,15 +12,14 @@ import project.Connection.Snowflake_Details.Snowflake_Details
|
|||||||
type Snowflake_Data_Link
|
type Snowflake_Data_Link
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A data-link returning a connection to the specified database.
|
A data-link returning a connection to the specified database.
|
||||||
Connection details:Snowflake_Details
|
Connection details:Snowflake_Details source:Data_Link_Source_Metadata
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A data-link returning a query to a specific table within a database.
|
A data-link returning a query to a specific table within a database.
|
||||||
Table name:Text details:Snowflake_Details
|
Table name:Text details:Snowflake_Details source:Data_Link_Source_Metadata
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
parse json source -> Snowflake_Data_Link =
|
parse json source -> Snowflake_Data_Link =
|
||||||
_ = source
|
|
||||||
account = get_required_field "account" json expected_type=Text
|
account = get_required_field "account" json expected_type=Text
|
||||||
db_name = get_required_field "database_name" json expected_type=Text
|
db_name = get_required_field "database_name" json expected_type=Text
|
||||||
schema = get_optional_field "schema" json if_missing="SNOWFLAKE" expected_type=Text
|
schema = get_optional_field "schema" json if_missing="SNOWFLAKE" expected_type=Text
|
||||||
@ -34,17 +33,17 @@ type Snowflake_Data_Link
|
|||||||
details = Snowflake_Details.Snowflake account=account database=db_name schema=schema warehouse=warehouse credentials=credentials
|
details = Snowflake_Details.Snowflake account=account database=db_name schema=schema warehouse=warehouse credentials=credentials
|
||||||
case get_optional_field "table" json expected_type=Text of
|
case get_optional_field "table" json expected_type=Text of
|
||||||
Nothing ->
|
Nothing ->
|
||||||
Snowflake_Data_Link.Connection details
|
Snowflake_Data_Link.Connection details source
|
||||||
table_name : Text ->
|
table_name : Text ->
|
||||||
Snowflake_Data_Link.Table table_name details
|
Snowflake_Data_Link.Table table_name details source
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
|
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
|
||||||
_ = on_problems
|
_ = on_problems
|
||||||
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Snowflake Data Link, as it points to a database.") else
|
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Snowflake Data Link, as it points to a database.") else
|
||||||
default_options = Connection_Options.Value
|
default_options = DB_Data_Link_Helpers.data_link_connection_parameters self.source
|
||||||
connection = self.details.connect default_options
|
connection = self.details.connect default_options allow_data_links=False
|
||||||
case self of
|
case self of
|
||||||
Snowflake_Data_Link.Connection _ -> connection
|
Snowflake_Data_Link.Connection _ _ -> connection
|
||||||
Snowflake_Data_Link.Table table_name _ ->
|
Snowflake_Data_Link.Table table_name _ _ ->
|
||||||
connection.query table_name
|
connection.query table_name
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
private
|
private
|
||||||
|
|
||||||
from Standard.Base import all
|
from Standard.Base import all
|
||||||
|
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||||
import Standard.Base.Runtime.Context
|
import Standard.Base.Runtime.Context
|
||||||
import Standard.Base.Runtime.Ref.Ref
|
import Standard.Base.Runtime.Ref.Ref
|
||||||
|
from Standard.Base.Logging import all
|
||||||
from Standard.Base.Runtime import assert
|
from Standard.Base.Runtime import assert
|
||||||
|
|
||||||
import project.Group.Group
|
import project.Group.Group
|
||||||
@ -12,6 +14,7 @@ import project.Suite_Config.Suite_Config
|
|||||||
import project.Test.Test
|
import project.Test.Test
|
||||||
import project.Test_Result.Test_Result
|
import project.Test_Result.Test_Result
|
||||||
|
|
||||||
|
polyglot java import java.lang.IllegalArgumentException
|
||||||
polyglot java import java.lang.StringBuilder
|
polyglot java import java.lang.StringBuilder
|
||||||
polyglot java import java.lang.System as Java_System
|
polyglot java import java.lang.System as Java_System
|
||||||
|
|
||||||
@ -93,17 +96,39 @@ print_single_result (test_result : Test_Result) (config : Suite_Config) =
|
|||||||
only if running in the GitHub Actions environment.
|
only if running in the GitHub Actions environment.
|
||||||
report_github_error_message (title : Text) (message : Text) =
|
report_github_error_message (title : Text) (message : Text) =
|
||||||
is_enabled = Environment.get "GITHUB_ACTIONS" == "true"
|
is_enabled = Environment.get "GITHUB_ACTIONS" == "true"
|
||||||
|
if is_enabled then
|
||||||
|
IO.println (generate_github_error_annotation title message)
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Generates a GitHub Actions annotation for a failing test.
|
||||||
|
generate_github_error_annotation (title : Text) (message : Text) =
|
||||||
sanitize_message txt = txt.replace '\n' '%0A'
|
sanitize_message txt = txt.replace '\n' '%0A'
|
||||||
sanitize_parameter txt = txt . replace '\n' '%0A' . replace ',' '%2C'
|
sanitize_parameter txt = txt . replace '\n' '%0A' . replace ',' '%2C'
|
||||||
if is_enabled then
|
|
||||||
location_match = Regex.compile "at ((?:[A-Za-z]:)?[^:]+):([0-9]+):" . match message
|
location_match = Regex.compile "\(at ((?:[A-Za-z]:)?[^:]+):([0-9\-]+):([0-9\-]+)\)" . match message
|
||||||
location_part = if location_match.is_nothing then "" else
|
split_on_dash start_field_name end_field_name text =
|
||||||
|
if text.contains "-" . not then start_field_name+"="+text else
|
||||||
|
parts = text.split "-"
|
||||||
|
if parts.length != 2 then Panic.throw (Illegal_Argument.Error "Invalid range format: "+text) else
|
||||||
|
start = parts.at 0
|
||||||
|
end = parts.at 1
|
||||||
|
start_field_name+"="+start+","+end_field_name+"="+end
|
||||||
|
on_location_failing_to_parse caught_panic =
|
||||||
|
Test_Result.log_message "Failed to parse file location ["+location_match.to_display_text+"]: "+caught_panic.payload.to_display_text
|
||||||
|
""
|
||||||
|
location_part = if location_match.is_nothing then "" else
|
||||||
|
Panic.catch Any handler=on_location_failing_to_parse <|
|
||||||
repo_root = enso_project.root.parent.parent.absolute.normalize
|
repo_root = enso_project.root.parent.parent.absolute.normalize
|
||||||
path = File.new (location_match.get 1)
|
path = File.new (location_match.get 1)
|
||||||
relative_path = repo_root.relativize path
|
## We try to relativize the path, but if it fails, we just use the original path.
|
||||||
line = location_match.get 2
|
(It may fail eg. if the project root and the test files are on different drives -
|
||||||
",file="+(sanitize_parameter relative_path.path)+",line="+(sanitize_parameter line)+""
|
very unlikely but not impossible if tests import other libraries located in weird places.)
|
||||||
IO.println "::error title="+(sanitize_parameter title)+location_part+"::"+(sanitize_message message)
|
relative_path = Panic.catch IllegalArgumentException (repo_root.relativize path) _->path
|
||||||
|
lines_part = split_on_dash "line" "endLine" (location_match.get 2)
|
||||||
|
columns_part = split_on_dash "col" "endColumn" (location_match.get 3)
|
||||||
|
",file="+(sanitize_parameter relative_path.path)+","+lines_part+","+columns_part
|
||||||
|
|
||||||
|
"::error title="+(sanitize_parameter title)+location_part+"::"+(sanitize_message message)
|
||||||
|
|
||||||
## Prints all the results, optionally writing them to a jUnit XML output.
|
## Prints all the results, optionally writing them to a jUnit XML output.
|
||||||
|
|
||||||
|
@ -614,6 +614,7 @@ public final class EnsoFile implements EnsoObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Builtin.Method
|
@Builtin.Method
|
||||||
|
@Builtin.WrapException(from = IllegalArgumentException.class)
|
||||||
@TruffleBoundary
|
@TruffleBoundary
|
||||||
public EnsoFile relativize(EnsoFile other) {
|
public EnsoFile relativize(EnsoFile other) {
|
||||||
return new EnsoFile(this.truffleFile.relativize(other.truffleFile));
|
return new EnsoFile(this.truffleFile.relativize(other.truffleFile));
|
||||||
|
31
test/Base_Internal_Tests/src/Github_Annotations_Spec.enso
Normal file
31
test/Base_Internal_Tests/src/Github_Annotations_Spec.enso
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from Standard.Base import all
|
||||||
|
|
||||||
|
from Standard.Test import all
|
||||||
|
import Standard.Test.Test_Reporter
|
||||||
|
|
||||||
|
add_specs suite_builder = suite_builder.group "Test Reporter running on GitHub" group_builder->
|
||||||
|
file = (enso_project.root / "src" / "test-file.enso") . absolute . normalize
|
||||||
|
file_relative_to_repo_root = (File.new "test") / "Base_Internal_Tests" / "src" / "test-file.enso"
|
||||||
|
group_builder.specify "should correctly parse error message" <|
|
||||||
|
message = "[False, False, False, True] did not equal [False, False, True, True]; first difference at index 2 (at "+file.path+":1:13-110)."
|
||||||
|
line = Test_Reporter.generate_github_error_annotation 'Test, and\n special characters' message
|
||||||
|
line.should_equal "::error title=Test%2C and%0A special characters,file="+file_relative_to_repo_root.path+",line=1,col=13,endColumn=110::[False, False, False, True] did not equal [False, False, True, True]; first difference at index 2 (at "+file.path+":1:13-110)."
|
||||||
|
|
||||||
|
group_builder.specify "should be able to parse dashes" <|
|
||||||
|
message = "test failure (at "+file.path+":1-2:3-4)."
|
||||||
|
line = Test_Reporter.generate_github_error_annotation "Test" message
|
||||||
|
line.should_equal "::error title=Test,file="+file_relative_to_repo_root.path+",line=1,endLine=2,col=3,endColumn=4::test failure (at "+file.path+":1-2:3-4)."
|
||||||
|
|
||||||
|
message2 = "test failure (at "+file.path+":1234-5678:91011-121314)."
|
||||||
|
line2 = Test_Reporter.generate_github_error_annotation "Test" message2
|
||||||
|
line2.should_equal "::error title=Test,file="+file_relative_to_repo_root.path+",line=1234,endLine=5678,col=91011,endColumn=121314::test failure (at "+file.path+":1234-5678:91011-121314)."
|
||||||
|
|
||||||
|
group_builder.specify "should be able to parse no dashes" <|
|
||||||
|
message = "test failure (at "+file.path+":1234:7)."
|
||||||
|
line = Test_Reporter.generate_github_error_annotation "Test" message
|
||||||
|
line.should_equal "::error title=Test,file="+file_relative_to_repo_root.path+",line=1234,col=7::test failure (at "+file.path+":1234:7)."
|
||||||
|
|
||||||
|
main filter=Nothing =
|
||||||
|
suite = Test.build suite_builder->
|
||||||
|
add_specs suite_builder
|
||||||
|
suite.run_with_filter filter
|
@ -6,6 +6,7 @@ import project.Input_Output_Spec
|
|||||||
import project.Comparator_Spec
|
import project.Comparator_Spec
|
||||||
import project.Decimal_Constructor_Spec
|
import project.Decimal_Constructor_Spec
|
||||||
import project.Grapheme_Spec
|
import project.Grapheme_Spec
|
||||||
|
import project.Github_Annotations_Spec
|
||||||
|
|
||||||
main filter=Nothing =
|
main filter=Nothing =
|
||||||
suite = Test.build suite_builder->
|
suite = Test.build suite_builder->
|
||||||
@ -13,5 +14,6 @@ main filter=Nothing =
|
|||||||
Decimal_Constructor_Spec.add_specs suite_builder
|
Decimal_Constructor_Spec.add_specs suite_builder
|
||||||
Grapheme_Spec.add_specs suite_builder
|
Grapheme_Spec.add_specs suite_builder
|
||||||
Input_Output_Spec.add_specs suite_builder
|
Input_Output_Spec.add_specs suite_builder
|
||||||
|
Github_Annotations_Spec.add_specs suite_builder
|
||||||
|
|
||||||
suite.run_with_filter filter
|
suite.run_with_filter filter
|
||||||
|
12
test/Snowflake_Tests/data/datalinks/snowflake-db.datalink
Normal file
12
test/Snowflake_Tests/data/datalinks/snowflake-db.datalink
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"type": "Snowflake_Connection",
|
||||||
|
"libraryName": "Standard.Snowflake",
|
||||||
|
"account": "ACCOUNT",
|
||||||
|
"database_name": "DBNAME",
|
||||||
|
"schema": "SCHEMA",
|
||||||
|
"warehouse": "WAREHOUSE",
|
||||||
|
"credentials": {
|
||||||
|
"username": "USERNAME",
|
||||||
|
"password": "PASSWORD"
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,9 @@
|
|||||||
from Standard.Base import all
|
from Standard.Base import all
|
||||||
|
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
|
||||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||||
import Standard.Base.Runtime.Ref.Ref
|
import Standard.Base.Runtime.Ref.Ref
|
||||||
|
from Standard.Base.Enso_Cloud.Data_Link_Helpers import secure_value_to_json
|
||||||
|
|
||||||
from Standard.Table import Table, Value_Type, Aggregate_Column, Bits, expr
|
from Standard.Table import Table, Value_Type, Aggregate_Column, Bits, expr
|
||||||
from Standard.Table.Errors import Invalid_Column_Names, Inexact_Type_Coercion, Duplicate_Output_Column_Names
|
from Standard.Table.Errors import Invalid_Column_Names, Inexact_Type_Coercion, Duplicate_Output_Column_Names
|
||||||
@ -21,6 +23,7 @@ from Standard.Test import all
|
|||||||
import Standard.Test.Test_Environment
|
import Standard.Test.Test_Environment
|
||||||
|
|
||||||
import enso_dev.Table_Tests
|
import enso_dev.Table_Tests
|
||||||
|
import enso_dev.Table_Tests.Database.Common.Audit_Spec
|
||||||
import enso_dev.Table_Tests.Database.Common.Common_Spec
|
import enso_dev.Table_Tests.Database.Common.Common_Spec
|
||||||
import enso_dev.Table_Tests.Database.Common.IR_Spec
|
import enso_dev.Table_Tests.Database.Common.IR_Spec
|
||||||
import enso_dev.Table_Tests.Database.Transaction_Spec
|
import enso_dev.Table_Tests.Database.Transaction_Spec
|
||||||
@ -569,16 +572,43 @@ supported_replace_params =
|
|||||||
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
||||||
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
|
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||||
|
|
||||||
|
transform_file base_file connection_details =
|
||||||
|
content = Data_Link.read_raw_config base_file
|
||||||
|
|
||||||
|
if connection_details.credentials.password.is_a Enso_Secret then
|
||||||
|
Panic.throw (Illegal_State.Error "Currently Snowflake tests need a plaintext password in environment variable to run.")
|
||||||
|
|
||||||
|
new_content = content
|
||||||
|
. replace "ACCOUNT" connection_details.account
|
||||||
|
. replace "DBNAME" connection_details.database
|
||||||
|
. replace "SCHEMA" connection_details.schema
|
||||||
|
. replace "WAREHOUSE" connection_details.warehouse
|
||||||
|
. replace "USERNAME" connection_details.credentials.username
|
||||||
|
. replace "PASSWORD" connection_details.credentials.password
|
||||||
|
|
||||||
|
temp_file = File.create_temporary_file "snowflake-test-db" ".datalink"
|
||||||
|
Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
|
||||||
|
|
||||||
|
type Temporary_Data_Link_File
|
||||||
|
Value ~get
|
||||||
|
|
||||||
|
make connection_details = Temporary_Data_Link_File.Value <|
|
||||||
|
transform_file (enso_project.data / "datalinks" / "snowflake-db.datalink") connection_details
|
||||||
|
|
||||||
add_table_specs suite_builder =
|
add_table_specs suite_builder =
|
||||||
case create_connection_builder of
|
case get_configured_connection_details of
|
||||||
Nothing ->
|
Nothing ->
|
||||||
message = "Snowflake test connection is not configured. See README.md for instructions."
|
message = "Snowflake test connection is not configured. See README.md for instructions."
|
||||||
suite_builder.group "[Snowflake] Database tests" pending=message (_-> Nothing)
|
suite_builder.group "[Snowflake] Database tests" pending=message (_-> Nothing)
|
||||||
connection_builder ->
|
connection_details ->
|
||||||
db_name = get_configured_connection_details.database
|
db_name = connection_details.database
|
||||||
|
connection_builder = _ -> Database.connect connection_details
|
||||||
add_snowflake_specs suite_builder connection_builder db_name
|
add_snowflake_specs suite_builder connection_builder db_name
|
||||||
Transaction_Spec.add_specs suite_builder connection_builder "[Snowflake] "
|
Transaction_Spec.add_specs suite_builder connection_builder "[Snowflake] "
|
||||||
|
|
||||||
|
data_link_file = Temporary_Data_Link_File.make connection_details
|
||||||
|
Audit_Spec.add_specs suite_builder "[Snowflake] " data_link_file.get database_pending=Nothing
|
||||||
|
|
||||||
suite_builder.group "[Snowflake] Secrets in connection settings" group_builder->
|
suite_builder.group "[Snowflake] Secrets in connection settings" group_builder->
|
||||||
cloud_setup = Cloud_Tests_Setup.prepare
|
cloud_setup = Cloud_Tests_Setup.prepare
|
||||||
base_details = get_configured_connection_details
|
base_details = get_configured_connection_details
|
||||||
@ -625,14 +655,6 @@ get_configured_connection_details = Panic.rethrow <|
|
|||||||
credentials = Credentials.Username_And_Password user resolved_password
|
credentials = Credentials.Username_And_Password user resolved_password
|
||||||
Snowflake_Details.Snowflake account_name credentials database schema warehouse
|
Snowflake_Details.Snowflake account_name credentials database schema warehouse
|
||||||
|
|
||||||
## Returns a function that takes anything and returns a new connection.
|
|
||||||
The function creates a _new_ connection on each invocation
|
|
||||||
(this is needed for some tests that need multiple distinct connections).
|
|
||||||
create_connection_builder =
|
|
||||||
connection_details = get_configured_connection_details
|
|
||||||
connection_details.if_not_nothing <|
|
|
||||||
_ -> Database.connect connection_details
|
|
||||||
|
|
||||||
add_specs suite_builder =
|
add_specs suite_builder =
|
||||||
add_table_specs suite_builder
|
add_table_specs suite_builder
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ from Standard.Database.Errors import Integrity_Error
|
|||||||
from Standard.Test import all
|
from Standard.Test import all
|
||||||
|
|
||||||
from project.Common_Table_Operations.Util import run_default_backend
|
from project.Common_Table_Operations.Util import run_default_backend
|
||||||
|
import project.Common_Table_Operations.Util
|
||||||
|
|
||||||
main filter=Nothing = run_default_backend add_specs filter
|
main filter=Nothing = run_default_backend add_specs filter
|
||||||
|
|
||||||
@ -15,7 +16,7 @@ add_specs suite_builder setup =
|
|||||||
|
|
||||||
add_coalesce_specs suite_builder setup =
|
add_coalesce_specs suite_builder setup =
|
||||||
prefix = setup.prefix
|
prefix = setup.prefix
|
||||||
table_builder = setup.table_builder
|
table_builder = Util.build_sorted_table setup
|
||||||
|
|
||||||
suite_builder.group prefix+"Table.coalesce" group_builder->
|
suite_builder.group prefix+"Table.coalesce" group_builder->
|
||||||
group_builder.specify "2 columns" <|
|
group_builder.specify "2 columns" <|
|
||||||
|
Loading…
Reference in New Issue
Block a user