Save Database connection as data link, SQL Server data link support (#11343)

- Closes #11294
This commit is contained in:
Radosław Waśko 2024-10-17 11:06:57 +02:00 committed by GitHub
parent a45e233f03
commit d75e20c1d2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 408 additions and 164 deletions

View File

@ -35,6 +35,7 @@ const BASE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Base_Tests/data/datal
const S3_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/AWS_Tests/data/')
const TABLE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Table_Tests/data/datalinks/')
const SNOWFLAKE_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Snowflake_Tests/data/datalinks/')
const SQLSERVER_DATA_LINKS_ROOT = path.resolve(REPO_ROOT, 'test/Microsoft_Tests/data/datalinks/')
v.test('correctly validates example HTTP .datalink files with the schema', () => {
const schemas = [
@ -106,3 +107,11 @@ v.test('correctly validates example Snowflake .datalink files with the schema',
testSchema(json, schema)
}
})
v.test('correctly validates example SQLServer .datalink files with the schema', () => {
const schemas = ['sqlserver-db.datalink']
for (const schema of schemas) {
const json = loadDataLinkFile(path.resolve(SQLSERVER_DATA_LINKS_ROOT, schema))
testSchema(json, schema)
}
})

View File

@ -8,7 +8,8 @@
{ "$ref": "#/$defs/EnsoFileDataLink" },
{ "$ref": "#/$defs/HttpFetchDataLink" },
{ "$ref": "#/$defs/PostgresDataLink" },
{ "$ref": "#/$defs/SnowflakeDataLink" }
{ "$ref": "#/$defs/SnowflakeDataLink" },
{ "$ref": "#/$defs/SQLServerDataLink" }
],
"$comment": "The fields `type` and `libraryName` are required for all data link types, but we currently don't add a top-level `required` setting to the schema, because it was confusing the code that is generating the modal."
},
@ -236,6 +237,50 @@
},
"required": ["type", "libraryName", "account", "database_name", "credentials"]
},
"SQLServerDataLink": {
"title": "SQL Server Database Connection",
"type": "object",
"properties": {
"type": {
"title": "Type",
"const": "SQLServer_Connection",
"type": "string"
},
"libraryName": { "const": "Standard.Microsoft" },
"host": {
"title": "Hostname",
"type": "string"
},
"port": {
"title": "Port",
"type": "integer",
"minimum": 1,
"maximum": 65535,
"default": 1433
},
"database_name": {
"title": "Database Name",
"type": "string"
},
"credentials": {
"title": "Credentials",
"type": "object",
"properties": {
"username": {
"title": "Username",
"$ref": "#/$defs/SecureValue"
},
"password": {
"title": "Password",
"$ref": "#/$defs/SecureValue"
}
},
"required": ["username", "password"]
},
"table": { "title": "Table to access", "type": "string" }
},
"required": ["type", "libraryName", "host", "port", "database_name"]
},
"Format": {
"title": "Format",

View File

@ -182,3 +182,27 @@ disallow_links_in_move source target ~action =
if is_source_data_link && is_target_data_link then Error.throw (Illegal_Argument.Error "The `move_to` operation cannot be used with data links. If you want to move the link, use `Data_Link.move`.") else
if is_source_data_link || is_target_data_link then Error.throw (Illegal_Argument.Error "The `move_to` operation cannot be used with data links. Please `.read` the data link and then write the data to the destination using the appropriate method.") else
action
## PRIVATE
Takes a secure value (either a Text or Enso_Secret) and returns a secret representation of it.
If given an existing secret, it will be returned as-is.
However, if given a plain text, it will create a new secret in the provided directory.
Because it may be creating new secret, this should only be run within an enabled Output context.
store_as_secret base_location:Enso_File name_hint:Text secure_value:Text|Enso_Secret -> Enso_Secret = case secure_value of
existing_secret : Enso_Secret -> existing_secret
plain_text : Text ->
create_fresh_secret ix =
secret_name = name_hint + (if ix == 0 then "" else "-"+ix.to_text)
r = Enso_Secret.create secret_name plain_text base_location
r.catch Illegal_Argument error->
if error.message.contains "already exists" then create_fresh_secret ix+1 else r
create_fresh_secret 0
## PRIVATE
save_password_for_data_link data_link_location:Enso_File secure_value:Text|Enso_Secret name_hint:Text="password" -> Enso_Secret =
secret_location = data_link_location.parent.if_nothing (Error.throw (Illegal_State.Error "Trying to create a secret to store the Data Link password, but the provided data link location: "+data_link_location.to_text+" does not have a parent directory. This should not happen."))
location_name = if data_link_location.name.ends_with data_link_extension then data_link_location.name.drop (..Last data_link_extension.length) else data_link_location.name
secret_location.if_not_error <|
store_as_secret secret_location location_name+"-"+name_hint secure_value

View File

@ -1,5 +1,6 @@
import project.Any.Any
import project.Data.Array.Array
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Error
import project.Meta
@ -292,3 +293,6 @@ type Wrapped_Dataflow_Error
## PRIVATE
Throws the original error.
unwrap self = Error.throw self.payload
## PRIVATE
to_display_text self -> Text = "Wrapped_Dataflow_Error: "+self.payload.to_display_text

View File

@ -1,6 +1,7 @@
from Standard.Base import all
import Standard.Base.Data.Numbers.Number_Parse_Error
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import project.Connection.Client_Certificate.Client_Certificate
@ -8,8 +9,8 @@ import project.Connection.Connection_Options.Connection_Options
import project.Connection.Credentials.Credentials
import project.Connection.Postgres_Connection.Postgres_Connection
import project.Connection.SSL_Mode.SSL_Mode
import project.Internal.Data_Link_Setup.Data_Link_Setup
import project.Internal.Postgres.Pgpass
import project.Internal.Postgres.Postgres_Data_Link_Setup.Postgres_Data_Link_Setup
polyglot java import org.postgresql.Driver
@ -47,8 +48,8 @@ type Postgres
connect self options (allow_data_links : Boolean = True) =
if Driver.isRegistered.not then Driver.register
data_link_setup = if allow_data_links then Postgres_Data_Link_Setup.Available self else
Postgres_Data_Link_Setup.Unavailable "Saving connections established through a Data Link is not allowed. Please copy the Data Link instead."
data_link_setup = if allow_data_links then Data_Link_Setup.Available (create_data_link_structure self) else
Data_Link_Setup.already_a_data_link
properties = options.merge self.jdbc_properties
## Cannot use default argument values as gets in an infinite loop if you do.
@ -117,3 +118,15 @@ default_postgres_port =
## PRIVATE
default_postgres_database = Environment.get "PGDATABASE" "postgres"
## PRIVATE
private create_data_link_structure details:Postgres data_link_location:Enso_File -> JS_Object =
credentials_json = details.credentials.if_not_nothing <|
Data_Link_Setup.save_credentials_for_data_link data_link_location details.credentials
if (details.use_ssl != SSL_Mode.Prefer) || details.client_cert.is_nothing.not then Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: custom SSL settings are currently unsupported.") else
JS_Object.from_pairs <|
header = [["type", "Postgres_Connection"], ["libraryName", "Standard.Database"]]
connection_part = [["host", details.host], ["port", details.port], ["database_name", details.database]]
schema_part = if details.schema.not_empty then [["schema", details.schema]] else []
credential_part = if credentials_json.is_nothing.not then [["credentials", credentials_json]] else []
header + connection_part + schema_part + credential_part

View File

@ -13,7 +13,7 @@ import project.Dialect
import project.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import project.Internal.IR.Query.Query
import project.Internal.JDBC_Connection
import project.Internal.Postgres.Postgres_Data_Link_Setup.Postgres_Data_Link_Setup
import project.Internal.Data_Link_Setup.Data_Link_Setup
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.SQL_Query.SQL_Query
import project.SQL_Statement.SQL_Statement
@ -32,7 +32,8 @@ type Postgres_Connection
- url: The URL to connect to.
- properties: A vector of properties for the connection.
- make_new: A function that returns a new connection.
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Data_Link_Setup -> Postgres_Connection
- data_link_setup: The setup for saving the connection as a data link.
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Data_Link_Setup -> Postgres_Connection
create url properties make_new data_link_setup =
jdbc_connection = JDBC_Connection.create url properties
encoding = parse_postgres_encoding (get_encoding_name jdbc_connection)
@ -52,7 +53,8 @@ type Postgres_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
private Value (connection:Connection) (make_new : Text -> Text -> Postgres_Connection) (data_link_setup : Postgres_Data_Link_Setup)
- data_link_setup: the setup for saving the connection as a data link.
private Value (connection:Connection) (make_new : Text -> Text -> Postgres_Connection) (data_link_setup : Data_Link_Setup)
## ICON close
Closes the connection releasing the underlying database resources

View File

@ -958,9 +958,8 @@ type DB_Table
t2.read
limit : Integer -> DB_Table
limit self max_rows:Integer=1000 =
Feature.Sample.if_supported_else_throw self.connection.dialect "limit" <|
new_ctx = self.context.set_limit max_rows
self.updated_context new_ctx
new_ctx = self.context.set_limit max_rows
self.updated_context new_ctx
## ALIAS add column, expression, formula, new column, update column
GROUP Standard.Base.Values

View File

@ -0,0 +1,47 @@
from Standard.Base import all
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Runtime.Context
from Standard.Base.Enso_Cloud.Data_Link_Helpers import data_link_extension, secure_value_to_json, save_password_for_data_link
import project.Connection.Credentials.Credentials
## PRIVATE
type Data_Link_Setup
## PRIVATE
Available create_data_link_structure:Enso_File->JS_Object
## PRIVATE
Unavailable cause:Text
## PRIVATE
Returns an unavailable setup with reason being the connection was alraedy a data link.
already_a_data_link -> Data_Link_Setup = Data_Link_Setup.Unavailable "Saving connections established through a Data Link is not allowed. Please copy the Data Link instead."
## PRIVATE
save_as_data_link self destination on_existing_file:Existing_File_Behavior = case self of
Data_Link_Setup.Available create_fn -> Context.Output.if_enabled disabled_message="As writing is disabled, cannot save to a Data Link. Press the Write button ▶ to perform the operation." panic=False <|
case destination of
_ : Enso_File ->
replace_existing = case on_existing_file of
Existing_File_Behavior.Overwrite -> True
Existing_File_Behavior.Error -> False
_ -> Error.throw (Illegal_Argument.Error "Invalid value for `on_existing_file` parameter, only `Overwrite` and `Error` are supported here.")
exists_checked = if replace_existing.not && destination.exists then Error.throw (File_Error.Already_Exists destination)
exists_checked.if_not_error <|
json = create_fn destination
Data_Link.write_config destination json replace_existing
_ -> Error.throw (Illegal_Argument.Error "Currently a connection can only be saved as a Data Link into the Enso Cloud. Please provide an `Enso_File` as destination.")
Data_Link_Setup.Unavailable cause ->
Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: "+cause)
## PRIVATE
save_credentials_for_data_link data_link_location:Enso_File credentials:Credentials -> JS_Object =
# A plain text is automatically promoted to a secret.
secret_password = save_password_for_data_link data_link_location credentials.password
# But we keep the username as-is - if it was in plain text, it will stay in plain text.
JS_Object.from_pairs [["username", secure_value_to_json credentials.username], ["password", secure_value_to_json secret_password]]

View File

@ -1,70 +0,0 @@
private
from Standard.Base import all
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Runtime.Context
from Standard.Base.Enso_Cloud.Data_Link_Helpers import data_link_extension, secure_value_to_json
import project.Connection.Credentials.Credentials
import project.Connection.Postgres.Postgres
import project.Connection.SSL_Mode.SSL_Mode
## PRIVATE
type Postgres_Data_Link_Setup
## PRIVATE
Available details:Postgres
## PRIVATE
Unavailable cause:Text
## PRIVATE
save_as_data_link self destination on_existing_file:Existing_File_Behavior = case self of
Postgres_Data_Link_Setup.Available details -> Context.Output.if_enabled disabled_message="As writing is disabled, cannot save to a Data Link. Press the Write button ▶ to perform the operation." panic=False <|
case destination of
_ : Enso_File ->
replace_existing = case on_existing_file of
Existing_File_Behavior.Overwrite -> True
Existing_File_Behavior.Error -> False
_ -> Error.throw (Illegal_Argument.Error "Invalid value for `on_existing_file` parameter, only `Overwrite` and `Error` are supported here.")
exists_checked = if replace_existing.not && destination.exists then Error.throw (File_Error.Already_Exists destination)
exists_checked.if_not_error <|
credentials = prepare_credentials destination details
if (details.use_ssl != SSL_Mode.Prefer) || details.client_cert.is_nothing.not then Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: custom SSL settings are currently unsupported.") else
json = JS_Object.from_pairs <|
header = [["type", "Postgres_Connection"], ["libraryName", "Standard.Database"]]
connection_part = [["host", details.host], ["port", details.port], ["database_name", details.database]]
schema_part = if details.schema.not_empty then [["schema", details.schema]] else []
credential_part = if credentials.is_nothing.not then [["credentials", credentials]] else []
header + connection_part + schema_part + credential_part
Data_Link.write_config destination json replace_existing
_ -> Error.throw (Illegal_Argument.Error "Currently a connection can only be saved as a Data Link into the Enso Cloud. Please provide an `Enso_File` as destination.")
Postgres_Data_Link_Setup.Unavailable cause ->
Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: "+cause)
## PRIVATE
prepare_credentials data_link_location:Enso_File details:Postgres -> JS_Object | Nothing =
case details.credentials of
Nothing -> Nothing
credentials:Credentials ->
# A plain text is automatically promoted to a secret.
secret_password = case credentials.password of
secret : Enso_Secret -> secret
plain_text_password : Text ->
secret_location = data_link_location.parent.if_nothing (Error.throw (Illegal_State.Error "Trying to create a secret to store the Data Link password, but the provided data link location: "+data_link_location.to_text+" does not have a parent directory. This should not happen."))
location_name = if data_link_location.name.ends_with data_link_extension then data_link_location.name.drop (..Last data_link_extension.length) else data_link_location.name
create_fresh_secret ix =
secret_name = location_name + "-password" + (if ix == 0 then "" else "-"+ix.to_text)
r = Enso_Secret.create secret_name plain_text_password secret_location
r.catch Illegal_Argument error->
if error.message.contains "already exists" then create_fresh_secret ix+1 else r
secret_location.if_not_error <| location_name.if_not_error <|
create_fresh_secret 0
# But we keep the username as-is - if it was in plain text, it will stay in plain text.
JS_Object.from_pairs [["username", secure_value_to_json credentials.username], ["password", secure_value_to_json secret_password]]

View File

@ -7,6 +7,7 @@ import Standard.Base.Metadata.Widget.Text_Input
import Standard.Database.Connection.Connection_Options.Connection_Options
import Standard.Database.Connection.Credentials.Credentials
import Standard.Database.Internal.Data_Link_Setup.Data_Link_Setup
import project.SQLServer_Connection.SQLServer_Connection
@ -33,13 +34,15 @@ type SQLServer_Details
Arguments:
- options: Overrides for the connection properties.
connect : Connection_Options -> SQLServer_Connection
connect self options =
connect : Connection_Options -> Boolean -> SQLServer_Connection
connect self options (allow_data_links : Boolean = True) =
data_link_setup = if allow_data_links then Data_Link_Setup.Available (create_data_link_structure self) else
Data_Link_Setup.already_a_data_link
properties = options.merge self.jdbc_properties
make_new database =
SQLServer_Details.SQLServer self.host self.credentials self.port (database.if_nothing self.database) . connect options
SQLServer_Connection.create self.jdbc_url properties make_new
SQLServer_Connection.create self.jdbc_url properties make_new data_link_setup
## PRIVATE
Provides the jdbc url for the connection.
@ -55,3 +58,12 @@ type SQLServer_Details
database = [Pair.new 'databaseName' self.database]
credentials = [Pair.new 'user' self.credentials.username, Pair.new 'password' self.credentials.password]
account + database + credentials
## PRIVATE
private create_data_link_structure details:SQLServer_Details data_link_location:Enso_File -> JS_Object =
credentials_json = Data_Link_Setup.save_credentials_for_data_link data_link_location details.credentials
JS_Object.from_pairs <|
header = [["type", "SQLServer_Connection"], ["libraryName", "Standard.Microsoft"]]
connection_part = [["host", details.host], ["port", details.port], ["database_name", details.database]]
credential_part = [["credentials", credentials_json]]
header + connection_part + credential_part

View File

@ -92,7 +92,7 @@ type SQLServer_Dialect
Generates SQL modifier for limiting the number of rows and its position in the query
get_limit_sql_modifier : Integer -> Any
get_limit_sql_modifier self limit =
[150, SQL_Builder.code (" TOP " + limit.to_text)]
[150, SQL_Builder.code (" TOP " + limit.to_text + " ")]
## PRIVATE
Wraps and possibly escapes the identifier so that it can be used in a

View File

@ -11,6 +11,7 @@ import Standard.Database.Connection.Connection.Connection
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Internal.Common.Encoding_Limited_Naming_Properties.Encoding_Limited_Naming_Properties
import Standard.Database.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import Standard.Database.Internal.Data_Link_Setup.Data_Link_Setup
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.SQL_Query.SQL_Query
import Standard.Database.SQL_Statement.SQL_Statement
@ -29,14 +30,15 @@ type SQLServer_Connection
- url: The URL to connect to.
- properties: A vector of properties for the connection.
- make_new: A function that returns a new connection.
create : Text -> Vector -> (Text -> Text -> SQLServer_Connection) -> SQLServer_Connection
create url properties make_new =
- data_link_setup: the setup for saving the connection as a data link.
create : Text -> Vector -> (Text -> Text -> SQLServer_Connection) -> Data_Link_Setup -> SQLServer_Connection
create url properties make_new data_link_setup =
jdbc_connection = JDBC_Connection.create url properties
jdbc_entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=True
## jdbc reports table name length limit as 128, but it actually seems to be 116 for temp tables so we override it
limited = Encoding_Limited_Naming_Properties.Instance Encoding.utf_8 limit=116 is_case_sensitive=True
modified_entity_naming_properties = Entity_Naming_Properties.Value for_table_names=limited for_column_names=jdbc_entity_naming_properties.for_column_names for_generated_column_names=jdbc_entity_naming_properties.for_generated_column_names
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver modified_entity_naming_properties) make_new
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver modified_entity_naming_properties) make_new data_link_setup
## PRIVATE
@ -45,7 +47,8 @@ type SQLServer_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
Value connection make_new
- data_link_setup: the setup for saving the connection as a data link.
Value connection make_new data_link_setup
## ICON close
Closes the connection releasing the underlying database resources
@ -291,3 +294,15 @@ type SQLServer_Connection
on the 'subclasses'.
base_connection : Connection
base_connection self = self.connection
## ICON data_output
Creates a Data Link that will open the same connection.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = Existing_File_Behavior.Error) =
self.data_link_setup.save_as_data_link destination on_existing_file
## PRIVATE
Converts this value to a JSON serializable object.
to_js_object : JS_Object
to_js_object self =
JS_Object.from_pairs [["type", "SQLServer_Connection"], ["links", self.tables.at "Name" . to_vector]]

View File

@ -0,0 +1,48 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Base.Enso_Cloud.Data_Link_Helpers import Data_Link_Source_Metadata, parse_secure_value
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
import Standard.Database.Connection.Credentials.Credentials
import Standard.Database.Internal.DB_Data_Link_Helpers
import project.Connection.SQLServer_Details.SQLServer_Details
## PRIVATE
type SQLServer_Data_Link
## PRIVATE
A data-link returning a connection to the specified database.
Connection details:SQLServer_Details source:Data_Link_Source_Metadata
## PRIVATE
A data-link returning a query to a specific table within a database.
Table name:Text details:SQLServer_Details source:Data_Link_Source_Metadata
## PRIVATE
parse json source -> SQLServer_Data_Link =
host = get_required_field "host" json expected_type=Text
port = get_required_field "port" json expected_type=Integer
db_name = get_required_field "database_name" json expected_type=Text
credentials_json = get_required_field "credentials" json
username = get_required_field "username" credentials_json |> parse_secure_value
password = get_required_field "password" credentials_json |> parse_secure_value
credentials = Credentials.Username_And_Password username password
details = SQLServer_Details.SQLServer host=host credentials=credentials port=port database=db_name
case get_optional_field "table" json expected_type=Text of
Nothing ->
SQLServer_Data_Link.Connection details source
table_name : Text ->
SQLServer_Data_Link.Table table_name details source
## PRIVATE
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
_ = on_problems
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a SQLServer Data Link, as it points to a database.") else
default_options = DB_Data_Link_Helpers.data_link_connection_parameters self.source
connection = self.details.connect default_options allow_data_links=False
case self of
SQLServer_Data_Link.Connection _ _ -> connection
SQLServer_Data_Link.Table table_name _ _ ->
connection.query table_name

View File

@ -7,6 +7,7 @@ import Standard.Base.Metadata.Widget.Text_Input
import Standard.Database.Connection.Connection_Options.Connection_Options
import Standard.Database.Connection.Credentials.Credentials
import Standard.Database.Internal.Data_Link_Setup.Data_Link_Setup
import project.Snowflake_Connection.Snowflake_Connection
@ -38,15 +39,15 @@ type Snowflake_Details
- options: Overrides for the connection properties.
connect : Connection_Options -> Boolean -> Snowflake_Connection
connect self options (allow_data_links : Boolean = True) =
# TODO use this once #11294 is done
_ = allow_data_links
data_link_setup = if allow_data_links then Data_Link_Setup.Available (create_data_link_structure self) else
Data_Link_Setup.already_a_data_link
properties = options.merge self.jdbc_properties
## Cannot use default argument values as gets in an infinite loop if you do.
make_new database schema warehouse =
Snowflake_Details.Snowflake self.account self.credentials (database.if_nothing self.database) (schema.if_nothing self.schema) (warehouse.if_nothing self.warehouse) . connect options
Snowflake_Details.Snowflake self.account self.credentials (database.if_nothing self.database) (schema.if_nothing self.schema) (warehouse.if_nothing self.warehouse) . connect options allow_data_links
Snowflake_Connection.create self.jdbc_url properties make_new
Snowflake_Connection.create self.jdbc_url properties make_new data_link_setup
## PRIVATE
Provides the jdbc url for the connection.
@ -71,3 +72,14 @@ type Snowflake_Details
## Control the format of TIMESTAMP and TIME fields
formats = [Pair.new "TIME_OUTPUT_FORMAT" "HH24:MI:SS.FF9", Pair.new "TIMESTAMP_OUTPUT_FORMAT" "YYYY-MM-DD HH24:MI:SS.FF9 TZHTZM", Pair.new "TIMESTAMP_NTZ_OUTPUT_FORMAT" "YYYY-MM-DD HH24:MI:SS.FF9", Pair.new "TIMESTAMP_LTZ_OUTPUT_FORMAT" "YYYY-MM-DD HH24:MI:SS.FF9"]
account + credentials + database + schema + warehouse + formats
## PRIVATE
private create_data_link_structure details:Snowflake_Details data_link_location:Enso_File -> JS_Object =
credentials_json = Data_Link_Setup.save_credentials_for_data_link data_link_location details.credentials
JS_Object.from_pairs <|
header = [["type", "Snowflake_Connection"], ["libraryName", "Standard.Snowflake"]]
connection_part = [["account", details.account], ["database_name", details.database]]
schema_part = if details.schema.not_empty then [["schema", details.schema]] else []
warehouse_part = if details.warehouse.not_empty then [["warehouse", details.warehouse]] else []
credential_part = [["credentials", credentials_json]]
header + connection_part + schema_part + warehouse_part + credential_part

View File

@ -10,6 +10,7 @@ import Standard.Database.Column_Description.Column_Description
import Standard.Database.Connection.Connection.Connection
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import Standard.Database.Internal.Data_Link_Setup.Data_Link_Setup
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.SQL_Query.SQL_Query
import Standard.Database.SQL_Statement.SQL_Statement
@ -28,14 +29,15 @@ type Snowflake_Connection
- url: The URL to connect to.
- properties: A vector of properties for the connection.
- make_new: A function that returns a new connection.
create : Text -> Vector -> (Text -> Text -> Snowflake_Connection) -> Snowflake_Connection
create url properties make_new =
- data_link_setup: the setup for saving the connection as a data link.
create : Text -> Vector -> (Text -> Text -> Snowflake_Connection) -> Data_Link_Setup -> Snowflake_Connection
create url properties make_new data_link_setup =
jdbc_connection = JDBC_Connection.create url properties
## As long as names are quoted, the Snowflake SQL dialect is case sensitive.
Our generator always quotes identifiers, so we can rely on the case sensitivity.
This is the same as in Postgres.
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=True
Snowflake_Connection.Value (Connection.new jdbc_connection Snowflake_Dialect.snowflake entity_naming_properties) make_new
Snowflake_Connection.Value (Connection.new jdbc_connection Snowflake_Dialect.snowflake entity_naming_properties) make_new data_link_setup
## PRIVATE
@ -44,7 +46,8 @@ type Snowflake_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
Value connection make_new
- data_link_setup: the setup for saving the connection as a data link.
Value connection make_new data_link_setup
## ICON close
Closes the connection releasing the underlying database resources
@ -315,6 +318,12 @@ type Snowflake_Connection
base_connection : Connection
base_connection self = self.connection
## ICON data_output
Creates a Data Link that will open the same connection.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = Existing_File_Behavior.Error) =
self.data_link_setup.save_as_data_link destination on_existing_file
## PRIVATE
Converts this value to a JSON serializable object.
to_js_object : JS_Object

View File

@ -0,0 +1,21 @@
package org.enso.microsoft;
import org.enso.base.enso_cloud.DataLinkSPI;
@org.openide.util.lookup.ServiceProvider(service = DataLinkSPI.class)
public class SQLServerDataLinkSPI extends DataLinkSPI {
@Override
protected String getModuleName() {
return "Standard.Microsoft.SQLServer_Data_Link";
}
@Override
protected String getTypeName() {
return "SQLServer_Data_Link";
}
@Override
protected String getLinkTypeName() {
return "SQLServer_Connection";
}
}

View File

@ -0,0 +1,11 @@
{
"type": "SQLServer_Connection",
"libraryName": "Standard.Microsoft",
"host": "HOSTNAME",
"port": 12345,
"database_name": "DBNAME",
"credentials": {
"username": "USERNAME",
"password": "PASSWORD"
}
}

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Runtime.Ref.Ref
@ -21,7 +22,9 @@ from Standard.Test import all
import Standard.Test.Test_Environment
import enso_dev.Table_Tests
import enso_dev.Table_Tests.Database.Common.Audit_Spec
import enso_dev.Table_Tests.Database.Common.Common_Spec
import enso_dev.Table_Tests.Database.Common.Save_Connection_Data_Link
import enso_dev.Table_Tests.Database.Transaction_Spec
import enso_dev.Table_Tests.Database.Upload_Spec
import enso_dev.Table_Tests.Database.Helpers.Name_Generator
@ -72,22 +75,37 @@ get_configured_connection_details =
if database.is_nothing then Nothing else
SQLServer_Details.SQLServer host credentials port database
## Returns a function that takes anything and returns a new connection.
The function creates a _new_ connection on each invocation
(this is needed for some tests that need multiple distinct connections).
create_connection_builder =
connection_details = get_configured_connection_details
connection_details.if_not_nothing <|
_ -> Database.connect connection_details
transform_file base_file connection_details =
content = Data_Link.read_raw_config base_file
new_content = content
. replace "HOSTNAME" connection_details.host
. replace "12345" connection_details.port.to_text
. replace "DBNAME" connection_details.database
. replace "USERNAME" connection_details.credentials.username
. replace "PASSWORD" connection_details.credentials.password
temp_file = File.create_temporary_file "sqlserver-test-db" ".datalink"
Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
type Temporary_Data_Link_File
Value ~get
make connection_details = Temporary_Data_Link_File.Value <|
transform_file (enso_project.data / "datalinks" / "sqlserver-db.datalink") connection_details
add_specs suite_builder =
case create_connection_builder of
case get_configured_connection_details of
Nothing ->
message = "SQLServer test database is not configured. See README.md for instructions."
suite_builder.group "[SQLServer] Database tests" pending=message (_-> Nothing)
connection_builder ->
connection_details ->
connection_builder = _ -> Database.connect connection_details
add_sqlserver_specs suite_builder connection_builder
default_connection = Database.connect get_configured_connection_details
prefix = "[SQLServer] "
data_link_file = Temporary_Data_Link_File.make connection_details
Audit_Spec.add_specs suite_builder prefix data_link_file.get database_pending=Nothing
Save_Connection_Data_Link.add_specs suite_builder prefix connection_details pending=Nothing
default_connection = Database.connect connection_details
if default_connection.dialect.is_feature_supported Feature.Integration_Tests then
suite_builder.group "[SQLServer] Info" group_builder->
data = SQLServer_Info_Data.setup default_connection

View File

@ -26,6 +26,7 @@ import enso_dev.Table_Tests
import enso_dev.Table_Tests.Database.Common.Audit_Spec
import enso_dev.Table_Tests.Database.Common.Common_Spec
import enso_dev.Table_Tests.Database.Common.IR_Spec
import enso_dev.Table_Tests.Database.Common.Save_Connection_Data_Link
import enso_dev.Table_Tests.Database.Transaction_Spec
import enso_dev.Table_Tests.Database.Upload_Spec
import enso_dev.Table_Tests.Database.Helpers.Name_Generator
@ -604,10 +605,12 @@ add_table_specs suite_builder =
db_name = connection_details.database
connection_builder = _ -> Database.connect connection_details
add_snowflake_specs suite_builder connection_builder db_name
Transaction_Spec.add_specs suite_builder connection_builder "[Snowflake] "
prefix = "[Snowflake] "
Transaction_Spec.add_specs suite_builder connection_builder prefix
data_link_file = Temporary_Data_Link_File.make connection_details
Audit_Spec.add_specs suite_builder "[Snowflake] " data_link_file.get database_pending=Nothing
Audit_Spec.add_specs suite_builder prefix data_link_file.get database_pending=Nothing
Save_Connection_Data_Link.add_specs suite_builder prefix connection_details pending=Nothing
suite_builder.group "[Snowflake] Secrets in connection settings" group_builder->
cloud_setup = Cloud_Tests_Setup.prepare

View File

@ -35,8 +35,11 @@ add_specs suite_builder prefix ~datalink_to_connection database_pending =
table_name = "audited-table-"+Random.uuid
mem_table = Table.new [["X", [1, 2]], ["Y", ["my_payload", "foo"]]]
audited_table = mem_table.select_into_database_table audited_connection table_name temporary=True . should_succeed
audited_table.read . should_equal mem_table
audited_connection.drop_table table_name . should_succeed
materialized = audited_table.read
materialized.column_names . should_equal ["X", "Y"]
materialized.at "X" . to_vector . should_equal_ignoring_order [1, 2]
materialized.at "Y" . to_vector . should_equal_ignoring_order ["my_payload", "foo"]
audited_connection.drop_table audited_table.name . should_succeed
# Retrying is needed as there may be some delay before the background thread finishes processing the logs.
Test.with_retries <|
@ -86,7 +89,7 @@ add_specs suite_builder prefix ~datalink_to_connection database_pending =
locally_audited_connection = datalink_to_connection.read
# We just check that we can read queries through this connection:
locally_audited_connection.read (SQL_Query.Raw_SQL "SELECT 1") . at 0 . to_vector . should_equal [1]
locally_audited_connection.read (SQL_Query.Raw_SQL "SELECT 1 AS foo") . at 0 . to_vector . should_equal [1]
# This test may only run on real cloud because the mock does not support creating datalinks.
# Once the tests above can be run on real cloud too (#10919), we can merge all 3 cloud setups into a single one.

View File

@ -0,0 +1,68 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import Table
from Standard.Database import all
from Standard.Test import all
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Temporary_Directory
add_specs suite_builder prefix connection_details pending =
cloud_setup = Cloud_Tests_Setup.prepare
suite_builder.group prefix+"Saving to Data Link" pending=(pending.if_nothing cloud_setup.real_cloud_pending) group_builder->
test_root = Temporary_Directory.make "DB-DataLinks"
group_builder.teardown test_root.cleanup
## datalink support
group_builder.specify "allows to save an established connection as a Data Link" <|
cloud_location = test_root.get / "my-db.datalink"
raw_connection = Database.connect connection_details
Panic.with_finalizer raw_connection.close <|
r1 = raw_connection.save_as_data_link cloud_location
# The written data link location is returned
r1.should_equal cloud_location
# A secret for the plain-text password is created:
secrets = Enso_Secret.list test_root.get
secrets.length . should_equal 1
secrets.first.name . should_contain "my-db"
secrets.first.name . should_contain "password"
written_data_link_connection = cloud_location.read
Panic.with_finalizer written_data_link_connection.close <|
written_data_link_connection.tables.column_names . should_contain "Name"
q = written_data_link_connection.query (..Raw_SQL 'SELECT 1 AS "A"')
q.column_names . should_equal ["A"]
q.at "A" . to_vector . should_equal [1]
# Also test existing file behaviour
r2 = raw_connection.save_as_data_link cloud_location
r2.should_fail_with File_Error
r2.catch.should_be_a File_Error.Already_Exists
r3 = raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Overwrite
r3.should_succeed
## datalink support
group_builder.specify "does not allow Backup or Append modes in save_as_data_link" <|
cloud_location = test_root.get / "my-db-2.datalink"
raw_connection = Database.connect connection_details
Panic.with_finalizer raw_connection.close <|
raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Backup . should_fail_with Illegal_Argument
raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
## datalink support
group_builder.specify "but will not allow to save a connection from a Data Link as a new Data Link" <|
## (Because the Data Link connection is hiding its connection details from the user.)
raw_connection = Database.connect connection_details
data_link_file = raw_connection.save_as_data_link (test_root.get / "my-db-3.datalink")
data_link_file.should_be_a Enso_File
other_data_link_location = test_root.get / "my-db-new-4.datalink"
data_link_connection = Data.read data_link_file
Panic.with_finalizer data_link_connection.close <|
r = data_link_connection.save_as_data_link other_data_link_location
r.should_fail_with Illegal_Argument

View File

@ -1,6 +1,5 @@
from Standard.Base import all
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Runtime.Ref.Ref
@ -26,6 +25,7 @@ import Standard.Test.Test_Environment
import project.Database.Common.Audit_Spec
import project.Database.Common.Common_Spec
import project.Database.Common.IR_Spec
import project.Database.Common.Save_Connection_Data_Link
import project.Database.Transaction_Spec
import project.Database.Upload_Spec
import project.Database.Helpers.Name_Generator
@ -1060,58 +1060,9 @@ add_data_link_specs suite_builder =
r.should_fail_with Illegal_Argument
r.catch.to_display_text . should_contain "The Postgres Data Link cannot be saved to a file."
cloud_setup = Cloud_Tests_Setup.prepare
suite_builder.group "[PostgreSQL] Saving to DataLink" pending=(pending.if_nothing cloud_setup.real_cloud_pending) group_builder->
test_root = Temporary_Directory.make "Postgres-DataLinks"
group_builder.teardown test_root.cleanup
## datalink support
group_builder.specify "allows to save an established connection as a Data Link" <|
cloud_location = test_root.get / "my-postgres-db.datalink"
raw_connection = Database.connect connection_details
Panic.with_finalizer raw_connection.close <|
r1 = raw_connection.save_as_data_link cloud_location
# The written data link location is returned
r1.should_equal cloud_location
# A secret for the plain-text password is created:
secrets = Enso_Secret.list test_root.get
secrets.length . should_equal 1
secrets.first.name . should_contain "my-postgres-db"
secrets.first.name . should_contain "password"
written_data_link_connection = cloud_location.read
Panic.with_finalizer written_data_link_connection.close <|
written_data_link_connection.tables.column_names . should_contain "Name"
q = written_data_link_connection.query (..Raw_SQL 'SELECT 1 AS "A"')
q.column_names . should_equal ["A"]
q.at "A" . to_vector . should_equal [1]
# Also test existing file behaviour
r2 = raw_connection.save_as_data_link cloud_location
r2.should_fail_with File_Error
r2.catch.should_be_a File_Error.Already_Exists
r3 = raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Overwrite
r3.should_succeed
## datalink support
group_builder.specify "does not allow Backup or Append modes in save_as_data_link" <|
cloud_location = test_root.get / "my-postgres-db-2.datalink"
raw_connection = Database.connect connection_details
Panic.with_finalizer raw_connection.close <|
raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Backup . should_fail_with Illegal_Argument
raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
## datalink support
group_builder.specify "but will not allow to save a connection from a Data Link as a new Data Link" <|
## (Because the Data Link connection is hiding its connection details from the user.)
cloud_location = test_root.get / "my-postgres-db-3.datalink"
data_link_connection = Data.read data_link_file.get
Panic.with_finalizer data_link_connection.close <|
r = data_link_connection.save_as_data_link cloud_location
r.should_fail_with Illegal_Argument
Audit_Spec.add_specs suite_builder "[PostgreSQL] " data_link_file.get database_pending=pending
prefix = "[PostgreSQL] "
Audit_Spec.add_specs suite_builder prefix data_link_file.get database_pending=pending
Save_Connection_Data_Link.add_specs suite_builder prefix connection_details pending
add_specs suite_builder =