Saving data links to a DB_Table (#11371)

- Closes #11295
This commit is contained in:
Radosław Waśko 2024-10-24 15:18:49 +02:00 committed by GitHub
parent fe45da98d7
commit ca9df70ebf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 592 additions and 157 deletions

View File

@ -14,10 +14,9 @@ on:
default: false
jobs:
enso-build-ci-gen-job-snowflake-tests-linux-amd64:
name: Snowflake Tests (linux, amd64)
name: Snowflake Tests (LinuxLatest)
runs-on:
- self-hosted
- Linux
- ubuntu-latest
steps:
- if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent')
name: Installing wasm-pack
@ -44,6 +43,11 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: ./run backend test std-snowflake
env:
ENSO_CLOUD_COGNITO_REGION: ${{ vars.ENSO_CLOUD_COGNITO_REGION }}
ENSO_CLOUD_COGNITO_USER_POOL_ID: ${{ vars.ENSO_CLOUD_COGNITO_USER_POOL_ID }}
ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID: ${{ vars.ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID }}
ENSO_CLOUD_TEST_ACCOUNT_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }}
ENSO_CLOUD_TEST_ACCOUNT_USERNAME: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }}
ENSO_SNOWFLAKE_ACCOUNT: ${{ secrets.ENSO_SNOWFLAKE_ACCOUNT }}
ENSO_SNOWFLAKE_DATABASE: ${{ secrets.ENSO_SNOWFLAKE_DATABASE }}
ENSO_SNOWFLAKE_PASSWORD: ${{ secrets.ENSO_SNOWFLAKE_PASSWORD }}

View File

@ -22,9 +22,11 @@
cloud.][11235]
- [The user may set description and labels of an Enso Cloud asset
programmatically.][11255]
- [DB_Table may be saved as a Data Link.][11371]
[11235]: https://github.com/enso-org/enso/pull/11235
[11255]: https://github.com/enso-org/enso/pull/11255
[11371]: https://github.com/enso-org/enso/pull/11371
#### Enso Language & Runtime

View File

@ -93,7 +93,12 @@ v.test('correctly validates example Table .datalink files with the schema', () =
})
v.test('correctly validates example Database .datalink files with the schema', () => {
const schemas = ['postgres-db.datalink', 'postgres-table.datalink']
const schemas = [
'postgres-db.datalink',
'postgres-table.datalink',
'postgres-simple-query.datalink',
'postgres-serialized-query.datalink',
]
for (const schema of schemas) {
const json = loadDataLinkFile(path.resolve(TABLE_DATA_LINKS_ROOT, schema))
testSchema(json, schema)

View File

@ -188,7 +188,7 @@
},
"required": ["username", "password"]
},
"table": { "title": "Table to access", "type": "string" }
"table": { "title": "Table to access", "$ref": "#/$defs/DatabaseTableDefinition" }
},
"required": ["type", "libraryName", "host", "port", "database_name"]
},
@ -233,7 +233,7 @@
},
"required": ["username", "password"]
},
"table": { "title": "Table to access", "type": "string" }
"table": { "title": "Table to access", "$ref": "#/$defs/DatabaseTableDefinition" }
},
"required": ["type", "libraryName", "account", "database_name", "credentials"]
},
@ -277,7 +277,7 @@
},
"required": ["username", "password"]
},
"table": { "title": "Table to access", "type": "string" }
"table": { "title": "Table to access", "$ref": "#/$defs/DatabaseTableDefinition" }
},
"required": ["type", "libraryName", "host", "port", "database_name"]
},
@ -458,6 +458,35 @@
}
},
"required": ["type", "subType"]
},
"DatabaseTableDefinition": {
"title": "Query to read",
"anyOf": [
{ "title": "Table", "type": "string" },
{
"title": "SQL Query",
"type": "object",
"properties": {
"query": {
"title": "SQL",
"type": "string"
}
},
"required": ["query"]
},
{
"title": "(Advanced) JSON serialized interpolated SQL statement",
"type": "object",
"properties": {
"sql_statement": {
"title": "SQL_Statement (JSON)",
"type": "string"
}
},
"required": ["sql_statement"]
}
]
}
}
}

View File

@ -1,3 +1,5 @@
use core::panic;
use crate::prelude::*;
use crate::ci_gen::not_default_branch;
@ -307,6 +309,10 @@ fn build_job_ensuring_cloud_tests_run_on_github(
cloud_tests_enabled: bool,
) -> Job {
if cloud_tests_enabled {
if target.0 != OS::Linux {
panic!("If the Cloud tests are enabled, they require GitHub hosted runner for Cloud auth, so they only run on Linux.");
}
run_steps_builder.build_job(job_name, RunnerLabel::LinuxLatest)
} else {
run_steps_builder.build_job(job_name, target)
@ -320,6 +326,9 @@ const GRAAL_EDITION_FOR_EXTRA_TESTS: graalvm::Edition = graalvm::Edition::Commun
impl JobArchetype for SnowflakeTests {
fn job(&self, target: Target) -> Job {
if target.0 != OS::Linux {
panic!("Snowflake tests currently require GitHub hosted runner for Cloud auth, so they only run on Linux.");
}
let job_name = "Snowflake Tests";
let mut job = RunStepsBuilder::new("backend test std-snowflake")
.customize(move |step| {
@ -349,17 +358,16 @@ impl JobArchetype for SnowflakeTests {
crate::libraries_tests::snowflake::env::ENSO_SNOWFLAKE_WAREHOUSE,
);
// Temporarily disabled until we can get the Cloud auth fixed.
// Snowflake does not rely on cloud anyway, so it can be disabled.
// But it will rely once we add datalink tests, so this should be fixed soon.
// let updated_main_step = enable_cloud_tests(main_step);
// Snowflake tests are run only in the 'Extra' job, so it is okay to run it with
// Enso Cloud as well. They need it to test data link integration.
let updated_main_step = enable_cloud_tests(main_step);
vec![
main_step,
updated_main_step,
step::extra_stdlib_test_reporter(target, GRAAL_EDITION_FOR_EXTRA_TESTS),
]
})
.build_job(job_name, target)
.build_job(job_name, RunnerLabel::LinuxLatest)
.with_permission(Permission::Checks, Access::Write);
job.env(env::GRAAL_EDITION, GRAAL_EDITION_FOR_EXTRA_TESTS);
job

View File

@ -267,7 +267,7 @@ Text.characters self =
## This matches `aBc` @ character 11
"aabbbbccccaaBcaaaa".find "a[ab]c" Case_Sensitivity.Insensitive
Text.find : (Regex | Text) -> Case_Sensitivity -> Match | Nothing ! Regex_Syntax_Error | Illegal_Argument
Text.find self pattern:(Regex | Text)=".*" case_sensitivity=Case_Sensitivity.Sensitive =
Text.find self pattern:(Regex | Text)=".*" case_sensitivity:Case_Sensitivity=..Sensitive =
case_insensitive = case_sensitivity.is_case_insensitive_in_memory
compiled_pattern = Regex.compile pattern case_insensitive=case_insensitive
compiled_pattern.match self
@ -299,8 +299,8 @@ Text.find self pattern:(Regex | Text)=".*" case_sensitivity=Case_Sensitivity.Sen
example_find_all_insensitive =
## This matches `aABbbbc` @ character 0 and `aBC` @ character 11
"aABbbbccccaaBCaaaa".find_all "a[ab]+c" Case_Sensitivity.Insensitive
Text.find_all : Text -> Case_Sensitivity -> Vector Match ! Regex_Syntax_Error | Illegal_Argument
Text.find_all self pattern=".*" case_sensitivity=Case_Sensitivity.Sensitive =
Text.find_all : Text|Regex -> Case_Sensitivity -> Vector Match ! Regex_Syntax_Error | Illegal_Argument
Text.find_all self pattern:Text|Regex=".*" case_sensitivity:Case_Sensitivity=..Sensitive =
case_insensitive = case_sensitivity.is_case_insensitive_in_memory
compiled_pattern = Regex.compile pattern case_insensitive=case_insensitive
compiled_pattern.match_all self
@ -334,8 +334,8 @@ Text.find_all self pattern=".*" case_sensitivity=Case_Sensitivity.Sensitive =
regex = ".+ct@.+"
# Evaluates to true
"CONTACT@enso.org".match regex Case_Sensitivity.Insensitive
Text.match : Text -> Case_Sensitivity -> Boolean ! Regex_Syntax_Error | Illegal_Argument
Text.match self pattern=".*" case_sensitivity=Case_Sensitivity.Sensitive =
Text.match : Text|Regex -> Case_Sensitivity -> Boolean ! Regex_Syntax_Error | Illegal_Argument
Text.match self pattern:Text|Regex=".*" case_sensitivity:Case_Sensitivity=..Sensitive =
case_insensitive = case_sensitivity.is_case_insensitive_in_memory
compiled_pattern = Regex.compile pattern case_insensitive=case_insensitive
compiled_pattern.matches self
@ -394,7 +394,7 @@ Text.to_regex self case_insensitive=False = Regex.compile self case_insensitive
'azbzczdzezfzg'.split ['b', 'zez'] == ['az', 'zczd', 'fzg']
@delimiter make_delimiter_selector
Text.split : Text | Vector Text -> Case_Sensitivity -> Boolean -> Vector Text ! Illegal_Argument
Text.split self delimiter="," case_sensitivity=Case_Sensitivity.Sensitive use_regex=False =
Text.split self delimiter="," case_sensitivity:Case_Sensitivity=..Sensitive use_regex=False =
delimiter_is_empty = case delimiter of
_ : Text -> delimiter.is_empty
_ : Vector -> delimiter.is_empty || delimiter.any (.is_empty)
@ -452,8 +452,8 @@ Text.split self delimiter="," case_sensitivity=Case_Sensitivity.Sensitive use_re
'Hello Big\r\nWide\tWorld\nGoodbye!' . tokenize "(\S+)(?:\s+|$)"
== ["Hello","Big","Wide","World","Goodbye!"]
Text.tokenize : Text -> Case_Sensitivity -> Vector Text
Text.tokenize self pattern:Text=(Missing_Argument.throw "pattern") case_sensitivity:Case_Sensitivity=..Sensitive =
Text.tokenize : Text|Regex -> Case_Sensitivity -> Vector Text
Text.tokenize self pattern:Text|Regex=(Missing_Argument.throw "pattern") case_sensitivity:Case_Sensitivity=..Sensitive =
case_insensitive = case_sensitivity.is_case_insensitive_in_memory
compiled_pattern = Regex.compile pattern case_insensitive=case_insensitive
compiled_pattern.tokenize self
@ -614,7 +614,7 @@ Cleansable_Text.from (that:Text) = Cleansable_Text.Value (pattern->replace_with-
"แมวมีสี่ขา".words == ['แมว', 'มี', 'สี่', 'ขา']
Text.words : Boolean -> Vector Text
Text.words self keep_whitespace=False =
Text.words self keep_whitespace:Boolean=False =
iterator = BreakIterator.getWordInstance
iterator.setText self
Vector.build builder->
@ -657,7 +657,7 @@ Text.words self keep_whitespace=False =
'\na\nb\n'.lines keep_endings=True == ['\n', 'a\n', 'b\n']
Text.lines : Boolean -> Vector Text
Text.lines self keep_endings=False =
Text.lines self keep_endings:Boolean=False =
Vector.from_polyglot_array (Text_Utils.split_on_lines self keep_endings)
## GROUP Text
@ -684,7 +684,7 @@ Text.lines self keep_endings=False =
"Hello World!".insert 5 " Cruel" == "Hello Cruel World!"
"Hello World!".insert -1 " Cruel" == "Hello World! Cruel"
Text.insert : Integer -> Text -> Text ! Index_Out_Of_Bounds
Text.insert self index that =
Text.insert self index:Integer that:Text =
len = self.length
idx = if index < 0 then len + index + 1 else index
if (idx < 0) || (idx > len) then Error.throw (Index_Out_Of_Bounds.Error index len) else
@ -718,7 +718,7 @@ Text.insert self index that =
"A0".is_digit 1 == True
"건반(Korean)".is_digit 1 == False
Text.is_digit : Integer -> Boolean ! Index_Out_Of_Bounds
Text.is_digit self (index=0) =
Text.is_digit self index:Integer=0 =
grapheme = self.at index
char = (Text_Utils.get_chars grapheme).at 0
char>=48 && char<=57
@ -903,7 +903,7 @@ Text.from_codepoints codepoints = Text_Utils.from_codepoints codepoints
"Hello!".starts_with "hello" == False
"Hello!".starts_with "hello" Case_Sensitivity.Insensitive == True
Text.starts_with : Text -> Case_Sensitivity -> Boolean
Text.starts_with self prefix case_sensitivity=Case_Sensitivity.Sensitive = case case_sensitivity of
Text.starts_with self prefix:Text case_sensitivity:Case_Sensitivity=..Sensitive = case case_sensitivity of
Case_Sensitivity.Default -> self.starts_with prefix Case_Sensitivity.Sensitive
Case_Sensitivity.Sensitive -> Text_Utils.starts_with self prefix
Case_Sensitivity.Insensitive locale ->
@ -933,7 +933,7 @@ Text.starts_with self prefix case_sensitivity=Case_Sensitivity.Sensitive = case
"Hello World".ends_with "world" == False
"Hello World".ends_with "world" Case_Sensitivity.Insensitive == True
Text.ends_with : Text -> Case_Sensitivity -> Boolean
Text.ends_with self suffix case_sensitivity=Case_Sensitivity.Sensitive = case case_sensitivity of
Text.ends_with self suffix:Text case_sensitivity:Case_Sensitivity=..Sensitive = case case_sensitivity of
Case_Sensitivity.Default -> self.ends_with suffix Case_Sensitivity.Sensitive
Case_Sensitivity.Sensitive -> Text_Utils.ends_with self suffix
Case_Sensitivity.Insensitive locale ->
@ -979,7 +979,7 @@ Text.ends_with self suffix case_sensitivity=Case_Sensitivity.Sensitive = case ca
"Hello!".contains "LO" Case_Sensitivity.Insensitive
Text.contains : Text -> Case_Sensitivity -> Boolean
Text.contains self term="" case_sensitivity=Case_Sensitivity.Sensitive = case case_sensitivity of
Text.contains self term:Text="" case_sensitivity:Case_Sensitivity=..Sensitive = case case_sensitivity of
Case_Sensitivity.Default -> self.contains term Case_Sensitivity.Sensitive
Case_Sensitivity.Sensitive -> Text_Utils.contains self term
Case_Sensitivity.Insensitive locale ->
@ -1004,7 +1004,7 @@ Text.contains self term="" case_sensitivity=Case_Sensitivity.Sensitive = case ca
"Hello " * 2 == "Hello Hello "
Text.* : Integer -> Text
Text.* self count = self.repeat count
Text.* self count:Integer = self.repeat count
## GROUP Calculations
ICON text
@ -1025,7 +1025,7 @@ Text.* self count = self.repeat count
"Hello ".repeat 2 == "Hello Hello "
Text.repeat : Integer -> Text
Text.repeat self count=1 =
Text.repeat self count:Integer=1 =
0.up_to count . fold "" acc-> _-> acc + self
## ALIAS first, head, keep, last, left, limit, mid, right, slice, substring, tail, top
@ -1343,7 +1343,7 @@ Text.trim self where:Location=..Both what=_.is_whitespace =
match_1 == match_2
Text.locate : Text -> Matching_Mode -> Case_Sensitivity -> Span | Nothing
Text.locate self term="" mode=Matching_Mode.First case_sensitivity=Case_Sensitivity.Sensitive = case case_sensitivity of
Text.locate self term:Text="" mode=Matching_Mode.First case_sensitivity:Case_Sensitivity=..Sensitive = case case_sensitivity of
Case_Sensitivity.Default -> self.locate term mode Case_Sensitivity.Sensitive
Case_Sensitivity.Sensitive ->
codepoint_span = case mode of
@ -1434,7 +1434,7 @@ Text.locate self term="" mode=Matching_Mode.First case_sensitivity=Case_Sensitiv
match_2 = ligatures . locate_all "ffiff" case_sensitivity=Case_Sensitive.Insensitive
match_2 . map .length == [2, 5]
Text.locate_all : Text -> Case_Sensitivity -> Vector Span
Text.locate_all self term="" case_sensitivity=Case_Sensitivity.Sensitive = if term.is_empty then Vector.new (self.length + 1) (ix -> Span.Value (ix.up_to ix) self) else case case_sensitivity of
Text.locate_all self term:Text="" case_sensitivity:Case_Sensitivity=..Sensitive = if term.is_empty then Vector.new (self.length + 1) (ix -> Span.Value (ix.up_to ix) self) else case case_sensitivity of
Case_Sensitivity.Default -> self.locate term Case_Sensitivity.Sensitive
Case_Sensitivity.Sensitive ->
codepoint_spans = Vector.from_polyglot_array <| Text_Utils.span_of_all self term
@ -1479,7 +1479,7 @@ Text.locate_all self term="" case_sensitivity=Case_Sensitivity.Sensitive = if te
"Hello World!".index_of "J" == Nothing
"Hello World!".index_of "o" == 4
Text.index_of : Text -> Integer -> Case_Sensitivity -> Integer | Nothing
Text.index_of self term="" (start : Integer = 0) case_sensitivity=Case_Sensitivity.Sensitive =
Text.index_of self term:Text="" (start : Integer = 0) case_sensitivity:Case_Sensitivity=..Sensitive =
used_start = if start < 0 then start+self.length else start
if used_start < 0 || used_start > self.length then Error.throw (Index_Out_Of_Bounds.Error start self.length+1) else
used = if used_start == 0 then self else self.drop used_start
@ -1514,7 +1514,7 @@ Text.index_of self term="" (start : Integer = 0) case_sensitivity=Case_Sensitivi
"Hello World!".last_index_of "J" == Nothing
"Hello World!".last_index_of "o" == 7
Text.last_index_of : Text -> Integer -> Case_Sensitivity -> Integer | Nothing
Text.last_index_of self term="" start=-1 case_sensitivity=Case_Sensitivity.Sensitive =
Text.last_index_of self term:Text="" start=-1 case_sensitivity:Case_Sensitivity=..Sensitive =
used_start = if start < 0 then start+self.length else start
if used_start < 0 || used_start >= self.length then Error.throw (Index_Out_Of_Bounds.Error start self.length) else
used = if used_start == self.length-1 then self else self.take used_start+1

View File

@ -19,6 +19,7 @@ import project.DB_Table as DB_Table_Module
import project.DB_Table.DB_Table
import project.Dialect.Dialect
import project.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import project.Internal.Data_Link_Setup.Data_Link_Setup
import project.Internal.DDL_Transaction
import project.Internal.Hidden_Table_Registry
import project.Internal.In_Transaction.In_Transaction
@ -59,7 +60,9 @@ type Connection
`False`.
- hidden_table_registry: a registry of hidden tables that are not
shown to the user, but are used internally by the dry-run system.
Value jdbc_connection dialect (entity_naming_properties : Entity_Naming_Properties) (supports_large_update : Ref Boolean) (hidden_table_registry : Hidden_Table_Registry.Hidden_Table_Registry)
- data_link_setup: an optional setup allowing for saving the connection
as a data link.
Value jdbc_connection dialect (entity_naming_properties : Entity_Naming_Properties) (supports_large_update : Ref Boolean) (hidden_table_registry : Hidden_Table_Registry.Hidden_Table_Registry) (data_link_setup : Data_Link_Setup | Nothing = Nothing)
## PRIVATE
Constructs a new Connection.
@ -68,12 +71,15 @@ type Connection
- jdbc_connection: the resource managing the underlying JDBC
connection.
- dialect: the dialect associated with the database we are connected to.
- entity_naming_properties: a helper allowing to manage properties of
entity naming rules of the given backend.
- data_link_setup: an optional setup allowing for saving the connection
as a data link.
- try_large_update: whether the connection should try to use
`executeLargeUpdate`.
new : JDBC_Connection -> Dialect -> Entity_Naming_Properties -> Boolean -> Connection
new jdbc_connection dialect entity_naming_properties try_large_update=True =
new jdbc_connection:JDBC_Connection dialect entity_naming_properties:Entity_Naming_Properties (data_link_setup : Data_Link_Setup | Nothing = Nothing) (try_large_update : Boolean = True) -> Connection =
registry = Hidden_Table_Registry.new
Connection.Value jdbc_connection dialect entity_naming_properties (Ref.new try_large_update) registry
Connection.Value jdbc_connection dialect entity_naming_properties (Ref.new try_large_update) registry data_link_setup
## PRIVATE
Closes the connection releasing the underlying database resources
@ -218,12 +224,10 @@ type Connection
SQL_Query.Table_Name name ->
table_naming_helper = self.base_connection.table_naming_helper
table_naming_helper.verify_table_name name <|
make_table_for_name self name alias
_make_table_for_name self name alias
SQL_Query.Raw_SQL raw_sql -> handle_sql_errors <| alias.if_not_error <|
self.dialect.ensure_query_has_no_holes self.jdbc_connection raw_sql . if_not_error <|
columns = self.fetch_columns raw_sql Statement_Setter.null
name = if alias == "" then (UUID.randomUUID.to_text) else alias
ctx = Context.for_query raw_sql name
r = make_table_from_query self raw_sql alias
## Any problems are treated as errors - e.g. if the query
contains clashing column names, it may very likely lead to
data corruption. Our renaming mechanism is used to fix issues
@ -233,7 +237,6 @@ type Connection
will actually result in both columns `A` and `A 1` containing
the value 1; and value 2 being lost. That is why such queries
must fail.
r = DB_Table_Module.make_table self name columns ctx on_problems=Problem_Behavior.Report_Error
r.catch Any error->
Error.throw (Illegal_Argument.Error "The provided custom SQL query is invalid and may suffer data corruption when being processed, especially if it contains aliased column names, it may not be interpreted correctly. Please ensure the names are unique. The original error was: "+error.to_display_text cause=error)
@ -363,7 +366,7 @@ type Connection
Execution_Context.Output.if_enabled disabled_message="As writing is disabled, cannot execute an update query. Press the Write button ▶ to perform the operation." panic=False <|
statement_setter = self.dialect.get_statement_setter
self.jdbc_connection.with_prepared_statement query statement_setter stmt->
check_statement_is_allowed self stmt
_check_statement_is_allowed self stmt
result = case self.supports_large_update.get of
True -> Panic.catch UnsupportedOperationException stmt.executeLargeUpdate _->
self.supports_large_update.put False
@ -387,7 +390,7 @@ type Connection
Execution_Context.Output.if_enabled disabled_message="As writing is disabled, cannot execute an update query. Press the Write button ▶ to perform the operation." panic=False <|
result = self.jdbc_connection.execute query
stmt = result.second
check_statement_is_allowed self stmt
_check_statement_is_allowed self stmt
stmt.getUpdateCount
## PRIVATE
@ -459,7 +462,7 @@ type Connection
internal_allocate_dry_run_table : Text -> DB_Table
internal_allocate_dry_run_table self table_name =
ref = self.hidden_table_registry.make_reference table_name
make_table_for_name self table_name table_name ref
_make_table_for_name self table_name table_name ref
## PRIVATE
ADVANCED
@ -476,6 +479,14 @@ type Connection
create_literal_table self (source : Table) (alias : Text) -> DB_Table =
DB_Table_Module.make_literal_table self (source.columns.map .to_vector) source.column_names alias
## ICON data_output
Creates a Data Link that will open the same connection.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = ..Error) =
case self.data_link_setup of
Nothing -> Error.throw (Illegal_State.Error "The connection does not support saving as a data link.")
data_link_setup -> data_link_setup.save_as_data_link destination on_existing_file
## PRIVATE
make_table_types_selector : Connection -> Widget
@ -511,12 +522,24 @@ make_structure_creator =
Vector_Editor item_editor=item_editor item_default=item_editor.values.first.value display=Display.Always
## PRIVATE
make_table_for_name connection name alias internal_temporary_keep_alive_reference=Nothing =
private make_table_from_query connection query:Text|SQL_Statement alias:Text -> DB_Table =
expect_interpolations = case query of
# If the statement is given as a Text, then it should not contain `?` holes.
_ : Text -> False
_ : SQL_Statement -> True
statement_setter = if expect_interpolations then connection.dialect.get_statement_setter else Statement_Setter.null
columns = connection.base_connection.fetch_columns query statement_setter
name = if alias == "" then (UUID.randomUUID.to_text) else alias
ctx = Context.for_query query name
DB_Table_Module.make_table connection.base_connection name columns ctx on_problems=Problem_Behavior.Report_Error
## PRIVATE
private _make_table_for_name connection name alias internal_temporary_keep_alive_reference=Nothing =
result = handle_sql_errors <|
ctx = Context.for_table name (if alias == "" then name else alias) internal_temporary_keep_alive_reference
statement = connection.dialect.generate_sql (Query.Select Nothing ctx)
statement_setter = connection.dialect.get_statement_setter
columns = connection.fetch_columns statement statement_setter
columns = connection.base_connection.fetch_columns statement statement_setter
## In case of accessing an existing table, we assume that column names
are distinguishable by the backend, so any issues that are caught
only affect Enso columns, and just renaming Enso columns is enough to
@ -528,7 +551,7 @@ make_table_for_name connection name alias internal_temporary_keep_alive_referenc
Error.throw (Table_Not_Found.Error name)
## PRIVATE
private check_statement_is_allowed connection stmt =
private _check_statement_is_allowed connection stmt =
trimmed = stmt.to_text.trim.to_case ..Lower
is_ddl_regex = "\s*(?:create|alter|drop).*".to_regex case_insensitive=True
is_ddl = is_ddl_regex.matches trimmed

View File

@ -9,17 +9,13 @@ from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_requir
import project.Connection.Connection_Options.Connection_Options
import project.Connection.Credentials.Credentials
import project.Connection.Postgres.Postgres
import project.Internal.Data_Link_Setup.DB_Data_Link_Type
import project.Internal.DB_Data_Link_Helpers
## PRIVATE
type Postgres_Data_Link
## PRIVATE
A data-link returning a connection to the specified database.
Connection details:Postgres source:Data_Link_Source_Metadata
## PRIVATE
A data-link returning a query to a specific table within a database.
Table name:Text details:Postgres source:Data_Link_Source_Metadata
Value details:Postgres source:Data_Link_Source_Metadata link_type:DB_Data_Link_Type
## PRIVATE
parse json source:Data_Link_Source_Metadata -> Postgres_Data_Link =
@ -35,11 +31,8 @@ type Postgres_Data_Link
Credentials.Username_And_Password username password
details = Postgres.Server host=host port=port database=db_name schema=schema credentials=credentials
case get_optional_field "table" json expected_type=Text of
Nothing ->
Postgres_Data_Link.Connection details source
table_name : Text ->
Postgres_Data_Link.Table table_name details source
link_type = DB_Data_Link_Type.from_js (get_optional_field "table" json)
Postgres_Data_Link.Value details source link_type
## PRIVATE
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
@ -47,7 +40,4 @@ type Postgres_Data_Link
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Postgres Data Link, as it points to a database.") else
default_options = DB_Data_Link_Helpers.data_link_connection_parameters self.source
connection = self.details.connect default_options allow_data_links=False
case self of
Postgres_Data_Link.Connection _ _ -> connection
Postgres_Data_Link.Table table_name _ _ ->
connection.query table_name
self.link_type.interpret connection

View File

@ -44,7 +44,7 @@ type Postgres_Connection
Our generator is supposed to always quote identifiers
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection encoding is_case_sensitive=True
Postgres_Connection.Value (Connection.new jdbc_connection Dialect.postgres entity_naming_properties) make_new data_link_setup
Postgres_Connection.Value (Connection.new jdbc_connection Dialect.postgres entity_naming_properties data_link_setup) make_new
## PRIVATE
@ -53,8 +53,7 @@ type Postgres_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
- data_link_setup: the setup for saving the connection as a data link.
private Value (connection:Connection) (make_new : Text -> Text -> Postgres_Connection) (data_link_setup : Data_Link_Setup)
private Value (connection:Connection) (make_new : Text -> Text -> Postgres_Connection)
## ICON close
Closes the connection releasing the underlying database resources
@ -300,8 +299,8 @@ type Postgres_Connection
## ICON data_output
Creates a Data Link that will open the same connection.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = Existing_File_Behavior.Error) =
self.data_link_setup.save_as_data_link destination on_existing_file
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = ..Error) =
self.base_connection.save_as_data_link destination on_existing_file
## PRIVATE
Converts this value to a JSON serializable object.

View File

@ -53,6 +53,7 @@ import project.Internal.Base_Generator
import project.Internal.Common.Database_Join_Helper
import project.Internal.Common.Lookup_Query_Helper
import project.Internal.Common.Row_Number_Helpers
import project.Internal.DB_Data_Link_Helpers
import project.Internal.Helpers
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
@ -2745,15 +2746,25 @@ type DB_Table
Some operations, like writing to tables, require their target to be a
trivial query.
is_trivial_query : Boolean ! Table_Not_Found
is_trivial_query self =
Arguments:
- fail_if_not_found: If `True`, a `Table_Not_Found` error is raised if the
table does not exist in the database. Otherwise, `False` is returned.
is_trivial_query self (fail_if_not_found : Boolean = True) -> Boolean ! Table_Not_Found =
case self.context.from_spec of
From_Spec.Table internal_table_name _ _ ->
if self.name != internal_table_name then False else
trivial_counterpart = self.connection.query (SQL_Query.Table_Name self.name)
trivial_counterpart.if_not_error <|
# If the table spec seems trivial, but the underlying table does not exist, we propagate the Table_Not_Found error.
check_context_and_columns =
if self.context != trivial_counterpart.context then False else
column_descriptor internal_column = [internal_column.name, internal_column.expression]
my_columns = self.internal_columns.map column_descriptor
trivial_columns = trivial_counterpart.internal_columns.map column_descriptor
my_columns == trivial_columns
if fail_if_not_found then trivial_counterpart.if_not_error check_context_and_columns else
if trivial_counterpart.is_error then False else check_context_and_columns
_ -> False
## PRIVATE
Provides a simplified text representation for display in the REPL and errors.
@ -2825,6 +2836,15 @@ type DB_Table
# TODO This should ideally be done in a streaming manner, or at least respect the row limits.
self.read.write path format on_existing_file match_columns on_problems
## GROUP Standard.Base.Output
ICON data_output
Creates a Data Link that will act as a view into the query represented by
this table.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = ..Error) =
DB_Data_Link_Helpers.save_table_as_data_link self destination on_existing_file
## ALIAS fill missing, if_nothing
GROUP Standard.Base.Values
ICON table_clean

View File

@ -13,6 +13,7 @@ import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.Internal.IR.Traversal_Helpers
import project.SQL.SQL_Builder
import project.SQL_Statement.SQL_Statement
from project.Dialect import Temp_Table_Style
from project.Errors import Unsupported_Database_Operation
from project.Internal.IR.Operation_Metadata import Row_Number_Metadata
@ -56,7 +57,7 @@ type Dialect_Operations
type SQL_Generator
## We can use a Hashset for the let-binding names, since they are guaranteed
to be distinct and not result in any shadowing.
Value (let_bindings_ref : Ref Hashset)
private Value (let_bindings_ref : Ref Hashset)
## PRIVATE
private new -> SQL_Generator = SQL_Generator.Value (Ref.new Hashset.empty)
@ -117,7 +118,12 @@ type SQL_Generator
From_Spec.Table name as_name _ ->
dialect.wrap_identifier name ++ alias dialect as_name
From_Spec.Query raw_sql as_name ->
SQL_Builder.code raw_sql . paren ++ alias dialect as_name
subquery = case raw_sql of
prepared_statement : SQL_Statement ->
SQL_Builder.from_fragments prepared_statement.fragments
raw_code : Text ->
SQL_Builder.code raw_code
subquery.paren ++ alias dialect as_name
From_Spec.Literal_Values vecs column_names as_name ->
Runtime.assert (vecs.length == column_names.length) "Vectors and column names must have the same length"
dialect.make_table_literal vecs column_names as_name

View File

@ -1,8 +1,14 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
from Standard.Base.Enso_Cloud.Data_Link_Helpers import Data_Link_Source_Metadata
from Standard.Table import Table
import project.Connection.Connection_Options.Connection_Options
import project.Dialect.Temp_Table_Style
import project.Internal.Data_Link_Setup.DB_Data_Link_Type
import project.Internal.IR.From_Spec.From_Spec
## PRIVATE
data_link_connection_parameters (source : Data_Link_Source_Metadata) -> Connection_Options =
@ -12,3 +18,56 @@ data_link_connection_parameters (source : Data_Link_Source_Metadata) -> Connecti
audit_mode = if Enso_User.is_logged_in then "cloud" else "local"
options_vector = [["enso.internal.audit", audit_mode]] + (if related_asset_id.is_nothing then [] else [["enso.internal.relatedAssetId", related_asset_id]])
Connection_Options.Value options_vector
## PRIVATE
save_table_as_data_link table destination on_existing_file:Existing_File_Behavior =
data_link_setup = table.connection.data_link_setup.if_nothing <|
Error.throw (Illegal_Argument.Error "Saving a Table as data link is currently not supported in this backend.")
# For a trivial query we return the table name.
link_type = if table.is_trivial_query fail_if_not_found=False then DB_Data_Link_Type.Table table.name else
sql_statement = table.to_sql
prepared = sql_statement.prepare
# If there are no interpolations, we can do a raw query represented by Text (more friendly in the GUI).
if prepared.second.is_empty then DB_Data_Link_Type.Query prepared.first else
# Lastly, if there are interpolations, we need to fully serialize
DB_Data_Link_Type.SQL_Statement sql_statement
result = data_link_setup.save_as_data_link destination on_existing_file link_type
referred_temporary_tables = _find_referred_temporary_tables table.connection table.context
if referred_temporary_tables.is_nothing then result else
warning = Illegal_State.Error "The saved query seems to refer to tables "+referred_temporary_tables.to_text+" which are temporary. Such tables may cease to exist once the session is closed, so the saved data link will no longer be valid and will fail to open."
Warning.attach warning result
## PRIVATE
Checks if the query refers to any tables that are temporary.
Note that this is a heuristic and it may potentially lead to false positives
if aliasing table names exist across schemas. Supporting tables with clashing
names across schemas is something that may need to be revised overall in the
Database library.
private _find_referred_temporary_tables connection context -> Nothing | Vector Text =
encountered_table_names = Vector.build builder->
context.from_spec.traverse x-> case x of
From_Spec.Table name _ _ ->
builder.append name
x
_ -> x
return_nothing_if_empty vec = if vec.is_empty then Nothing else vec
case connection.dialect.temp_table_style of
# For prefix-style, we just inspect names.
Temp_Table_Style.Hash_Prefix ->
encountered_temporary_tables = encountered_table_names.filter name->
name.starts_with "#"
return_nothing_if_empty encountered_temporary_tables
# For CREATE TEMPORARY we query `getTables` and try to check table Type
Temp_Table_Style.Temporary_Table ->
table_info = connection.get_tables_advanced include_hidden=True
# If types are unknown, we cannot tell anything.
if table_info.column_names.contains "Type" . not then Nothing else
encountered_table_info = table_info.join (Table.new [["Name", encountered_table_names]]) join_kind=..Inner on="Name"
temporary_indicator = encountered_table_info.at "Type" . map type_name->
type_name.contains "temp" case_sensitivity=..Insensitive
encountered_temporary_tables = encountered_table_info.filter temporary_indicator . at "Name" . to_vector
return_nothing_if_empty encountered_temporary_tables

View File

@ -7,6 +7,8 @@ import Standard.Base.Runtime.Context
from Standard.Base.Enso_Cloud.Data_Link_Helpers import data_link_extension, secure_value_to_json, save_password_for_data_link
import project.Connection.Credentials.Credentials
import project.SQL_Statement.SQL_Statement
from project.Connection.Connection import make_table_from_query
## PRIVATE
type Data_Link_Setup
@ -21,7 +23,7 @@ type Data_Link_Setup
already_a_data_link -> Data_Link_Setup = Data_Link_Setup.Unavailable "Saving connections established through a Data Link is not allowed. Please copy the Data Link instead."
## PRIVATE
save_as_data_link self destination on_existing_file:Existing_File_Behavior = case self of
save_as_data_link self destination on_existing_file:Existing_File_Behavior link_type:DB_Data_Link_Type=..Database = case self of
Data_Link_Setup.Available create_fn -> Context.Output.if_enabled disabled_message="As writing is disabled, cannot save to a Data Link. Press the Write button ▶ to perform the operation." panic=False <|
case destination of
_ : Enso_File ->
@ -31,12 +33,18 @@ type Data_Link_Setup
_ -> Error.throw (Illegal_Argument.Error "Invalid value for `on_existing_file` parameter, only `Overwrite` and `Error` are supported here.")
exists_checked = if replace_existing.not && destination.exists then Error.throw (File_Error.Already_Exists destination)
exists_checked.if_not_error <|
json = create_fn destination
connection_description = create_fn destination
json = link_type.add_to_data_link_description connection_description
Data_Link.write_config destination json replace_existing
_ -> Error.throw (Illegal_Argument.Error "Currently a connection can only be saved as a Data Link into the Enso Cloud. Please provide an `Enso_File` as destination.")
Data_Link_Setup.Unavailable cause ->
Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: "+cause)
word = case link_type of
DB_Data_Link_Type.Database -> "connection"
DB_Data_Link_Type.Table _ -> "table"
DB_Data_Link_Type.Query _ -> "query"
DB_Data_Link_Type.SQL_Statement _ -> "query"
Error.throw (Illegal_Argument.Error "Cannot save "+word+" as Data Link: "+cause)
## PRIVATE
save_credentials_for_data_link data_link_location:Enso_File credentials:Credentials -> JS_Object =
@ -45,3 +53,82 @@ type Data_Link_Setup
# But we keep the username as-is - if it was in plain text, it will stay in plain text.
JS_Object.from_pairs [["username", secure_value_to_json credentials.username], ["password", secure_value_to_json secret_password]]
## PRIVATE
type DB_Data_Link_Type
## PRIVATE
Database
## PRIVATE
Table name:Text
## PRIVATE
Query query:Text
## PRIVATE
SQL_Statement sql_statement:SQL_Statement
## PRIVATE
Alters the connection description JSON, adding information dependent on
the data link type (table name or query to fetch, when applicable).
add_to_data_link_description self connection_description:JS_Object -> JS_Object =
as_js_object = self.to_js_object
if as_js_object.is_nothing then connection_description else
connection_description.set_value "table" as_js_object
## PRIVATE
Serializes this data link type to an appropriate JSON representation that
can be assigned to the `table` field in a data link.
to_js_object self = case self of
DB_Data_Link_Type.Database ->
Nothing
DB_Data_Link_Type.Table name ->
name
DB_Data_Link_Type.Query query ->
JS_Object.from_pairs [["query", query]]
DB_Data_Link_Type.SQL_Statement sql_statement ->
serialized = sql_statement.serialize ensure_roundtrip=True
serialized.if_not_error <|
## Instead of nesting the serialized query as a JSON object,
we store its JSON representation as a string - this will be
more friendly for the data link modal in the dashboard.
JS_Object.from_pairs [["sql_statement", serialized.to_json]]
## PRIVATE
Parse the `table` field of a data link JSON, returning the appropriate link type.
This is the inverse to `to_js_object`.
from_js value:Any -> DB_Data_Link_Type ! Illegal_Argument = case value of
Nothing ->
DB_Data_Link_Type.Database
table_name : Text ->
DB_Data_Link_Type.Table table_name
obj : JS_Object ->
fields = obj.field_names
parse_simple_query =
query = obj.get "query"
if query.is_a Text then DB_Data_Link_Type.Query query else
Error.throw (Illegal_Argument.Error "Invalid JSON inside of data link: expected `query` field to be Text but got: "+query.to_display_text)
parse_serialized_statement =
sql_statement_json = obj.get "sql_statement"
if sql_statement_json.is_a Text then DB_Data_Link_Type.SQL_Statement (SQL_Statement.deserialize sql_statement_json) else
Error.throw (Illegal_Argument.Error "Invalid JSON inside of data link: expected `sql_statement` field to be a Text containing JSON but got: "+sql_statement_json.to_display_text)
if fields == ["query"] then parse_simple_query else
if fields == ["sql_statement"] then parse_serialized_statement else
Error.throw (Illegal_Argument.Error "Invalid JSON inside of data link: expected exactly one field: `query` or `sql_statement`, but got: "+obj.to_display_text)
_ -> Error.throw (Illegal_Argument.Error "Invalid JSON inside of data link: expected Text or object but got: "+value.to_display_text)
## PRIVATE
Interprets this data link setup according to its type.
The `Database` link will return the connection as-is, whereas the other
types will return a `DB_Table` for the specific table/query.
interpret self connection = case self of
DB_Data_Link_Type.Database ->
connection
DB_Data_Link_Type.Table name ->
connection.query (..Table_Name name)
DB_Data_Link_Type.Query raw_sql ->
connection.query (..Raw_SQL raw_sql)
DB_Data_Link_Type.SQL_Statement sql_statement ->
make_table_from_query connection sql_statement alias=""
. catch Any error-> Error.throw (Illegal_State.Error "Failed to read back the SQL statement from the data link: "+error.to_display_text)

View File

@ -6,6 +6,7 @@ import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.SQL.SQL_Builder
import project.SQL_Statement.SQL_Statement
## PRIVATE
@ -22,7 +23,7 @@ type Context
- internal_temporary_keep_alive_reference: See `From_Spec.Table` for more
details.
for_table : Text -> Text -> Any -> Context
for_table table_name alias=table_name internal_temporary_keep_alive_reference=Nothing =
for_table table_name:Text alias:Text=table_name internal_temporary_keep_alive_reference=Nothing =
Context.Value (From_Spec.Table table_name alias internal_temporary_keep_alive_reference=internal_temporary_keep_alive_reference) [] [] [] Nothing []
## PRIVATE
@ -30,10 +31,9 @@ type Context
Creates a query context that runs a raw SQL query.
Arguments:
- raw_sql: The name of the table for which the context is being created.
- raw_sql: The SQL statement (either as prepared statement or text).
- alias: An alias name to use for table within the query.
for_query : Text -> Text -> Context
for_query raw_sql alias =
for_query (raw_sql:Text|SQL_Statement) (alias:Text) -> Context =
Context.Value (From_Spec.Query raw_sql alias) [] [] [] Nothing []
## PRIVATE
@ -42,8 +42,7 @@ type Context
Arguments:
- subquery: The subquery to lift into a context.
for_subquery : From_Spec -> Context
for_subquery subquery =
for_subquery subquery:From_Spec -> Context =
Context.Value subquery [] [] [] Nothing []
## PRIVATE

View File

@ -4,6 +4,7 @@ import project.Internal.IR.Context.Context
import project.Internal.IR.Query.Query
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.SQL_Statement.SQL_Statement
## PRIVATE
@ -31,12 +32,13 @@ type From_Spec
A query source that holds a raw SQL statement.
Arguments:
- raw_sql: the raw SQL code of a query used as a source for this table.
- raw_sql: the raw SQL code of a query used as a source for this table,
or a SQL_Statement representing SQL code with interpolated values.
- alias: the name upon which the table can be referred to in other
parts of the query, this is especially useful for example in
self-joins, allowing to differentiate between different instances of
the same table.
Query (raw_sql : Text) (alias : Text)
Query (raw_sql : Text | SQL_Statement) (alias : Text)
## PRIVATE

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Runtime.State
import Standard.Table.Internal.Vector_Builder.Vector_Builder
@ -20,7 +21,7 @@ type SQL_Fragment
Arguments:
- code: A fragment of SQL code.
Code_Part code:Text
private Code_Part code:Text
## PRIVATE
@ -29,7 +30,7 @@ type SQL_Fragment
Arguments:
- object: A value that will be interpolated into the query.
Interpolation object:Any
private Interpolation object:Any
## PRIVATE
type SQL_Builder
@ -49,7 +50,7 @@ type SQL_Builder
code : Text -> SQL_Builder
code text =
vec = if text.is_empty then [] else [SQL_Fragment.Code_Part text]
SQL_Builder.Value (Vector_Builder.from_vector vec)
SQL_Builder.from_fragments vec
## PRIVATE
ADVANCED
@ -59,7 +60,12 @@ type SQL_Builder
- object: The object to be interpolated into the query as if it has the type
given by `sql_type`.
interpolation : Any -> SQL_Builder
interpolation object = SQL_Builder.Value (Vector_Builder.from_vector [SQL_Fragment.Interpolation object])
interpolation object = SQL_Builder.from_fragments [SQL_Fragment.Interpolation object]
## PRIVATE
Creates an SQL_Builder from a list of fragments.
from_fragments : Vector SQL_Fragment -> SQL_Builder
from_fragments fragments = SQL_Builder.Value (Vector_Builder.from_vector fragments)
## PRIVATE
ADVANCED
@ -86,7 +92,7 @@ type SQL_Builder
It can be used to concatenate parts of SQL code in O(1) time and at the
end build the actual query in linear time.
Value (fragments:(Vector_Builder SQL_Fragment))
private Value (fragments:(Vector_Builder SQL_Fragment))
## PRIVATE
ADVANCED
@ -118,6 +124,13 @@ type SQL_Builder
_ -> False
_ -> False
## PRIVATE
Extracts the interpolated constant from the builder.
This can only be called if `is_constant` returns `True`.
extract_constant self =
if self.is_constant.not then Panic.throw (Illegal_State.Error "`SQL_Builder.extract_constant called on a builder that is not a constant.") else
self.fragments.vec.first.object
## PRIVATE
ADVANCED
Builds a SQL statement.

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import project.SQL.SQL_Fragment
@ -13,7 +14,7 @@ type SQL_Statement
The statement consists of SQL code with parameters and values that will be
interpolated for these parameters.
Value (internal_fragments:(Vector SQL_Fragment))
private Value (internal_fragments:(Vector SQL_Fragment))
## PRIVATE
ADVANCED
@ -68,15 +69,64 @@ type SQL_Statement
Returns a JS_Object representation of the statement.
to_js_object : JS_Object
to_js_object self =
self.serialize ensure_roundtrip=False
## PRIVATE
Serializes this SQL statement to JSON.
Arguments:
- ensure_roundtrip: If true, interpolated objects will be checked if they
can be deserialized back. If any object cannot make a round-trip, an
error will be thrown. If false, no checks are performed.
serialize self (ensure_roundtrip : Boolean) -> JS_Object ! Unable_To_Serialize_SQL_Statement = Panic.recover Unable_To_Serialize_SQL_Statement <|
jsonify fragment = case fragment of
SQL_Fragment.Code_Part code -> JS_Object.from_pairs [["sql_code", code]]
SQL_Fragment.Code_Part code ->
JS_Object.from_pairs [["sql_code", code]]
SQL_Fragment.Interpolation obj ->
inner = JS_Object.from_pairs [["value", obj]]
JS_Object.from_pairs [["sql_interpolation", inner]]
obj_as_js = obj.to_js_object
if ensure_roundtrip then
reconstructed = Json.parse (Json.stringify obj_as_js)
valid_roundtrip = reconstructed == obj
if valid_roundtrip.not then
Panic.throw (Unable_To_Serialize_SQL_Statement.Error obj)
obj_as_js.if_not_error
JS_Object.from_pairs [["sql_interpolation", obj_as_js]]
fragments = self.internal_fragments.map jsonify
JS_Object.from_pairs [["query", fragments]]
result = fragments.if_not_error <| JS_Object.from_pairs [["query", fragments]]
result.if_not_error <|
if ensure_roundtrip then
Runtime.assert (SQL_Statement.deserialize result == self) "SQL_Fragment serialization does not round-trip as expected: "+result.to_display_text+" did not deserialize back."
result
## PRIVATE
deserialize (json : Text | JS_Object) -> SQL_Statement ! Illegal_Argument =
parsed_json = case json of
_ : JS_Object -> json
_ : Text ->
parsed = Json.parse json
if parsed.is_a JS_Object then parsed else
Error.throw (Illegal_Argument.Error "Error parsing SQL_Statement: expected a JSON object, but got: "+parsed.to_display_text+".")
fragments = parsed_json.get "query" if_missing=(Error.throw (Illegal_Argument.Error "Error parsing SQL_Statement: It should contain a `query` field. Fields were: "+parsed_json.field_names.to_display_text+"."))
if fragments.is_a Vector . not then Error.throw (Illegal_Argument.Error "Error parsing SQL_Statement: The 'query' field should be an array, but got:"+fragments.to_display_text+".") else
parse_fragment fragment = case fragment of
_ : JS_Object ->
fields = fragment.field_names
if fields == ["sql_code"] then SQL_Fragment.Code_Part (fragment.get "sql_code") else
if fields == ["sql_interpolation"] then SQL_Fragment.Interpolation (fragment.get "sql_interpolation") else
Error.throw (Illegal_Argument.Error "Error parsing SQL_Statement: a fragment should contain exactly one field of either `sql_code` or `sql_interpolation`, but got: "+fragment.to_display_text+".")
_ -> Error.throw (Illegal_Argument.Error "Error parsing SQL_Statement: expected `sql_code` or `sql_interpolation` object, but got:"+fragment.to_display_text+".")
SQL_Statement.Value (fragments.map parse_fragment)
## PRIVATE
to_text self -> Text =
prepared = self.prepare
"SQL_Statement " + prepared.first + " with values " + prepared.second.to_text
## PRIVATE
type Unable_To_Serialize_SQL_Statement
## PRIVATE
Error obj
## PRIVATE
to_display_text self =
"Unable to serialize an SQL statement, because it contains an interpolated object [" + self.obj.to_display_text + " (" + (Meta.type_of self.obj).to_text + ")] that cannot be deserialized back."

View File

@ -38,7 +38,7 @@ type SQLServer_Connection
## jdbc reports table name length limit as 128, but it actually seems to be 116 for temp tables so we override it
limited = Encoding_Limited_Naming_Properties.Instance Encoding.utf_8 limit=116 is_case_sensitive=True
modified_entity_naming_properties = Entity_Naming_Properties.Value for_table_names=limited for_column_names=jdbc_entity_naming_properties.for_column_names for_generated_column_names=jdbc_entity_naming_properties.for_generated_column_names
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver modified_entity_naming_properties) make_new data_link_setup
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver modified_entity_naming_properties data_link_setup) make_new
## PRIVATE
@ -47,8 +47,7 @@ type SQLServer_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
- data_link_setup: the setup for saving the connection as a data link.
Value connection make_new data_link_setup
Value connection make_new
## ICON close
Closes the connection releasing the underlying database resources
@ -298,8 +297,8 @@ type SQLServer_Connection
## ICON data_output
Creates a Data Link that will open the same connection.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = Existing_File_Behavior.Error) =
self.data_link_setup.save_as_data_link destination on_existing_file
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = ..Error) =
self.base_connection.save_as_data_link destination on_existing_file
## PRIVATE
Converts this value to a JSON serializable object.

View File

@ -4,6 +4,7 @@ from Standard.Base.Enso_Cloud.Data_Link_Helpers import Data_Link_Source_Metadata
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
import Standard.Database.Connection.Credentials.Credentials
import Standard.Database.Internal.Data_Link_Setup.DB_Data_Link_Type
import Standard.Database.Internal.DB_Data_Link_Helpers
import project.Connection.SQLServer_Details.SQLServer_Details
@ -11,12 +12,7 @@ import project.Connection.SQLServer_Details.SQLServer_Details
## PRIVATE
type SQLServer_Data_Link
## PRIVATE
A data-link returning a connection to the specified database.
Connection details:SQLServer_Details source:Data_Link_Source_Metadata
## PRIVATE
A data-link returning a query to a specific table within a database.
Table name:Text details:SQLServer_Details source:Data_Link_Source_Metadata
Value details:SQLServer_Details source:Data_Link_Source_Metadata link_type:DB_Data_Link_Type
## PRIVATE
parse json source -> SQLServer_Data_Link =
@ -30,11 +26,8 @@ type SQLServer_Data_Link
credentials = Credentials.Username_And_Password username password
details = SQLServer_Details.SQLServer host=host credentials=credentials port=port database=db_name
case get_optional_field "table" json expected_type=Text of
Nothing ->
SQLServer_Data_Link.Connection details source
table_name : Text ->
SQLServer_Data_Link.Table table_name details source
link_type = DB_Data_Link_Type.from_js (get_optional_field "table" json)
SQLServer_Data_Link.Value details source link_type
## PRIVATE
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
@ -42,7 +35,4 @@ type SQLServer_Data_Link
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a SQLServer Data Link, as it points to a database.") else
default_options = DB_Data_Link_Helpers.data_link_connection_parameters self.source
connection = self.details.connect default_options allow_data_links=False
case self of
SQLServer_Data_Link.Connection _ _ -> connection
SQLServer_Data_Link.Table table_name _ _ ->
connection.query table_name
self.link_type.interpret connection

View File

@ -467,7 +467,7 @@ trim_ops =
_ ->
case chars.is_constant of
True ->
const = chars.fragments.vec.first.object
const = chars.extract_constant
if const.is_nothing || const.is_empty then SQL_Builder.code fn_name+"(" ++ input ++ ", " ++ whitespace ++ ")" else
SQL_Builder.code fn_name+"(" ++ input ++ ", " ++ chars ++ ")"
False ->

View File

@ -37,7 +37,7 @@ type Snowflake_Connection
Our generator always quotes identifiers, so we can rely on the case sensitivity.
This is the same as in Postgres.
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=True
Snowflake_Connection.Value (Connection.new jdbc_connection Snowflake_Dialect.snowflake entity_naming_properties) make_new data_link_setup
Snowflake_Connection.Value (Connection.new jdbc_connection Snowflake_Dialect.snowflake entity_naming_properties data_link_setup) make_new
## PRIVATE
@ -46,8 +46,7 @@ type Snowflake_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
- data_link_setup: the setup for saving the connection as a data link.
Value connection make_new data_link_setup
Value connection make_new
## ICON close
Closes the connection releasing the underlying database resources
@ -321,8 +320,8 @@ type Snowflake_Connection
## ICON data_output
Creates a Data Link that will open the same connection.
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = Existing_File_Behavior.Error) =
self.data_link_setup.save_as_data_link destination on_existing_file
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = ..Error) =
self.base_connection.save_as_data_link destination on_existing_file
## PRIVATE
Converts this value to a JSON serializable object.

View File

@ -4,6 +4,7 @@ from Standard.Base.Enso_Cloud.Data_Link_Helpers import Data_Link_Source_Metadata
from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
import Standard.Database.Connection.Credentials.Credentials
import Standard.Database.Internal.Data_Link_Setup.DB_Data_Link_Type
import Standard.Database.Internal.DB_Data_Link_Helpers
import project.Connection.Snowflake_Details.Snowflake_Details
@ -11,12 +12,7 @@ import project.Connection.Snowflake_Details.Snowflake_Details
## PRIVATE
type Snowflake_Data_Link
## PRIVATE
A data-link returning a connection to the specified database.
Connection details:Snowflake_Details source:Data_Link_Source_Metadata
## PRIVATE
A data-link returning a query to a specific table within a database.
Table name:Text details:Snowflake_Details source:Data_Link_Source_Metadata
Value details:Snowflake_Details source:Data_Link_Source_Metadata link_type:DB_Data_Link_Type
## PRIVATE
parse json source -> Snowflake_Data_Link =
@ -31,11 +27,8 @@ type Snowflake_Data_Link
credentials = Credentials.Username_And_Password username password
details = Snowflake_Details.Snowflake account=account database=db_name schema=schema warehouse=warehouse credentials=credentials
case get_optional_field "table" json expected_type=Text of
Nothing ->
Snowflake_Data_Link.Connection details source
table_name : Text ->
Snowflake_Data_Link.Table table_name details source
link_type = DB_Data_Link_Type.from_js (get_optional_field "table" json)
Snowflake_Data_Link.Value details source link_type
## PRIVATE
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
@ -43,7 +36,4 @@ type Snowflake_Data_Link
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Snowflake Data Link, as it points to a database.") else
default_options = DB_Data_Link_Helpers.data_link_connection_parameters self.source
connection = self.details.connect default_options allow_data_links=False
case self of
Snowflake_Data_Link.Connection _ _ -> connection
Snowflake_Data_Link.Table table_name _ _ ->
connection.query table_name
self.link_type.interpret connection

View File

@ -3,7 +3,6 @@ import Standard.Base.Errors.Empty_Error.Empty_Error
import Standard.Base.Errors.Common.Index_Out_Of_Bounds
import Standard.Base.Errors.Common.Not_Found
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Common.Unsupported_Argument_Types
import Standard.Base.Runtime.State
from Standard.Test import all
@ -169,7 +168,7 @@ add_specs suite_builder = suite_builder.group "List" group_builder->
txt.filter (..Less than="a") . should_equal List.Nil
txt.filter (..Greater than="b") . should_equal ["bbb", "cccc", "baaa", "ś"].to_list
txt.filter (..Between "b" "c") . should_equal ["bbb", "baaa"].to_list
Test.expect_panic_with (txt.filter (..Starts_With 42)) Unsupported_Argument_Types
Test.expect_panic Type_Error (txt.filter (..Starts_With 42))
["", Nothing, " ", "a"].to_list.filter (..Is_Empty) . should_equal ["", Nothing].to_list
["", Nothing, " ", "a"].to_list.filter (..Not_Empty) . should_equal [" ", "a"].to_list
@ -293,4 +292,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -10,7 +10,6 @@ import Standard.Base.Errors.Common.Index_Out_Of_Bounds
import Standard.Base.Errors.Common.Missing_Argument
import Standard.Base.Errors.Common.Not_Found
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Common.Unsupported_Argument_Types
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Unimplemented.Unimplemented
import Standard.Base.Runtime.Ref.Ref
@ -256,17 +255,17 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
txtvec.filter (..Contains 's\u0301') . should_equal ["ś"]
txtvec.filter (..Contains 'S\u0301' Case_Sensitivity.Sensitive) . should_equal []
txtvec.filter (..Contains 'S\u0301' Case_Sensitivity.Insensitive) . should_equal ["ś"]
Test.expect_panic_with (txtvec.filter (..Contains 42)) Unsupported_Argument_Types
Test.expect_panic Type_Error (txtvec.filter (..Contains 42))
txtvec.filter (..Starts_With "a") . should_equal ["aaa", "abab"]
txtvec.filter (..Starts_With "a" keep_or_remove=Filter_Action.Remove) . should_equal ["bbb", "cccc", "baaa", "ś"]
txtvec.filter (..Starts_With "A" Case_Sensitivity.Sensitive) . should_equal []
txtvec.filter (..Starts_With "A" Case_Sensitivity.Insensitive) . should_equal ["aaa", "abab"]
Test.expect_panic_with (txtvec.filter (..Starts_With 42)) Unsupported_Argument_Types
Test.expect_panic Type_Error (txtvec.filter (..Starts_With 42))
txtvec.filter (..Ends_With "a") . should_equal ["aaa", "baaa"]
txtvec.filter (..Ends_With "a" keep_or_remove=Filter_Action.Remove) . should_equal ["bbb", "abab", "cccc", "ś"]
txtvec.filter (..Ends_With "A" Case_Sensitivity.Sensitive) . should_equal []
txtvec.filter (..Ends_With "A" Case_Sensitivity.Insensitive) . should_equal ["aaa", "baaa"]
Test.expect_panic_with (txtvec.filter (..Ends_With 42)) Unsupported_Argument_Types
Test.expect_panic Type_Error (txtvec.filter (..Ends_With 42))
txtvec.filter (..Less than="a") . should_equal []
txtvec.filter (..Greater than="b") . should_equal ["bbb", "cccc", "baaa", "ś"]

View File

@ -0,0 +1,14 @@
{
"type": "Postgres_Connection",
"libraryName": "Standard.Database",
"host": "HOSTNAME",
"port": 12345,
"database_name": "DBNAME",
"credentials": {
"username": "USERNAME",
"password": "PASSWORD"
},
"table": {
"sql_statement": "{\"query\": [{\"sql_code\": \"SELECT 1000+\"}, {\"sql_interpolation\": 456}, {\"sql_code\": \" AS \\\"int\\\", \"}, {\"sql_interpolation\": \"my text\"}, {\"sql_code\": \" AS \\\"text\\\"\"}]}"
}
}

View File

@ -0,0 +1,14 @@
{
"type": "Postgres_Connection",
"libraryName": "Standard.Database",
"host": "HOSTNAME",
"port": 12345,
"database_name": "DBNAME",
"credentials": {
"username": "USERNAME",
"password": "PASSWORD"
},
"table": {
"query": "SELECT 1+1 AS \"two\""
}
}

View File

@ -45,11 +45,9 @@ add_specs suite_builder =
q1 = data.t1.filter (data.t1.at "A" == 42) . to_json
part1 = JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (("T1"."A") = (']]
part2_sub = JS_Object.from_pairs [["value", 42]]
part2 = JS_Object.from_pairs [["sql_interpolation", part2_sub]]
part2 = JS_Object.from_pairs [["sql_interpolation", 42]]
part3 = JS_Object.from_pairs [["sql_code", ")) = ("]]
part4_sub = JS_Object.from_pairs [["value", True]]
part4 = JS_Object.from_pairs [["sql_interpolation", part4_sub]]
part4 = JS_Object.from_pairs [["sql_interpolation", True]]
part5 = JS_Object.from_pairs [["sql_code", ")"]]
expected = JS_Object.from_pairs [["query", [part1, part2, part3, part4, part5]]] . to_text
q1.should_equal expected
@ -168,4 +166,3 @@ main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -1,10 +1,12 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
from Standard.Table import Table
from Standard.Database import all
import Standard.Database.DB_Table.DB_Table
from Standard.Test import all
@ -66,3 +68,85 @@ add_specs suite_builder prefix connection_details pending =
Panic.with_finalizer data_link_connection.close <|
r = data_link_connection.save_as_data_link other_data_link_location
r.should_fail_with Illegal_Argument
group_builder.specify "allows to save a data link to a database table or processed queries" <|
raw_connection = Database.connect connection_details
table_name = test_root.get.name
## The table cannot be temporary as it will be 'viewed' through a separate Data Link connection.
A temporary table could not be seen by other connection on some backends.
table = (Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]]).select_into_database_table raw_connection table_name temporary=False
Panic.with_finalizer (raw_connection.drop_table table.name) <|
table_data_link = table.save_as_data_link (test_root.get / "table.datalink")
table_data_link.should_be_a Enso_File
Problems.assume_no_problems table_data_link
r1 = table_data_link.read
r1.should_be_a DB_Table
r1.column_names . should_equal ["X", "Y"]
r1.at "X" . to_vector . should_equal_ignoring_order [1, 2, 3]
## This data link should create a simple SQL query, without full interpolation serialization.
Though we cannot guarantee that. Still we keep this test to ensure all paths are tested.
selected_table = table.select_columns ["X"]
selected_table_data_link = selected_table.save_as_data_link (test_root.get / "table-selected.datalink")
selected_table_data_link.should_be_a Enso_File
Problems.assume_no_problems selected_table_data_link
r2 = selected_table_data_link.read
r2.should_be_a DB_Table
r2.column_names . should_equal ["X"]
r2.at "X" . to_vector . should_equal_ignoring_order [1, 2, 3]
complex_query_test_enabled = raw_connection.dialect.is_feature_supported ..Column_Operations
case complex_query_test_enabled of
False ->
IO.println "Skipping complex query data link tests, because the dialect does not support minimal column operations needed to prepare it."
True ->
queried_table = table
. set (table.at "X" * 10000 + 45) "Z"
. set (table.at "Y" + '...') "W"
queried_table_data_link = queried_table.save_as_data_link (test_root.get / "complex-query.datalink")
queried_table_data_link.should_be_a Enso_File
Problems.assume_no_problems queried_table_data_link
r3 = queried_table_data_link.read
r3.should_be_a DB_Table
r3.column_names . should_equal ["X", "Y", "Z", "W"]
m3 = r3.read
m3.at "Z" . to_vector . should_equal_ignoring_order [10045, 20045, 30045]
m3.at "W" . to_vector . should_equal_ignoring_order ['a...', 'b...', 'c...']
## We test cross-join because it returns a table with an internal name that does not exist in the DB.
This used to break some logic, so to ensure no regressions after a fix - we added this test.
cross_join_test_enabled = raw_connection.dialect.is_feature_supported ..Cross_Join
case cross_join_test_enabled of
False ->
IO.println "Skipping cross-join data link tests, because the dialect does not support cross-join."
True ->
cross_joined_table = table.cross_join table
cross_joined_table_data_link = cross_joined_table.save_as_data_link (test_root.get / "cross-join.datalink")
cross_joined_table_data_link.should_be_a Enso_File
Problems.assume_no_problems cross_joined_table_data_link
group_builder.specify "warns if a query to a temporary table is saved as data link" <|
raw_connection = Database.connect connection_details
table_name = test_root.get.name
table = (Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]]).select_into_database_table raw_connection table_name temporary=True
table_data_link = table.save_as_data_link (test_root.get / "table-temporary.datalink")
table_data_link.should_be_a Enso_File
w1 = Problems.expect_only_warning Illegal_State table_data_link
w1.to_display_text . should_contain "saved query seems to refer to tables"
w1.to_display_text . should_contain "which are temporary"
complex_query_test_enabled = raw_connection.dialect.is_feature_supported ..Column_Operations
case complex_query_test_enabled of
False ->
IO.println "Skipping complex query data link tests, because the dialect does not support minimal column operations needed to prepare it."
True ->
queried_table = table
. set (table.at "X" * 10000 + 45) "Z"
. set (table.at "Y" + '...') "W"
queried_table_data_link = queried_table.save_as_data_link (test_root.get / "complex-query-temporary.datalink")
queried_table_data_link.should_be_a Enso_File
w2 = Problems.expect_only_warning Illegal_State queried_table_data_link
w2.to_display_text . should_contain "saved query seems to refer to tables"
w2.to_display_text . should_contain "which are temporary"

View File

@ -1002,6 +1002,8 @@ add_data_link_specs suite_builder =
connection_details = get_configured_connection_details
pending = if connection_details.is_nothing then "PostgreSQL test database is not configured. See README.md for instructions."
data_link_file = Temporary_Data_Link_File.make connection_details
## We have common tests in Save_Connection_Data_Link, but these tests check some specific examples,
e.g. data link formats that are also checked in Dashboard tests, to ensure consistency.
suite_builder.group "[PostgreSQL] Data Link" pending=pending group_builder->
## datalink support
group_builder.specify "should be able to open a datalink setting up a connection to the database" <|
@ -1020,6 +1022,7 @@ add_data_link_specs suite_builder =
connection = Database.connect connection_details
Panic.with_finalizer connection.close <|
# We create the table that will then be accessed through the datalink, and ensure it's cleaned up afterwards.
connection.drop_table if_exists=True "DatalinkedTable"
example_table = Panic.rethrow <|
(Table.new [["X", [22]], ["Y", ["o"]]]).select_into_database_table connection "DatalinkedTable" temporary=False
Panic.with_finalizer (connection.drop_table example_table.name) <|
@ -1031,6 +1034,21 @@ add_data_link_specs suite_builder =
data_link_table.at "X" . to_vector . should_equal [22]
data_link_table.at "Y" . to_vector . should_equal ["o"]
## datalink support
group_builder.specify "should be able to open a datalink to a DB query" <|
table_data_link_file = transform_file (enso_project.data / "datalinks" / "postgres-simple-query.datalink") connection_details
data_link_table = Data.read table_data_link_file
data_link_table.should_be_a DB_Table
data_link_table.column_names . should_equal ["two"]
data_link_table.at "two" . to_vector . should_equal [2]
table_data_link_file_2 = transform_file (enso_project.data / "datalinks" / "postgres-serialized-query.datalink") connection_details
data_link_table_2 = Data.read table_data_link_file_2
data_link_table_2.should_be_a DB_Table
data_link_table_2.column_names . should_equal ["int", "text"]
data_link_table_2.at "int" . to_vector . should_equal [1456]
data_link_table_2.at "text" . to_vector . should_equal ["my text"]
## datalink support
group_builder.specify "will reject any format overrides or stream operations on the data link" <|
r1 = Data.read data_link_file.get ..Plain_Text

View File

@ -518,7 +518,7 @@ add_specs suite_builder setup make_new_connection persistent_connector=True =
db_table_3 = db_table_1.aggregate ["X"] [Aggregate_Column.Sum (expr "[Y]*[Y]") "C3"] . set (expr "[X] + 1") "X"
db_table_4 = db_table_2.join db_table_3 join_kind=Join_Kind.Left_Outer
db_table_4.is_trivial_query . should_fail_with Table_Not_Found
db_table_4.is_trivial_query . should_be_false
copied_table = db_table_4.select_into_database_table data.connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing
copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "Right X", "C3"]

View File

@ -4,6 +4,7 @@ from Standard.Test import all
import project.Helpers.Auto_Constructor_Instrumentation_Spec
import project.Helpers.Sorted_List_Index_Spec
import project.Helpers.SQL_Statement_Serialization
import project.Helpers.Unique_Naming_Strategy_Spec
import project.Helpers.Value_Type_Spec
@ -11,6 +12,7 @@ add_specs suite_builder =
Auto_Constructor_Instrumentation_Spec.add_specs suite_builder
Unique_Naming_Strategy_Spec.add_specs suite_builder
Sorted_List_Index_Spec.add_specs suite_builder
SQL_Statement_Serialization.add_specs suite_builder
Value_Type_Spec.add_specs suite_builder
main filter=Nothing =

View File

@ -0,0 +1,35 @@
from Standard.Base import all
import Standard.Database.SQL.SQL_Builder
import Standard.Database.SQL_Statement.SQL_Statement
import Standard.Database.SQL_Statement.Unable_To_Serialize_SQL_Statement
from Standard.Test import all
type My_Type
Value x
to_js_object self = JS_Object.from_pairs [["x", self.x]]
add_specs suite_builder = suite_builder.group "SQL_Statement serialization" group_builder->
code c = SQL_Builder.code c
interp x = SQL_Builder.interpolation x
group_builder.specify "should allow to serialize and deserialize a SQL_Statement" <|
big_integer = 1234567890123456789012345678901234567890
stmt1 = (code "SELECT A + " ++ interp 1 ++ code ", B + " ++ interp 2.5 ++ code " FROM table WHERE C = " ++ interp "str" ++ code " AND D = " ++ interp (Date_Time.new 2021 1 1 12 30) ++ code " AND E = " ++ interp big_integer) . build
json = stmt1.to_json
stmt2 = SQL_Statement.deserialize json
# After deserializing, the result should be equal to the original statement.
stmt2 . should_equal stmt1
group_builder.specify "will throw an error if the interpolation does not round-trip, unless ensure_roundtrip is false" <|
stmt = (code "SELECT A + " ++ interp (My_Type.Value 1) ++ code " FROM table") . build
stmt.to_js_object . should_equal (stmt.serialize (ensure_roundtrip=False))
stmt.serialize ensure_roundtrip=True . should_fail_with Unable_To_Serialize_SQL_Statement
main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter