Renaming various constructors and moving types around for Database. (#3715)

Repairing the constructor name following the types work. Some general tiding up as well.

- Remove `Standard.Database.Data.Column.Aggregate_Column_Builder`.
- Remove `Standard.Database.Data.Dialect.Dialect.Dialect_Data`.
- Remove unused imports and update some type definitions.
- Rename `Postgres.Postgres_Data` => `Postgres_Options.Postgres`.
- Rename `Redshift.Redshift_Data` => `Redshift_Options.Redshift`.
- Rename `SQLite.SQLite_Data` => `SQLite_Options.SQLite`.
- Rename `Credentials.Credentials_Data` => `Credentials.Username_And_Password`.
- Rename `Sql` to `SQL` across the board.
- Merge `Standard.Database.Data.Internal` into `Standard.Database.Internal`.
- Move dialects into `Internal` and merge the function in `Helpers` into `Base_Generator`.
This commit is contained in:
James Dunkerley 2022-09-19 13:39:40 +01:00 committed by GitHub
parent a771e40002
commit d6346e9d66
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 486 additions and 572 deletions

View File

@ -8,7 +8,7 @@
const textType = 'Builtins.Main.Text'
/** The module prefix added for unknown SQL types. */
const customSqlTypePrefix = 'Standard.Database.Data.Sql.Sql_Type.'
const customSqlTypePrefix = 'Standard.Database.Data.SQL.SQL_Type.'
/** Specifies opacity of interpolation background color. */
const interpolationBacgroundOpacity = 0.3
@ -87,7 +87,7 @@ class SqlVisualization extends Visualization {
constructor(api) {
super(api)
this.setPreprocessor('Standard.Visualization.Sql.Visualization', 'prepare_visualization')
this.setPreprocessor('Standard.Visualization.SQL.Visualization', 'prepare_visualization')
}
onDataReceived(data) {

View File

@ -19,7 +19,7 @@
from Standard.Base import all
import Standard.Base.Error.Common as Errors
from Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine export Default
from Standard.Base.Data.Text.Regex.Engine.Default export Default
## The `Data.Text.Regex.Engine.Engine` interface.
type Engine

View File

@ -1,16 +1,16 @@
from Standard.Base import all
import Standard.Database.Data.Internal.IR
import Standard.Database.Data.Sql
import Standard.Database.Internal.IR
import Standard.Database.Data.SQL
import Standard.Database.Data.Table as Database_Table
import Standard.Table.Data.Table as Materialized_Table
from Standard.Table import Column_Selector, Column_Name_Mapping
from Standard.Database.Data.Sql import Sql_Type, Sql_Type_Data
from Standard.Database.Data.SQL import SQL_Type, Statement
from Standard.Database.Internal.JDBC_Connection import create_table_statement, handle_sql_errors
from Standard.Database.Internal.Result_Set import read_column, result_set_to_table
from Standard.Database.Errors import Sql_Error
from Standard.Database.Errors import SQL_Error
polyglot java import java.lang.UnsupportedOperationException
@ -59,10 +59,10 @@ type Connection
Arguments:
- database: The name of the database to connect to.
set_database : Text -> Connection ! Sql_Error
set_database : Text -> Connection ! SQL_Error
set_database self database =
if database == self.database then self else
Sql_Error.throw_sql_error "Changing database is not supported."
SQL_Error.throw_sql_error "Changing database is not supported."
## Returns the list of schemas for the connection within the current database (or catalog).
schemas : [Text]
@ -79,10 +79,10 @@ type Connection
Arguments:
- schema: The name of the schema to connect to.
set_schema : Text -> Connection ! Sql_Error
set_schema : Text -> Connection ! SQL_Error
set_schema self schema =
if schema == self.schema then self else
Sql_Error.throw_sql_error "Changing schema is not supported."
SQL_Error.throw_sql_error "Changing schema is not supported."
## Gets a list of the table types
table_types : [Text]
@ -116,11 +116,11 @@ type Connection
Executes a raw query and returns the result as an in-memory Table.
Arguments:
- query: either raw SQL code as Text or an instance of Sql.Statement
- query: either raw SQL code as Text or an instance of SQL.Statement
representing the query to execute.
- expected_types: an optional array of expected types of each column;
meant only for internal use.
execute_query : Text | Sql.Statement -> Vector Sql.Sql_Type -> Materialized_Table
execute_query : Text | Statement -> Vector SQL_Type -> Materialized_Table
execute_query self query expected_types=Nothing =
self.jdbc_connection.with_prepared_statement query stmt->
result_set_to_table stmt.executeQuery expected_types
@ -132,9 +132,9 @@ type Connection
returns 0 for other types of queries (like creating or altering tables).
Arguments:
- query: either raw SQL code as Text or an instance of Sql.Statement
- query: either raw SQL code as Text or an instance of SQL.Statement
representing the query to execute.
execute_update : Text | Sql.Statement -> Integer
execute_update : Text | SQL.Statement -> Integer
execute_update self query =
self.jdbc_connection.with_prepared_statement query stmt->
Panic.catch UnsupportedOperationException stmt.executeLargeUpdate _->
@ -147,10 +147,10 @@ type Connection
Arguments:
- table_name: The name of the table to fetch the column metadata for.
# fetch_columns : Text -> Vector [Text, Sql.Sql_Type]
# fetch_columns : Text -> Vector [Text, SQL_Type]
fetch_columns : Text -> Vector Any
fetch_columns self table_name =
query = IR.Select_All (IR.make_ctx_from table_name)
query = IR.Select_All (IR.context_for_table table_name)
compiled = self.dialect.generate_sql query
self.jdbc_connection.fetch_columns compiled

View File

@ -2,7 +2,7 @@ from Standard.Base import all
type Credentials
## Simple username and password type.
Credentials_Data username:Text password:Text
Username_And_Password username:Text password:Text
## Override `to_text` to mask the password field.
to_text : Text

View File

@ -2,11 +2,12 @@ from Standard.Base import all
from Standard.Database.Connection.Connection_Options import Connection_Options, Connection_Options_Data
import Standard.Database.Connection.Postgres
import Standard.Database.Connection.SQLite
import Standard.Database.Connection.Redshift
from Standard.Database.Connection.Postgres_Options import Postgres_Options
from Standard.Database.Connection.SQLite_Options import SQLite_Options
from Standard.Database.Connection.Redshift_Options import Redshift_Options
from Standard.Database.Connection.Connection import Connection, Sql_Error
from Standard.Database.Connection.Connection import Connection
from Standard.Database.Errors import SQL_Error
## UNSTABLE
@ -15,6 +16,6 @@ from Standard.Database.Connection.Connection import Connection, Sql_Error
Arguments:
- details: Connection_Details to use to connect.
- options: Any overriding options to use.
connect : (Postgres|SQLite|Redshift) -> Connection_Options -> Connection ! Sql_Error
connect : (Postgres_Options|SQLite_Options|Redshift_Options) -> Connection_Options -> Connection ! SQL_Error
connect details options=Connection_Options_Data =
details.connect options

View File

@ -5,7 +5,7 @@ from Standard.Base.Data.Numbers import Parse_Error_Data
import Standard.Database.Internal.Postgres.Postgres_Connection
import Standard.Database.Connection.Connection_Options
from Standard.Database.Connection.Credentials import Credentials, Credentials_Data
from Standard.Database.Connection.Credentials import Credentials, Username_And_Password
import Standard.Database.Connection.SSL_Mode
from Standard.Database.Connection.SSL_Mode import all
@ -16,7 +16,7 @@ import Standard.Database.Internal.Postgres.Pgpass
polyglot java import org.postgresql.Driver
type Postgres
type Postgres_Options
## Connect to a PostgreSQL database.
Arguments:
@ -27,7 +27,7 @@ type Postgres
- credentials: The credentials to use for the connection (defaults to PGPass or No Authentication).
- use_ssl: Whether to use SSL (defaults to `Prefer`).
- client_cert: The client certificate to use or `Nothing` if not needed.
Postgres_Data (host:Text=default_postgres_host) (port:Integer=default_postgres_port) (database:Text=default_postgres_database) (schema:Text="") (credentials:(Credentials|Nothing)=Nothing) (use_ssl:SSL_Mode=Prefer) (client_cert:(Client_Certificate|Nothing)=Nothing)
Postgres (host:Text=default_postgres_host) (port:Integer=default_postgres_port) (database:Text=default_postgres_database) (schema:Text="") (credentials:(Credentials|Nothing)=Nothing) (use_ssl:SSL_Mode=Prefer) (client_cert:(Client_Certificate|Nothing)=Nothing)
## Build the Connection resource.
@ -41,7 +41,7 @@ type Postgres
## Cannot use default argument values as gets in an infinite loop if you do.
make_new database schema =
Postgres_Data self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
Postgres_Connection.create self.jdbc_url properties make_new
@ -66,7 +66,7 @@ type Postgres
Pgpass.read self.host self.port self.database username
Pair_Data username password ->
[Pair_Data 'user' username, Pair_Data 'password' password]
Credentials_Data username password ->
Username_And_Password username password ->
[Pair_Data 'user' username, Pair_Data 'password' password]
ssl_properties = ssl_mode_to_jdbc_properties self.use_ssl

View File

@ -3,7 +3,7 @@ from Standard.Base import all
import Standard.Database.Data.Dialect
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.Connection.Connection
from Standard.Database.Connection.Credentials import Credentials, Credentials_Data
from Standard.Database.Connection.Credentials import Credentials, Username_And_Password
import Standard.Database.Connection.Connection_Options
import Standard.Database.Connection.SSL_Mode
from Standard.Database.Connection.SSL_Mode import all
@ -14,7 +14,7 @@ polyglot java import com.amazon.redshift.jdbc.Driver
polyglot java import java.util.Properties
polyglot java import org.enso.database.JDBCProxy
type Redshift
type Redshift_Options
## Connect to a AWS Redshift database.
Arguments:
@ -24,7 +24,7 @@ type Redshift
- credentials: The credentials to use for the connection (defaults to PGPass or No Authentication).
- use_ssl: Whether to use SSL (defaults to `Require`).
- client_cert: The client certificate to use or `Nothing` if not needed.
Redshift_Data (host:Text) (port:Integer=5439) (schema:Text='') (credentials:Credentials|AWS_Credential|Nothing=Nothing) (use_ssl:(Disable|Require|Verify_CA|Full_Verification)=Require) (client_cert:Client_Certificate|Nothing=Nothing)
Redshift (host:Text) (port:Integer=5439) (schema:Text='') (credentials:Credentials|AWS_Credential|Nothing=Nothing) (use_ssl:(Disable|Require|Verify_CA|Full_Verification)=Require) (client_cert:Client_Certificate|Nothing=Nothing)
## Build the Connection resource.
@ -60,7 +60,7 @@ type Redshift
[Pair_Data 'user' db_user] + (if profile == '' then [] else [Pair_Data 'profile' profile])
AWS_Key db_user access_key secret_access_key ->
[Pair_Data 'user' db_user, Pair_Data 'AccessKeyID' access_key, Pair_Data 'SecretAccessKey' secret_access_key]
Credentials_Data username password ->
Username_And_Password username password ->
[Pair_Data 'user' username, Pair_Data 'password' password]
## Disabled as Redshift SSL settings are different to PostgreSQL.

View File

@ -3,12 +3,12 @@ from Standard.Base import all
import Standard.Database.Internal.SQLite.SQLite_Connection
import Standard.Database.Connection.Connection_Options
type SQLite
type SQLite_Options
## Connect to a SQLite DB File or InMemory DB.
Arguments:
- location: Location of the SQLite database to connect to.
SQLite_Data (location:(In_Memory|File|Text))
SQLite (location:(In_Memory|File|Text))
## Build the Connection resource.

View File

@ -1,13 +1,13 @@
from Standard.Base import all
import Standard.Database.Data.Internal.Helpers
import Standard.Database.Data.Internal.IR
import Standard.Database.Internal.Helpers
import Standard.Database.Internal.IR
import Standard.Database.Data.Table
import Standard.Table.Data.Column as Materialized_Column
import Standard.Table.Data.Sort_Column_Selector
import Standard.Table.Data.Sort_Column
from Standard.Database.Data.Sql import Sql_Type
from Standard.Database.Data.SQL import SQL_Type, Statement
from Standard.Database.Data.Table import Integrity_Error
from Standard.Database.Errors import Unsupported_Database_Operation_Error
@ -30,10 +30,7 @@ type Column
which they come. Combined expressions must come from the same context -
they must both have the same filtering, grouping etc. rules applied to be
able to be combined.
# type Column (name : Text) (connection : Connection)
# (sql_type : Sql_Type) (expression : IR.Expression)
# (context : IR.Context)
Column_Data name connection sql_type expression context
Column_Data name:Text connection:Connection sql_type:SQL_Type expression:IR.Expression context:IR.Context
## UNSTABLE
@ -96,8 +93,8 @@ type Column
## UNSTABLE
Returns an Sql statement that will be used for materializing this column.
to_sql : Sql.Statement
Returns an SQL statement that will be used for materializing this column.
to_sql : Statement
to_sql self = self.to_table.to_sql
## PRIVATE
@ -114,7 +111,7 @@ type Column
If not specified, the `new_type` is the same as the current one.
`operand_type` is only relevant if the operand is not a column, it
defaults to the current type if not provided.
make_binary_op : Text -> Text -> (Column | Any) -> (Sql_Type | Nothing) -> (Sql_Type | Nothing) -> Column
make_binary_op : Text -> Text -> (Column | Any) -> (SQL_Type | Nothing) -> (SQL_Type | Nothing) -> Column
make_binary_op self op_kind operand new_type=Nothing operand_type=Nothing =
actual_new_type = new_type.if_nothing self.sql_type
case operand of
@ -139,7 +136,7 @@ type Column
- op_kind: The kind of the unary operator.
- new_type: The type of the SQL column that results from applying the
operator.
make_unary_op : Text -> Text -> (Sql_Type | Nothing) -> Column
make_unary_op : Text -> Text -> (SQL_Type | Nothing) -> Column
make_unary_op self op_kind new_type=Nothing =
actual_new_type = new_type.if_nothing self.sql_type
new_expr = IR.Operation op_kind [self.expression]
@ -232,7 +229,7 @@ type Column
`other`. If `other` is a column, the comparison is performed pairwise
between corresponding elements of `self` and `other`.
== : Column | Any -> Column
== self other = self.make_binary_op "=" other new_type=Sql_Type.boolean
== self other = self.make_binary_op "=" other new_type=SQL_Type.boolean
## UNSTABLE
@ -245,7 +242,7 @@ type Column
`other`. If `other` is a column, the comparison is performed pairwise
between corresponding elements of `self` and `other`.
!= : Column | Any -> Column
!= self other = self.make_binary_op "!=" other new_type=Sql_Type.boolean
!= self other = self.make_binary_op "!=" other new_type=SQL_Type.boolean
## UNSTABLE
@ -258,7 +255,7 @@ type Column
`other`. If `other` is a column, the comparison is performed pairwise
between corresponding elements of `self` and `other`.
>= : Column | Any -> Column
>= self other = self.make_binary_op ">=" other new_type=Sql_Type.boolean
>= self other = self.make_binary_op ">=" other new_type=SQL_Type.boolean
## UNSTABLE
@ -271,7 +268,7 @@ type Column
`other`. If `other` is a column, the comparison is performed pairwise
between corresponding elements of `self` and `other`.
<= : Column | Any -> Column
<= self other = self.make_binary_op "<=" other new_type=Sql_Type.boolean
<= self other = self.make_binary_op "<=" other new_type=SQL_Type.boolean
## UNSTABLE
@ -284,7 +281,7 @@ type Column
`other`. If `other` is a column, the comparison is performed pairwise
between corresponding elements of `self` and `other`.
> : Column | Any -> Column
> self other = self.make_binary_op ">" other new_type=Sql_Type.boolean
> self other = self.make_binary_op ">" other new_type=SQL_Type.boolean
## UNSTABLE
@ -297,7 +294,7 @@ type Column
`other`. If `other` is a column, the comparison is performed pairwise
between corresponding elements of `self` and `other`.
< : Column | Any -> Column
< self other = self.make_binary_op "<" other new_type=Sql_Type.boolean
< self other = self.make_binary_op "<" other new_type=SQL_Type.boolean
## UNSTABLE
@ -390,7 +387,7 @@ type Column
Returns a column of booleans, with `True` items at the positions where
this column contains a `Nothing`.
is_missing : Column
is_missing self = self.make_unary_op "ISNULL" new_type=Sql_Type.boolean
is_missing self = self.make_unary_op "ISNULL" new_type=SQL_Type.boolean
## UNSTABLE
@ -502,7 +499,7 @@ type Column
missing value (a Nothing or a column with missing values), the behaviour
on these missing values is vendor specific.
starts_with : Column | Text -> Column
starts_with self other = self.make_binary_op "starts_with" other new_type=Sql_Type.boolean
starts_with self other = self.make_binary_op "starts_with" other new_type=SQL_Type.boolean
## UNSTABLE
@ -516,7 +513,7 @@ type Column
missing value (a Nothing or a column with missing values), the behaviour
on these missing values is vendor specific.
ends_with : Column | Text -> Column
ends_with self other = self.make_binary_op "ends_with" other new_type=Sql_Type.boolean
ends_with self other = self.make_binary_op "ends_with" other new_type=SQL_Type.boolean
## UNSTABLE
@ -530,94 +527,12 @@ type Column
missing value (a Nothing or a column with missing values), the behaviour
on these missing values is vendor specific.
contains : Column | Text -> Column
contains self other = self.make_binary_op "contains" other new_type=Sql_Type.boolean
contains self other = self.make_binary_op "contains" other new_type=SQL_Type.boolean
## PRIVATE
as_internal : IR.Internal_Column
as_internal self = IR.Internal_Column_Data self.name self.sql_type self.expression
type Aggregate_Column_Builder
## UNSTABLE
Wraps a column grouped by its index.
Arguments:
- name: The name of the column.
- connection: The connection with which the column is associated.
- sql_type: The SQL type of the aggregate column.
- expression: The expressions to apply to the column.
- context: The SQl context in which the column exists.
Allows performing aggregation operations on the contained values.
# type Aggregate_Column_Builder (name : Text) (connection : Connection)
# (sql_type : Sql_Type) (expression : IR.Expression)
# (context : IR.Context)
Aggregate_Column_Builder_Data name connection sql_type expression context
## UNSTABLE
Sums the values in each group.
Arguments:
- name_suffix: a suffix that will be appended to the original column name
to generate the resulting column name.
sum : Text -> Column
sum self name_suffix='_sum' =
make_aggregate self "SUM" name_suffix
## UNSTABLE
Computes the maximum element of each group.
Arguments:
- name_suffix: a suffix that will be appended to the original column name
to generate the resulting column name.
max : Text -> Column
max self name_suffix='_max' =
make_aggregate self "MAX" name_suffix
## UNSTABLE
Computes the minimum element of each group.
Arguments:
- name_suffix: a suffix that will be appended to the original column name
to generate the resulting column name.
min : Text -> Column
min self name_suffix='_min' =
make_aggregate self "MIN" name_suffix
## UNSTABLE
Computes the number of non-missing elements in each group.
Arguments:
- name_suffix: a suffix that will be appended to the original column name
to generate the resulting column name.
count : Text -> Column
count self name_suffix='_count' =
make_aggregate self "COUNT" name_suffix new_type=Sql_Type.integer
## UNSTABLE
Computes the mean of non-missing elements in each group.
Arguments:
- name_suffix: a suffix that will be appended to the original column name
to generate the resulting column name.
mean : Text -> Column
mean self name_suffix='_mean' =
make_aggregate self "AVG" name_suffix
## PRIVATE
A helper that returns the underlying column from before grouping.
ungrouped : Column
ungrouped self =
new_ctx = self.context.set_groups []
Column_Data self.name self.connection self.sql_type self.expression new_ctx
## PRIVATE
A helper method for creating an aggregated column by applying some
@ -628,7 +543,7 @@ type Aggregate_Column_Builder
- operation: The name of the aggregation operation.
- name_suffix: The suffix to apply to the name of the aggregate column.
- new_type: The SQL type of the result column.
make_aggregate : Column -> Text -> Text -> Sql_Type -> Column
make_aggregate : Column -> Text -> Text -> SQL_Type -> Column
make_aggregate column operation name_suffix="_agg" new_type=Nothing =
actual_new_type = new_type.if_nothing column.sql_type
expr = IR.Operation operation [column.expression]
@ -648,7 +563,7 @@ make_aggregate column operation name_suffix="_agg" new_type=Nothing =
- expected_type: The expected SQL type of the column.
- expr: The expression for the query.
- context: The context in which the query exists.
lift_aggregate : Text -> Connection -> Sql_Type -> IR.Expression -> IR.Context -> Column
lift_aggregate : Text -> Connection -> SQL_Type -> IR.Expression -> IR.Context -> Column
lift_aggregate new_name connection expected_type expr context =
# TODO [RW] This is a simple workaround for #1643 - we always wrap the
# aggregate into a subquery, thus making it safe to use it everywhere. A

View File

@ -2,11 +2,11 @@ from Standard.Base import all
import Standard.Base.Error.Common as Errors
from Standard.Table import Aggregate_Column
import Standard.Database.Data.Sql
import Standard.Database.Data.Internal.IR
import Standard.Database.Data.Dialect.Postgres
import Standard.Database.Data.Dialect.Redshift
import Standard.Database.Data.Dialect.SQLite
from Standard.Database.Data.SQL import Statement, SQL_Type
import Standard.Database.Internal.IR
import Standard.Database.Internal.Postgres.Postgres_Dialect
import Standard.Database.Internal.Redshift.Redshift_Dialect
import Standard.Database.Internal.SQLite.SQLite_Dialect
## PRIVATE
@ -15,21 +15,15 @@ import Standard.Database.Data.Dialect.SQLite
It encapsulates dialect-specific code generation details allowing us to
support differing SQL dialects.
type Dialect
## PRIVATE
This is a fake constructor to make the compiler accept this type
definition. It can and should be removed once interface definitions are
allowed.
Dialect_Data
## PRIVATE
Name of the dialect.
name : Text
name self = Errors.unimplemented "This is an interface only."
## PRIVATE
A function which generates SQL code from the internal representation
according to the specific dialect.
generate_sql : Query -> Sql.Statement
generate_sql : Query -> Statement
generate_sql self = Errors.unimplemented "This is an interface only."
## PRIVATE
@ -37,7 +31,7 @@ type Dialect
The provided aggregate is assumed to contain only already resolved columns.
You may need to transform it with `resolve_aggregate` first.
resolve_target_sql_type : Aggregate_Column -> Sql_Type
resolve_target_sql_type : Aggregate_Column -> SQL_Type
resolve_target_sql_type self = Errors.unimplemented "This is an interface only."
## PRIVATE
@ -52,18 +46,16 @@ type Dialect
The dialect of SQLite databases.
sqlite : Dialect
sqlite = SQLite.sqlite
sqlite = SQLite_Dialect.sqlite
## PRIVATE
The dialect of PostgreSQL databases.
postgres : Dialect
postgres = Postgres.postgres
postgres = Postgres_Dialect.postgres
## PRIVATE
The dialect of Redshift databases.
redshift : Dialect
redshift = Redshift.redshift
redshift = Redshift_Dialect.redshift

View File

@ -1,49 +0,0 @@
from Standard.Base import all
import Standard.Database.Data.Sql
## PRIVATE
Arguments:
- make_raw_concat_expr: a function which takes two expressions - a (possibly
processed) column expression and a separator and returns a SQL expression
which concatenates the expressions with separators.
- make_contains_expr: a function which takes two expressions - a string to
search in and a substring to search for and returns an expression which is
true if the string contains the substring.
- has_quote: specifies whether it should expect an additional SQL argument
containing a quote character.
- args: the processed SQL arguments from the generator.
It takes 4 or 5 SQL arguments depending on `has_quote`.
The SQL arguments are following:
- the column expression to concatenate,
- the separator to use when joining the expressions,
- a prefix to prepend,
- a suffix to append,
- a quote character to use to escape separator if it is present inside of a
column expression; it should be provided only if `has_quote` is `True` and
must not be empty then. If the quote character occurs in the expression, it
is escaped by doubling each occurrence.
make_concat make_raw_concat_expr make_contains_expr has_quote args =
expected_args = if has_quote then 5 else 4
if args.length != expected_args then Error.throw (Illegal_State_Error_Data "Unexpected number of arguments for the concat operation.") else
expr = args.at 0
separator = args.at 1
prefix = args.at 2
suffix = args.at 3
append = Sql.code " || "
possibly_quoted = case has_quote of
True ->
quote = args.at 4
includes_separator = separator ++ Sql.code " != '' AND " ++ make_contains_expr expr separator
## We use the assumption that `has_quote` is True iff `quote` is not empty.
includes_quote = make_contains_expr expr quote
is_empty = expr ++ Sql.code " = ''"
needs_quoting = includes_separator.paren ++ Sql.code " OR " ++ includes_quote.paren ++ Sql.code " OR " ++ is_empty.paren
escaped = Sql.code "replace(" ++ expr ++ Sql.code ", " ++ quote ++ Sql.code ", " ++ quote ++ append ++ quote ++ Sql.code ")"
quoted = quote ++ append ++ escaped ++ append ++ quote
Sql.code "CASE WHEN " ++ needs_quoting ++ Sql.code " THEN " ++ quoted ++ Sql.code " ELSE " ++ expr ++ Sql.code " END"
False -> expr
transformed_expr = Sql.code "CASE WHEN " ++ expr ++ Sql.code " IS NULL THEN '' ELSE " ++ possibly_quoted.paren ++ Sql.code " END"
concatenated = make_raw_concat_expr transformed_expr separator
prefix.paren ++ append ++ concatenated ++ append ++ suffix.paren

View File

@ -19,7 +19,7 @@ empty = Builder_Data (Vector_Builder.empty)
- text: The raw SQL code.
code : Text -> Builder
code text =
vec = if text.is_empty then [] else [Sql_Code_Part text]
vec = if text.is_empty then [] else [SQL_Code_Part text]
Builder_Data (Vector_Builder.from_vector vec)
## UNSTABLE
@ -30,8 +30,8 @@ code text =
- sql_type: The expected SQL type of `object`.
- object: The object to be interpolated into the query as if it has the type
given by `sql_type`.
interpolation : Sql_Type -> Any -> Builder
interpolation sql_type object = Builder_Data (Vector_Builder.from_vector [Sql_Interpolation sql_type object])
interpolation : SQL_Type -> Any -> Builder
interpolation sql_type object = Builder_Data (Vector_Builder.from_vector [SQL_Interpolation sql_type object])
## UNSTABLE
@ -50,66 +50,66 @@ join separator statements =
(1.up_to statements.length . fold (statements.at 0) acc-> i-> acc ++ sep ++ statements.at i)
## Represents an internal SQL data-type.
type Sql_Type
type SQL_Type
## Represents an internal SQL data-type.
Arguments:
- typeid: a numerical type id, as defined in `java.sql.Types`.
- name: a database-specific type name, used for pretty printing.
Sql_Type_Data typeid name
SQL_Type_Data typeid name
== self that = case that of
Sql_Type_Data that_id _ ->
SQL_Type_Data that_id _ ->
self.typeid == that_id
_ -> False
## The SQL representation of `Boolean` type.
boolean : Sql_Type
boolean = Sql_Type_Data Types.BOOLEAN "BOOLEAN"
boolean : SQL_Type
boolean = SQL_Type_Data Types.BOOLEAN "BOOLEAN"
## The SQL representation of `Integer` type.
integer : Sql_Type
integer = Sql_Type_Data Types.INTEGER "INTEGER"
integer : SQL_Type
integer = SQL_Type_Data Types.INTEGER "INTEGER"
## The SQL representation of the `BIGINT` type.
bigint : Sql_Type
bigint = Sql_Type_Data Types.BIGINT "BIGINT"
bigint : SQL_Type
bigint = SQL_Type_Data Types.BIGINT "BIGINT"
## The SQL representation of the `SMALLINT` type.
smallint : Sql_Type
smallint = Sql_Type_Data Types.SMALLINT "SMALLINT"
smallint : SQL_Type
smallint = SQL_Type_Data Types.SMALLINT "SMALLINT"
## The SQL type representing decimal numbers.
decimal : Sql_Type
decimal = Sql_Type_Data Types.DECIMAL "DECIMAL"
decimal : SQL_Type
decimal = SQL_Type_Data Types.DECIMAL "DECIMAL"
## The SQL type representing decimal numbers.
real : Sql_Type
real = Sql_Type_Data Types.REAL "REAL"
real : SQL_Type
real = SQL_Type_Data Types.REAL "REAL"
## The SQL type representing double-precision floating-point numbers.
double : Sql_Type
double = Sql_Type_Data Types.DOUBLE "DOUBLE PRECISION"
double : SQL_Type
double = SQL_Type_Data Types.DOUBLE "DOUBLE PRECISION"
## The SQL type representing a general numeric type.
numeric : Sql_Type
numeric = Sql_Type_Data Types.NUMERIC "NUMERIC"
numeric : SQL_Type
numeric = SQL_Type_Data Types.NUMERIC "NUMERIC"
## The SQL type representing one of the suppported textual types.
varchar : Sql_Type
varchar = Sql_Type_Data Types.VARCHAR "VARCHAR"
varchar : SQL_Type
varchar = SQL_Type_Data Types.VARCHAR "VARCHAR"
## UNSTABLE
The SQL type representing one of the suppported textual types.
It seems that JDBC treats the `TEXT` and `VARCHAR` types as interchangeable.
text : Sql_Type
text = Sql_Type_Data Types.VARCHAR "VARCHAR"
text : SQL_Type
text = SQL_Type_Data Types.VARCHAR "VARCHAR"
## The SQL type representing a binary object.
blob : Sql_Type
blob = Sql_Type_Data Types.BLOB "BLOB"
blob : SQL_Type
blob = SQL_Type_Data Types.BLOB "BLOB"
## PRIVATE
@ -159,10 +159,10 @@ type Sql_Type
A fragment of a SQL query.
It can either be a Sql_Code_Part that represents raw SQL code or
Sql_Interpolation which represents an object that will be interpolated into
It can either be a SQL_Code_Part that represents raw SQL code or
SQL_Interpolation which represents an object that will be interpolated into
the query.
type Sql_Fragment
type SQL_Fragment
## UNSTABLE
@ -170,8 +170,7 @@ type Sql_Fragment
Arguments:
- code: A fragment of SQL code.
# type Sql_Code_Part (code : Text)
Sql_Code_Part code
SQL_Code_Part code:Text
## UNSTABLE
@ -182,8 +181,7 @@ type Sql_Fragment
- sql_type: The expected SQL type of `object`.
- object: A value that will be interpolated into the query, interpreted
as having the type `sql_type`.
# type Sql_Interpolation (sql_type : Sql_Type) (object : Any)
Sql_Interpolation sql_type object
SQL_Interpolation sql_type:SQL_Type object:Any
type Statement
@ -196,28 +194,27 @@ type Statement
The statement consists of SQL code with parameters and values that will be
interpolated for these parameters.
# type Statement (internal_fragments : Vector Sql_Fragment)
Statement_Data internal_fragments
Statement_Data (internal_fragments:(Vector SQL_Fragment))
## UNSTABLE
A vector of code fragments.
Consists of two types of values:
- Sql_Code_Part, representing parts of raw SQL code and
- Sql_Interpolation, representing objects that will be interpolated in
- SQL_Code_Part, representing parts of raw SQL code and
- SQL_Interpolation, representing objects that will be interpolated in
between the SQL code.
fragments : Vector Sql_Fragment
fragments : Vector SQL_Fragment
fragments self = self.internal_fragments
## UNSAFE
UNSTABLE
ADVANCED
This function returns a raw Sql string for the query, manually handling
This function returns a raw SQL string for the query, manually handling
the interpolation that is normally handled by the database engine itself.
It should be used EXTREMELY carefully - the returned Sql code should
It should be used EXTREMELY carefully - the returned SQL code should
always be read by a human to ensure that it does what is expected before
executing it in any REPLs or other database interfaces.
@ -225,9 +222,9 @@ type Statement
unsafe_to_raw_sql : Text
unsafe_to_raw_sql self =
strings = self.internal_fragments . map <| case _ of
Sql_Code_Part code -> code
SQL_Code_Part code -> code
# TODO at some point we may try more sophisticated serialization based on data type
Sql_Interpolation _ obj -> case obj of
SQL_Interpolation _ obj -> case obj of
Number -> obj.to_text
_ -> "'" + obj.to_text.replace "'" "''" + "'"
strings.join ""
@ -239,11 +236,11 @@ type Statement
# prepare : [Text, Vector Any]
prepare self =
to_code fragment = case fragment of
Sql_Code_Part code -> code
Sql_Interpolation _ _ -> "?"
SQL_Code_Part code -> code
SQL_Interpolation _ _ -> "?"
to_subst fragment = case fragment of
Sql_Code_Part _ -> []
Sql_Interpolation typ obj -> [[obj, typ]]
SQL_Code_Part _ -> []
SQL_Interpolation typ obj -> [[obj, typ]]
sql = self.fragments.map to_code . join ""
substitutions = self.fragments.flat_map to_subst
[sql, substitutions]
@ -254,8 +251,8 @@ type Statement
to_json : Json
to_json self =
jsonify fragment = case fragment of
Sql_Code_Part code -> Json.from_pairs [["sql_code", code]]
Sql_Interpolation typ obj ->
SQL_Code_Part code -> Json.from_pairs [["sql_code", code]]
SQL_Interpolation typ obj ->
inner = Json.from_pairs [["value", obj], ["expected_sql_type", typ.name]]
Json.from_pairs [["sql_interpolation", inner]]
fragments = Json.Array (self.internal_fragments.map jsonify)
@ -272,8 +269,7 @@ type Builder
It can be used to concatenate parts of SQL code in O(1) time and at the
end build the actual query in linear time.
# type Builder (fragments : Vector_Builder.Vector_Builder Sql_Fragment)
Builder_Data fragments
Builder_Data (fragments:(Vector_Builder.Vector_Builder SQL_Fragment))
## UNSTABLE
@ -329,29 +325,29 @@ type Builder
Arguments:
- fragments: The fragments to be merged together.
optimize_fragments : Vector Sql_Fragment -> Vector Sql_Fragment
optimize_fragments : Vector SQL_Fragment -> Vector SQL_Fragment
optimize_fragments fragments =
builder = Vector.new_builder
go elem =
last_part = State.get Sql_Code_Part
last_part = State.get SQL_Code_Part
case elem of
Sql_Code_Part code ->
SQL_Code_Part code ->
new_part = case last_part of
Nothing -> Sql_Code_Part code
Sql_Code_Part other -> Sql_Code_Part other+code
State.put Sql_Code_Part new_part
Sql_Interpolation _ _ ->
Nothing -> SQL_Code_Part code
SQL_Code_Part other -> SQL_Code_Part other+code
State.put SQL_Code_Part new_part
SQL_Interpolation _ _ ->
case last_part of
Nothing -> Nothing
Sql_Code_Part _ ->
SQL_Code_Part _ ->
builder.append last_part
State.put Sql_Code_Part Nothing
State.put SQL_Code_Part Nothing
builder.append elem
State.run Sql_Code_Part Nothing <|
State.run SQL_Code_Part Nothing <|
fragments.each go
last_part = State.get Sql_Code_Part
last_part = State.get SQL_Code_Part
case last_part of
Nothing -> Nothing
Sql_Code_Part _ -> builder.append last_part
SQL_Code_Part _ -> builder.append last_part
builder.to_vector

View File

@ -1,10 +1,10 @@
from Standard.Base import all
from Standard.Base.Error.Problem_Behavior import Report_Warning
import Standard.Database.Data.Internal.Helpers
import Standard.Database.Data.Internal.Aggregate_Helper
import Standard.Database.Data.Internal.IR
import Standard.Database.Data.Sql
import Standard.Database.Internal.Helpers
import Standard.Database.Internal.Aggregate_Helper
import Standard.Database.Internal.IR
from Standard.Database.Data.SQL import Statement, SQL_Type
import Standard.Table.Data.Column as Materialized_Column
import Standard.Table.Data.Table as Materialized_Table
@ -18,8 +18,8 @@ import Standard.Table.Internal.Table_Helpers
import Standard.Table.Internal.Problem_Builder
import Standard.Table.Internal.Aggregate_Column_Helper
from Standard.Database.Data.Column import Column, Aggregate_Column_Builder, Column_Data
from Standard.Database.Data.Internal.IR import Internal_Column, Internal_Column_Data
from Standard.Database.Data.Column import Column, Column_Data
from Standard.Database.Internal.IR import Internal_Column, Internal_Column_Data
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
import Standard.Table.Data.Position
@ -38,10 +38,7 @@ type Table
- connection: The connection with which the table is associated.
- internal_columns: The internal representation of the table columns.
- context: The context associated with this table.
# type Table (name : Text) (connection : Connection)
# (internal_columns : Vector Internal_Column)
# (context : IR.Context)
Table_Data name connection internal_columns context
Table_Data name:Text connection:Connection (internal_columns:(Vector Internal_Column)) context:IR.Context
## UNSTABLE
@ -810,7 +807,7 @@ type Table
## UNSTABLE
Returns an SQL statement that will be used for materializing this table.
to_sql : Sql.Statement
to_sql : Statement
to_sql self =
cols = self.internal_columns.map (c -> [c.name, c.expression])
case cols.is_empty of
@ -1003,10 +1000,10 @@ type Integrity_Error
- connection: The connection to a database.
- table_name: The name of the table to get.
- columns: The names of the columns to get.
# make_table : Connection -> Text -> Vector [Text, Sql.Sql_Type] -> Table
# make_table : Connection -> Text -> Vector [Text, SQL_Type] -> Table
make_table : Connection -> Text -> Vector -> Table
make_table connection table_name columns =
ctx = IR.make_ctx_from table_name
ctx = IR.context_for_table table_name
cols = columns.map (p -> Internal_Column_Data p.first p.second (IR.Column table_name p.first))
Table_Data table_name connection cols ctx

View File

@ -21,7 +21,7 @@ type Unsupported_Database_Operation_Error
to_display_text self =
"Unsupported database operation: " + self.message
type Sql_Error
type SQL_Error
## UNSTABLE
Indicates an error with executing a query, update or connecting to the
@ -31,7 +31,7 @@ type Sql_Error
- java_exception: The underlying exception.
- related_query (optional): A string representation of a query that this
error is related to.
Sql_Error_Data java_exception related_query=Nothing
SQL_Error_Data java_exception related_query=Nothing
## UNSTABLE
@ -50,11 +50,11 @@ type Sql_Error
## PRIVATE
Throws an error as if a SQL Exception was thrown.
throw_sql_error : Text -> Sql_Error
throw_sql_error : Text -> SQL_Error
throw_sql_error message =
Error.throw (Sql_Error_Data (SQLException.new message))
Error.throw (SQL_Error_Data (SQLException.new message))
type Sql_Timeout_Error
type SQL_Timeout_Error
## UNSTABLE
Indicates that an operation has timed out.
@ -63,7 +63,7 @@ type Sql_Timeout_Error
- java_exception: The underlying exception.
- related_query (optional): A string representation of a query that this
error is related to.
Sql_Timeout_Error_Data java_exception related_query=Nothing
SQL_Timeout_Error_Data java_exception related_query=Nothing
## UNSTABLE

View File

@ -2,8 +2,8 @@ from Standard.Base import all hiding First, Last
from Standard.Base.Data.Text.Text_Ordering import Text_Ordering_Data
from Standard.Table.Data.Aggregate_Column import all
import Standard.Database.Data.Internal.IR
from Standard.Database.Data.Sql import Sql_Type
import Standard.Database.Internal.IR
from Standard.Database.Data.SQL import SQL_Type
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
## PRIVATE
@ -35,7 +35,7 @@ make_expression aggregate dialect =
Count_Nothing c _ -> IR.Operation "COUNT_IS_NULL" [c.expression]
Count_Not_Empty c _ -> IR.Operation "COUNT_NOT_EMPTY" [c.expression]
Count_Empty c _ -> IR.Operation "COUNT_EMPTY" [c.expression]
Percentile p c _ -> IR.Operation "PERCENTILE" [IR.Constant Sql_Type.double p, c.expression]
Percentile p c _ -> IR.Operation "PERCENTILE" [IR.Constant SQL_Type.double p, c.expression]
Mode c _ -> IR.Operation "MODE" [c.expression]
First c _ ignore_nothing order_by -> case is_non_empty_selector order_by of
False -> Error.throw (Unsupported_Database_Operation_Error_Data "`First` aggregation requires at least one `order_by` column.")
@ -59,10 +59,10 @@ make_expression aggregate dialect =
True -> IR.Operation "STDDEV_POP" [c.expression]
False -> IR.Operation "STDDEV_SAMP" [c.expression]
Concatenate c _ separator prefix suffix quote_char ->
base_args = [c.expression, IR.Constant Sql_Type.text separator, IR.Constant Sql_Type.text prefix, IR.Constant Sql_Type.text suffix]
base_args = [c.expression, IR.Constant SQL_Type.text separator, IR.Constant SQL_Type.text prefix, IR.Constant SQL_Type.text suffix]
case quote_char.is_empty of
True -> IR.Operation "CONCAT" base_args
False -> IR.Operation "CONCAT_QUOTE_IF_NEEDED" base_args+[IR.Constant Sql_Type.text quote_char]
False -> IR.Operation "CONCAT_QUOTE_IF_NEEDED" base_args+[IR.Constant SQL_Type.text quote_char]
Sum c _ -> IR.Operation "SUM" [c.expression]
Average c _ -> IR.Operation "AVG" [c.expression]
Median c _ -> IR.Operation "MEDIAN" [c.expression]

View File

@ -1,9 +1,10 @@
from Standard.Base import all
import Standard.Database.Data.Sql
import Standard.Database.Data.Internal.IR
import Standard.Database.Internal.IR
from Standard.Database.Data.SQL import Builder, code
import Standard.Database.Data.SQL
from Standard.Database.Data.Sql import Sql_Type
from Standard.Database.Errors import Unsupported_Database_Operation_Error
type Internal_Dialect
@ -21,17 +22,14 @@ type Internal_Dialect
identifier name in such a way that it can be used in the query; that
usually consists of wrapping the name in quotes and escaping any quotes
within it.
# type Internal_Dialect (operation_map : Map Text (Vector Sql.Builder -> Sql.Builder))
# (identifier_wrapper : Text -> Sql.Builder)
Internal_Dialect_Data operation_map wrap_identifier
Internal_Dialect_Data (operation_map:(Map Text (Vector (Builder->Builder)))) (wrap_identifier:(Text->Builder))
## PRIVATE
Creates a copy of the dialect that supports additional operations or
overrides existing ones.
# extend_with : Vector [Text, Vector Sql.Builder -> Sql.Builder] -> Internal_Dialect
# extend_with : Vector [Text, Vector Builder -> Builder] -> Internal_Dialect
extend_with : Vector Any -> Internal_Dialect
extend_with self mappings =
new_map = mappings.fold self.operation_map (m -> el -> m.insert (el.at 0) (el.at 1))
@ -43,12 +41,12 @@ type Internal_Dialect
Arguments:
- name: The name of the binary operator.
make_binary_op : Text -> Vector Sql.Builder -> Sql.Builder
make_binary_op : Text -> Vector Builder -> Builder
make_binary_op name =
arguments ->
case arguments.length == 2 of
True ->
op = Sql.code " "+name+" "
op = code " "+name+" "
(arguments.at 0)++op++(arguments.at 1) . paren
False ->
Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name)
@ -59,12 +57,12 @@ make_binary_op name =
Arguments:
- name: The name of the unary operator.
make_unary_op : Text -> Vector Sql.Builder -> Sql.Builder
make_unary_op : Text -> Vector Builder -> Builder
make_unary_op name =
arguments ->
case arguments.length == 1 of
True ->
(Sql.code name+" ")++(arguments.at 0) . paren
(code name+" ")++(arguments.at 0) . paren
False ->
Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name)
@ -77,7 +75,7 @@ make_unary_op name =
- function: A function taking exactly one argument: the generated SQL code
for the argument of the operation, and returning the generated SQL code for
the whole operation.
lift_unary_op : Text -> (Sql.Builder -> Sql.Builder) -> [Text, (Vector Sql.Builder -> Sql.Builder)]
lift_unary_op : Text -> (Builder -> Builder) -> [Text, (Vector Builder -> Builder)]
lift_unary_op name function =
generator = arguments -> case arguments.length == 1 of
False -> Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name + ".")
@ -93,7 +91,7 @@ lift_unary_op name function =
- function: A function taking exactly two arguments: the generated SQL code
for the argument of the operation, and returning the generated SQL code for
the whole operation.
lift_binary_op : Text -> (Sql.Builder -> Sql.Builder -> Sql.Builder) -> [Text, (Vector Sql.Builder -> Sql.Builder)]
lift_binary_op : Text -> (Builder -> Builder -> Builder) -> [Text, (Vector Builder -> Builder)]
lift_binary_op name function =
generator = arguments -> case arguments.length == 2 of
False -> Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name + ".")
@ -107,12 +105,12 @@ lift_binary_op name function =
Arguments:
- name: The name of the unary operator.
make_right_unary_op : Text -> Vector Sql.Builder -> Sql.Builder
make_right_unary_op : Text -> Vector Builder -> Builder
make_right_unary_op name =
arguments ->
case arguments.length == 1 of
True ->
(arguments.at 0)++(Sql.code " "+name) . paren
(arguments.at 0)++(code " "+name) . paren
False ->
Error.throw ("Invalid amount of arguments for operation " + name)
@ -122,22 +120,22 @@ make_right_unary_op name =
Arguments:
- name: The name of the function.
make_function : Text -> Vector Sql.Builder -> Sql.Builder
make_function : Text -> Vector Builder -> Builder
make_function name =
arguments ->
(Sql.code name) ++ (Sql.join ", " arguments . paren)
(code name) ++ (SQL.join ", " arguments . paren)
## PRIVATE
A helper function to create an operation that takes no arguments.
Arguments:
- code: The code for the constant.
make_constant : Text -> Vector Sql.Builder -> Sql.Builder
make_constant code =
- sql_code: The SQL code for the constant.
make_constant : Text -> Vector Builder -> Builder
make_constant sql_code =
arguments ->
if arguments.not_empty then Error.throw <| Illegal_State_Error_Data "No arguments were expected" else
Sql.code code
code sql_code
## PRIVATE
@ -149,10 +147,10 @@ make_constant code =
This is the simplest way of escaping identifiers that should work across most
dialects.
wrap_in_quotes : Text -> Sql.Builder
wrap_in_quotes : Text -> Builder
wrap_in_quotes identifier =
escaped = identifier.replace '"' '""'
Sql.code '"'+escaped+'"'
code '"'+escaped+'"'
## PRIVATE
@ -180,12 +178,12 @@ base_dialect =
Arguments:
- dialect: The SQL dialect in which the expression is being generated.
- expr: The expression to generate SQL code for.
generate_expression : Internal_Dialect -> IR.Expression -> Sql.Builder
generate_expression : Internal_Dialect -> IR.Expression -> Builder
generate_expression dialect expr = case expr of
IR.Column origin name ->
dot = Sql.code '.'
dot = code '.'
dialect.wrap_identifier origin ++ dot ++ dialect.wrap_identifier name
IR.Constant sql_type value -> Sql.interpolation sql_type value
IR.Constant sql_type value -> SQL.interpolation sql_type value
IR.Operation kind arguments ->
op = dialect.operation_map.get_or_else kind (Error.throw <| Unsupported_Database_Operation_Error kind)
parsed_args = arguments.map (generate_expression dialect)
@ -200,10 +198,10 @@ generate_expression dialect expr = case expr of
Arguments:
- dialect: The dialect for which to add the alias.
- name: The name of the alias.
alias : Internal_Dialect -> Text -> Sql.Builder
alias : Internal_Dialect -> Text -> Builder
alias dialect name =
wrapped = dialect.wrap_identifier name
Sql.code " AS " ++ wrapped
code " AS " ++ wrapped
## PRIVATE
@ -212,7 +210,7 @@ alias dialect name =
Arguments:
- dialect: The SQL dialect for which the code is generated.
- from_spec: A description of the FROM clause.
generate_from_part : Internal_Dialect -> From_Spec -> Sql.Builder
generate_from_part : Internal_Dialect -> From_Spec -> Builder
generate_from_part dialect from_spec = case from_spec of
IR.From_Table name as_name ->
dialect.wrap_identifier name ++ alias dialect as_name
@ -224,8 +222,8 @@ generate_from_part dialect from_spec = case from_spec of
IR.Join_Left -> "LEFT JOIN"
IR.Join_Right -> "RIGHT JOIN"
IR.Join_Cross -> "CROSS JOIN"
ons = Sql.join " AND " (on.map (generate_expression dialect)) . prefix_if_present " ON "
left ++ (Sql.code " "+prefix+" ") ++ right ++ ons
ons = SQL.join " AND " (on.map (generate_expression dialect)) . prefix_if_present " ON "
left ++ (code " "+prefix+" ") ++ right ++ ons
IR.Sub_Query columns context as_name ->
sub = generate_query dialect (IR.Select columns context)
sub.paren ++ alias dialect as_name
@ -237,18 +235,18 @@ generate_from_part dialect from_spec = case from_spec of
Arguments:
- dialect: The SQL dialect for which the code is generated.
- order_descriptor: A description of the ORDER clause.
generate_order : Internal_Dialect -> Order_Descriptor -> Sql.Builder
generate_order : Internal_Dialect -> Order_Descriptor -> Builder
generate_order dialect order_descriptor =
order_suffix = case order_descriptor.direction of
Sort_Direction.Ascending -> Sql.code " ASC"
Sort_Direction.Descending -> Sql.code " DESC"
Sort_Direction.Ascending -> code " ASC"
Sort_Direction.Descending -> code " DESC"
nulls_suffix = case order_descriptor.nulls_order of
Nothing -> Sql.empty
IR.Nulls_First -> Sql.code " NULLS FIRST"
IR.Nulls_Last -> Sql.code " NULLS LAST"
Nothing -> SQL.empty
IR.Nulls_First -> code " NULLS FIRST"
IR.Nulls_Last -> code " NULLS LAST"
collation = case order_descriptor.collation of
Nothing -> Sql.empty
collation_name -> Sql.code ' COLLATE "'+collation_name+'"'
Nothing -> SQL.empty
collation_name -> code ' COLLATE "'+collation_name+'"'
base_expression = generate_expression dialect order_descriptor.expression
base_expression ++ collation ++ order_suffix ++ nulls_suffix
@ -260,19 +258,19 @@ generate_order dialect order_descriptor =
Arguments:
- dialect: The SQL dialect for which the code is being generated.
- ctx: A description of the SELECT clause.
generate_select_context : Internal_Dialect -> IR.Context -> Sql.Builder
generate_select_context : Internal_Dialect -> IR.Context -> Builder
generate_select_context dialect ctx =
gen_exprs exprs = exprs.map (generate_expression dialect)
from_part = generate_from_part dialect ctx.from_spec
where_part = (Sql.join " AND " (gen_exprs ctx.where_filters)) . prefix_if_present " WHERE "
group_part = (Sql.join ", " (gen_exprs ctx.groups)) . prefix_if_present " GROUP BY "
where_part = (SQL.join " AND " (gen_exprs ctx.where_filters)) . prefix_if_present " WHERE "
group_part = (SQL.join ", " (gen_exprs ctx.groups)) . prefix_if_present " GROUP BY "
limit_part = case ctx.limit of
Nothing -> Sql.empty
Integer -> Sql.code " LIMIT "+ctx.limit.to_text
Nothing -> SQL.empty
Integer -> code " LIMIT "+ctx.limit.to_text
orders = ctx.orders.map (generate_order dialect)
order_part = (Sql.join ", " orders) . prefix_if_present " ORDER BY "
(Sql.code " FROM ") ++ from_part ++ where_part ++ group_part ++ order_part ++ limit_part
order_part = (SQL.join ", " orders) . prefix_if_present " ORDER BY "
(code " FROM ") ++ from_part ++ where_part ++ group_part ++ order_part ++ limit_part
## PRIVATE
@ -283,13 +281,13 @@ generate_select_context dialect ctx =
- table_name: The name of the table into which the values are being inserted.
- pairs: The values to insert into the table, consisting of pairs of key, and
expression returning a value.
generate_insert_query : Internal_Dialect -> Text -> Vector Any -> Sql.Builder
generate_insert_query : Internal_Dialect -> Text -> Vector Any -> Builder
generate_insert_query dialect table_name pairs =
names = Sql.join ", " <| pairs.map (.first >> dialect.wrap_identifier)
values = Sql.join ", " <| pairs.map (.second >> generate_expression dialect)
names = SQL.join ", " <| pairs.map (.first >> dialect.wrap_identifier)
values = SQL.join ", " <| pairs.map (.second >> generate_expression dialect)
into = dialect.wrap_identifier table_name
Sql.code "INSERT INTO " ++ into ++ Sql.code " (" ++ names ++ Sql.code ") VALUES (" ++ values ++ Sql.code ")"
code "INSERT INTO " ++ into ++ code " (" ++ names ++ code ") VALUES (" ++ values ++ code ")"
## PRIVATE
@ -298,14 +296,60 @@ generate_insert_query dialect table_name pairs =
Arguments:
- dialect: The SQL dialect for which the code is being generated.
- query: An IR describing the query.
generate_query : Internal_Dialect -> IR.Query -> Sql.Builder
generate_query : Internal_Dialect -> IR.Query -> Builder
generate_query dialect query = case query of
IR.Select columns ctx ->
gen_column pair = (generate_expression dialect pair.second) ++ alias dialect pair.first
cols = Sql.join ", " (columns.map gen_column)
(Sql.code "SELECT ") ++ cols ++ generate_select_context dialect ctx
cols = SQL.join ", " (columns.map gen_column)
(code "SELECT ") ++ cols ++ generate_select_context dialect ctx
IR.Select_All ctx ->
(Sql.code "SELECT * ") ++ generate_select_context dialect ctx
(code "SELECT * ") ++ generate_select_context dialect ctx
IR.Insert table_name pairs ->
generate_insert_query dialect table_name pairs
_ -> Error.throw <| Unsupported_Database_Operation_Error "Unsupported query type."
## PRIVATE
Arguments:
- make_raw_concat_expr: a function which takes two expressions - a (possibly
processed) column expression and a separator and returns a SQL expression
which concatenates the expressions with separators.
- make_contains_expr: a function which takes two expressions - a string to
search in and a substring to search for and returns an expression which is
true if the string contains the substring.
- has_quote: specifies whether it should expect an additional SQL argument
containing a quote character.
- args: the processed SQL arguments from the generator.
It takes 4 or 5 SQL arguments depending on `has_quote`.
The SQL arguments are following:
- the column expression to concatenate,
- the separator to use when joining the expressions,
- a prefix to prepend,
- a suffix to append,
- a quote character to use to escape separator if it is present inside of a
column expression; it should be provided only if `has_quote` is `True` and
must not be empty then. If the quote character occurs in the expression, it
is escaped by doubling each occurrence.
make_concat make_raw_concat_expr make_contains_expr has_quote args =
expected_args = if has_quote then 5 else 4
if args.length != expected_args then Error.throw (Illegal_State_Error_Data "Unexpected number of arguments for the concat operation.") else
expr = args.at 0
separator = args.at 1
prefix = args.at 2
suffix = args.at 3
append = code " || "
possibly_quoted = case has_quote of
True ->
quote = args.at 4
includes_separator = separator ++ code " != '' AND " ++ make_contains_expr expr separator
## We use the assumption that `has_quote` is True iff `quote` is not empty.
includes_quote = make_contains_expr expr quote
is_empty = expr ++ code " = ''"
needs_quoting = includes_separator.paren ++ code " OR " ++ includes_quote.paren ++ code " OR " ++ is_empty.paren
escaped = code "replace(" ++ expr ++ code ", " ++ quote ++ code ", " ++ quote ++ append ++ quote ++ code ")"
quoted = quote ++ append ++ escaped ++ append ++ quote
code "CASE WHEN " ++ needs_quoting ++ code " THEN " ++ quoted ++ code " ELSE " ++ expr ++ code " END"
False -> expr
transformed_expr = code "CASE WHEN " ++ expr ++ code " IS NULL THEN '' ELSE " ++ possibly_quoted.paren ++ code " END"
concatenated = make_raw_concat_expr transformed_expr separator
prefix.paren ++ append ++ concatenated ++ append ++ suffix.paren

View File

@ -58,14 +58,14 @@ type Unsupported_Name_Error
## PRIVATE
This is used to check if the new name is safe for use in Sql queries.
This is used to check if the new name is safe for use in SQL queries.
Arguments:
- name: The name to check for safety.
In a future version we will decouple the internal Sql-safe names from the
In a future version we will decouple the internal SQL-safe names from the
external names shown to the user, but as a temporary solution we only allow
Sql-safe names for columns.
SQL-safe names for columns.
# TODO [RW] better name handling in Tables (#1513)
ensure_name_is_sane : Text -> Boolean ! Unsupported_Name_Error

View File

@ -1,4 +1,5 @@
from Standard.Base import all
from Standard.Database.Data.SQL import SQL_Type
## PRIVATE
@ -29,7 +30,7 @@ type Expression
It is usually inferred from the expression's context.
- value: the value to be interpolated; it should be a simple Number, Text
or other types that are serializable for JDBC.
Constant (sql_type : Sql.Sql_Type) (value : Any)
Constant (sql_type : SQL_Type) (value : Any)
## PRIVATE
@ -208,6 +209,18 @@ type From_Spec
the same table.
From_Table (table_name : Text) (alias : Text)
## PRIVATE
A query source that holds a raw SQL statement.
Arguments:
- raw_sql: the raw SQL code of a query used as a source for this table.
- alias: the name upon which the table can be referred to in other
parts of the query, this is especially useful for example in
self-joins, allowing to differentiate between different instances of
the same table.
From_Query (raw_sql : Text) (alias : Text)
## PRIVATE
A query source that performs a join operation on two sources.
@ -335,10 +348,22 @@ type Query
additional processing.
Arguments:
- table_name: The name of the tanle for which the context is being created.
make_ctx_from : Text -> Context
make_ctx_from table_name =
Context_Data (From_Table table_name table_name) [] [] [] [] Nothing
- table_name: The name of the table for which the context is being created.
- alias: An alias name to use for table within the query.
context_for_table : Text -> Text -> Context
context_for_table table_name alias=table_name =
Context_Data (From_Table table_name alias) [] [] [] [] Nothing
## PRIVATE
Creates a query context that runs a raw SQL query.
Arguments:
- raw_sql: The name of the table for which the context is being created.
- alias: An alias name to use for table within the query.
context_for_query : Text -> Text -> Context
context_for_query raw_sql alias =
Context_Data (From_Query raw_sql alias) [] [] [] [] Nothing
## PRIVATE
@ -357,7 +382,7 @@ subquery_as_ctx subquery =
Arguments:
- sql_type: The SQL type of the value.
- x: The value to turn into a constant.
make_constant : Sql.Sql_Type -> Any -> Expression
make_constant : SQL_Type -> Any -> Expression
make_constant sql_type x =
Constant sql_type x

View File

@ -3,10 +3,10 @@ from Standard.Base import all
import Standard.Base.Runtime.Resource
from Standard.Base.Runtime.Resource import Managed_Resource
import Standard.Database.Data.Sql
from Standard.Database.Data.Sql import Sql_Type, Sql_Type_Data, Statement, Statement_Data
from Standard.Database.Errors import Sql_Error, Sql_Error_Data, Sql_Timeout_Error, Sql_Timeout_Error_Data
import Standard.Database.Data.Internal.Base_Generator
import Standard.Database.Data.SQL
from Standard.Database.Data.SQL import SQL_Type, SQL_Type_Data, Statement, Statement_Data
from Standard.Database.Errors import SQL_Error, SQL_Error_Data, SQL_Timeout_Error, SQL_Timeout_Error_Data
import Standard.Database.Internal.Base_Generator
import Standard.Table.Data.Storage
import Standard.Database.Data.Table as Database_Table
import Standard.Table.Data.Table as Materialized_Table
@ -89,7 +89,7 @@ type JDBC_Connection
name = metadata.getColumnName ix+1
typeid = metadata.getColumnType ix+1
typename = metadata.getColumnTypeName ix+1
[name, Sql_Type_Data typeid typename]
[name, SQL_Type_Data typeid typename]
Vector.new metadata.getColumnCount resolve_column
@ -158,17 +158,17 @@ close_connection connection =
Arguments:
- action: The computation to execute. This computation may throw SQL errors.
handle_sql_errors : Any -> (Text | Nothing) -> Any ! (Sql_Error | Sql_Timeout_Error)
handle_sql_errors : Any -> (Text | Nothing) -> Any ! (SQL_Error | SQL_Timeout_Error)
handle_sql_errors ~action related_query=Nothing =
Panic.catch SQLException action caught_panic->
exc = caught_panic.payload.cause
case Java.is_instance exc SQLTimeoutException of
True -> Error.throw (Sql_Timeout_Error_Data exc related_query)
False -> Error.throw (Sql_Error_Data exc related_query)
True -> Error.throw (SQL_Timeout_Error_Data exc related_query)
False -> Error.throw (SQL_Error_Data exc related_query)
## PRIVATE
Sets values inside of a prepared statement.
set_statement_values : PreparedStatement -> Vector (Pair Any Sql_Type) -> Nothing
set_statement_values : PreparedStatement -> Vector (Pair Any SQL_Type) -> Nothing
set_statement_values stmt holes =
holes.map_with_index ix-> obj->
position = ix + 1
@ -183,18 +183,18 @@ create_table_statement name table temporary =
column_types = table.columns.map col-> default_storage_type col.storage_type
column_names = table.columns.map .name
col_makers = column_names.zip column_types name-> typ->
Base_Generator.wrap_in_quotes name ++ Sql.code " " ++ Sql.code typ.name
Base_Generator.wrap_in_quotes name ++ SQL.code " " ++ SQL.code typ.name
create_prefix = if temporary then "CREATE TEMPORARY TABLE " else "CREATE TABLE "
(Sql.code create_prefix ++ Base_Generator.wrap_in_quotes name ++ Sql.code " (" ++ (Sql.join ", " col_makers) ++ Sql.code ")").build
(SQL.code create_prefix ++ Base_Generator.wrap_in_quotes name ++ SQL.code " (" ++ (SQL.join ", " col_makers) ++ SQL.code ")").build
## PRIVATE
Returns the default database type corresponding to an in-memory storage type.
default_storage_type : Storage.Type -> Sql_Type
default_storage_type : Storage.Type -> SQL_Type
default_storage_type storage_type = case storage_type of
Storage.Text -> Sql_Type.text
Storage.Integer -> Sql_Type.integer
Storage.Decimal -> Sql_Type.double
Storage.Boolean -> Sql_Type.boolean
Storage.Text -> SQL_Type.text
Storage.Integer -> SQL_Type.integer
Storage.Decimal -> SQL_Type.double
Storage.Boolean -> SQL_Type.boolean
## Support for mixed type columns in Table upload is currently very limited,
falling back to treating everything as text.
Storage.Any -> Sql_Type.text
Storage.Any -> SQL_Type.text

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from Standard.Database.Data.Sql import Sql_Type, Statement
from Standard.Database.Data.SQL import SQL_Type, Statement
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.Data.Dialect
@ -9,7 +9,7 @@ from Standard.Database.Connection.Connection import Connection_Data
import Standard.Database.Data.Table as Database_Table
import Standard.Table.Data.Table as Materialized_Table
from Standard.Database.Errors import Sql_Error
from Standard.Database.Errors import SQL_Error
from Standard.Database.Internal.Result_Set import read_column
@ -56,7 +56,7 @@ type Postgres_Connection
Arguments:
- database: The name of the database to connect to.
set_database : Text -> Connection ! Sql_Error
set_database : Text -> Connection ! SQL_Error
set_database self database =
if database == self.database then self else
self.make_new database Nothing
@ -73,7 +73,7 @@ type Postgres_Connection
Arguments:
- schema: The name of the schema to connect to.
set_schema : Text -> Connection ! Sql_Error
set_schema : Text -> Connection ! SQL_Error
set_schema self schema =
if schema == self.schema then self else
self.make_new Nothing schema
@ -104,7 +104,7 @@ type Postgres_Connection
representing the query to execute.
- expected_types: an optional array of expected types of each column;
meant only for internal use.
execute_query : Text | Statement -> Vector Sql_Type -> Materialized_Table
execute_query : Text | Statement -> Vector SQL_Type -> Materialized_Table
execute_query self query expected_types=Nothing =
self.connection.execute_query query expected_types
@ -128,7 +128,7 @@ type Postgres_Connection
Arguments:
- table_name: The name of the table to fetch the column metadata for.
# fetch_columns : Text -> Vector [Text, Sql_Type]
# fetch_columns : Text -> Vector [Text, SQL_Type]
fetch_columns : Text -> Vector Any
fetch_columns self table_name =
self.connection.fetch_columns table_name

View File

@ -2,12 +2,11 @@ from Standard.Base import all hiding First, Last
import Standard.Base.Error.Common as Errors
from Standard.Table.Data.Aggregate_Column import all
from Standard.Database.Data.Sql import Sql_Type
import Standard.Database.Data.Sql
from Standard.Database.Data.SQL import SQL_Type, Statement, code
import Standard.Database.Data.SQL
import Standard.Database.Data.Dialect
import Standard.Database.Data.Dialect.Helpers
import Standard.Database.Data.Internal.Base_Generator
import Standard.Database.Data.Internal.IR
import Standard.Database.Internal.Base_Generator
import Standard.Database.Internal.IR
from Standard.Database.Errors import Unsupported_Database_Operation_Error
## PRIVATE
@ -17,7 +16,6 @@ postgres : Dialect
postgres =
Postgres_Dialect_Data make_internal_generator_dialect
## PRIVATE
The dialect of PostgreSQL databases.
@ -35,7 +33,7 @@ type Postgres_Dialect
## PRIVATE
A function which generates SQL code from the internal representation
according to the specific dialect.
generate_sql : Query -> Sql.Statement
generate_sql : Query -> Statement
generate_sql self query =
Base_Generator.generate_query self.internal_generator_dialect query . build
@ -44,7 +42,7 @@ type Postgres_Dialect
The provided aggregate is assumed to contain only already resolved columns.
You may need to transform it with `resolve_aggregate` first.
resolve_target_sql_type : Aggregate_Column -> Sql_Type
resolve_target_sql_type : Aggregate_Column -> SQL_Type
resolve_target_sql_type self aggregate = resolve_target_sql_type aggregate
## PRIVATE
@ -73,13 +71,13 @@ make_internal_generator_dialect =
You may need to transform it with `resolve_aggregate` first.
resolve_target_sql_type aggregate = case aggregate of
Group_By c _ -> c.sql_type
Count _ -> Sql_Type.bigint
Count_Distinct _ _ _ -> Sql_Type.bigint
Count_Not_Nothing _ _ -> Sql_Type.bigint
Count_Nothing _ _ -> Sql_Type.bigint
Count_Not_Empty _ _ -> Sql_Type.bigint
Count_Empty _ _ -> Sql_Type.bigint
Percentile _ _ _ -> Sql_Type.double
Count _ -> SQL_Type.bigint
Count_Distinct _ _ _ -> SQL_Type.bigint
Count_Not_Nothing _ _ -> SQL_Type.bigint
Count_Nothing _ _ -> SQL_Type.bigint
Count_Not_Empty _ _ -> SQL_Type.bigint
Count_Empty _ _ -> SQL_Type.bigint
Percentile _ _ _ -> SQL_Type.double
Mode c _ -> c.sql_type
First c _ _ _ -> c.sql_type
Last c _ _ _ -> c.sql_type
@ -87,56 +85,56 @@ resolve_target_sql_type aggregate = case aggregate of
Minimum c _ -> c.sql_type
Shortest c _ -> c.sql_type
Longest c _ -> c.sql_type
Standard_Deviation _ _ _ -> Sql_Type.double
Concatenate _ _ _ _ _ _ -> Sql_Type.text
Standard_Deviation _ _ _ -> SQL_Type.double
Concatenate _ _ _ _ _ _ -> SQL_Type.text
Sum c _ ->
if (c.sql_type == Sql_Type.integer) || (c.sql_type == Sql_Type.smallint) then Sql_Type.bigint else
if c.sql_type == Sql_Type.bigint then Sql_Type.numeric else
if (c.sql_type == SQL_Type.integer) || (c.sql_type == SQL_Type.smallint) then SQL_Type.bigint else
if c.sql_type == SQL_Type.bigint then SQL_Type.numeric else
c.sql_type
Average c _ ->
if c.sql_type.is_definitely_integer then Sql_Type.numeric else
if c.sql_type.is_definitely_double then Sql_Type.double else
if c.sql_type.is_definitely_integer then SQL_Type.numeric else
if c.sql_type.is_definitely_double then SQL_Type.double else
c.sql_type
Median _ _ -> Sql_Type.double
Median _ _ -> SQL_Type.double
## PRIVATE
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
Sql.code "COUNT(CASE WHEN " ++ arg.paren ++ Sql.code " IS NULL THEN 1 END)"
code "COUNT(CASE WHEN " ++ arg.paren ++ code " IS NULL THEN 1 END)"
## PRIVATE
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
Sql.code "COUNT(CASE WHEN (" ++ arg.paren ++ Sql.code " IS NULL) OR (" ++ arg.paren ++ Sql.code " = '') THEN 1 END)"
code "COUNT(CASE WHEN (" ++ arg.paren ++ code " IS NULL) OR (" ++ arg.paren ++ code " = '') THEN 1 END)"
## PRIVATE
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
Sql.code "COUNT(CASE WHEN (" ++ arg.paren ++ Sql.code " IS NOT NULL) AND (" ++ arg.paren ++ Sql.code " != '') THEN 1 END)"
code "COUNT(CASE WHEN (" ++ arg.paren ++ code " IS NOT NULL) AND (" ++ arg.paren ++ code " != '') THEN 1 END)"
## PRIVATE
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
median = Sql.code "percentile_cont(0.5) WITHIN GROUP (ORDER BY " ++ arg ++ Sql.code ")"
median = code "percentile_cont(0.5) WITHIN GROUP (ORDER BY " ++ arg ++ code ")"
## TODO Technically, this check may not be necessary if the input column has
type INTEGER, because it is impossible to represent a NaN in that type.
However, currently the column type inference is not tested well-enough to
rely on this, so leaving an uniform approach regardless of type. This
could be revisited when further work on column types takes place.
See issue: https://www.pivotaltracker.com/story/show/180854759
has_nan = Sql.code "bool_or(" ++ arg ++ Sql.code " = double precision 'NaN')"
Sql.code "CASE WHEN " ++ has_nan ++ Sql.code " THEN 'NaN' ELSE " ++ median ++ Sql.code " END"
has_nan = code "bool_or(" ++ arg ++ code " = double precision 'NaN')"
code "CASE WHEN " ++ has_nan ++ code " THEN 'NaN' ELSE " ++ median ++ code " END"
## PRIVATE
agg_mode = Base_Generator.lift_unary_op "MODE" arg->
Sql.code "mode() WITHIN GROUP (ORDER BY " ++ arg ++ Sql.code ")"
code "mode() WITHIN GROUP (ORDER BY " ++ arg ++ code ")"
agg_percentile = Base_Generator.lift_binary_op "PERCENTILE" p-> expr->
percentile = Sql.code "percentile_cont(" ++ p ++ Sql.code ") WITHIN GROUP (ORDER BY " ++ expr ++ Sql.code ")"
percentile = code "percentile_cont(" ++ p ++ code ") WITHIN GROUP (ORDER BY " ++ expr ++ code ")"
## TODO Technically, this check may not be necessary if the input column has
type INTEGER, because it is impossible to represent a NaN in that type.
However, currently the column type inference is not tested well-enough to
rely on this, so leaving an uniform approach regardless of type. This
could be revisited when further work on column types takes place.
See issue: https://www.pivotaltracker.com/story/show/180854759
has_nan = Sql.code "bool_or(" ++ expr ++ Sql.code " = double precision 'NaN')"
Sql.code "CASE WHEN " ++ has_nan ++ Sql.code " THEN 'NaN' ELSE " ++ percentile ++ Sql.code " END"
has_nan = code "bool_or(" ++ expr ++ code " = double precision 'NaN')"
code "CASE WHEN " ++ has_nan ++ code " THEN 'NaN' ELSE " ++ percentile ++ code " END"
## PRIVATE
These are written in a not most-efficient way, but a way that makes them
@ -154,32 +152,32 @@ make_first_aggregator reverse ignore_null args =
result_expr = args.head
order_bys = args.tail
filter_clause = if ignore_null.not then Sql.code "" else
Sql.code " FILTER (WHERE " ++ result_expr.paren ++ Sql.code " IS NOT NULL)"
filter_clause = if ignore_null.not then code "" else
code " FILTER (WHERE " ++ result_expr.paren ++ code " IS NOT NULL)"
order_clause =
Sql.code " ORDER BY " ++ Sql.join "," order_bys
code " ORDER BY " ++ SQL.join "," order_bys
index_expr = case reverse of
True -> if ignore_null.not then Sql.code "COUNT(*)" else
Sql.code "COUNT(" ++ result_expr ++ Sql.code ")"
False -> Sql.code "1"
True -> if ignore_null.not then code "COUNT(*)" else
code "COUNT(" ++ result_expr ++ code ")"
False -> code "1"
Sql.code "(array_agg(" ++ result_expr.paren ++ order_clause ++ Sql.code ")" ++ filter_clause ++ Sql.code ")[" ++ index_expr ++ Sql.code "]"
code "(array_agg(" ++ result_expr.paren ++ order_clause ++ code ")" ++ filter_clause ++ code ")[" ++ index_expr ++ code "]"
agg_shortest = Base_Generator.lift_unary_op "SHORTEST" arg->
order_clause =
Sql.code " ORDER BY char_length(" ++ arg ++ Sql.code ") ASC NULLS LAST"
Sql.code "(array_agg(" ++ arg.paren ++ order_clause ++ Sql.code "))[1]"
code " ORDER BY char_length(" ++ arg ++ code ") ASC NULLS LAST"
code "(array_agg(" ++ arg.paren ++ order_clause ++ code "))[1]"
agg_longest = Base_Generator.lift_unary_op "LONGEST" arg->
order_clause =
Sql.code " ORDER BY char_length(" ++ arg ++ Sql.code ") DESC NULLS LAST"
Sql.code "(array_agg(" ++ arg.paren ++ order_clause ++ Sql.code "))[1]"
code " ORDER BY char_length(" ++ arg ++ code ") DESC NULLS LAST"
code "(array_agg(" ++ arg.paren ++ order_clause ++ code "))[1]"
## PRIVATE
concat_ops =
make_raw_concat_expr expr separator =
Sql.code "string_agg(" ++ expr ++ Sql.code ", " ++ separator ++ Sql.code ")"
concat = Helpers.make_concat make_raw_concat_expr make_contains_expr
code "string_agg(" ++ expr ++ code ", " ++ separator ++ code ")"
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
@ -188,34 +186,34 @@ agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument_E
case args.length == 1 of
True ->
## A single null value will be skipped.
Sql.code "COUNT(DISTINCT " ++ args.first ++ Sql.code ")"
code "COUNT(DISTINCT " ++ args.first ++ code ")"
False ->
## A tuple of nulls is not a null, so it will not be skipped - but
we want to ignore all-null columns. So we manually filter them
out.
count = Sql.code "COUNT(DISTINCT (" ++ Sql.join ", " args ++ Sql.code "))"
are_nulls = args.map arg-> arg.paren ++ Sql.code " IS NULL"
all_nulls_filter = Sql.code " FILTER (WHERE NOT (" ++ Sql.join " AND " are_nulls ++ Sql.code "))"
count = code "COUNT(DISTINCT (" ++ SQL.join ", " args ++ code "))"
are_nulls = args.map arg-> arg.paren ++ code " IS NULL"
all_nulls_filter = code " FILTER (WHERE NOT (" ++ SQL.join " AND " are_nulls ++ code "))"
(count ++ all_nulls_filter).paren
## PRIVATE
agg_count_distinct_include_null args =
## If we always count as tuples, then even null fields are counted.
Sql.code "COUNT(DISTINCT (" ++ Sql.join ", " args ++ Sql.code ", 0))"
code "COUNT(DISTINCT (" ++ SQL.join ", " args ++ code ", 0))"
## PRIVATE
starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub->
res = str ++ (Sql.code " LIKE CONCAT(") ++ sub ++ (Sql.code ", '%')")
res = str ++ (code " LIKE CONCAT(") ++ sub ++ (code ", '%')")
res.paren
## PRIVATE
ends_with = Base_Generator.lift_binary_op "ends_with" str-> sub->
res = str ++ (Sql.code " LIKE CONCAT('%', ") ++ sub ++ (Sql.code ")")
res = str ++ (code " LIKE CONCAT('%', ") ++ sub ++ (code ")")
res.paren
## PRIVATE
make_contains_expr expr substring =
Sql.code "position(" ++ substring ++ Sql.code " in " ++ expr ++ Sql.code ") > 0"
code "position(" ++ substring ++ code " in " ++ expr ++ code ") > 0"
## PRIVATE
contains = Base_Generator.lift_binary_op "contains" make_contains_expr

View File

@ -2,17 +2,17 @@ from Standard.Base import all
from Standard.Table import Aggregate_Column
import Standard.Database.Data.Sql
from Standard.Database.Data.SQL import SQL_Type, Statement
import Standard.Database.Data.Dialect
import Standard.Database.Data.Dialect.Postgres
import Standard.Database.Data.Internal.Base_Generator
import Standard.Database.Internal.Postgres.Postgres_Dialect
import Standard.Database.Internal.Base_Generator
## PRIVATE
The dialect for Redshift connections.
redshift : Dialect
redshift =
Redshift_Dialect_Data Postgres.make_internal_generator_dialect
Redshift_Dialect_Data Postgres_Dialect.make_internal_generator_dialect
## PRIVATE
@ -31,7 +31,7 @@ type Redshift_Dialect
## PRIVATE
A function which generates SQL code from the internal representation
according to the specific dialect.
generate_sql : Query -> Sql.Statement
generate_sql : Query -> Statement
generate_sql self query =
Base_Generator.generate_query self.internal_generator_dialect query . build
@ -40,9 +40,9 @@ type Redshift_Dialect
The provided aggregate is assumed to contain only already resolved columns.
You may need to transform it with `resolve_aggregate` first.
resolve_target_sql_type : Aggregate_Column -> Sql_Type
resolve_target_sql_type : Aggregate_Column -> SQL_Type
resolve_target_sql_type self aggregate =
Postgres.resolve_target_sql_type aggregate
Postgres_Dialect.resolve_target_sql_type aggregate
## PRIVATE
Prepares an ordering descriptor.
@ -51,4 +51,4 @@ type Redshift_Dialect
settings are supported by the given database backend.
prepare_order_descriptor : IR.Internal_Column -> Sort_Direction -> Text_Ordering -> IR.Order_Descriptor
prepare_order_descriptor self internal_column sort_direction text_ordering =
Postgres.make_order_descriptor internal_column sort_direction text_ordering
Postgres_Dialect.make_order_descriptor internal_column sort_direction text_ordering

View File

@ -2,7 +2,7 @@ from Standard.Base import all
import Standard.Table.Data.Table as Materialized_Table
import Standard.Table.Internal.Java_Exports
from Standard.Database.Data.Sql import Sql_Type, Sql_Type_Data
from Standard.Database.Data.SQL import SQL_Type, SQL_Type_Data
polyglot java import java.sql.ResultSet
@ -32,7 +32,7 @@ result_set_to_table result_set expected_types=Nothing =
Vector.new ncols ix->
typeid = metadata.getColumnType ix+1
name = metadata.getColumnTypeName ix+1
Sql_Type_Data typeid name
SQL_Type_Data typeid name
column_builders = column_types.map typ->
create_builder typ
go has_next = if has_next.not then Nothing else
@ -51,7 +51,7 @@ result_set_to_table result_set expected_types=Nothing =
Arguments:
- sql_type: The SQL type of the column to create a builder for.
create_builder : Sql_Type -> Builder
create_builder : SQL_Type -> Builder
create_builder sql_type =
initial_size = 10
if sql_type.is_definitely_boolean then Builder_Boolean (Java_Exports.make_bool_builder) else

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from Standard.Database.Data.Sql import Sql_Type, Statement
from Standard.Database.Data.SQL import SQL_Type, Statement
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.Data.Dialect
@ -9,7 +9,7 @@ from Standard.Database.Connection.Connection import Connection_Data
import Standard.Database.Data.Table as Database_Table
import Standard.Table.Data.Table as Materialized_Table
from Standard.Database.Errors import Sql_Error
from Standard.Database.Errors import SQL_Error
type SQLite_Connection
## PRIVATE
@ -49,10 +49,10 @@ type SQLite_Connection
Arguments:
- database: The name of the database to connect to.
set_database : Text -> Connection ! Sql_Error
set_database : Text -> Connection ! SQL_Error
set_database self database =
if database == self.database then self else
Sql_Error.throw_sql_error "Changing database is not supported."
SQL_Error.throw_sql_error "Changing database is not supported."
## Returns the list of schemas for the connection within the current database (or catalog).
schemas : [Text]
@ -66,10 +66,10 @@ type SQLite_Connection
Arguments:
- schema: The name of the schema to connect to.
set_schema : Text -> Connection ! Sql_Error
set_schema : Text -> Connection ! SQL_Error
set_schema self schema =
if schema == self.schema then self else
Sql_Error.throw_sql_error "Changing schema is not supported."
SQL_Error.throw_sql_error "Changing schema is not supported."
## Gets a list of the table types
table_types : [Text]
@ -97,7 +97,7 @@ type SQLite_Connection
representing the query to execute.
- expected_types: an optional array of expected types of each column;
meant only for internal use.
execute_query : Text | Statement -> Vector Sql_Type -> Materialized_Table
execute_query : Text | Statement -> Vector SQL_Type -> Materialized_Table
execute_query self query expected_types=Nothing =
self.connection.execute_query query expected_types
@ -121,7 +121,7 @@ type SQLite_Connection
Arguments:
- table_name: The name of the table to fetch the column metadata for.
# fetch_columns : Text -> Vector [Text, Sql_Type]
# fetch_columns : Text -> Vector [Text, SQL_Type]
fetch_columns : Text -> Vector Any
fetch_columns self table_name =
self.connection.fetch_columns table_name

View File

@ -1,12 +1,11 @@
from Standard.Base import all hiding First, Last
from Standard.Table.Data.Aggregate_Column import all
from Standard.Database.Data.Sql import Sql_Type
import Standard.Database.Data.Sql
import Standard.Database.Data.SQL
from Standard.Database.Data.SQL import SQL_Type, Statement, code
import Standard.Database.Data.Dialect
import Standard.Database.Data.Dialect.Helpers
import Standard.Database.Data.Internal.Base_Generator
import Standard.Database.Data.Internal.IR
import Standard.Database.Internal.Base_Generator
import Standard.Database.Internal.IR
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
## PRIVATE
@ -33,7 +32,7 @@ type SQLite_Dialect
## PRIVATE
A function which generates SQL code from the internal representation
according to the specific dialect.
generate_sql : Query -> Sql.Statement
generate_sql : Query -> Statement
generate_sql self query =
Base_Generator.generate_query self.internal_generator_dialect query . build
@ -42,7 +41,7 @@ type SQLite_Dialect
The provided aggregate is assumed to contain only already resolved columns.
You may need to transform it with `resolve_aggregate` first.
resolve_target_sql_type : Aggregate_Column -> Sql_Type
resolve_target_sql_type : Aggregate_Column -> SQL_Type
resolve_target_sql_type self aggregate = resolve_target_sql_type aggregate
## PRIVATE
@ -80,14 +79,14 @@ make_internal_generator_dialect =
You may need to transform it with `resolve_aggregate` first.
resolve_target_sql_type aggregate = case aggregate of
Group_By c _ -> c.sql_type
Count _ -> Sql_Type.integer
Count _ -> SQL_Type.integer
Count_Distinct columns _ _ ->
if columns.length == 1 then Sql_Type.integer else
if columns.length == 1 then SQL_Type.integer else
unsupported "Count_Distinct on multiple columns"
Count_Not_Nothing _ _ -> Sql_Type.integer
Count_Nothing _ _ -> Sql_Type.integer
Count_Not_Empty _ _ -> Sql_Type.integer
Count_Empty _ _ -> Sql_Type.integer
Count_Not_Nothing _ _ -> SQL_Type.integer
Count_Nothing _ _ -> SQL_Type.integer
Count_Not_Empty _ _ -> SQL_Type.integer
Count_Empty _ _ -> SQL_Type.integer
Percentile _ _ _ -> unsupported "Percentile"
Mode _ _ -> unsupported "Mode"
First _ _ _ _ -> unsupported "First"
@ -96,10 +95,10 @@ resolve_target_sql_type aggregate = case aggregate of
Minimum c _ -> c.sql_type
Shortest _ _ -> unsupported "Shortest"
Longest _ _ -> unsupported "Longest"
Standard_Deviation _ _ _ -> Sql_Type.real
Concatenate _ _ _ _ _ _ -> Sql_Type.text
Standard_Deviation _ _ _ -> SQL_Type.real
Concatenate _ _ _ _ _ _ -> SQL_Type.text
Sum c _ -> c.sql_type
Average _ _ -> Sql_Type.real
Average _ _ -> SQL_Type.real
Median _ _ -> unsupported "Median"
## PRIVATE
@ -108,31 +107,31 @@ unsupported name =
## PRIVATE
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
Sql.code "COALESCE(SUM(" ++ arg.paren ++ Sql.code " IS NULL), 0)"
code "COALESCE(SUM(" ++ arg.paren ++ code " IS NULL), 0)"
## PRIVATE
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
Sql.code "COALESCE(SUM((" ++ arg.paren ++ Sql.code " IS NULL) OR (" ++ arg.paren ++ Sql.code " == '')), 0)"
code "COALESCE(SUM((" ++ arg.paren ++ code " IS NULL) OR (" ++ arg.paren ++ code " == '')), 0)"
## PRIVATE
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
Sql.code "COALESCE(SUM((" ++ arg.paren ++ Sql.code " IS NOT NULL) AND (" ++ arg.paren ++ Sql.code " != '')), 0)"
code "COALESCE(SUM((" ++ arg.paren ++ code " IS NOT NULL) AND (" ++ arg.paren ++ code " != '')), 0)"
## PRIVATE
agg_stddev_pop = Base_Generator.lift_unary_op "STDDEV_POP" arg->
sum_of_squares = Sql.code "SUM(" ++ arg.paren ++ Sql.code "*" ++ arg.paren ++ Sql.code ")"
square_of_sums = Sql.code "SUM(" ++ arg ++ Sql.code ") * SUM(" ++ arg ++ Sql.code ")"
n = Sql.code "CAST(COUNT(" ++ arg ++ Sql.code ") AS REAL)"
var = Sql.code "(" ++ sum_of_squares ++ Sql.code " - (" ++ square_of_sums ++ Sql.code " / " ++ n ++ Sql.code ")) / " ++ n
Sql.code "SQRT(" ++ var ++ Sql.code ")"
sum_of_squares = code "SUM(" ++ arg.paren ++ code "*" ++ arg.paren ++ code ")"
square_of_sums = code "SUM(" ++ arg ++ code ") * SUM(" ++ arg ++ code ")"
n = code "CAST(COUNT(" ++ arg ++ code ") AS REAL)"
var = code "(" ++ sum_of_squares ++ code " - (" ++ square_of_sums ++ code " / " ++ n ++ code ")) / " ++ n
code "SQRT(" ++ var ++ code ")"
## PRIVATE
agg_stddev_samp = Base_Generator.lift_unary_op "STDDEV_SAMP" arg->
sum_of_squares = Sql.code "SUM(" ++ arg.paren ++ Sql.code "*" ++ arg.paren ++ Sql.code ")"
square_of_sums = Sql.code "SUM(" ++ arg ++ Sql.code ") * SUM(" ++ arg ++ Sql.code ")"
n = Sql.code "CAST(COUNT(" ++ arg ++ Sql.code ") AS REAL)"
var = Sql.code "(" ++ sum_of_squares ++ Sql.code " - (" ++ square_of_sums ++ Sql.code " / " ++ n ++ Sql.code ")) / (" ++ n ++ Sql.code " - 1)"
Sql.code "SQRT(" ++ var ++ Sql.code ")"
sum_of_squares = code "SUM(" ++ arg.paren ++ code "*" ++ arg.paren ++ code ")"
square_of_sums = code "SUM(" ++ arg ++ code ") * SUM(" ++ arg ++ code ")"
n = code "CAST(COUNT(" ++ arg ++ code ") AS REAL)"
var = code "(" ++ sum_of_squares ++ code " - (" ++ square_of_sums ++ code " / " ++ n ++ code ")) / (" ++ n ++ code " - 1)"
code "SQRT(" ++ var ++ code ")"
## PRIVATE
This is a prototype that doesn't work correctly. Left for reference for
@ -152,46 +151,46 @@ window_aggregate window_type ignore_null args =
result_expr = args.head
order_exprs = args.tail
filter_clause = if ignore_null.not then Sql.code "" else
Sql.code " FILTER (WHERE " ++ result_expr.paren ++ Sql.code " IS NOT NULL)"
filter_clause = if ignore_null.not then code "" else
code " FILTER (WHERE " ++ result_expr.paren ++ code " IS NOT NULL)"
Sql.code window_type+"(" ++ result_expr ++ Sql.code ")" ++ filter_clause ++ Sql.code " OVER (ORDER BY " ++ Sql.join "," order_exprs ++ Sql.code ")"
code window_type+"(" ++ result_expr ++ code ")" ++ filter_clause ++ code " OVER (ORDER BY " ++ SQL.join "," order_exprs ++ code ")"
## PRIVATE
concat_ops =
make_raw_concat_expr expr separator =
Sql.code "group_concat(" ++ expr ++ Sql.code ", " ++ separator ++ Sql.code ")"
concat = Helpers.make_concat make_raw_concat_expr make_contains_expr
code "group_concat(" ++ expr ++ code ", " ++ separator ++ code ")"
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
## PRIVATE
agg_count_distinct args = case args.length == 1 of
True -> Sql.code "COUNT(DISTINCT (" ++ args.first ++ Sql.code "))"
True -> code "COUNT(DISTINCT (" ++ args.first ++ code "))"
False -> Error.throw (Illegal_Argument_Error_Data "COUNT_DISTINCT supports only single arguments in SQLite.")
## PRIVATE
agg_count_distinct_include_null args = case args.length == 1 of
True ->
arg = args.first
count = Sql.code "COUNT(DISTINCT " ++ arg ++ Sql.code ")"
all_nulls_case = Sql.code "CASE WHEN COUNT(CASE WHEN " ++ arg ++ Sql.code "IS NULL THEN 1 END) > 0 THEN 1 ELSE 0 END"
count ++ Sql.code " + " ++ all_nulls_case
count = code "COUNT(DISTINCT " ++ arg ++ code ")"
all_nulls_case = code "CASE WHEN COUNT(CASE WHEN " ++ arg ++ code "IS NULL THEN 1 END) > 0 THEN 1 ELSE 0 END"
count ++ code " + " ++ all_nulls_case
False -> Error.throw (Illegal_Argument_Error_Data "COUNT_DISTINCT supports only single arguments in SQLite.")
## PRIVATE
starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub->
res = str ++ (Sql.code " LIKE (") ++ sub ++ (Sql.code " || '%')")
res = str ++ (code " LIKE (") ++ sub ++ (code " || '%')")
res.paren
## PRIVATE
ends_with = Base_Generator.lift_binary_op "ends_with" str-> sub->
res = str ++ (Sql.code " LIKE ('%' || ") ++ sub ++ (Sql.code ")")
res = str ++ (code " LIKE ('%' || ") ++ sub ++ (code ")")
res.paren
## PRIVATE
make_contains_expr expr substring =
Sql.code "instr(" ++ expr ++ Sql.code ", " ++ substring ++ Sql.code ") > 0"
code "instr(" ++ expr ++ code ", " ++ substring ++ code ") > 0"
## PRIVATE
contains = Base_Generator.lift_binary_op "contains" make_contains_expr

View File

@ -1,6 +1,5 @@
import Standard.Database.Data.Table
import Standard.Database.Data.Column
import Standard.Database.Connection.Connection
import Standard.Database.Connection.Database
import Standard.Database.Connection.Credentials
@ -8,21 +7,20 @@ import Standard.Database.Connection.Client_Certificate
import Standard.Database.Connection.SSL_Mode
import Standard.Database.Connection.Connection_Options
import Standard.Database.Connection.Postgres
import Standard.Database.Connection.SQLite
import Standard.Database.Connection.Redshift
import Standard.Database.Connection.Postgres_Options
import Standard.Database.Connection.SQLite_Options
import Standard.Database.Connection.Redshift_Options
export Standard.Database.Data.Table
export Standard.Database.Data.Column
export Standard.Database.Connection.SSL_Mode
from Standard.Database.Connection.Connection export Sql_Error, Sql_Timeout_Error
from Standard.Database.Connection.Credentials export Credentials, Credentials_Data
from Standard.Database.Connection.Credentials export Credentials, Username_And_Password
from Standard.Database.Connection.Client_Certificate export Client_Certificate
from Standard.Database.Connection.Connection_Options export Connection_Options
from Standard.Database.Connection.Database export connect
from Standard.Database.Connection.Postgres export Postgres, Postgres_Data
from Standard.Database.Connection.SQLite export SQLite, SQLite_Data, In_Memory
from Standard.Database.Connection.Redshift export Redshift, Redshift_Data, AWS_Profile, AWS_Key
from Standard.Database.Connection.Postgres_Options export Postgres_Options, Postgres
from Standard.Database.Connection.SQLite_Options export SQLite_Options, SQLite, In_Memory
from Standard.Database.Connection.Redshift_Options export Redshift_Options, Redshift, AWS_Profile, AWS_Key

View File

@ -1,4 +1,5 @@
from Standard.Base import all
from Standard.Database.Data.SQL import SQL_Type
import Standard.Visualization.Helpers
@ -38,7 +39,7 @@ prepare_visualization x = Helpers.recover_errors <|
Expected Enso types are only inferred for some known SQL types. For unknown
types it will return `Nothing`.
find_expected_enso_type_for_sql : Sql_Type -> Text
find_expected_enso_type_for_sql : SQL_Type -> Text
find_expected_enso_type_for_sql sql_type =
if sql_type.is_definitely_integer then "Standard.Base.Data.Numbers.Integer" else
if sql_type.is_definitely_double then "Standard.Base.Data.Numbers.Decimal" else

View File

@ -47,8 +47,6 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
Dataframe_Column.Aggregate_Column_Data _ ->
ungrouped = Dataframe_Column.Column_Data x.java_column.getColumn
prepare_visualization ungrouped.to_table max_rows
Database_Column.Aggregate_Column_Builder_Data _ _ _ _ _ ->
prepare_visualization x.ungrouped.to_table max_rows
# TODO [RW] Should we truncate Vectors?
# We also visualize Vectors and arrays

View File

@ -6,7 +6,7 @@ from Standard.Table.Data.Aggregate_Column import all
from Standard.Table.Errors import No_Input_Columns_Selected, Missing_Input_Columns_Data, No_Such_Column_Error_Data
from Standard.Database import all
from Standard.Database.Data.Sql import Sql_Type
from Standard.Database.Data.SQL import SQL_Type
import Standard.Database.Data.Dialect
from Standard.Database.Data.Table import combine_names, fresh_names
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
@ -17,9 +17,9 @@ import Standard.Test.Problems
import project.Database.Helpers.Fake_Test_Connection
spec =
int = Sql_Type.integer
bool = Sql_Type.boolean
str = Sql_Type.varchar
int = SQL_Type.integer
bool = SQL_Type.boolean
str = SQL_Type.varchar
test_connection =
table1 = ["T1", [["A", int], ["B", str], ["C", bool]]]
table2 = ["T2", [["D", int], ["E", int], ["F", bool]]]

View File

@ -2,10 +2,10 @@ from Standard.Base import all
import Standard.Table.Data.Table as Materialized_Table
import Standard.Database.Data.Table as Database_Table
from Standard.Database.Data.SQL import Statement, SQL_Type
type Fake_Test_Connection
# type Fake_Test_Connection (tables : Map Text (Vector [Text, Sql_Type]))
# (dialect : Text)
# type Fake_Test_Connection_Data (tables : Map Text (Vector [Text, SQL_Type])) (dialect : Text)
Fake_Test_Connection_Data tables dialect
## PRIVATE
@ -19,17 +19,17 @@ type Fake_Test_Connection
close self = Nothing
## PRIVATE
explain_query_plan : Sql.Statement -> Text
explain_query_plan : Statement -> Text
explain_query_plan self _ =
Error.throw "Materialization not supported on fake connection."
## PRIVATE
execute_query : Text | Sql.Statement -> Materialized_Table
execute_query : Text | Statement -> Materialized_Table
execute_query self _ =
Error.throw "Materialization not supported on fake connection."
## PRIVATE
execute_update : Text | Sql.Statement -> Integer
execute_update : Text | Statement -> Integer
execute_update self _ =
Error.throw "Materialization not supported on fake connection."

View File

@ -7,10 +7,10 @@ from Standard.Base.System.Process.Exit_Code import Exit_Success
import Standard.Table as Materialized_Table
from Standard.Table.Data.Aggregate_Column import all hiding First
from Standard.Database import all
import Standard.Database
from Standard.Database.Errors import Sql_Error
from Standard.Database.Data.Sql import Sql_Type
from Standard.Database import all
from Standard.Database.Errors import SQL_Error
from Standard.Database.Data.SQL import SQL_Type
from Standard.Database.Internal.Postgres.Pgpass import Pgpass_Entry_Data
import Standard.Database.Internal.Postgres.Pgpass
@ -121,33 +121,33 @@ postgres_specific_spec connection db_name pending =
t = connection.access_table name
Test.specify "Concatenate, Shortest and Longest" <|
r = t.aggregate [Concatenate "txt", Shortest "txt", Longest "txt"]
r.columns.at 0 . sql_type . should_equal Sql_Type.text
r.columns.at 1 . sql_type . should_equal Sql_Type.text
r.columns.at 2 . sql_type . should_equal Sql_Type.text
r.columns.at 0 . sql_type . should_equal SQL_Type.text
r.columns.at 1 . sql_type . should_equal SQL_Type.text
r.columns.at 2 . sql_type . should_equal SQL_Type.text
Test.specify "Counts" <|
r = t.aggregate [Count, Count_Empty "txt", Count_Not_Empty "txt", Count_Distinct "i1", Count_Not_Nothing "i2", Count_Nothing "i3"]
r.column_count . should_equal 6
r.columns.each column->
column.sql_type . should_equal Sql_Type.bigint
column.sql_type . should_equal SQL_Type.bigint
Test.specify "Sum" <|
r = t.aggregate [Sum "i1", Sum "i2", Sum "i3", Sum "i4", Sum "r1", Sum "r2"]
r.columns.at 0 . sql_type . should_equal Sql_Type.bigint
r.columns.at 1 . sql_type . should_equal Sql_Type.bigint
r.columns.at 2 . sql_type . should_equal Sql_Type.numeric
r.columns.at 3 . sql_type . should_equal Sql_Type.numeric
r.columns.at 4 . sql_type . should_equal Sql_Type.real
r.columns.at 5 . sql_type . should_equal Sql_Type.double
r.columns.at 0 . sql_type . should_equal SQL_Type.bigint
r.columns.at 1 . sql_type . should_equal SQL_Type.bigint
r.columns.at 2 . sql_type . should_equal SQL_Type.numeric
r.columns.at 3 . sql_type . should_equal SQL_Type.numeric
r.columns.at 4 . sql_type . should_equal SQL_Type.real
r.columns.at 5 . sql_type . should_equal SQL_Type.double
Test.specify "Average" <|
r = t.aggregate [Average "i1", Average "i2", Average "i3", Average "i4", Average "r1", Average "r2"]
r.columns.at 0 . sql_type . should_equal Sql_Type.numeric
r.columns.at 1 . sql_type . should_equal Sql_Type.numeric
r.columns.at 2 . sql_type . should_equal Sql_Type.numeric
r.columns.at 3 . sql_type . should_equal Sql_Type.numeric
r.columns.at 4 . sql_type . should_equal Sql_Type.double
r.columns.at 5 . sql_type . should_equal Sql_Type.double
r.columns.at 0 . sql_type . should_equal SQL_Type.numeric
r.columns.at 1 . sql_type . should_equal SQL_Type.numeric
r.columns.at 2 . sql_type . should_equal SQL_Type.numeric
r.columns.at 3 . sql_type . should_equal SQL_Type.numeric
r.columns.at 4 . sql_type . should_equal SQL_Type.double
r.columns.at 5 . sql_type . should_equal SQL_Type.double
connection.execute_update 'DROP TABLE "'+name+'"'
@ -198,29 +198,29 @@ table_spec =
ssl_pending = if ca_cert_file.is_nothing then "PostgreSQL SSL test not configured." else Nothing
Test.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending <|
Test.specify "should connect without ssl parameter" <|
Database.connect (Postgres_Data db_host db_port db_name credentials=(Credentials_Data db_user db_password)) . should_succeed
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password)) . should_succeed
Test.specify "should connect, requiring SSL" <|
Database.connect (Postgres_Data db_host db_port db_name credentials=(Credentials_Data db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed
Test.specify "should connect be able to verify the certificate" <|
Database.connect (Postgres_Data db_host db_port db_name credentials=(Credentials_Data db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed
## Default certificate should not accept the self signed certificate.
ca_fail = Database.connect (Postgres_Data db_host db_port db_name credentials=(Credentials_Data db_user db_password) use_ssl=SSL_Mode.Verify_CA)
ca_fail = Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=SSL_Mode.Verify_CA)
ca_fail.is_error . should_equal True
ca_fail.catch Sql_Error . is_a Sql_Error . should_equal True
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
Test.specify "should connect be able to verify the host name against the certificate" <|
Database.connect (Postgres_Data db_host db_port db_name credentials=(Credentials_Data db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed
alternate_host = Environment.get "ENSO_DATABASE_TEST_ALTERNATE_HOST" . if_nothing <|
if db_host == "127.0.0.1" then "localhost" else Nothing
pending_alternate = if alternate_host.is_nothing then "Alternative host name not configured." else Nothing
Test.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <|
ca_fail = Database.connect (Postgres_Data alternate_host db_port db_name credentials=(Credentials_Data db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file))
ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file))
ca_fail.is_error . should_equal True
ca_fail.catch Sql_Error . is_a Sql_Error . should_equal True
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
case db_name.is_nothing of
True ->
@ -228,7 +228,7 @@ table_spec =
connection = Error.throw message
run_tests connection pending=message
False ->
connection = Database.connect (Postgres_Data (db_host_port.at 0) db_port db_name credentials=(Credentials_Data db_user db_password))
connection = Database.connect (Postgres (db_host_port.at 0) db_port db_name credentials=(Username_And_Password db_user db_password))
run_tests connection db_name
@ -280,12 +280,12 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
Test.specify "should use environment variables as host, port and database defaults and fall back to hardcoded defaults" <|
c1 = Postgres_Data "example.com" 12345 "my_db"
c2 = Postgres_Data
c1 = Postgres "example.com" 12345 "my_db"
c2 = Postgres
c3 = Test_Environment.unsafe_with_environment_override "PGHOST" "192.168.0.1" <|
Test_Environment.unsafe_with_environment_override "PGPORT" "1000" <|
Test_Environment.unsafe_with_environment_override "PGDATABASE" "ensoDB" <|
Postgres_Data
Postgres
c1.host . should_equal "example.com"
c1.port . should_equal 12345
@ -305,7 +305,7 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
## Currently we require the port to be numeric. When we support
Unix-sockets, we may lift that restriction.
c4 = Test_Environment.unsafe_with_environment_override "PGPORT" "foobar" <|
Postgres_Data
Postgres
c4.host . should_equal "localhost"
c4.port . should_equal 5432
c4.database . should_equal ""
@ -313,12 +313,12 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
add_ssl props = props+[Pair_Data 'sslmode' 'prefer']
Test.specify "should use the given credentials" <|
c = Postgres_Data credentials=(Credentials_Data "myuser" "mypass")
c = Postgres credentials=(Username_And_Password "myuser" "mypass")
c.jdbc_url . should_equal "jdbc:postgresql://localhost:5432"
c.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "myuser", Pair_Data "password" "mypass"]
Test.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <|
c1 = Postgres_Data
c1 = Postgres
c1.jdbc_url . should_equal "jdbc:postgresql://localhost:5432"
c1.jdbc_properties . should_equal <| add_ssl []
@ -329,9 +329,9 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
Test_Environment.unsafe_with_environment_override "PGUSER" "someuser" <|
c1.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "someuser", Pair_Data "password" "somepassword"]
c2 = Postgres_Data "192.168.4.0" 1234 "foo"
c3 = Postgres_Data "::1" 55999 "database_name"
c4 = Postgres_Data "::1" 55999 "otherDB"
c2 = Postgres "192.168.4.0" 1234 "foo"
c3 = Postgres "::1" 55999 "database_name"
c4 = Postgres "::1" 55999 "otherDB"
c2.jdbc_properties . should_equal <| add_ssl []
c3.jdbc_properties . should_equal <| add_ssl []
c4.jdbc_properties . should_equal <| add_ssl []

View File

@ -3,9 +3,8 @@ import Standard.Base.Runtime.Ref
import Standard.Table as Materialized_Table
from Standard.Database import all
import Standard.Database
from Standard.Database.Connection.Connection import Sql_Error
from Standard.Database import all
import Standard.Test
@ -83,7 +82,7 @@ connect_via_json_config =
db_name = uri.at 2
user = creds.get 'db_user'
Redshift_Data db_uri db_port db_name credentials=(AWS_Key user access_key secret_key)
Redshift db_uri db_port db_name credentials=(AWS_Key user access_key secret_key)
connect_via_aws_environment db_host_port =
db_host_port_split = uri_parse db_host_port
@ -98,7 +97,7 @@ connect_via_aws_environment db_host_port =
credentials = if (access_key.is_nothing || secret_key.is_nothing) then AWS_Profile db_user (Environment.get "AWS_PROFILE" . if_nothing '') else
AWS_Key db_user access_key secret_key
Redshift_Data db_uri db_port db_name credentials=credentials
Redshift db_uri db_port db_name credentials=credentials
uri_parse uri =
host_db_split = uri.split '/'

View File

@ -3,9 +3,9 @@ import Standard.Base.Runtime.Ref
import Standard.Table as Materialized_Table
from Standard.Database import all
import Standard.Database
from Standard.Database.Errors import Sql_Error_Data
from Standard.Database import all
from Standard.Database.Errors import SQL_Error_Data
import Standard.Test
@ -27,8 +27,8 @@ sqlite_specific_spec connection =
Meta.is_same_object connection (connection.set_schema Nothing) . should_be_true
Test.specify "does not allow changing schema or database" <|
connection.set_schema "foo" . should_fail_with Sql_Error_Data
connection.set_database "foo" . should_fail_with Sql_Error_Data
connection.set_schema "foo" . should_fail_with SQL_Error_Data
connection.set_database "foo" . should_fail_with SQL_Error_Data
Test.group "[SQLite] Tables and Table Types" <|
tinfo = Name_Generator.random_name "TestTable"
@ -67,11 +67,11 @@ sqlite_specific_spec connection =
Test.group "[SQLite] Error Handling" <|
Test.specify "should wrap errors" <|
connection.execute_query "foobar" . should_fail_with Sql_Error_Data
connection.execute_update "foobar" . should_fail_with Sql_Error_Data
connection.execute_query "foobar" . should_fail_with SQL_Error_Data
connection.execute_update "foobar" . should_fail_with SQL_Error_Data
action = connection.execute_query "SELECT A FROM undefined_table"
action . should_fail_with Sql_Error_Data
action . should_fail_with SQL_Error_Data
action.catch.to_text . should_equal "There was an SQL error: '[SQLITE_ERROR] SQL error or missing database (no such table: undefined_table)'. [Query was: SELECT A FROM undefined_table]"
Test.group "[SQLite] Metadata" <|
@ -133,9 +133,9 @@ spec =
enso_project.data.create_directory
file = enso_project.data / "sqlite_test.db"
file.delete_if_exists
sqlite_spec (Database.connect (SQLite_Data file)) "[SQLite] "
sqlite_spec (Database.connect (SQLite file)) "[SQLite] "
file.delete
sqlite_spec (Database.connect (SQLite_Data In_Memory)) "[SQLite Memory] "
sqlite_spec (Database.connect (SQLite In_Memory)) "[SQLite Memory] "
main = Test.Suite.run_main spec

View File

@ -7,7 +7,7 @@ import project.Helpers_Spec
import project.Histogram_Spec
import project.Id_Spec
import project.Scatter_Plot_Spec
import project.Sql_Spec
import project.SQL_Spec
import project.Table_Spec
import project.Visualization_Spec
@ -17,6 +17,6 @@ main = Test.Suite.run_main <|
Histogram_Spec.spec
Id_Spec.spec
Scatter_Plot_Spec.spec
Sql_Spec.spec
SQL_Spec.spec
Table_Spec.spec
Visualization_Spec.spec

View File

@ -3,14 +3,14 @@ from Standard.Base import all
from Standard.Database import all
import Standard.Database
import Standard.Visualization.Sql.Visualization as Visualization
import Standard.Visualization.SQL.Visualization as Visualization
import Standard.Test
visualization_spec connection =
connection.execute_update 'CREATE TABLE "T" ("A" VARCHAR, "B" INTEGER, "C" INTEGER)'
t = connection.access_table "T"
Test.group "Sql Visualization" <|
Test.group "SQL Visualization" <|
Test.specify "should provide type metadata for interpolations" <|
q = t.where ((t.at "B" == 2) && (t.at "A" == True)) . at "C"
vis = Visualization.prepare_visualization q
@ -23,7 +23,7 @@ spec =
enso_project.data.create_directory
file = enso_project.data / "sqlite_test.db"
file.delete_if_exists
connection = Database.connect (SQLite_Data file)
connection = Database.connect (SQLite file)
visualization_spec connection
connection.close
file.delete

View File

@ -3,7 +3,7 @@ from Standard.Base import all
from Standard.Table import Table, Aggregate_Column
import Standard.Table
from Standard.Database import SQLite_Data
from Standard.Database import SQLite
import Standard.Database
import Standard.Database.Data.Table as Database_Table
@ -98,7 +98,7 @@ spec =
enso_project.data.create_directory
file = enso_project.data / "sqlite_test.db"
file.delete_if_exists
connection = Database.connect (SQLite_Data file)
connection = Database.connect (SQLite file)
visualization_spec connection
connection.close
file.delete