mirror of
https://github.com/enso-org/enso.git
synced 2024-12-24 03:02:30 +03:00
Sort out statics in Database. (#3774)
- Moved `Standard.Database.connect` into `Standard.Database.Database.connect`, so can now just `from Standard.Database import ...`. - Removed all `Dubious constructor export`s. - Switched to using `project` for internal imports. - Moved to using `Value` for private constructors and not re-exporting. - Export types not modules from `Standard.Database`. - Broke up `IR` into separate files (Context, Expression, From_Spec, Internal_Column, Join_Kind, Query). - No longer use `IR.` instead via specific types. - Broke up `SQL` into separate files (SQL_Type and SQL_Statement). Additionally; - Standard.Table: Moved `storage_types` into `Storage`. - Standard.Table: Switched to using `project` for internal imports. - Standard.Table.Excel: Renamed modules `Range` to `Excel_Range` and `Section` to `Excel_Section`. - `Standard.Visualisation`: Switched to using `project` for internal imports. - `Standard.Visualisation`: Moved to using `Value` for private constructors and not re-exporting. # Important Notes - Have not cleared up the `Errors` yet. - Have not switched to type pattern matching.
This commit is contained in:
parent
841b2e6e7a
commit
9301f2dcc5
@ -1,9 +1,5 @@
|
||||
from Standard.Base import all
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Client_Certificate.Client_Certificate import all
|
||||
from project.Connection.Client_Certificate.Client_Certificate export all
|
||||
|
||||
type Client_Certificate
|
||||
## Creates a new Client_Certificate object.
|
||||
|
||||
@ -11,7 +7,7 @@ type Client_Certificate
|
||||
- cert_file: path to the client certificate file.
|
||||
- key_file: path to the client key file.
|
||||
- key_password: password for the client key file.
|
||||
Client_Certificate_Data cert_file:(File|Text) key_file:(File|Text) (key_password:Text='')
|
||||
Value cert_file:(File|Text) key_file:(File|Text) (key_password:Text='')
|
||||
|
||||
## PRIVATE
|
||||
Creates the JDBC properties for the client certificate.
|
||||
|
@ -1,25 +1,23 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Database.Internal.IR
|
||||
import Standard.Database.Data.SQL
|
||||
import Standard.Database.Data.SQL_Query
|
||||
import Standard.Database.Data.Table as Database_Table
|
||||
|
||||
import Standard.Table.Data.Table as Materialized_Table
|
||||
from Standard.Table import Column_Selector, Column_Name_Mapping
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement, Statement_Data
|
||||
from Standard.Database.Internal.JDBC_Connection import create_table_statement, handle_sql_errors
|
||||
from Standard.Database.Internal.Result_Set import read_column, result_set_to_table
|
||||
from Standard.Database.Errors import SQL_Error
|
||||
import project.Data.SQL_Query.SQL_Query
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Data.Table as Database_Table
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.Query.Query
|
||||
|
||||
from project.Internal.Result_Set import read_column, result_set_to_table
|
||||
from project.Internal.JDBC_Connection import create_table_statement, handle_sql_errors
|
||||
from project.Errors import SQL_Error
|
||||
|
||||
polyglot java import java.lang.UnsupportedOperationException
|
||||
polyglot java import java.util.UUID
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Connection.Connection import all
|
||||
from project.Connection.Connection.Connection export all
|
||||
|
||||
type Connection
|
||||
## PRIVATE
|
||||
|
||||
@ -29,7 +27,7 @@ type Connection
|
||||
- jdbc_connection: the resource managing the underlying JDBC
|
||||
connection.
|
||||
- dialect: the dialect associated with the database we are connected to.
|
||||
Connection_Data jdbc_connection dialect
|
||||
Value jdbc_connection dialect
|
||||
|
||||
## Closes the connection releasing the underlying database resources
|
||||
immediately instead of waiting for them to be automatically released.
|
||||
@ -94,7 +92,7 @@ type Connection
|
||||
- schema: The schema name to search in (defaults to current schema).
|
||||
- types: The table types to search for. The list of possible values can be obtained using the `table_types` method.
|
||||
- all_fields: Return all the fields in the metadata table.
|
||||
tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table.Table
|
||||
tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table
|
||||
tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False =
|
||||
types_array = if types.is_nothing then Nothing else types.to_array
|
||||
name_map = Map.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]]
|
||||
@ -121,11 +119,11 @@ type Connection
|
||||
SQL_Query.Raw_SQL raw_sql ->
|
||||
columns = self.jdbc_connection.fetch_columns raw_sql
|
||||
name = if alias == "" then (UUID.randomUUID.to_text) else alias
|
||||
ctx = IR.context_for_query raw_sql name
|
||||
ctx = Context.for_query raw_sql name
|
||||
Database_Table.make_table self name columns ctx
|
||||
SQL_Query.Table_Name name ->
|
||||
ctx = IR.context_for_table name (if alias == "" then name else alias)
|
||||
columns = self.jdbc_connection.fetch_columns (self.dialect.generate_sql (IR.Select_All ctx))
|
||||
ctx = Context.for_table name (if alias == "" then name else alias)
|
||||
columns = self.jdbc_connection.fetch_columns (self.dialect.generate_sql (Query.Select_All ctx))
|
||||
Database_Table.make_table self name columns ctx
|
||||
|
||||
## Execute the query and load the results into memory as a Table.
|
||||
@ -134,13 +132,13 @@ type Connection
|
||||
- query: name of the table or sql statement to query.
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- limit: the maximum number of rows to return.
|
||||
read : Text | SQL_Query -> Text -> Integer | Nothing -> Materialized_Table.Table
|
||||
read : Text | SQL_Query -> Text -> Integer | Nothing -> Materialized_Table
|
||||
read self query limit=Nothing =
|
||||
self.query query . read max_rows=limit
|
||||
|
||||
## PRIVATE
|
||||
Internal read function for a statement with optional types.
|
||||
read_statement : Statement -> (Nothing | Vector SQL_Type) -> Materialized_Table.Table
|
||||
read_statement : SQL_Statement -> (Nothing | Vector SQL_Type) -> Materialized_Table
|
||||
read_statement self statement expected_types=Nothing =
|
||||
self.jdbc_connection.with_prepared_statement statement stmt->
|
||||
result_set_to_table stmt.executeQuery expected_types
|
||||
@ -152,9 +150,9 @@ type Connection
|
||||
returns 0 for other types of queries (like creating or altering tables).
|
||||
|
||||
Arguments:
|
||||
- query: either raw SQL code as Text or an instance of SQL.Statement
|
||||
- query: either raw SQL code as Text or an instance of SQL_Statement
|
||||
representing the query to execute.
|
||||
execute_update : Text | SQL.Statement -> Integer
|
||||
execute_update : Text | SQL_Statement -> Integer
|
||||
execute_update self query =
|
||||
self.jdbc_connection.with_prepared_statement query stmt->
|
||||
Panic.catch UnsupportedOperationException stmt.executeLargeUpdate _->
|
||||
@ -177,15 +175,15 @@ type Connection
|
||||
usually not be visible to other connections.
|
||||
- batch_size: Specifies how many rows should be uploaded in a single
|
||||
batch.
|
||||
upload_table : Text -> Materialized_Table.Table -> Boolean -> Integer -> Database_Table.Table
|
||||
upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table.Table
|
||||
upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State_Error_Data <|
|
||||
create_sql = create_table_statement name table temporary
|
||||
create_table = self.execute_update create_sql
|
||||
|
||||
db_table = if create_table.is_error then create_table else self.query (SQL_Query.Table_Name name)
|
||||
if db_table.is_error.not then
|
||||
pairs = db_table.internal_columns.map col->[col.name, IR.Constant col.sql_type Nothing]
|
||||
insert_query = self.dialect.generate_sql <| IR.Insert name pairs
|
||||
pairs = db_table.internal_columns.map col->[col.name, Expression.Constant col.sql_type Nothing]
|
||||
insert_query = self.dialect.generate_sql <| Query.Insert name pairs
|
||||
insert_template = insert_query.prepare.first
|
||||
self.jdbc_connection.load_table insert_template db_table table batch_size
|
||||
|
||||
|
@ -1,12 +1,9 @@
|
||||
from Standard.Base import all
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Connection_Options.Connection_Options import all
|
||||
from project.Connection.Connection_Options.Connection_Options export all
|
||||
|
||||
type Connection_Options
|
||||
## Hold a set of key value pairs used to configure the connection.
|
||||
Connection_Options_Data options:Vector=[]
|
||||
Value options:Vector=[]
|
||||
|
||||
## Merge the base set of options with the overrides in this object.
|
||||
merge : Vector -> Vector
|
||||
|
@ -1,9 +1,5 @@
|
||||
from Standard.Base import all
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Credentials.Credentials import all
|
||||
from project.Connection.Credentials.Credentials export all
|
||||
|
||||
type Credentials
|
||||
## Simple username and password type.
|
||||
Username_And_Password username:Text password:Text
|
||||
|
@ -1,13 +1,12 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Database.Connection.Connection_Options import Connection_Options, Connection_Options_Data
|
||||
import project.Connection.Connection_Options.Connection_Options
|
||||
import project.Connection.Postgres_Options.Postgres_Options
|
||||
import project.Connection.SQLite_Options.SQLite_Options
|
||||
import project.Connection.Redshift_Options.Redshift_Options
|
||||
import project.Connection.Connection.Connection
|
||||
|
||||
from Standard.Database.Connection.Postgres_Options import Postgres_Options
|
||||
from Standard.Database.Connection.SQLite_Options import SQLite_Options
|
||||
from Standard.Database.Connection.Redshift_Options import Redshift_Options
|
||||
|
||||
from Standard.Database.Connection.Connection import Connection
|
||||
from Standard.Database.Errors import SQL_Error
|
||||
from project.Errors import SQL_Error
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -17,5 +16,5 @@ from Standard.Database.Errors import SQL_Error
|
||||
- details: Connection_Details to use to connect.
|
||||
- options: Any overriding options to use.
|
||||
connect : (Postgres_Options|SQLite_Options|Redshift_Options) -> Connection_Options -> Connection ! SQL_Error
|
||||
connect details options=Connection_Options_Data =
|
||||
connect details options=Connection_Options.Value =
|
||||
details.connect options
|
||||
|
@ -1,24 +1,17 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Base.Data.Numbers import Parse_Error_Data
|
||||
|
||||
import Standard.Database.Internal.Postgres.Postgres_Connection
|
||||
import Standard.Database.Connection.Connection_Options
|
||||
|
||||
from Standard.Database.Connection.Credentials import Credentials, Username_And_Password
|
||||
|
||||
import Standard.Database.Connection.SSL_Mode
|
||||
from Standard.Database.Connection.SSL_Mode import all
|
||||
|
||||
import Standard.Database.Connection.Client_Certificate
|
||||
|
||||
import Standard.Database.Internal.Postgres.Pgpass
|
||||
import project.Connection.Client_Certificate.Client_Certificate
|
||||
import project.Connection.Connection_Options.Connection_Options
|
||||
import project.Connection.Credentials.Credentials
|
||||
import project.Connection.SSL_Mode.SSL_Mode
|
||||
import project.Internal.Postgres.Postgres_Connection
|
||||
import project.Internal.Postgres.Pgpass
|
||||
|
||||
polyglot java import org.postgresql.Driver
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Postgres_Options.Postgres_Options import all
|
||||
from project.Connection.Postgres_Options.Postgres_Options export all
|
||||
from project.Connection.Postgres_Options.Postgres_Options import Postgres
|
||||
from project.Connection.Postgres_Options.Postgres_Options export Postgres
|
||||
|
||||
type Postgres_Options
|
||||
## Connect to a PostgreSQL database.
|
||||
@ -29,9 +22,9 @@ type Postgres_Options
|
||||
- database: The database to connect to. If empty, the default database will be used.
|
||||
- schema: The schema to connect to. If empty, the default schema will be used.
|
||||
- credentials: The credentials to use for the connection (defaults to PGPass or No Authentication).
|
||||
- use_ssl: Whether to use SSL (defaults to `Prefer`).
|
||||
- use_ssl: Whether to use SSL (defaults to `SSL_Mode.Prefer`).
|
||||
- client_cert: The client certificate to use or `Nothing` if not needed.
|
||||
Postgres (host:Text=default_postgres_host) (port:Integer=default_postgres_port) (database:Text=default_postgres_database) (schema:Text="") (credentials:(Credentials|Nothing)=Nothing) (use_ssl:SSL_Mode=Prefer) (client_cert:(Client_Certificate|Nothing)=Nothing)
|
||||
Postgres (host:Text=default_postgres_host) (port:Integer=default_postgres_port) (database:Text=default_postgres_database) (schema:Text="") (credentials:(Credentials|Nothing)=Nothing) (use_ssl:SSL_Mode=SSL_Mode.Prefer) (client_cert:(Client_Certificate|Nothing)=Nothing)
|
||||
|
||||
## Build the Connection resource.
|
||||
|
||||
@ -45,7 +38,7 @@ type Postgres_Options
|
||||
|
||||
## Cannot use default argument values as gets in an infinite loop if you do.
|
||||
make_new database schema =
|
||||
Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
|
||||
Postgres_Options.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
|
||||
|
||||
Postgres_Connection.create self.jdbc_url properties make_new
|
||||
|
||||
@ -70,7 +63,7 @@ type Postgres_Options
|
||||
Pgpass.read self.host self.port self.database username
|
||||
Pair_Data username password ->
|
||||
[Pair_Data 'user' username, Pair_Data 'password' password]
|
||||
Username_And_Password username password ->
|
||||
Credentials.Username_And_Password username password ->
|
||||
[Pair_Data 'user' username, Pair_Data 'password' password]
|
||||
|
||||
ssl_properties = ssl_mode_to_jdbc_properties self.use_ssl
|
||||
@ -85,13 +78,13 @@ type Postgres_Options
|
||||
connection.
|
||||
ssl_mode_to_jdbc_properties : SSL_Mode -> [Pair Text Text]
|
||||
ssl_mode_to_jdbc_properties use_ssl = case use_ssl of
|
||||
Disable -> []
|
||||
Prefer -> [Pair_Data 'sslmode' 'prefer']
|
||||
Require -> [Pair_Data 'sslmode' 'require']
|
||||
Verify_CA cert_file ->
|
||||
SSL_Mode.Disable -> []
|
||||
SSL_Mode.Prefer -> [Pair_Data 'sslmode' 'prefer']
|
||||
SSL_Mode.Require -> [Pair_Data 'sslmode' 'require']
|
||||
SSL_Mode.Verify_CA cert_file ->
|
||||
if cert_file.is_nothing then [Pair_Data 'sslmode' 'verify-ca'] else
|
||||
[Pair_Data 'sslmode' 'verify-ca', Pair_Data 'sslrootcert' (File.new cert_file).absolute.path]
|
||||
Full_Verification cert_file ->
|
||||
SSL_Mode.Full_Verification cert_file ->
|
||||
if cert_file.is_nothing then [Pair_Data 'sslmode' 'verify-full'] else
|
||||
[Pair_Data 'sslmode' 'verify-full', Pair_Data 'sslrootcert' (File.new cert_file).absolute.path]
|
||||
|
||||
|
@ -1,22 +1,20 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Database.Data.Dialect
|
||||
import Standard.Database.Internal.JDBC_Connection
|
||||
import Standard.Database.Connection.Connection
|
||||
from Standard.Database.Connection.Credentials import Credentials, Username_And_Password
|
||||
import Standard.Database.Connection.Connection_Options
|
||||
import Standard.Database.Connection.SSL_Mode
|
||||
from Standard.Database.Connection.SSL_Mode import all
|
||||
import Standard.Database.Connection.Client_Certificate
|
||||
import Standard.Database.Internal.Postgres.Pgpass
|
||||
import project.Connection.Client_Certificate.Client_Certificate
|
||||
import project.Connection.Connection.Connection
|
||||
import project.Connection.Connection_Options.Connection_Options
|
||||
import project.Connection.Credentials.Credentials
|
||||
import project.Connection.SSL_Mode.SSL_Mode
|
||||
import project.Data.Dialect
|
||||
import project.Internal.JDBC_Connection
|
||||
import project.Internal.Postgres.Pgpass
|
||||
|
||||
polyglot java import com.amazon.redshift.jdbc.Driver
|
||||
polyglot java import java.util.Properties
|
||||
polyglot java import org.enso.database.JDBCProxy
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Redshift_Options.Redshift_Options import all
|
||||
from project.Connection.Redshift_Options.Redshift_Options export all
|
||||
from project.Connection.Redshift_Options.Redshift_Options import Redshift
|
||||
from project.Connection.Redshift_Options.Redshift_Options export Redshift
|
||||
|
||||
type Redshift_Options
|
||||
## Connect to a AWS Redshift database.
|
||||
@ -26,9 +24,9 @@ type Redshift_Options
|
||||
- port: The port of the database server (defaults to 5432).
|
||||
- schema: The schema to connect to (if not provided or empty, the default schema will be used).
|
||||
- credentials: The credentials to use for the connection (defaults to PGPass or No Authentication).
|
||||
- use_ssl: Whether to use SSL (defaults to `Require`).
|
||||
- use_ssl: Whether to use SSL (defaults to `SSL_Mode.Require`).
|
||||
- client_cert: The client certificate to use or `Nothing` if not needed.
|
||||
Redshift (host:Text) (port:Integer=5439) (schema:Text='') (credentials:Credentials|AWS_Credential|Nothing=Nothing) (use_ssl:(Disable|Require|Verify_CA|Full_Verification)=Require) (client_cert:Client_Certificate|Nothing=Nothing)
|
||||
Redshift (host:Text) (port:Integer=5439) (schema:Text='') (credentials:Credentials|AWS_Credential|Nothing=Nothing) (use_ssl:SSL_Mode=SSL_Mode.Require) (client_cert:Client_Certificate|Nothing=Nothing)
|
||||
|
||||
## Build the Connection resource.
|
||||
|
||||
@ -44,14 +42,13 @@ type Redshift_Options
|
||||
java_props.setProperty pair.first pair.second
|
||||
|
||||
jdbc_connection = JDBC_Connection.create self.jdbc_url properties
|
||||
Connection.Connection_Data jdbc_connection Dialect.redshift
|
||||
Connection.Value jdbc_connection Dialect.redshift
|
||||
|
||||
## Provides the jdbc url for the connection.
|
||||
jdbc_url : Text
|
||||
jdbc_url self =
|
||||
prefix = case self.credentials of
|
||||
AWS_Profile _ _ -> 'jdbc:redshift:iam://'
|
||||
AWS_Key _ _ _ -> 'jdbc:redshift:iam://'
|
||||
_ : AWS_Credential -> 'jdbc:redshift:iam://'
|
||||
_ -> 'jdbc:redshift://'
|
||||
prefix + self.host + ':' + self.port.to_text + (if self.schema == '' then '' else '/' + self.schema)
|
||||
|
||||
@ -60,11 +57,11 @@ type Redshift_Options
|
||||
jdbc_properties self =
|
||||
credentials = case self.credentials of
|
||||
Nothing -> Pgpass.read self.host self.port self.schema
|
||||
AWS_Profile db_user profile ->
|
||||
AWS_Credential.Profile db_user profile ->
|
||||
[Pair_Data 'user' db_user] + (if profile == '' then [] else [Pair_Data 'profile' profile])
|
||||
AWS_Key db_user access_key secret_access_key ->
|
||||
AWS_Credential.Key db_user access_key secret_access_key ->
|
||||
[Pair_Data 'user' db_user, Pair_Data 'AccessKeyID' access_key, Pair_Data 'SecretAccessKey' secret_access_key]
|
||||
Username_And_Password username password ->
|
||||
Credentials.Username_And_Password username password ->
|
||||
[Pair_Data 'user' username, Pair_Data 'password' password]
|
||||
|
||||
## Disabled as Redshift SSL settings are different to PostgreSQL.
|
||||
@ -75,17 +72,13 @@ type Redshift_Options
|
||||
|
||||
credentials + ssl_properties + cert_properties
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.Redshift_Options.AWS_Credential import all
|
||||
from project.Connection.Redshift_Options.AWS_Credential export all
|
||||
|
||||
type AWS_Credential
|
||||
## Access Redshift using IAM via an AWS profile.
|
||||
|
||||
Arguments:
|
||||
- db_user: Redshift username to connect as.
|
||||
- profile: AWS profile name (if empty uses default).
|
||||
AWS_Profile db_user:Text profile:Text=''
|
||||
Profile db_user:Text profile:Text=''
|
||||
|
||||
## Access Redshift using IAM via an AWS access key ID and secret access key.
|
||||
|
||||
@ -93,4 +86,4 @@ type AWS_Credential
|
||||
- db_user: Redshift username to connect as.
|
||||
- access_key: AWS access key ID.
|
||||
- secret_access_key: AWS secret access key.
|
||||
AWS_Key db_user:Text access_key:Text secret_access_key:Text
|
||||
Key db_user:Text access_key:Text secret_access_key:Text
|
||||
|
@ -1,11 +1,10 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Database.Internal.SQLite.SQLite_Connection
|
||||
import Standard.Database.Connection.Connection_Options
|
||||
import project.Connection.Connection_Options
|
||||
import project.Internal.SQLite.SQLite_Connection
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.SQLite_Options.SQLite_Options import all
|
||||
from project.Connection.SQLite_Options.SQLite_Options export all
|
||||
from project.Connection.SQLite_Options.SQLite_Options import SQLite
|
||||
from project.Connection.SQLite_Options.SQLite_Options export SQLite
|
||||
|
||||
type SQLite_Options
|
||||
## Connect to a SQLite DB File or InMemory DB.
|
||||
|
@ -1,9 +1,5 @@
|
||||
from Standard.Base import all
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Connection.SSL_Mode.SSL_Mode import all
|
||||
from project.Connection.SSL_Mode.SSL_Mode export all
|
||||
|
||||
type SSL_Mode
|
||||
## Do not use SSL for the connection.
|
||||
Disable
|
||||
|
@ -1,22 +1,19 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Database.Internal.Helpers
|
||||
import Standard.Database.Internal.IR
|
||||
from Standard.Database.Data.Table import Table, freshen_columns
|
||||
from Standard.Table import Filter_Condition
|
||||
|
||||
import Standard.Table.Data.Column.Column as Materialized_Column
|
||||
import Standard.Table.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import Standard.Table.Data.Sort_Column.Sort_Column
|
||||
from Standard.Table import Sort_Column, Sort_Column_Selector
|
||||
import Standard.Table.Data.Value_Type.Value_Type
|
||||
import Standard.Table.Data.Column.Column as Materialized_Column
|
||||
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement
|
||||
from Standard.Database.Data.Table import Integrity_Error
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Helpers
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.Column.Column import all
|
||||
from project.Data.Column.Column export all
|
||||
from project.Data.Table import Table, freshen_columns
|
||||
|
||||
from project.Errors import Unsupported_Database_Operation_Error
|
||||
|
||||
type Column
|
||||
|
||||
@ -37,7 +34,7 @@ type Column
|
||||
which they come. Combined expressions must come from the same context -
|
||||
they must both have the same filtering, grouping etc. rules applied to be
|
||||
able to be combined.
|
||||
Column_Data name:Text connection:Connection sql_type:SQL_Type expression:IR.Expression context:IR.Context
|
||||
Value name:Text connection:Connection sql_type:SQL_Type expression:Expression context:Context
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -72,7 +69,7 @@ type Column
|
||||
Converts this column into a single-column table.
|
||||
to_table : Table
|
||||
to_table self =
|
||||
Table.Table_Data self.name self.connection [self.as_internal] self.context
|
||||
Table.Value self.name self.connection [self.as_internal] self.context
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -81,7 +78,7 @@ type Column
|
||||
Arguments:
|
||||
- max_rows: specifies a maximum amount of rows to fetch; if not set, all
|
||||
available rows are fetched.
|
||||
read : (Nothing | Integer) -> Materialized_Column.Column
|
||||
read : (Nothing | Integer) -> Materialized_Column
|
||||
read self max_rows=Nothing =
|
||||
self.to_table.read max_rows . at self.name
|
||||
|
||||
@ -113,7 +110,7 @@ type Column
|
||||
## UNSTABLE
|
||||
|
||||
Returns an SQL statement that will be used for materializing this column.
|
||||
to_sql : Statement
|
||||
to_sql : SQL_Statement
|
||||
to_sql self = self.to_table.to_sql
|
||||
|
||||
## PRIVATE
|
||||
@ -134,18 +131,18 @@ type Column
|
||||
make_binary_op self op_kind operand new_type=Nothing operand_type=Nothing =
|
||||
actual_new_type = new_type.if_nothing self.sql_type
|
||||
case operand of
|
||||
Column_Data _ _ _ other_expr _ ->
|
||||
Column.Value _ _ _ other_expr _ ->
|
||||
case Helpers.check_integrity self operand of
|
||||
False ->
|
||||
Error.throw <| Unsupported_Database_Operation_Error "Cannot compare columns coming from different contexts. Only columns of a single table can be compared."
|
||||
True ->
|
||||
new_expr = IR.Operation op_kind [self.expression, other_expr]
|
||||
Column_Data self.name self.connection actual_new_type new_expr self.context
|
||||
new_expr = Expression.Operation op_kind [self.expression, other_expr]
|
||||
Column.Value self.name self.connection actual_new_type new_expr self.context
|
||||
_ ->
|
||||
actual_operand_type = operand_type.if_nothing self.sql_type
|
||||
other = IR.make_constant actual_operand_type operand
|
||||
new_expr = IR.Operation op_kind [self.expression, other]
|
||||
Column_Data self.name self.connection actual_new_type new_expr self.context
|
||||
other = Expression.Constant actual_operand_type operand
|
||||
new_expr = Expression.Operation op_kind [self.expression, other]
|
||||
Column.Value self.name self.connection actual_new_type new_expr self.context
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -158,8 +155,8 @@ type Column
|
||||
make_unary_op : Text -> Text -> (SQL_Type | Nothing) -> Column
|
||||
make_unary_op self op_kind new_type=Nothing =
|
||||
actual_new_type = new_type.if_nothing self.sql_type
|
||||
new_expr = IR.Operation op_kind [self.expression]
|
||||
Column_Data self.name self.connection actual_new_type new_expr self.context
|
||||
new_expr = Expression.Operation op_kind [self.expression]
|
||||
Column.Value self.name self.connection actual_new_type new_expr self.context
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -430,7 +427,7 @@ type Column
|
||||
case is_used_in_index of
|
||||
True -> Error.throw <| Illegal_State_Error_Data "Cannot rename the column to "+new_name+", because it has an index with the same name."
|
||||
False ->
|
||||
Column_Data new_name self.connection self.sql_type self.expression self.context
|
||||
Column.Value new_name self.connection self.sql_type self.expression self.context
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -521,8 +518,8 @@ type Column
|
||||
contains self other = self.make_binary_op "contains" other new_type=SQL_Type.boolean
|
||||
|
||||
## PRIVATE
|
||||
as_internal : IR.Internal_Column
|
||||
as_internal self = IR.Internal_Column_Data self.name self.sql_type self.expression
|
||||
as_internal : Internal_Column
|
||||
as_internal self = Internal_Column.Value self.name self.sql_type self.expression
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -537,7 +534,7 @@ type Column
|
||||
make_aggregate : Column -> Text -> Text -> SQL_Type -> Column
|
||||
make_aggregate column operation name_suffix="_agg" new_type=Nothing =
|
||||
actual_new_type = new_type.if_nothing column.sql_type
|
||||
expr = IR.Operation operation [column.expression]
|
||||
expr = Expression.Operation operation [column.expression]
|
||||
case Helpers.ensure_name_is_sane name_suffix of
|
||||
True ->
|
||||
new_name = column.name + name_suffix
|
||||
@ -554,18 +551,18 @@ make_aggregate column operation name_suffix="_agg" new_type=Nothing =
|
||||
- expected_type: The expected SQL type of the column.
|
||||
- expr: The expression for the query.
|
||||
- context: The context in which the query exists.
|
||||
lift_aggregate : Text -> Connection -> SQL_Type -> IR.Expression -> IR.Context -> Column
|
||||
lift_aggregate : Text -> Connection -> SQL_Type -> Expression -> Context -> Column
|
||||
lift_aggregate new_name connection expected_type expr context =
|
||||
# TODO [RW] This is a simple workaround for #1643 - we always wrap the
|
||||
# aggregate into a subquery, thus making it safe to use it everywhere. A
|
||||
# more complex solution may be adopted at some point.
|
||||
ixes = freshen_columns [new_name] context.meta_index
|
||||
col = IR.Internal_Column_Data new_name expected_type expr
|
||||
col = Internal_Column.Value new_name expected_type expr
|
||||
setup = context.as_subquery new_name+"_sub" [[col], ixes]
|
||||
subquery = setup.first
|
||||
cols = setup.second
|
||||
new_col = cols.first.first
|
||||
new_ixes = cols.second
|
||||
new_ctx = IR.subquery_as_ctx subquery . set_index new_ixes
|
||||
Column_Data new_name connection new_col.sql_type new_col.expression new_ctx
|
||||
new_ctx = Context.for_subquery subquery . set_index new_ixes
|
||||
Column.Value new_name connection new_col.sql_type new_col.expression new_ctx
|
||||
|
||||
|
@ -1,12 +1,15 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Base.Error.Common as Errors
|
||||
|
||||
from Standard.Table import Aggregate_Column
|
||||
from Standard.Database.Data.SQL import Statement, SQL_Type
|
||||
import Standard.Database.Internal.IR
|
||||
import Standard.Database.Internal.Postgres.Postgres_Dialect
|
||||
import Standard.Database.Internal.Redshift.Redshift_Dialect
|
||||
import Standard.Database.Internal.SQLite.SQLite_Dialect
|
||||
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
import project.Internal.Postgres.Postgres_Dialect
|
||||
import project.Internal.Redshift.Redshift_Dialect
|
||||
import project.Internal.SQLite.SQLite_Dialect
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -23,7 +26,7 @@ type Dialect
|
||||
## PRIVATE
|
||||
A function which generates SQL code from the internal representation
|
||||
according to the specific dialect.
|
||||
generate_sql : Query -> Statement
|
||||
generate_sql : Query -> SQL_Statement
|
||||
generate_sql self = Errors.unimplemented "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
@ -39,7 +42,7 @@ type Dialect
|
||||
|
||||
One of the purposes of this method is to verify if the expected ordering
|
||||
settings are supported by the given database backend.
|
||||
prepare_order_descriptor : IR.Internal_Column -> Sort_Direction -> Text_Ordering -> IR.Order_Descriptor
|
||||
prepare_order_descriptor : Internal_Column -> Sort_Direction -> Text_Ordering -> Order_Descriptor
|
||||
prepare_order_descriptor self = Errors.unimplemented "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
|
@ -2,13 +2,14 @@ from Standard.Base import all
|
||||
|
||||
import Standard.Table.Internal.Vector_Builder.Vector_Builder
|
||||
|
||||
polyglot java import java.sql.Types
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Creates a Builder representing and empty code fragment.
|
||||
empty : Builder
|
||||
empty = Builder_Data (Vector_Builder.empty)
|
||||
empty = Builder.Value (Vector_Builder.empty)
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -19,8 +20,8 @@ empty = Builder_Data (Vector_Builder.empty)
|
||||
- text: The raw SQL code.
|
||||
code : Text -> Builder
|
||||
code text =
|
||||
vec = if text.is_empty then [] else [SQL_Code_Part text]
|
||||
Builder_Data (Vector_Builder.from_vector vec)
|
||||
vec = if text.is_empty then [] else [SQL_Fragment.Code_Part text]
|
||||
Builder.Value (Vector_Builder.from_vector vec)
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -31,7 +32,7 @@ code text =
|
||||
- object: The object to be interpolated into the query as if it has the type
|
||||
given by `sql_type`.
|
||||
interpolation : SQL_Type -> Any -> Builder
|
||||
interpolation sql_type object = Builder_Data (Vector_Builder.from_vector [SQL_Interpolation sql_type object])
|
||||
interpolation sql_type object = Builder.Value (Vector_Builder.from_vector [SQL_Fragment.Interpolation sql_type object])
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -43,133 +44,19 @@ interpolation sql_type object = Builder_Data (Vector_Builder.from_vector [SQL_In
|
||||
join : Builder | Text -> Vector Builder -> Builder
|
||||
join separator statements =
|
||||
sep = case separator of
|
||||
Builder_Data _ -> separator
|
||||
Builder.Value _ -> separator
|
||||
_ -> code separator
|
||||
|
||||
if statements.length == 0 then empty else
|
||||
(1.up_to statements.length . fold (statements.at 0) acc-> i-> acc ++ sep ++ statements.at i)
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.SQL.SQL_Type import all
|
||||
from project.Data.SQL.SQL_Type export all
|
||||
|
||||
## Represents an internal SQL data-type.
|
||||
type SQL_Type
|
||||
|
||||
## Represents an internal SQL data-type.
|
||||
|
||||
Arguments:
|
||||
- typeid: a numerical type id, as defined in `java.sql.Types`.
|
||||
- name: a database-specific type name, used for pretty printing.
|
||||
SQL_Type_Data typeid name
|
||||
|
||||
== self that = case that of
|
||||
SQL_Type_Data that_id _ ->
|
||||
self.typeid == that_id
|
||||
_ -> False
|
||||
|
||||
## The SQL representation of `Boolean` type.
|
||||
boolean : SQL_Type
|
||||
boolean = SQL_Type_Data Types.BOOLEAN "BOOLEAN"
|
||||
|
||||
## The SQL representation of `Integer` type.
|
||||
integer : SQL_Type
|
||||
integer = SQL_Type_Data Types.INTEGER "INTEGER"
|
||||
|
||||
## The SQL representation of the `BIGINT` type.
|
||||
bigint : SQL_Type
|
||||
bigint = SQL_Type_Data Types.BIGINT "BIGINT"
|
||||
|
||||
## The SQL representation of the `SMALLINT` type.
|
||||
smallint : SQL_Type
|
||||
smallint = SQL_Type_Data Types.SMALLINT "SMALLINT"
|
||||
|
||||
## The SQL type representing decimal numbers.
|
||||
decimal : SQL_Type
|
||||
decimal = SQL_Type_Data Types.DECIMAL "DECIMAL"
|
||||
|
||||
## The SQL type representing decimal numbers.
|
||||
real : SQL_Type
|
||||
real = SQL_Type_Data Types.REAL "REAL"
|
||||
|
||||
## The SQL type representing double-precision floating-point numbers.
|
||||
double : SQL_Type
|
||||
double = SQL_Type_Data Types.DOUBLE "DOUBLE PRECISION"
|
||||
|
||||
## The SQL type representing a general numeric type.
|
||||
numeric : SQL_Type
|
||||
numeric = SQL_Type_Data Types.NUMERIC "NUMERIC"
|
||||
|
||||
## The SQL type representing one of the suppported textual types.
|
||||
varchar : SQL_Type
|
||||
varchar = SQL_Type_Data Types.VARCHAR "VARCHAR"
|
||||
|
||||
## UNSTABLE
|
||||
The SQL type representing one of the suppported textual types.
|
||||
|
||||
It seems that JDBC treats the `TEXT` and `VARCHAR` types as interchangeable.
|
||||
text : SQL_Type
|
||||
text = SQL_Type_Data Types.VARCHAR "VARCHAR"
|
||||
|
||||
## The SQL type representing a binary object.
|
||||
blob : SQL_Type
|
||||
blob = SQL_Type_Data Types.BLOB "BLOB"
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns True if this type represents an integer.
|
||||
|
||||
It only handles the standard types so it may return false negatives for
|
||||
non-standard ones.
|
||||
is_definitely_integer : Boolean
|
||||
is_definitely_integer self =
|
||||
[Types.INTEGER, Types.BIGINT, Types.SMALLINT, Types.TINYINT].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns True if this type represents a boolean.
|
||||
|
||||
It only handles the standard types so it may return false negatives for
|
||||
non-standard ones.
|
||||
is_definitely_boolean : Boolean
|
||||
is_definitely_boolean self =
|
||||
[Types.BOOLEAN, Types.BIT].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns True if this type represents a floating point number.
|
||||
|
||||
It only handles the standard types so it may return false negatives for
|
||||
non-standard ones.
|
||||
is_definitely_double : Boolean
|
||||
is_definitely_double self =
|
||||
[Types.FLOAT, Types.DOUBLE, Types.REAL].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
Returns True if this type represents a Text.
|
||||
is_definitely_text : Boolean
|
||||
is_definitely_text self =
|
||||
[Types.VARCHAR, Types.LONGVARCHAR, Types.NVARCHAR, Types.LONGNVARCHAR].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
Returns True if this type represents a Text, using heuristics that may
|
||||
match more possible types.
|
||||
is_likely_text : Boolean
|
||||
is_likely_text self =
|
||||
self.is_definitely_text || self.name.contains "text" Text_Matcher.Case_Insensitive
|
||||
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.SQL.SQL_Fragment import all
|
||||
from project.Data.SQL.SQL_Fragment export all
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
A fragment of a SQL query.
|
||||
|
||||
It can either be a SQL_Code_Part that represents raw SQL code or
|
||||
SQL_Interpolation which represents an object that will be interpolated into
|
||||
the query.
|
||||
It can either be a SQL_Fragment.Code_Part that represents raw SQL code or
|
||||
SQL_Fragment.Interpolation which represents an object that will be
|
||||
interpolated into the query.
|
||||
type SQL_Fragment
|
||||
|
||||
## UNSTABLE
|
||||
@ -178,7 +65,7 @@ type SQL_Fragment
|
||||
|
||||
Arguments:
|
||||
- code: A fragment of SQL code.
|
||||
SQL_Code_Part code:Text
|
||||
Code_Part code:Text
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -189,90 +76,7 @@ type SQL_Fragment
|
||||
- sql_type: The expected SQL type of `object`.
|
||||
- object: A value that will be interpolated into the query, interpreted
|
||||
as having the type `sql_type`.
|
||||
SQL_Interpolation sql_type:SQL_Type object:Any
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.SQL.Statement import all
|
||||
from project.Data.SQL.Statement export all
|
||||
|
||||
type Statement
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Represents a built SQL statement.
|
||||
|
||||
Arguments:
|
||||
- internal_fragments: A vector of SQL code fragments.
|
||||
|
||||
The statement consists of SQL code with parameters and values that will be
|
||||
interpolated for these parameters.
|
||||
Statement_Data (internal_fragments:(Vector SQL_Fragment))
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
A vector of code fragments.
|
||||
|
||||
Consists of two types of values:
|
||||
- SQL_Code_Part, representing parts of raw SQL code and
|
||||
- SQL_Interpolation, representing objects that will be interpolated in
|
||||
between the SQL code.
|
||||
fragments : Vector SQL_Fragment
|
||||
fragments self = self.internal_fragments
|
||||
|
||||
## UNSAFE
|
||||
UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
This function returns a raw SQL string for the query, manually handling
|
||||
the interpolation that is normally handled by the database engine itself.
|
||||
|
||||
It should be used EXTREMELY carefully - the returned SQL code should
|
||||
always be read by a human to ensure that it does what is expected before
|
||||
executing it in any REPLs or other database interfaces.
|
||||
|
||||
It should NEVER be used in production code.
|
||||
unsafe_to_raw_sql : Text
|
||||
unsafe_to_raw_sql self =
|
||||
strings = self.internal_fragments . map <| case _ of
|
||||
SQL_Code_Part code -> code
|
||||
# TODO at some point we may try more sophisticated serialization based on data type
|
||||
SQL_Interpolation _ obj -> case obj of
|
||||
Number -> obj.to_text
|
||||
_ -> "'" + obj.to_text.replace "'" "''" + "'"
|
||||
strings.join ""
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Returns a pair consisting of the SQL code with holes for values and
|
||||
a list for values that should be substituted.
|
||||
# prepare : [Text, Vector Any]
|
||||
prepare self =
|
||||
to_code fragment = case fragment of
|
||||
SQL_Code_Part code -> code
|
||||
SQL_Interpolation _ _ -> "?"
|
||||
to_subst fragment = case fragment of
|
||||
SQL_Code_Part _ -> []
|
||||
SQL_Interpolation typ obj -> [[obj, typ]]
|
||||
sql = self.fragments.map to_code . join ""
|
||||
substitutions = self.fragments.flat_map to_subst
|
||||
[sql, substitutions]
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Returns a JSON representation of the statement.
|
||||
to_json : Json
|
||||
to_json self =
|
||||
jsonify fragment = case fragment of
|
||||
SQL_Code_Part code -> Json.from_pairs [["sql_code", code]]
|
||||
SQL_Interpolation typ obj ->
|
||||
inner = Json.from_pairs [["value", obj], ["expected_sql_type", typ.name]]
|
||||
Json.from_pairs [["sql_interpolation", inner]]
|
||||
fragments = Json.Array (self.internal_fragments.map jsonify)
|
||||
Json.from_pairs [["query", fragments]]
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.SQL.Builder import all
|
||||
from project.Data.SQL.Builder export all
|
||||
Interpolation sql_type:SQL_Type object:Any
|
||||
|
||||
type Builder
|
||||
|
||||
@ -285,7 +89,7 @@ type Builder
|
||||
|
||||
It can be used to concatenate parts of SQL code in O(1) time and at the
|
||||
end build the actual query in linear time.
|
||||
Builder_Data (fragments:(Vector_Builder SQL_Fragment))
|
||||
Value (fragments:(Vector_Builder SQL_Fragment))
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -295,8 +99,8 @@ type Builder
|
||||
- other: The code fragment to append to `self`.
|
||||
++ : Builder -> Builder
|
||||
++ self other = case other of
|
||||
text : Text -> if text == "" then self else Builder_Data (self.fragments ++ (code text).fragments)
|
||||
_ -> Builder_Data (self.fragments ++ other.fragments)
|
||||
text : Text -> if text == "" then self else Builder.Value (self.fragments ++ (code text).fragments)
|
||||
_ -> Builder.Value (self.fragments ++ other.fragments)
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -307,10 +111,10 @@ type Builder
|
||||
## UNSTABLE
|
||||
|
||||
Builds a SQL statement.
|
||||
build : Statement
|
||||
build : SQL_Statement
|
||||
build self =
|
||||
fragments = optimize_fragments self.fragments.build
|
||||
Statement_Data fragments
|
||||
SQL_Statement.Value fragments
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -329,7 +133,7 @@ type Builder
|
||||
prefix_if_present : Text | Builder -> Builder
|
||||
prefix_if_present self prefix =
|
||||
pref = case prefix of
|
||||
Builder_Data _ -> prefix
|
||||
_ : Builder -> prefix
|
||||
_ -> code prefix
|
||||
if self.is_empty then self else pref++self
|
||||
|
||||
@ -344,25 +148,24 @@ optimize_fragments : Vector SQL_Fragment -> Vector SQL_Fragment
|
||||
optimize_fragments fragments =
|
||||
builder = Vector.new_builder
|
||||
go elem =
|
||||
last_part = State.get SQL_Code_Part
|
||||
last_part = State.get SQL_Fragment.Code_Part
|
||||
case elem of
|
||||
SQL_Code_Part code ->
|
||||
SQL_Fragment.Code_Part code ->
|
||||
new_part = case last_part of
|
||||
Nothing -> SQL_Code_Part code
|
||||
SQL_Code_Part other -> SQL_Code_Part other+code
|
||||
State.put SQL_Code_Part new_part
|
||||
SQL_Interpolation _ _ ->
|
||||
Nothing -> SQL_Fragment.Code_Part code
|
||||
SQL_Fragment.Code_Part other -> SQL_Fragment.Code_Part other+code
|
||||
State.put SQL_Fragment.Code_Part new_part
|
||||
SQL_Fragment.Interpolation _ _ ->
|
||||
case last_part of
|
||||
Nothing -> Nothing
|
||||
SQL_Code_Part _ ->
|
||||
SQL_Fragment.Code_Part _ ->
|
||||
builder.append last_part
|
||||
State.put SQL_Code_Part Nothing
|
||||
State.put SQL_Fragment.Code_Part Nothing
|
||||
builder.append elem
|
||||
State.run SQL_Code_Part Nothing <|
|
||||
State.run SQL_Fragment.Code_Part Nothing <|
|
||||
fragments.each go
|
||||
last_part = State.get SQL_Code_Part
|
||||
last_part = State.get SQL_Fragment.Code_Part
|
||||
case last_part of
|
||||
Nothing -> Nothing
|
||||
SQL_Code_Part _ -> builder.append last_part
|
||||
SQL_Fragment.Code_Part _ -> builder.append last_part
|
||||
builder.to_vector
|
||||
|
||||
|
@ -1,9 +1,5 @@
|
||||
from Standard.Base import Text
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.SQL_Query.SQL_Query import all
|
||||
from project.Data.SQL_Query.SQL_Query export all
|
||||
|
||||
type SQL_Query
|
||||
## Query a whole table or view.
|
||||
Table_Name name:Text
|
||||
|
@ -0,0 +1,78 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Data.SQL.SQL_Fragment
|
||||
|
||||
type SQL_Statement
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Represents a built SQL statement.
|
||||
|
||||
Arguments:
|
||||
- internal_fragments: A vector of SQL code fragments.
|
||||
|
||||
The statement consists of SQL code with parameters and values that will be
|
||||
interpolated for these parameters.
|
||||
Value (internal_fragments:(Vector SQL_Fragment))
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
A vector of code fragments.
|
||||
|
||||
Consists of two types of values:
|
||||
- SQL_Fragment.Code_Part, representing parts of raw SQL code and
|
||||
- SQL_Fragment.Interpolation, representing objects that will be
|
||||
interpolated in between the SQL code.
|
||||
fragments : Vector SQL_Fragment
|
||||
fragments self = self.internal_fragments
|
||||
|
||||
## UNSAFE
|
||||
UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
This function returns a raw SQL string for the query, manually handling
|
||||
the interpolation that is normally handled by the database engine itself.
|
||||
|
||||
It should be used EXTREMELY carefully - the returned SQL code should
|
||||
always be read by a human to ensure that it does what is expected before
|
||||
executing it in any REPLs or other database interfaces.
|
||||
|
||||
It should NEVER be used in production code.
|
||||
unsafe_to_raw_sql : Text
|
||||
unsafe_to_raw_sql self =
|
||||
strings = self.internal_fragments . map <| case _ of
|
||||
SQL_Fragment.Code_Part code -> code
|
||||
# TODO at some point we may try more sophisticated serialization based on data type
|
||||
SQL_Fragment.Interpolation _ obj -> case obj of
|
||||
Number -> obj.to_text
|
||||
_ -> "'" + obj.to_text.replace "'" "''" + "'"
|
||||
strings.join ""
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Returns a pair consisting of the SQL code with holes for values and
|
||||
a list for values that should be substituted.
|
||||
# prepare : [Text, Vector Any]
|
||||
prepare self =
|
||||
to_code fragment = case fragment of
|
||||
SQL_Fragment.Code_Part code -> code
|
||||
SQL_Fragment.Interpolation _ _ -> "?"
|
||||
to_subst fragment = case fragment of
|
||||
SQL_Fragment.Code_Part _ -> []
|
||||
SQL_Fragment.Interpolation typ obj -> [[obj, typ]]
|
||||
sql = self.fragments.map to_code . join ""
|
||||
substitutions = self.fragments.flat_map to_subst
|
||||
[sql, substitutions]
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Returns a JSON representation of the statement.
|
||||
to_json : Json
|
||||
to_json self =
|
||||
jsonify fragment = case fragment of
|
||||
SQL_Fragment.Code_Part code -> Json.from_pairs [["sql_code", code]]
|
||||
SQL_Fragment.Interpolation typ obj ->
|
||||
inner = Json.from_pairs [["value", obj], ["expected_sql_type", typ.name]]
|
||||
Json.from_pairs [["sql_interpolation", inner]]
|
||||
fragments = Json.Array (self.internal_fragments.map jsonify)
|
||||
Json.from_pairs [["query", fragments]]
|
@ -0,0 +1,108 @@
|
||||
from Standard.Base import all
|
||||
|
||||
polyglot java import java.sql.Types
|
||||
|
||||
## Represents an internal SQL data-type.
|
||||
type SQL_Type
|
||||
|
||||
## Represents an internal SQL data-type.
|
||||
|
||||
Arguments:
|
||||
- typeid: a numerical type id, as defined in `java.sql.Types`.
|
||||
- name: a database-specific type name, used for pretty printing.
|
||||
Value typeid name
|
||||
|
||||
== self that = case that of
|
||||
SQL_Type.Value that_id _ ->
|
||||
self.typeid == that_id
|
||||
_ -> False
|
||||
|
||||
## The SQL representation of `Boolean` type.
|
||||
boolean : SQL_Type
|
||||
boolean = SQL_Type.Value Types.BOOLEAN "BOOLEAN"
|
||||
|
||||
## The SQL representation of `Integer` type.
|
||||
integer : SQL_Type
|
||||
integer = SQL_Type.Value Types.INTEGER "INTEGER"
|
||||
|
||||
## The SQL representation of the `BIGINT` type.
|
||||
bigint : SQL_Type
|
||||
bigint = SQL_Type.Value Types.BIGINT "BIGINT"
|
||||
|
||||
## The SQL representation of the `SMALLINT` type.
|
||||
smallint : SQL_Type
|
||||
smallint = SQL_Type.Value Types.SMALLINT "SMALLINT"
|
||||
|
||||
## The SQL type representing decimal numbers.
|
||||
decimal : SQL_Type
|
||||
decimal = SQL_Type.Value Types.DECIMAL "DECIMAL"
|
||||
|
||||
## The SQL type representing decimal numbers.
|
||||
real : SQL_Type
|
||||
real = SQL_Type.Value Types.REAL "REAL"
|
||||
|
||||
## The SQL type representing double-precision floating-point numbers.
|
||||
double : SQL_Type
|
||||
double = SQL_Type.Value Types.DOUBLE "DOUBLE PRECISION"
|
||||
|
||||
## The SQL type representing a general numeric type.
|
||||
numeric : SQL_Type
|
||||
numeric = SQL_Type.Value Types.NUMERIC "NUMERIC"
|
||||
|
||||
## The SQL type representing one of the suppported textual types.
|
||||
varchar : SQL_Type
|
||||
varchar = SQL_Type.Value Types.VARCHAR "VARCHAR"
|
||||
|
||||
## UNSTABLE
|
||||
The SQL type representing one of the suppported textual types.
|
||||
|
||||
It seems that JDBC treats the `TEXT` and `VARCHAR` types as interchangeable.
|
||||
text : SQL_Type
|
||||
text = SQL_Type.Value Types.VARCHAR "VARCHAR"
|
||||
|
||||
## The SQL type representing a binary object.
|
||||
blob : SQL_Type
|
||||
blob = SQL_Type.Value Types.BLOB "BLOB"
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns True if this type represents an integer.
|
||||
|
||||
It only handles the standard types so it may return false negatives for
|
||||
non-standard ones.
|
||||
is_definitely_integer : Boolean
|
||||
is_definitely_integer self =
|
||||
[Types.INTEGER, Types.BIGINT, Types.SMALLINT, Types.TINYINT].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns True if this type represents a boolean.
|
||||
|
||||
It only handles the standard types so it may return false negatives for
|
||||
non-standard ones.
|
||||
is_definitely_boolean : Boolean
|
||||
is_definitely_boolean self =
|
||||
[Types.BOOLEAN, Types.BIT].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns True if this type represents a floating point number.
|
||||
|
||||
It only handles the standard types so it may return false negatives for
|
||||
non-standard ones.
|
||||
is_definitely_double : Boolean
|
||||
is_definitely_double self =
|
||||
[Types.FLOAT, Types.DOUBLE, Types.REAL].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
Returns True if this type represents a Text.
|
||||
is_definitely_text : Boolean
|
||||
is_definitely_text self =
|
||||
[Types.VARCHAR, Types.LONGVARCHAR, Types.NVARCHAR, Types.LONGNVARCHAR].contains self.typeid
|
||||
|
||||
## PRIVATE
|
||||
Returns True if this type represents a Text, using heuristics that may
|
||||
match more possible types.
|
||||
is_likely_text : Boolean
|
||||
is_likely_text self =
|
||||
self.is_definitely_text || self.name.contains "text" Text_Matcher.Case_Insensitive
|
@ -1,28 +1,29 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Base.Error.Problem_Behavior import Report_Warning
|
||||
|
||||
import Standard.Database.Internal.Helpers
|
||||
import Standard.Database.Internal.Aggregate_Helper
|
||||
import Standard.Database.Internal.IR
|
||||
from Standard.Database.Data.SQL_Query import Raw_SQL
|
||||
from Standard.Database.Data.SQL import Statement, SQL_Type
|
||||
|
||||
from Standard.Table import Auto_Detect, Aggregate_Column, Data_Formatter, Column_Name_Mapping, Sort_Column_Selector, Sort_Column, Match_Columns, Position
|
||||
from Standard.Table.Data.Column import get_item_string
|
||||
from Standard.Table import Auto_Detect, Aggregate_Column, Data_Formatter, Column_Name_Mapping, Column_Selector, Sort_Column_Selector, Sort_Column, Match_Columns, Position
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
from Standard.Table.Data.Table import print_table
|
||||
from Standard.Table.Data.Filter_Condition_Helpers import make_filter_column
|
||||
|
||||
from Standard.Table.Errors import No_Index_Set_Error, No_Such_Column_Error, No_Such_Column_Error_Data
|
||||
from Standard.Table.Data.Column_Selector import Column_Selector
|
||||
|
||||
import Standard.Table.Internal.Java_Exports
|
||||
import Standard.Table.Internal.Table_Helpers
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
import Standard.Table.Internal.Aggregate_Column_Helper
|
||||
from Standard.Table.Data.Column import get_item_string
|
||||
from Standard.Table.Data.Table import print_table
|
||||
from Standard.Table.Internal.Filter_Condition_Helpers import make_filter_column
|
||||
from Standard.Table.Errors import No_Index_Set_Error, No_Such_Column_Error, No_Such_Column_Error_Data
|
||||
|
||||
import project.Data.Column.Column
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Helpers
|
||||
import project.Internal.Aggregate_Helper
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.From_Spec.From_Spec
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.Join_Kind.Join_Kind
|
||||
import project.Internal.IR.Query.Query
|
||||
|
||||
from Standard.Database.Data.Column import Column, Column_Data
|
||||
from Standard.Database.Internal.IR import Internal_Column, Internal_Column_Data
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
polyglot java import java.sql.JDBCType
|
||||
@ -39,7 +40,7 @@ type Table
|
||||
- connection: The connection with which the table is associated.
|
||||
- internal_columns: The internal representation of the table columns.
|
||||
- context: The context associated with this table.
|
||||
Table_Data name:Text connection:Connection (internal_columns:(Vector Internal_Column)) context:IR.Context
|
||||
Value name:Text connection:Connection (internal_columns:(Vector Internal_Column)) context:IR.Context
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -335,7 +336,7 @@ type Table
|
||||
_ : Text -> Panic.rethrow (self.at column)
|
||||
_ ->
|
||||
if Helpers.check_integrity self column then column else
|
||||
Panic.throw (Integrity_Error_Data "Column "+column.name)
|
||||
Panic.throw (Integrity_Error.Error "Column "+column.name)
|
||||
|
||||
## ALIAS Filter Rows
|
||||
|
||||
@ -385,7 +386,7 @@ type Table
|
||||
_ : Column ->
|
||||
mask filter_column = case Helpers.check_integrity self filter_column of
|
||||
False ->
|
||||
Error.throw (Integrity_Error_Data "Column "+filter_column.name)
|
||||
Error.throw (Integrity_Error.Error "Column "+filter_column.name)
|
||||
True ->
|
||||
new_filters = self.context.where_filters + [filter_column.expression]
|
||||
new_ctx = self.context.set_where_filters new_filters
|
||||
@ -401,7 +402,7 @@ type Table
|
||||
|
||||
## PRIVATE
|
||||
with_no_rows self =
|
||||
false_expression = IR.Operation "=" [IR.Constant SQL_Type.integer 1, IR.Constant SQL_Type.integer 2]
|
||||
false_expression = Expression.Operation "=" [Expression.Constant SQL_Type.integer 1, Expression.Constant SQL_Type.integer 2]
|
||||
new_filters = self.context.where_filters + [false_expression]
|
||||
new_ctx = self.context.set_where_filters new_filters
|
||||
self.updated_context new_ctx
|
||||
@ -483,7 +484,7 @@ type Table
|
||||
case is_used_in_index of
|
||||
True -> Error.throw <| Illegal_State_Error_Data "Cannot override column "+name+", because it is used as an index. Remove the index or use a different name."
|
||||
False ->
|
||||
new_col = Internal_Column_Data name column.sql_type column.expression
|
||||
new_col = Internal_Column.Value name column.sql_type column.expression
|
||||
replace = self.internal_columns.exists (c -> c.name == name)
|
||||
case replace of
|
||||
True ->
|
||||
@ -659,7 +660,7 @@ type Table
|
||||
Panic.rethrow (Helpers.ensure_name_is_sane left_suffix && Helpers.ensure_name_is_sane right_suffix)
|
||||
if left_suffix == right_suffix then
|
||||
Panic.throw <| Illegal_State_Error_Data "left_suffix must be different from right_suffix"
|
||||
kind = if drop_unmatched then IR.Join_Inner else IR.Join_Left
|
||||
kind = if drop_unmatched then Join_Kind.Inner else Join_Kind.Left
|
||||
|
||||
# Prepare the left and right pairs of indices along which the join will be performed.
|
||||
left_join_index : Vector Internal_Column
|
||||
@ -719,12 +720,12 @@ type Table
|
||||
new_columns = left_renamed_columns + right_renamed_columns
|
||||
|
||||
on_exprs = left_new_join_index.zip right_new_join_index l-> r->
|
||||
IR.Operation "=" [l.expression, r.expression]
|
||||
new_from = IR.Join kind left_subquery right_subquery on_exprs
|
||||
Expression.Operation "=" [l.expression, r.expression]
|
||||
new_from = From_Spec.Join kind left_subquery right_subquery on_exprs
|
||||
new_limit = Nothing
|
||||
new_ctx = IR.Context_Data new_from [] [] [] new_index new_limit
|
||||
new_ctx = Context.Value new_from [] [] [] new_index new_limit
|
||||
|
||||
Table.Table_Data new_table_name self.connection new_columns new_ctx
|
||||
Table.Value new_table_name self.connection new_columns new_ctx
|
||||
|
||||
## ALIAS group, summarize
|
||||
|
||||
@ -763,7 +764,7 @@ type Table
|
||||
agg = p.second
|
||||
new_name = p.first
|
||||
Aggregate_Helper.make_aggregate_column self agg new_name . catch
|
||||
partitioned = results.partition (_.is_a Internal_Column_Data)
|
||||
partitioned = results.partition (_.is_a Internal_Column.Value)
|
||||
## When working on join we may encounter further issues with having
|
||||
aggregate columns exposed directly, it may be useful to re-use
|
||||
the `lift_aggregate` method to push the aggregates into a
|
||||
@ -793,11 +794,11 @@ type Table
|
||||
## DEPRECATED Will be replaced with `Incomplete_Columns` selector (to be used with `remove_columns`).
|
||||
drop_missing_columns : Table
|
||||
drop_missing_columns self =
|
||||
rows_expr = IR.Operation "COUNT_ROWS" []
|
||||
rows_expr = Expression.Operation "COUNT_ROWS" []
|
||||
all_rows_column_name = "row_count"
|
||||
make_count_expr expr = IR.Operation "COUNT" [expr]
|
||||
make_count_expr expr = Expression.Operation "COUNT" [expr]
|
||||
cols = self.internal_columns.map (c -> [c.name, make_count_expr c.expression])
|
||||
query = IR.Select [[all_rows_column_name, rows_expr]]+cols self.context
|
||||
query = Query.Select [[all_rows_column_name, rows_expr]]+cols self.context
|
||||
sql = self.connection.dialect.generate_sql query
|
||||
table = self.connection.read_statement sql
|
||||
all_rows = table.at all_rows_column_name . at 0
|
||||
@ -808,15 +809,15 @@ type Table
|
||||
## Returns the amount of rows in this table.
|
||||
row_count : Integer
|
||||
row_count self = if self.internal_columns.is_empty then 0 else
|
||||
expr = IR.Operation "COUNT_ROWS" []
|
||||
expr = Expression.Operation "COUNT_ROWS" []
|
||||
column_name = "row_count"
|
||||
## We need to keep some column in the subquery which will determine if
|
||||
the query is performing regular selection or aggregation. To avoid
|
||||
computing too much we do not pass all the columns but only the first
|
||||
one.
|
||||
setup = self.context.as_subquery self.name [[self.internal_columns.first]]
|
||||
new_ctx = IR.subquery_as_ctx setup.first
|
||||
query = IR.Select [[column_name, expr]] new_ctx
|
||||
new_ctx = Context.for_subquery setup.first
|
||||
query = Query.Select [[column_name, expr]] new_ctx
|
||||
sql = self.connection.dialect.generate_sql query
|
||||
table = self.connection.read_statement sql
|
||||
table.at column_name . at 0
|
||||
@ -859,13 +860,13 @@ type Table
|
||||
## UNSTABLE
|
||||
|
||||
Returns an SQL statement that will be used for materializing this table.
|
||||
to_sql : Statement
|
||||
to_sql : SQL_Statement
|
||||
to_sql self =
|
||||
cols = self.internal_columns.map (c -> [c.name, c.expression])
|
||||
case cols.is_empty of
|
||||
True -> Error.throw <| Unsupported_Database_Operation_Error_Data "Cannot generate SQL for a table with no columns."
|
||||
False ->
|
||||
query = IR.Select cols self.context
|
||||
query = Query.Select cols self.context
|
||||
self.connection.dialect.generate_sql query
|
||||
|
||||
## Returns a Table describing this table's contents.
|
||||
@ -881,10 +882,10 @@ type Table
|
||||
Naively wrapping each column in a `COUNT(...)` will not
|
||||
always work as aggregates cannot be nested.
|
||||
setup = self.context.as_subquery self.name [self.internal_columns]
|
||||
new_ctx = IR.subquery_as_ctx setup.first
|
||||
new_ctx = Context.for_subquery setup.first
|
||||
new_columns = setup.second.first.map column->
|
||||
[column.name, IR.Operation "COUNT" [column.expression]]
|
||||
query = IR.Select new_columns new_ctx
|
||||
[column.name, Expression.Operation "COUNT" [column.expression]]
|
||||
query = Query.Select new_columns new_ctx
|
||||
self.connection.dialect.generate_sql query
|
||||
count_table = self.connection.read_statement count_query
|
||||
counts = if cols.is_empty then [] else count_table.columns.map c-> c.at 0
|
||||
@ -904,7 +905,7 @@ type Table
|
||||
# these distinctness assumptions, to avoid this renaming.
|
||||
ixes = freshen_columns [internal.name] self.context.meta_index
|
||||
new_ctx = self.context.set_index ixes
|
||||
Column_Data internal.name self.connection internal.sql_type internal.expression new_ctx
|
||||
Column.Value internal.name self.connection internal.sql_type internal.expression new_ctx
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -913,7 +914,7 @@ type Table
|
||||
Arguments:
|
||||
- columns: The columns with which to update this table.
|
||||
updated_columns : Vector Internal_Column -> Table
|
||||
updated_columns self internal_columns = Table.Table_Data self.name self.connection internal_columns self.context
|
||||
updated_columns self internal_columns = Table.Value self.name self.connection internal_columns self.context
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -922,7 +923,7 @@ type Table
|
||||
Arguments:
|
||||
- ctx: The new context for this table.
|
||||
updated_context : Context -> Table
|
||||
updated_context self ctx = Table.Table_Data self.name self.connection self.internal_columns ctx
|
||||
updated_context self ctx = Table.Value self.name self.connection self.internal_columns ctx
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -932,7 +933,7 @@ type Table
|
||||
- ctx: The new context for this table.
|
||||
- internal_columns: The new columns to include in the table.
|
||||
updated_context_and_columns : Context -> Vector Internal_Column -> Table
|
||||
updated_context_and_columns self ctx internal_columns = Table.Table_Data self.name self.connection internal_columns ctx
|
||||
updated_context_and_columns self ctx internal_columns = Table.Value self.name self.connection internal_columns ctx
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -956,12 +957,12 @@ type Table
|
||||
insert : Vector Any -> Nothing
|
||||
insert self values =
|
||||
table_name = case self.context.from_spec of
|
||||
IR.From_Table name _ -> name
|
||||
From_Spec.From_Table name _ -> name
|
||||
_ -> Error.throw <| Illegal_State_Error_Data "Inserting can only be performed on tables as returned by `query`, any further processing is not allowed."
|
||||
# TODO [RW] before removing the PRIVATE tag, add a check that no bad stuff was done to the table as described above
|
||||
pairs = self.internal_columns.zip values col-> value->
|
||||
[col.name, IR.Constant col.sql_type value]
|
||||
query = self.connection.dialect.generate_sql <| IR.Insert table_name pairs
|
||||
[col.name, Expression.Constant col.sql_type value]
|
||||
query = self.connection.dialect.generate_sql <| Query.Insert table_name pairs
|
||||
affected_rows = self.connection.execute_update query
|
||||
case affected_rows == 1 of
|
||||
False -> Error.throw <| Illegal_State_Error_Data "The update unexpectedly affected "+affected_rows.to_text+" rows."
|
||||
@ -1016,21 +1017,17 @@ type Table
|
||||
Write a database table to a CSV file.
|
||||
|
||||
import Standard.Examples
|
||||
import Standard.Database
|
||||
from Standard.Database import all
|
||||
|
||||
example_to_csv =
|
||||
connection = Database.connect (SQLite (File.new "db.sqlite"))
|
||||
table = connection.query (Table_Name "Table")
|
||||
table = connection.query (SQL_Query.Table_Name "Table")
|
||||
table.write (enso_project.data / "example_csv_output.csv")
|
||||
write : File|Text -> File_Format -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Nothing ! Column_Mismatch | Illegal_Argument_Error | File_Not_Found | IO_Error
|
||||
write self path format=Auto_Detect on_existing_file=Existing_File_Behavior.Backup match_columns=Match_Columns.By_Name on_problems=Report_Warning =
|
||||
# TODO This should ideally be done in a streaming manner, or at least respect the row limits.
|
||||
self.read.write path format on_existing_file match_columns on_problems
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Data.Table.Integrity_Error import all
|
||||
from project.Data.Table.Integrity_Error export all
|
||||
|
||||
type Integrity_Error
|
||||
|
||||
## UNSTABLE
|
||||
@ -1039,7 +1036,7 @@ type Integrity_Error
|
||||
contexts.
|
||||
|
||||
To use columns from different tables, you must first join them.
|
||||
Integrity_Error_Data object_description
|
||||
Error object_description
|
||||
|
||||
# Return a readable description of this error.
|
||||
to_text : Text
|
||||
@ -1057,11 +1054,11 @@ type Integrity_Error
|
||||
- table_name: The name of the table to get.
|
||||
- columns: The names of the columns to get.
|
||||
- ctx: The context to use for the table.
|
||||
# make_table : Connection -> Text -> Vector [Text, SQL_Type] -> IR.Context -> Table
|
||||
make_table : Connection -> Text -> Vector -> IR.Context -> Table
|
||||
# make_table : Connection -> Text -> Vector [Text, SQL_Type] -> Context -> Table
|
||||
make_table : Connection -> Text -> Vector -> Context -> Table
|
||||
make_table connection table_name columns ctx =
|
||||
cols = columns.map (p -> Internal_Column_Data p.first p.second (IR.Column table_name p.first))
|
||||
Table.Table_Data table_name connection cols ctx
|
||||
cols = columns.map (p -> Internal_Column.Value p.first p.second (Expression.Column table_name p.first))
|
||||
Table.Value table_name connection cols ctx
|
||||
|
||||
## PRIVATE
|
||||
|
||||
|
@ -1,9 +1,12 @@
|
||||
from Standard.Base import all hiding First, Last
|
||||
|
||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||
import Standard.Database.Internal.IR
|
||||
from Standard.Database.Data.SQL import SQL_Type
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
|
||||
from project.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
## PRIVATE
|
||||
Creates an `Internal_Column` that computes the specified statistic.
|
||||
@ -11,57 +14,57 @@ from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
The provided `aggregate` is assumed to contain only already resolved columns.
|
||||
You may need to transform it with `resolve_aggregate` first.
|
||||
make_aggregate_column : Table -> Aggregate_Column -> Text -> IR.Internal_Column
|
||||
make_aggregate_column : Table -> Aggregate_Column -> Text -> Internal_Column
|
||||
make_aggregate_column table aggregate new_name =
|
||||
sql_type = table.connection.dialect.resolve_target_sql_type aggregate
|
||||
expression = make_expression aggregate table.connection.dialect
|
||||
IR.Internal_Column_Data new_name sql_type expression
|
||||
Internal_Column.Value new_name sql_type expression
|
||||
|
||||
## PRIVATE
|
||||
Creates an Internal Representation of the expression that computes a
|
||||
requested statistic.
|
||||
make_expression : Aggregate_Column -> Dialect -> IR.Expression
|
||||
make_expression : Aggregate_Column -> Dialect -> Expression
|
||||
make_expression aggregate dialect =
|
||||
is_non_empty_selector v = if v.is_nothing then False else v.columns.not_empty
|
||||
case aggregate of
|
||||
Group_By c _ -> c.expression
|
||||
Count _ -> IR.Operation "COUNT_ROWS" []
|
||||
Count _ -> Expression.Operation "COUNT_ROWS" []
|
||||
Count_Distinct columns _ ignore_nothing -> if columns.is_empty then Error.throw (Illegal_Argument_Error "Count_Distinct must have at least one column.") else
|
||||
case ignore_nothing of
|
||||
True -> IR.Operation "COUNT_DISTINCT" (columns.map .expression)
|
||||
False -> IR.Operation "COUNT_DISTINCT_INCLUDE_NULL" (columns.map .expression)
|
||||
Count_Not_Nothing c _ -> IR.Operation "COUNT" [c.expression]
|
||||
Count_Nothing c _ -> IR.Operation "COUNT_IS_NULL" [c.expression]
|
||||
Count_Not_Empty c _ -> IR.Operation "COUNT_NOT_EMPTY" [c.expression]
|
||||
Count_Empty c _ -> IR.Operation "COUNT_EMPTY" [c.expression]
|
||||
Percentile p c _ -> IR.Operation "PERCENTILE" [IR.Constant SQL_Type.double p, c.expression]
|
||||
Mode c _ -> IR.Operation "MODE" [c.expression]
|
||||
True -> Expression.Operation "COUNT_DISTINCT" (columns.map .expression)
|
||||
False -> Expression.Operation "COUNT_DISTINCT_INCLUDE_NULL" (columns.map .expression)
|
||||
Count_Not_Nothing c _ -> Expression.Operation "COUNT" [c.expression]
|
||||
Count_Nothing c _ -> Expression.Operation "COUNT_IS_NULL" [c.expression]
|
||||
Count_Not_Empty c _ -> Expression.Operation "COUNT_NOT_EMPTY" [c.expression]
|
||||
Count_Empty c _ -> Expression.Operation "COUNT_EMPTY" [c.expression]
|
||||
Percentile p c _ -> Expression.Operation "PERCENTILE" [Expression.Constant SQL_Type.double p, c.expression]
|
||||
Mode c _ -> Expression.Operation "MODE" [c.expression]
|
||||
First c _ ignore_nothing order_by -> case is_non_empty_selector order_by of
|
||||
False -> Error.throw (Unsupported_Database_Operation_Error_Data "`First` aggregation requires at least one `order_by` column.")
|
||||
True ->
|
||||
order_bys = order_by.columns.map c-> dialect.prepare_order_descriptor c.column.as_internal c.direction Text_Ordering.Default
|
||||
case ignore_nothing of
|
||||
False -> IR.Operation "FIRST" [c.expression]+order_bys
|
||||
True -> IR.Operation "FIRST_NOT_NULL" [c.expression]+order_bys
|
||||
False -> Expression.Operation "FIRST" [c.expression]+order_bys
|
||||
True -> Expression.Operation "FIRST_NOT_NULL" [c.expression]+order_bys
|
||||
Last c _ ignore_nothing order_by -> case is_non_empty_selector order_by of
|
||||
False -> Error.throw (Unsupported_Database_Operation_Error_Data "`Last` aggregation requires at least one `order_by` column.")
|
||||
True ->
|
||||
order_bys = order_by.columns.map c-> dialect.prepare_order_descriptor c.column.as_internal c.direction Text_Ordering.Default
|
||||
case ignore_nothing of
|
||||
False -> IR.Operation "LAST" [c.expression]+order_bys
|
||||
True -> IR.Operation "LAST_NOT_NULL" [c.expression]+order_bys
|
||||
Maximum c _ -> IR.Operation "MAX" [c.expression]
|
||||
Minimum c _ -> IR.Operation "MIN" [c.expression]
|
||||
Shortest c _ -> IR.Operation "SHORTEST" [c.expression]
|
||||
Longest c _ -> IR.Operation "LONGEST" [c.expression]
|
||||
False -> Expression.Operation "LAST" [c.expression]+order_bys
|
||||
True -> Expression.Operation "LAST_NOT_NULL" [c.expression]+order_bys
|
||||
Maximum c _ -> Expression.Operation "MAX" [c.expression]
|
||||
Minimum c _ -> Expression.Operation "MIN" [c.expression]
|
||||
Shortest c _ -> Expression.Operation "SHORTEST" [c.expression]
|
||||
Longest c _ -> Expression.Operation "LONGEST" [c.expression]
|
||||
Standard_Deviation c _ population -> case population of
|
||||
True -> IR.Operation "STDDEV_POP" [c.expression]
|
||||
False -> IR.Operation "STDDEV_SAMP" [c.expression]
|
||||
True -> Expression.Operation "STDDEV_POP" [c.expression]
|
||||
False -> Expression.Operation "STDDEV_SAMP" [c.expression]
|
||||
Concatenate c _ separator prefix suffix quote_char ->
|
||||
base_args = [c.expression, IR.Constant SQL_Type.text separator, IR.Constant SQL_Type.text prefix, IR.Constant SQL_Type.text suffix]
|
||||
base_args = [c.expression, Expression.Constant SQL_Type.text separator, Expression.Constant SQL_Type.text prefix, Expression.Constant SQL_Type.text suffix]
|
||||
case quote_char.is_empty of
|
||||
True -> IR.Operation "CONCAT" base_args
|
||||
False -> IR.Operation "CONCAT_QUOTE_IF_NEEDED" base_args+[IR.Constant SQL_Type.text quote_char]
|
||||
Sum c _ -> IR.Operation "SUM" [c.expression]
|
||||
Average c _ -> IR.Operation "AVG" [c.expression]
|
||||
Median c _ -> IR.Operation "MEDIAN" [c.expression]
|
||||
True -> Expression.Operation "CONCAT" base_args
|
||||
False -> Expression.Operation "CONCAT_QUOTE_IF_NEEDED" base_args+[Expression.Constant SQL_Type.text quote_char]
|
||||
Sum c _ -> Expression.Operation "SUM" [c.expression]
|
||||
Average c _ -> Expression.Operation "AVG" [c.expression]
|
||||
Median c _ -> Expression.Operation "MEDIAN" [c.expression]
|
||||
|
@ -1,15 +1,17 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Database.Internal.IR
|
||||
import project.Data.SQL
|
||||
import project.Data.SQL.Builder
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.From_Spec.From_Spec
|
||||
import project.Internal.IR.Join_Kind.Join_Kind
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
import project.Internal.IR.Nulls_Order.Nulls_Order
|
||||
import project.Internal.IR.Query.Query
|
||||
from project.Data.SQL import code
|
||||
|
||||
from Standard.Database.Data.SQL import Builder, code
|
||||
import Standard.Database.Data.SQL
|
||||
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.Base_Generator.Internal_Dialect import all
|
||||
from project.Internal.Base_Generator.Internal_Dialect export all
|
||||
from project.Errors import Unsupported_Database_Operation_Error
|
||||
|
||||
type Internal_Dialect
|
||||
|
||||
@ -26,7 +28,7 @@ type Internal_Dialect
|
||||
identifier name in such a way that it can be used in the query; that
|
||||
usually consists of wrapping the name in quotes and escaping any quotes
|
||||
within it.
|
||||
Internal_Dialect_Data (operation_map:(Map Text (Vector (Builder->Builder)))) (wrap_identifier:(Text->Builder))
|
||||
Value (operation_map:(Map Text (Vector (Builder->Builder)))) (wrap_identifier:(Text->Builder))
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -37,7 +39,7 @@ type Internal_Dialect
|
||||
extend_with : Vector Any -> Internal_Dialect
|
||||
extend_with self mappings =
|
||||
new_map = mappings.fold self.operation_map (m -> el -> m.insert (el.at 0) (el.at 1))
|
||||
Internal_Dialect_Data new_map self.wrap_identifier
|
||||
Internal_Dialect.Value new_map self.wrap_identifier
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -173,7 +175,7 @@ base_dialect =
|
||||
counts = [fun "COUNT", ["COUNT_ROWS", make_constant "COUNT(*)"]]
|
||||
nulls = [["ISNULL", make_right_unary_op "IS NULL"], ["FILLNULL", make_function "COALESCE"]]
|
||||
base_map = Map.from_vector (arith + logic + compare + agg + nulls + counts)
|
||||
Internal_Dialect_Data base_map wrap_in_quotes
|
||||
Internal_Dialect.Value base_map wrap_in_quotes
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -182,16 +184,16 @@ base_dialect =
|
||||
Arguments:
|
||||
- dialect: The SQL dialect in which the expression is being generated.
|
||||
- expr: The expression to generate SQL code for.
|
||||
generate_expression : Internal_Dialect -> IR.Expression -> Builder
|
||||
generate_expression : Internal_Dialect -> Expression | Order_Descriptor -> Builder
|
||||
generate_expression dialect expr = case expr of
|
||||
IR.Column origin name ->
|
||||
Expression.Column origin name ->
|
||||
dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name
|
||||
IR.Constant sql_type value -> SQL.interpolation sql_type value
|
||||
IR.Operation kind arguments ->
|
||||
Expression.Constant sql_type value -> SQL.interpolation sql_type value
|
||||
Expression.Operation kind arguments ->
|
||||
op = dialect.operation_map.get_or_else kind (Error.throw <| Unsupported_Database_Operation_Error kind)
|
||||
parsed_args = arguments.map (generate_expression dialect)
|
||||
op parsed_args
|
||||
IR.Order_Descriptor_Data _ _ _ _ -> generate_order dialect expr
|
||||
_ : Order_Descriptor -> generate_order dialect expr
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -215,22 +217,22 @@ alias dialect name =
|
||||
- from_spec: A description of the FROM clause.
|
||||
generate_from_part : Internal_Dialect -> From_Spec -> Builder
|
||||
generate_from_part dialect from_spec = case from_spec of
|
||||
IR.From_Table name as_name ->
|
||||
From_Spec.From_Table name as_name ->
|
||||
dialect.wrap_identifier name ++ alias dialect as_name
|
||||
IR.From_Query raw_sql as_name ->
|
||||
From_Spec.From_Query raw_sql as_name ->
|
||||
code raw_sql . paren ++ alias dialect as_name
|
||||
IR.Join kind left_spec right_spec on ->
|
||||
From_Spec.Join kind left_spec right_spec on ->
|
||||
left = generate_from_part dialect left_spec
|
||||
right = generate_from_part dialect right_spec
|
||||
prefix = case kind of
|
||||
IR.Join_Inner -> "INNER JOIN"
|
||||
IR.Join_Left -> "LEFT JOIN"
|
||||
IR.Join_Right -> "RIGHT JOIN"
|
||||
IR.Join_Cross -> "CROSS JOIN"
|
||||
Join_Kind.Inner -> "INNER JOIN"
|
||||
Join_Kind.Left -> "LEFT JOIN"
|
||||
Join_Kind.Right -> "RIGHT JOIN"
|
||||
Join_Kind.Cross -> "CROSS JOIN"
|
||||
ons = SQL.join " AND " (on.map (generate_expression dialect)) . prefix_if_present " ON "
|
||||
left ++ (" " + prefix + " ") ++ right ++ ons
|
||||
IR.Sub_Query columns context as_name ->
|
||||
sub = generate_query dialect (IR.Select columns context)
|
||||
From_Spec.Sub_Query columns context as_name ->
|
||||
sub = generate_query dialect (Query.Select columns context)
|
||||
sub.paren ++ alias dialect as_name
|
||||
|
||||
## PRIVATE
|
||||
@ -247,8 +249,8 @@ generate_order dialect order_descriptor =
|
||||
Sort_Direction.Descending -> " DESC"
|
||||
nulls_suffix = case order_descriptor.nulls_order of
|
||||
Nothing -> ""
|
||||
IR.Nulls_First -> " NULLS FIRST"
|
||||
IR.Nulls_Last -> " NULLS LAST"
|
||||
Nulls_Order.First -> " NULLS FIRST"
|
||||
Nulls_Order.Last -> " NULLS LAST"
|
||||
collation = case order_descriptor.collation of
|
||||
Nothing -> ""
|
||||
collation_name -> ' COLLATE "' + collation_name + '"'
|
||||
@ -263,7 +265,7 @@ generate_order dialect order_descriptor =
|
||||
Arguments:
|
||||
- dialect: The SQL dialect for which the code is being generated.
|
||||
- ctx: A description of the SELECT clause.
|
||||
generate_select_context : Internal_Dialect -> IR.Context -> Builder
|
||||
generate_select_context : Internal_Dialect -> Context -> Builder
|
||||
generate_select_context dialect ctx =
|
||||
gen_exprs exprs = exprs.map (generate_expression dialect)
|
||||
from_part = generate_from_part dialect ctx.from_spec
|
||||
@ -301,15 +303,15 @@ generate_insert_query dialect table_name pairs =
|
||||
Arguments:
|
||||
- dialect: The SQL dialect for which the code is being generated.
|
||||
- query: An IR describing the query.
|
||||
generate_query : Internal_Dialect -> IR.Query -> Builder
|
||||
generate_query : Internal_Dialect -> Query -> Builder
|
||||
generate_query dialect query = case query of
|
||||
IR.Select columns ctx ->
|
||||
Query.Select columns ctx ->
|
||||
gen_column pair = (generate_expression dialect pair.second) ++ alias dialect pair.first
|
||||
cols = SQL.join ", " (columns.map gen_column)
|
||||
code "SELECT " ++ cols ++ generate_select_context dialect ctx
|
||||
IR.Select_All ctx ->
|
||||
Query.Select_All ctx ->
|
||||
code "SELECT * " ++ generate_select_context dialect ctx
|
||||
IR.Insert table_name pairs ->
|
||||
Query.Insert table_name pairs ->
|
||||
generate_insert_query dialect table_name pairs
|
||||
_ -> Error.throw <| Unsupported_Database_Operation_Error "Unsupported query type."
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Database.Data.Table import Table
|
||||
from Standard.Database.Data.Column import Column
|
||||
import project.Data.Table.Table
|
||||
import project.Data.Column.Column
|
||||
|
||||
polyglot java import java.util.regex.Pattern
|
||||
|
||||
@ -36,10 +36,6 @@ unify_vector_singleton x = case x of
|
||||
_ : Vector.Vector -> x
|
||||
_ -> [x]
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.Helpers.Unsupported_Name_Error import all
|
||||
from project.Internal.Helpers.Unsupported_Name_Error export all
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Signals that a name for a column or table is not supported.
|
||||
@ -50,8 +46,8 @@ from project.Internal.Helpers.Unsupported_Name_Error export all
|
||||
Currently the names can only include ASCII letters, numbers and the
|
||||
underscore. This is a temporary limitation simplifying name handling. It will
|
||||
be removed in a future version.
|
||||
type Unsupported_Name_Error
|
||||
Unsupported_Name_Error_Data text
|
||||
type Unsupported_Name
|
||||
Error text
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -77,5 +73,5 @@ ensure_name_is_sane name =
|
||||
is_safe =
|
||||
Pattern.matches "[A-Za-z_0-9]+" name
|
||||
if is_safe then True else
|
||||
Error.throw <| Unsupported_Name_Error_Data (name + " is not a valid name for a column. Please use english letters, numbers and underscore only.")
|
||||
Error.throw <| Unsupported_Name.Error (name + " is not a valid name for a column. Please use english letters, numbers and underscore only.")
|
||||
|
||||
|
@ -1,451 +0,0 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Database.Data.SQL import SQL_Type
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Expression import all
|
||||
from project.Internal.IR.Expression export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression which can be a column
|
||||
reference, an interpolated constant or an operation that combines other
|
||||
expressions.
|
||||
type Expression
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression that gets a value from a
|
||||
column.
|
||||
|
||||
Arguments:
|
||||
- origin: the name of the entity (table or sub-query) that this column
|
||||
originates from, it corresponds to the `alias` field in `from_spec`.
|
||||
- name: the name of the column directly in the table or its alias in a
|
||||
sub-query.
|
||||
Column (origin : Text) (name : Text)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression which is a constant to
|
||||
be interpolated when building the query.
|
||||
|
||||
Arguments:
|
||||
- sql_type: The SQL type that this object is going to be serialized to.
|
||||
It is usually inferred from the expression's context.
|
||||
- value: the value to be interpolated; it should be a simple Number, Text
|
||||
or other types that are serializable for JDBC.
|
||||
Constant (sql_type : SQL_Type) (value : Any)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression built from an operation
|
||||
and sub-expressions.
|
||||
|
||||
Arguments:
|
||||
- kind: the name of the operation, these can be both functions or infix
|
||||
operators, the actual implementation is determined by a specific
|
||||
dialect.
|
||||
- expressions: a list of expressions which are arguments to the operation
|
||||
different operations support different amounts of arguments.
|
||||
Operation (kind : Text) (expressions : Vector Expression)
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Internal_Column import all
|
||||
from project.Internal.IR.Internal_Column export all
|
||||
|
||||
type Internal_Column
|
||||
## PRIVATE
|
||||
|
||||
An internal column structure.
|
||||
|
||||
Arguments:
|
||||
- name: The column name.
|
||||
- sql_type: The SQL type of the column.
|
||||
- expression: An expression for applying to the column.
|
||||
Internal_Column_Data name sql_type expression
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Rename the internal column.
|
||||
|
||||
Arguments:
|
||||
- new_name: The new name for the column.
|
||||
rename : Text -> Internal_Column
|
||||
rename self new_name = Internal_Column_Data new_name self.sql_type self.expression
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Context import all
|
||||
from project.Internal.IR.Context export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A context associated with an SQL query.
|
||||
type Context
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A context associated with an SQL query.
|
||||
|
||||
The expressions can only be computed in a context which specifies from where
|
||||
their columns come and set filters and other settings for processing the
|
||||
query.
|
||||
|
||||
Arguments:
|
||||
- from_spec: the sources for the query, see `From_Spec` for more
|
||||
details.
|
||||
- where_filters: a list of expressions for filtering - only the rows
|
||||
for which these expressions evaluate to true are included in the
|
||||
result.
|
||||
- orders: a list of ordering expressions, for each entry an ORDER BY
|
||||
clause is added.
|
||||
- groups: a list of grouping expressions, for each entry a GROUP BY is
|
||||
added, the resulting query can then directly include only the
|
||||
grouped-by columns or aggregate expressions.
|
||||
- meta_index: a list of internal columns to use for joining or grouping.
|
||||
- limit: an optional maximum number of elements that the equery should
|
||||
return.
|
||||
Context_Data (from_spec : From_Spec) (where_filters : Vector Expression) (orders : Vector Order_Descriptor) (groups : Vector Expression) (meta_index : Vector Internal_Column) (limit : Nothing | Integer)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `meta_index`.
|
||||
|
||||
Arguments:
|
||||
- new_index: The new index to set in the query.
|
||||
set_index : Vector Internal_Column -> Context
|
||||
set_index self new_index =
|
||||
Context_Data self.from_spec self.where_filters self.orders self.groups new_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `where_filters`.
|
||||
|
||||
Arguments:
|
||||
- new_filters: The new filters to set in the query.
|
||||
set_where_filters : Vector Expression -> Context
|
||||
set_where_filters self new_filters =
|
||||
Context_Data self.from_spec new_filters self.orders self.groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `orders`.
|
||||
|
||||
Arguments:
|
||||
- new_orders: The new ordering clauses to set in the query.
|
||||
set_orders : Vector Order_Descriptor -> Context
|
||||
set_orders self new_orders =
|
||||
Context_Data self.from_spec self.where_filters new_orders self.groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with added `orders`.
|
||||
|
||||
The orderings are modified in such a way that the newly added ordering
|
||||
takes precedence, but if any orderings were already present they are also
|
||||
taken into account to break ties within the new ordering.
|
||||
|
||||
In practice this means, that the old orderings are preserved, but the new
|
||||
ones are added to the beginning of the list so that they take precedence.
|
||||
|
||||
Arguments:
|
||||
- new_orders: The new ordering clauses to add to the query.
|
||||
add_orders : Vector Order_Descriptor -> Context
|
||||
add_orders self new_orders =
|
||||
Context_Data self.from_spec self.where_filters new_orders+self.orders self.groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `groups`.
|
||||
|
||||
Arguments:
|
||||
- new_groups: The new grouping clauses to set in the query.
|
||||
set_groups : Vector Expression -> Context
|
||||
set_groups self new_groups =
|
||||
Context_Data self.from_spec self.where_filters self.orders new_groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `limit`.
|
||||
|
||||
Arguments:
|
||||
- new_limit: The new limit clauses to set in the query.
|
||||
set_limit : (Nothing | Integer) -> Context
|
||||
set_limit self new_limit =
|
||||
Context_Data self.from_spec self.where_filters self.orders self.groups self.meta_index new_limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
'Lifts' this context into a subquery, so that the original context (with all filters etc.) is
|
||||
encapsulated within the subquery and all external references passed as the second argument,
|
||||
refer directly to that subquery.
|
||||
|
||||
It takes a list of lists of columns that should be included in that subquery (this can for
|
||||
example the list of regular columns, the list of indices etc.)
|
||||
It assumes that columns on these lists all have unique names.
|
||||
|
||||
It returns a new context and the lists transformed in such a way that each column corresponds
|
||||
to one from the original list but it is valid in the new context.
|
||||
|
||||
This is useful as a preprocessing step between combining queries, for example in a join.
|
||||
# as_subquery : Text -> Vector (Vector Internal_Column) -> [IR.Sub_Query, Vector (Vector Internal_Column)]
|
||||
as_subquery : Text -> Vector Any -> Vector
|
||||
as_subquery self alias column_lists =
|
||||
rewrite_internal_column : Internal_Column -> Internal_Column
|
||||
rewrite_internal_column column =
|
||||
Internal_Column_Data column.name column.sql_type (IR.Column alias column.name)
|
||||
|
||||
new_columns = column_lists.map columns->
|
||||
columns.map rewrite_internal_column
|
||||
|
||||
encapsulated_columns = column_lists.flat_map columns->
|
||||
columns.map column-> [column.name, column.expression]
|
||||
new_from = IR.Sub_Query encapsulated_columns self alias
|
||||
|
||||
[new_from, new_columns]
|
||||
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.From_Spec import all
|
||||
from project.Internal.IR.From_Spec export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Used as part of the context, specifies the sources of the query.
|
||||
type From_Spec
|
||||
## PRIVATE
|
||||
|
||||
A query source that takes data directly from a table in the
|
||||
database.
|
||||
|
||||
Arguments:
|
||||
- table_name: the full name identifying the table in the database.
|
||||
- alias: the name upon which the table can be referred to in other
|
||||
parts of the query, this is especially useful for example in
|
||||
self-joins, allowing to differentiate between different instances of
|
||||
the same table.
|
||||
From_Table (table_name : Text) (alias : Text)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A query source that holds a raw SQL statement.
|
||||
|
||||
Arguments:
|
||||
- raw_sql: the raw SQL code of a query used as a source for this table.
|
||||
- alias: the name upon which the table can be referred to in other
|
||||
parts of the query, this is especially useful for example in
|
||||
self-joins, allowing to differentiate between different instances of
|
||||
the same table.
|
||||
From_Query (raw_sql : Text) (alias : Text)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A query source that performs a join operation on two sources.
|
||||
|
||||
Arguments:
|
||||
- kind: the kind of the join.
|
||||
- left_spec: the left data source to be joined
|
||||
- right_spec: the right data source to be joined
|
||||
- on: a list of expressions that will be used as join conditions, these
|
||||
are usually be equalities between expressions from the left and right
|
||||
sources.
|
||||
Join (kind : Join_Kind) (left_spec : From_Spec) (right_spec : From_Spec) (on : Vector Expression)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A query source consisting of a sub-query.
|
||||
|
||||
Arguments:
|
||||
- columns: a list of pairs; each first element is the alias for the
|
||||
column created in the sub-query and the second element is the
|
||||
expression to be computed.
|
||||
- context: the context for the sub-query.
|
||||
- alias: the name upon which the results of this sub-query can be
|
||||
referred to in other parts of the query.
|
||||
Sub_Query (columns : Vector (Pair Text Expression)) (context : Context) (alias : Text)
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Join_Kind import all
|
||||
from project.Internal.IR.Join_Kind export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The kind of the join operation.
|
||||
type Join_Kind
|
||||
## PRIVATE
|
||||
|
||||
Inner join.
|
||||
|
||||
The result will contain only rows that had a match in both left and right
|
||||
source.
|
||||
Join_Inner
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Left join.
|
||||
|
||||
The result will contain all rows from the left source. Some of them may
|
||||
be duplicated if they have multiple matches on the right. If a row from
|
||||
the left source has no match on the right, it will be present exactly
|
||||
once in the result and the fields corresponding to the right source will
|
||||
be set to NULL.
|
||||
Join_Left
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Right join.
|
||||
|
||||
The result will contain all rows from the right source. Some of them may
|
||||
be duplicated if they have multiple matches on the left. If a row from
|
||||
the right source has no match on the left, it will be present exactly
|
||||
once in the result and the fields corresponding to the left source will
|
||||
be set to NULL.
|
||||
Join_Right
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Cross join.
|
||||
|
||||
The result will contain a cross product of rows from the left source with
|
||||
the right source. Its `on` list should be empty, instead `where_filters`
|
||||
in the query can be used to filter the results.
|
||||
Join_Cross
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Order_Descriptor import all
|
||||
from project.Internal.IR.Order_Descriptor export all
|
||||
|
||||
## PRIVATE
|
||||
type Order_Descriptor
|
||||
Order_Descriptor_Data (expression : Expression) (direction : Sort_Direction) (nulls_order : Nothing | Nulls_Order = Nothing) (collation : Nothing | Text = Nothing)
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Nulls_Order import all
|
||||
from project.Internal.IR.Nulls_Order export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Specifies how null values should be handled within the ordering.
|
||||
type Nulls_Order
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Null values are included before any other values in the ordering.
|
||||
Nulls_First
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Null values are included after all other values in the ordering.
|
||||
Nulls_Last
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.IR.Query import all
|
||||
from project.Internal.IR.Query export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Describes a full SQL query.
|
||||
type Query
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A Select SQL query.
|
||||
|
||||
Arguments:
|
||||
- expressions: List of pairs specifying the columns to materialize; each
|
||||
is a pair whose first element is the name of the materialized column
|
||||
and the second element is the expression to compute.
|
||||
- context: The query context, see `Context` for more detail.
|
||||
Select (expressions : Vector (Pair Text Expression)) (context : Context)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A Select SQL query that gets all columns in a table.
|
||||
|
||||
Arguments:
|
||||
- context: The query context, see `Context` for more detail.
|
||||
Select_All context
|
||||
|
||||
## PRIVATE
|
||||
|
||||
An Insert SQL query that inserts a single row to the table.
|
||||
|
||||
Arguments:
|
||||
- table_name: The name of the table to insert to.
|
||||
- pairs: A list of pairs consisting of a column name and and expression.
|
||||
Insert table_name pairs
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Creates a query context that just fetches data from a table, without any
|
||||
additional processing.
|
||||
|
||||
Arguments:
|
||||
- table_name: The name of the table for which the context is being created.
|
||||
- alias: An alias name to use for table within the query.
|
||||
context_for_table : Text -> Text -> Context
|
||||
context_for_table table_name alias=table_name =
|
||||
Context_Data (From_Table table_name alias) [] [] [] [] Nothing
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Creates a query context that runs a raw SQL query.
|
||||
|
||||
Arguments:
|
||||
- raw_sql: The name of the table for which the context is being created.
|
||||
- alias: An alias name to use for table within the query.
|
||||
context_for_query : Text -> Text -> Context
|
||||
context_for_query raw_sql alias =
|
||||
Context_Data (From_Query raw_sql alias) [] [] [] [] Nothing
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Creates a Context from a Sub_Query.
|
||||
|
||||
Arguments:
|
||||
- subquery: The subquery to lift into a context.
|
||||
subquery_as_ctx : Sub_Query -> Context
|
||||
subquery_as_ctx subquery =
|
||||
Context_Data subquery [] [] [] [] Nothing
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Creates an expression which is a simple constant to be interpolated.
|
||||
|
||||
Arguments:
|
||||
- sql_type: The SQL type of the value.
|
||||
- x: The value to turn into a constant.
|
||||
make_constant : SQL_Type -> Any -> Expression
|
||||
make_constant sql_type x =
|
||||
Constant sql_type x
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A helper function to subsitute names tables inside of expressions.
|
||||
|
||||
Arguments:
|
||||
- old_origin: The old table name.
|
||||
- new_origin: The new table name.
|
||||
- expr: The expression in which the substitution should be performed.
|
||||
|
||||
It is used for example when renaming a table during a join.
|
||||
substitute_origin : Text -> Text -> Expression -> Expression
|
||||
substitute_origin old_origin new_origin expr = case expr of
|
||||
Column origin name ->
|
||||
if origin == old_origin then Column new_origin name else expr
|
||||
Constant _ _ -> expr
|
||||
Operation kind exprs ->
|
||||
Operation kind (exprs.map (substitute_origin old_origin new_origin))
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Lifts a function mapping expressions into a function mapping internal columns
|
||||
which applies the original function to their expressions, leaving other
|
||||
fields as-is.
|
||||
|
||||
Arguments:
|
||||
- f: The function to map over expressions.
|
||||
- col: The column over which to apply `f`.
|
||||
lift_expression_map : (Expression -> Expression) -> Internal_Column -> Internal_Column
|
||||
lift_expression_map f col =
|
||||
Internal_Column_Data col.name col.sql_type (f col.expression)
|
||||
|
@ -0,0 +1,164 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.From_Spec.From_Spec
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A context associated with an SQL query.
|
||||
type Context
|
||||
## PRIVATE
|
||||
|
||||
Creates a query context that just fetches data from a table, without any
|
||||
additional processing.
|
||||
|
||||
Arguments:
|
||||
- table_name: The name of the table for which the context is being created.
|
||||
- alias: An alias name to use for table within the query.
|
||||
for_table : Text -> Text -> Context
|
||||
for_table table_name alias=table_name =
|
||||
Context.Value (From_Spec.From_Table table_name alias) [] [] [] [] Nothing
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Creates a query context that runs a raw SQL query.
|
||||
|
||||
Arguments:
|
||||
- raw_sql: The name of the table for which the context is being created.
|
||||
- alias: An alias name to use for table within the query.
|
||||
for_query : Text -> Text -> Context
|
||||
for_query raw_sql alias =
|
||||
Context.Value (From_Spec.From_Query raw_sql alias) [] [] [] [] Nothing
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Creates a Context from a Sub_Query.
|
||||
|
||||
Arguments:
|
||||
- subquery: The subquery to lift into a context.
|
||||
for_subquery : Sub_Query -> Context
|
||||
for_subquery subquery =
|
||||
Context.Value subquery [] [] [] [] Nothing
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A context associated with an SQL query.
|
||||
|
||||
The expressions can only be computed in a context which specifies from where
|
||||
their columns come and set filters and other settings for processing the
|
||||
query.
|
||||
|
||||
Arguments:
|
||||
- from_spec: the sources for the query, see `From_Spec` for more
|
||||
details.
|
||||
- where_filters: a list of expressions for filtering - only the rows
|
||||
for which these expressions evaluate to true are included in the
|
||||
result.
|
||||
- orders: a list of ordering expressions, for each entry an ORDER BY
|
||||
clause is added.
|
||||
- groups: a list of grouping expressions, for each entry a GROUP BY is
|
||||
added, the resulting query can then directly include only the
|
||||
grouped-by columns or aggregate expressions.
|
||||
- meta_index: a list of internal columns to use for joining or grouping.
|
||||
- limit: an optional maximum number of elements that the equery should
|
||||
return.
|
||||
Value (from_spec : From_Spec) (where_filters : Vector Expression) (orders : Vector Order_Descriptor) (groups : Vector Expression) (meta_index : Vector Internal_Column) (limit : Nothing | Integer)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `meta_index`.
|
||||
|
||||
Arguments:
|
||||
- new_index: The new index to set in the query.
|
||||
set_index : Vector Internal_Column -> Context
|
||||
set_index self new_index =
|
||||
Context.Value self.from_spec self.where_filters self.orders self.groups new_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `where_filters`.
|
||||
|
||||
Arguments:
|
||||
- new_filters: The new filters to set in the query.
|
||||
set_where_filters : Vector Expression -> Context
|
||||
set_where_filters self new_filters =
|
||||
Context.Value self.from_spec new_filters self.orders self.groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `orders`.
|
||||
|
||||
Arguments:
|
||||
- new_orders: The new ordering clauses to set in the query.
|
||||
set_orders : Vector Order_Descriptor -> Context
|
||||
set_orders self new_orders =
|
||||
Context.Value self.from_spec self.where_filters new_orders self.groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with added `orders`.
|
||||
|
||||
The orderings are modified in such a way that the newly added ordering
|
||||
takes precedence, but if any orderings were already present they are also
|
||||
taken into account to break ties within the new ordering.
|
||||
|
||||
In practice this means, that the old orderings are preserved, but the new
|
||||
ones are added to the beginning of the list so that they take precedence.
|
||||
|
||||
Arguments:
|
||||
- new_orders: The new ordering clauses to add to the query.
|
||||
add_orders : Vector Order_Descriptor -> Context
|
||||
add_orders self new_orders =
|
||||
Context.Value self.from_spec self.where_filters new_orders+self.orders self.groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `groups`.
|
||||
|
||||
Arguments:
|
||||
- new_groups: The new grouping clauses to set in the query.
|
||||
set_groups : Vector Expression -> Context
|
||||
set_groups self new_groups =
|
||||
Context.Value self.from_spec self.where_filters self.orders new_groups self.meta_index self.limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Returns a copy of the context with changed `limit`.
|
||||
|
||||
Arguments:
|
||||
- new_limit: The new limit clauses to set in the query.
|
||||
set_limit : (Nothing | Integer) -> Context
|
||||
set_limit self new_limit =
|
||||
Context.Value self.from_spec self.where_filters self.orders self.groups self.meta_index new_limit
|
||||
|
||||
## PRIVATE
|
||||
|
||||
'Lifts' this context into a subquery, so that the original context (with all filters etc.) is
|
||||
encapsulated within the subquery and all external references passed as the second argument,
|
||||
refer directly to that subquery.
|
||||
|
||||
It takes a list of lists of columns that should be included in that subquery (this can for
|
||||
example the list of regular columns, the list of indices etc.)
|
||||
It assumes that columns on these lists all have unique names.
|
||||
|
||||
It returns a new context and the lists transformed in such a way that each column corresponds
|
||||
to one from the original list but it is valid in the new context.
|
||||
|
||||
This is useful as a preprocessing step between combining queries, for example in a join.
|
||||
# as_subquery : Text -> Vector (Vector Internal_Column) -> [From_Spec.Sub_Query, Vector (Vector Internal_Column)]
|
||||
as_subquery : Text -> Vector Any -> Vector
|
||||
as_subquery self alias column_lists =
|
||||
rewrite_internal_column : Internal_Column -> Internal_Column
|
||||
rewrite_internal_column column =
|
||||
Internal_Column.Value column.name column.sql_type (Expression.Column alias column.name)
|
||||
|
||||
new_columns = column_lists.map columns->
|
||||
columns.map rewrite_internal_column
|
||||
|
||||
encapsulated_columns = column_lists.flat_map columns->
|
||||
columns.map column-> [column.name, column.expression]
|
||||
new_from = From_Spec.Sub_Query encapsulated_columns self alias
|
||||
|
||||
[new_from, new_columns]
|
@ -0,0 +1,46 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression which can be a column
|
||||
reference, an interpolated constant or an operation that combines other
|
||||
expressions.
|
||||
type Expression
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression that gets a value from a
|
||||
column.
|
||||
|
||||
Arguments:
|
||||
- origin: the name of the entity (table or sub-query) that this column
|
||||
originates from, it corresponds to the `alias` field in `from_spec`.
|
||||
- name: the name of the column directly in the table or its alias in a
|
||||
sub-query.
|
||||
Column (origin : Text) (name : Text)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression which is a constant to
|
||||
be interpolated when building the query.
|
||||
|
||||
Arguments:
|
||||
- sql_type: The SQL type that this object is going to be serialized to.
|
||||
It is usually inferred from the expression's context.
|
||||
- value: the value to be interpolated; it should be a simple Number, Text
|
||||
or other types that are serializable for JDBC.
|
||||
Constant (sql_type : SQL_Type) (value : Any)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The internal representation of an SQL expression built from an operation
|
||||
and sub-expressions.
|
||||
|
||||
Arguments:
|
||||
- kind: the name of the operation, these can be both functions or infix
|
||||
operators, the actual implementation is determined by a specific
|
||||
dialect.
|
||||
- expressions: a list of expressions which are arguments to the operation
|
||||
different operations support different amounts of arguments.
|
||||
Operation (kind : Text) (expressions : Vector Expression)
|
@ -0,0 +1,60 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.Join_Kind.Join_Kind
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Used as part of the context, specifies the sources of the query.
|
||||
type From_Spec
|
||||
## PRIVATE
|
||||
|
||||
A query source that takes data directly from a table in the
|
||||
database.
|
||||
|
||||
Arguments:
|
||||
- table_name: the full name identifying the table in the database.
|
||||
- alias: the name upon which the table can be referred to in other
|
||||
parts of the query, this is especially useful for example in
|
||||
self-joins, allowing to differentiate between different instances of
|
||||
the same table.
|
||||
From_Table (table_name : Text) (alias : Text)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A query source that holds a raw SQL statement.
|
||||
|
||||
Arguments:
|
||||
- raw_sql: the raw SQL code of a query used as a source for this table.
|
||||
- alias: the name upon which the table can be referred to in other
|
||||
parts of the query, this is especially useful for example in
|
||||
self-joins, allowing to differentiate between different instances of
|
||||
the same table.
|
||||
From_Query (raw_sql : Text) (alias : Text)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A query source that performs a join operation on two sources.
|
||||
|
||||
Arguments:
|
||||
- kind: the kind of the join.
|
||||
- left_spec: the left data source to be joined
|
||||
- right_spec: the right data source to be joined
|
||||
- on: a list of expressions that will be used as join conditions, these
|
||||
are usually be equalities between expressions from the left and right
|
||||
sources.
|
||||
Join (kind : Join_Kind) (left_spec : From_Spec) (right_spec : From_Spec) (on : Vector Expression)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A query source consisting of a sub-query.
|
||||
|
||||
Arguments:
|
||||
- columns: a list of pairs; each first element is the alias for the
|
||||
column created in the sub-query and the second element is the
|
||||
expression to be computed.
|
||||
- context: the context for the sub-query.
|
||||
- alias: the name upon which the results of this sub-query can be
|
||||
referred to in other parts of the query.
|
||||
Sub_Query (columns : Vector (Pair Text Expression)) (context : Context) (alias : Text)
|
@ -0,0 +1,24 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.IR.Expression.Expression
|
||||
|
||||
type Internal_Column
|
||||
## PRIVATE
|
||||
|
||||
An internal column structure.
|
||||
|
||||
Arguments:
|
||||
- name: The column name.
|
||||
- sql_type: The SQL type of the column.
|
||||
- expression: An expression for applying to the column.
|
||||
Value name:Text sql_type:SQL_Type expression:Expression
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Rename the internal column.
|
||||
|
||||
Arguments:
|
||||
- new_name: The new name for the column.
|
||||
rename : Text -> Internal_Column
|
||||
rename self new_name = Internal_Column.Value new_name self.sql_type self.expression
|
@ -0,0 +1,44 @@
|
||||
from Standard.Base import all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The kind of the join operation.
|
||||
type Join_Kind
|
||||
## PRIVATE
|
||||
|
||||
Inner join.
|
||||
|
||||
The result will contain only rows that had a match in both left and right
|
||||
source.
|
||||
Inner
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Left join.
|
||||
|
||||
The result will contain all rows from the left source. Some of them may
|
||||
be duplicated if they have multiple matches on the right. If a row from
|
||||
the left source has no match on the right, it will be present exactly
|
||||
once in the result and the fields corresponding to the right source will
|
||||
be set to NULL.
|
||||
Left
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Right join.
|
||||
|
||||
The result will contain all rows from the right source. Some of them may
|
||||
be duplicated if they have multiple matches on the left. If a row from
|
||||
the right source has no match on the left, it will be present exactly
|
||||
once in the result and the fields corresponding to the left source will
|
||||
be set to NULL.
|
||||
Right
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Cross join.
|
||||
|
||||
The result will contain a cross product of rows from the left source with
|
||||
the right source. Its `on` list should be empty, instead `where_filters`
|
||||
in the query can be used to filter the results.
|
||||
Cross
|
@ -0,0 +1,16 @@
|
||||
from Standard.Base import all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Specifies how null values should be handled within the ordering.
|
||||
type Nulls_Order
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Null values are included before any other values in the ordering.
|
||||
First
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Null values are included after all other values in the ordering.
|
||||
Last
|
@ -0,0 +1,8 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.Nulls_Order.Nulls_Order
|
||||
|
||||
## PRIVATE
|
||||
type Order_Descriptor
|
||||
Value (expression : Expression) (direction : Sort_Direction) (nulls_order : Nothing | Nulls_Order = Nothing) (collation : Nothing | Text = Nothing)
|
@ -0,0 +1,37 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Expression.Expression
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Describes a full SQL query.
|
||||
type Query
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A Select SQL query.
|
||||
|
||||
Arguments:
|
||||
- expressions: List of pairs specifying the columns to materialize; each
|
||||
is a pair whose first element is the name of the materialized column
|
||||
and the second element is the expression to compute.
|
||||
- context: The query context, see `Context` for more detail.
|
||||
Select (expressions : Vector (Pair Text Expression)) (context : Context)
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A Select SQL query that gets all columns in a table.
|
||||
|
||||
Arguments:
|
||||
- context: The query context, see `Context` for more detail.
|
||||
Select_All context
|
||||
|
||||
## PRIVATE
|
||||
|
||||
An Insert SQL query that inserts a single row to the table.
|
||||
|
||||
Arguments:
|
||||
- table_name: The name of the table to insert to.
|
||||
- pairs: A list of pairs consisting of a column name and and expression.
|
||||
Insert table_name pairs
|
@ -1,15 +1,18 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Base.Runtime.Resource
|
||||
from Standard.Base.Runtime.Resource import Managed_Resource
|
||||
|
||||
import Standard.Database.Data.SQL
|
||||
from Standard.Database.Data.SQL import SQL_Type, SQL_Type_Data, Statement, Statement_Data
|
||||
from Standard.Database.Errors import SQL_Error, SQL_Error_Data, SQL_Timeout_Error, SQL_Timeout_Error_Data
|
||||
import Standard.Database.Internal.Base_Generator
|
||||
import Standard.Table.Data.Storage.Storage
|
||||
import Standard.Database.Data.Table as Database_Table
|
||||
import Standard.Table.Data.Table as Materialized_Table
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
|
||||
import project.Data.SQL
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Base_Generator
|
||||
|
||||
import project.Data.Table.Table as Database_Table
|
||||
|
||||
from project.Errors import SQL_Error, SQL_Error_Data, SQL_Timeout_Error, SQL_Timeout_Error_Data
|
||||
|
||||
polyglot java import java.util.Properties
|
||||
|
||||
@ -20,13 +23,9 @@ polyglot java import java.sql.SQLTimeoutException
|
||||
|
||||
polyglot java import org.enso.database.JDBCProxy
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.JDBC_Connection.JDBC_Connection import all
|
||||
from project.Internal.JDBC_Connection.JDBC_Connection export all
|
||||
|
||||
type JDBC_Connection
|
||||
## PRIVATE
|
||||
JDBC_Connection_Data connection_resource
|
||||
Value connection_resource
|
||||
|
||||
## PRIVATE
|
||||
Closes the connection releasing the underlying database resources
|
||||
@ -61,7 +60,7 @@ type JDBC_Connection
|
||||
|
||||
Runs the provided action with a prepared statement, adding contextual
|
||||
information to any thrown SQL errors.
|
||||
with_prepared_statement : Text | Statement -> (PreparedStatement -> Any) -> Any
|
||||
with_prepared_statement : Text | SQL_Statement -> (PreparedStatement -> Any) -> Any
|
||||
with_prepared_statement self query action =
|
||||
prepare template holes = self.connection_resource.with java_connection->
|
||||
stmt = java_connection.prepareStatement template
|
||||
@ -76,7 +75,7 @@ type JDBC_Connection
|
||||
|
||||
case query of
|
||||
_ : Text -> go query []
|
||||
Statement_Data _ ->
|
||||
SQL_Statement.Value _ ->
|
||||
compiled = query.prepare
|
||||
go compiled.first compiled.second
|
||||
|
||||
@ -84,7 +83,7 @@ type JDBC_Connection
|
||||
|
||||
Given a prepared statement, gets the column names and types for the
|
||||
result set.
|
||||
fetch_columns : Text | Statement -> Any
|
||||
fetch_columns : Text | SQL_Statement -> Any
|
||||
fetch_columns self statement =
|
||||
self.with_prepared_statement statement stmt->
|
||||
metadata = stmt.executeQuery.getMetaData
|
||||
@ -93,7 +92,7 @@ type JDBC_Connection
|
||||
name = metadata.getColumnName ix+1
|
||||
typeid = metadata.getColumnType ix+1
|
||||
typename = metadata.getColumnTypeName ix+1
|
||||
[name, SQL_Type_Data typeid typename]
|
||||
[name, SQL_Type.Value typeid typename]
|
||||
|
||||
Vector.new metadata.getColumnCount resolve_column
|
||||
|
||||
@ -101,7 +100,7 @@ type JDBC_Connection
|
||||
|
||||
Given an insert query template and the associated Database_Table, and a
|
||||
Materialized_Table of data, load to the database.
|
||||
load_table : Text -> Database_Table.Table -> Materialized_Table.Table -> Integer -> Nothing
|
||||
load_table : Text -> Database_Table -> Materialized_Table -> Integer -> Nothing
|
||||
load_table self insert_template db_table table batch_size =
|
||||
db_types = db_table.internal_columns.map .sql_type
|
||||
self.with_connection java_connection->
|
||||
@ -142,7 +141,7 @@ create url properties = handle_sql_errors <|
|
||||
java_connection = JDBCProxy.getConnection url java_props
|
||||
|
||||
resource = Managed_Resource.register java_connection close_connection
|
||||
JDBC_Connection_Data resource
|
||||
JDBC_Connection.Value resource
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -182,7 +181,7 @@ set_statement_values stmt holes =
|
||||
|
||||
## PRIVATE
|
||||
Given a Materialized_Table, create a SQL statement to build the table.
|
||||
create_table_statement : Text -> Materialized_Table.Table -> Boolean -> Statement
|
||||
create_table_statement : Text -> Materialized_Table -> Boolean -> SQL_Statement
|
||||
create_table_statement name table temporary =
|
||||
column_types = table.columns.map col-> default_storage_type col.storage_type
|
||||
column_names = table.columns.map .name
|
||||
|
@ -35,13 +35,9 @@ read host port database username=Nothing =
|
||||
Nothing -> []
|
||||
entry -> [Pair_Data 'user' entry.username, Pair_Data 'password' entry.password]
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.Postgres.Pgpass.Pgpass_Entry import all
|
||||
from project.Internal.Postgres.Pgpass.Pgpass_Entry export all
|
||||
|
||||
type Pgpass_Entry
|
||||
## PRIVATE
|
||||
Pgpass_Entry_Data host port database username password
|
||||
Value host port database username password
|
||||
|
||||
## PRIVATE
|
||||
matches : Text -> Text|Integer -> Text -> Text -> Boolean
|
||||
@ -88,7 +84,7 @@ parse_file file =
|
||||
if line.starts_with "#" || line.is_empty then Nothing else
|
||||
elements = parse_line line
|
||||
if elements.length != 5 then Nothing else
|
||||
Pgpass_Entry_Data (elements.at 0) (elements.at 1) (elements.at 2) (elements.at 3) (elements.at 4)
|
||||
Pgpass_Entry.Value (elements.at 0) (elements.at 1) (elements.at 2) (elements.at 3) (elements.at 4)
|
||||
|
||||
File.read_text file . lines . map parse . filter (x -> x.is_nothing.not)
|
||||
|
||||
|
@ -1,22 +1,18 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement
|
||||
import Standard.Database.Data.SQL_Query
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
|
||||
import Standard.Database.Internal.JDBC_Connection
|
||||
import Standard.Database.Data.Dialect
|
||||
from Standard.Database.Connection.Connection import Connection_Data
|
||||
import project.Connection.Connection.Connection
|
||||
import project.Data.Dialect
|
||||
import project.Data.SQL_Query.SQL_Query
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Data.Table.Table as Database_Table
|
||||
import project.Internal.JDBC_Connection
|
||||
|
||||
import Standard.Database.Data.Table as Database_Table
|
||||
import Standard.Table.Data.Table as Materialized_Table
|
||||
from project.Internal.Result_Set import read_column
|
||||
|
||||
from Standard.Database.Errors import SQL_Error
|
||||
|
||||
from Standard.Database.Internal.Result_Set import read_column
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.Postgres.Postgres_Connection.Postgres_Connection import all
|
||||
from project.Internal.Postgres.Postgres_Connection.Postgres_Connection export all
|
||||
from project.Errors import SQL_Error
|
||||
|
||||
type Postgres_Connection
|
||||
|
||||
@ -27,7 +23,7 @@ type Postgres_Connection
|
||||
Arguments:
|
||||
- connection: the underlying connection.
|
||||
- make_new: a function that returns a new connection.
|
||||
Postgres_Connection_Data connection make_new
|
||||
Value connection make_new
|
||||
|
||||
## Closes the connection releasing the underlying database resources
|
||||
immediately instead of waiting for them to be automatically released.
|
||||
@ -85,7 +81,7 @@ type Postgres_Connection
|
||||
- schema: The schema name to search in (defaults to current schema).
|
||||
- types: The table types to search for. The list of values can be obtained using the `table_types` method.
|
||||
- all_fields: Return all the fields in the metadata table.
|
||||
tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table.Table
|
||||
tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table
|
||||
tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False =
|
||||
self.connection.tables name_like database schema types all_fields
|
||||
|
||||
@ -95,7 +91,7 @@ type Postgres_Connection
|
||||
- query: name of the table or sql statement to query.
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- alias: optionally specify a friendly alias for the query.
|
||||
query : Text | SQL_Query -> Text -> Database_Table.Table
|
||||
query : Text | SQL_Query -> Text -> Database_Table
|
||||
query self query alias="" = self.connection.query query alias
|
||||
|
||||
## Execute the query and load the results into memory as a Table.
|
||||
@ -104,12 +100,12 @@ type Postgres_Connection
|
||||
- query: name of the table or sql statement to query.
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- limit: the maximum number of rows to return.
|
||||
read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table.Table
|
||||
read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table
|
||||
read self query limit=Nothing = self.connection.read query limit
|
||||
|
||||
## PRIVATE
|
||||
Internal read function for a statement with optional types.
|
||||
read_statement : Statement -> (Nothing | Vector SQL_Type) -> Materialized_Table.Table
|
||||
read_statement : SQL_Statement -> (Nothing | Vector SQL_Type) -> Materialized_Table
|
||||
read_statement self statement expected_types=Nothing =
|
||||
self.connection.read_statement statement expected_types
|
||||
|
||||
@ -120,9 +116,9 @@ type Postgres_Connection
|
||||
returns 0 for other types of queries (like creating or altering tables).
|
||||
|
||||
Arguments:
|
||||
- query: either raw SQL code as Text or an instance of Statement
|
||||
- query: either raw SQL code as Text or an instance of SQL_Statement
|
||||
representing the query to execute.
|
||||
execute_update : Text | Statement -> Integer
|
||||
execute_update : Text | SQL_Statement -> Integer
|
||||
execute_update self query =
|
||||
self.connection.execute_update query
|
||||
|
||||
@ -143,7 +139,7 @@ type Postgres_Connection
|
||||
usually not be visible to other connections.
|
||||
- batch_size: Specifies how many rows should be uploaded in a single
|
||||
batch.
|
||||
upload_table : Text -> Materialized_Table.Table -> Boolean -> Integer -> Database_Table.Table
|
||||
upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table
|
||||
upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State_Error <|
|
||||
self.connection.upload_table name table temporary batch_size
|
||||
|
||||
@ -158,4 +154,4 @@ type Postgres_Connection
|
||||
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Connection
|
||||
create url properties make_new =
|
||||
jdbc_connection = JDBC_Connection.create url properties
|
||||
Postgres_Connection_Data (Connection_Data jdbc_connection Dialect.postgres) make_new
|
||||
Postgres_Connection.Value (Connection.Value jdbc_connection Dialect.postgres) make_new
|
||||
|
@ -1,24 +1,26 @@
|
||||
from Standard.Base import all hiding First, Last
|
||||
|
||||
import Standard.Base.Error.Common as Errors
|
||||
|
||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement, code
|
||||
import Standard.Database.Data.SQL
|
||||
import Standard.Database.Data.Dialect
|
||||
import Standard.Database.Internal.Base_Generator
|
||||
import Standard.Database.Internal.IR
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error
|
||||
|
||||
import project.Data.SQL
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Base_Generator
|
||||
import project.Internal.IR.Expression.Expression
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
import project.Internal.IR.Nulls_Order.Nulls_Order
|
||||
|
||||
from project.Data.SQL import code
|
||||
from project.Errors import Unsupported_Database_Operation_Error
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The dialect of PostgreSQL databases.
|
||||
postgres : Dialect
|
||||
postgres : Postgres_Dialect
|
||||
postgres =
|
||||
Postgres_Dialect_Data make_internal_generator_dialect
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.Postgres.Postgres_Dialect.Postgres_Dialect import all
|
||||
from project.Internal.Postgres.Postgres_Dialect.Postgres_Dialect export all
|
||||
Postgres_Dialect.Value make_internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -27,7 +29,7 @@ type Postgres_Dialect
|
||||
## PRIVATE
|
||||
|
||||
The dialect of PostgreSQL databases.
|
||||
Postgres_Dialect_Data internal_generator_dialect
|
||||
Value internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
Name of the dialect.
|
||||
@ -37,7 +39,7 @@ type Postgres_Dialect
|
||||
## PRIVATE
|
||||
A function which generates SQL code from the internal representation
|
||||
according to the specific dialect.
|
||||
generate_sql : Query -> Statement
|
||||
generate_sql : Query -> SQL_Statement
|
||||
generate_sql self query =
|
||||
Base_Generator.generate_query self.internal_generator_dialect query . build
|
||||
|
||||
@ -54,7 +56,7 @@ type Postgres_Dialect
|
||||
|
||||
One of the purposes of this method is to verify if the expected ordering
|
||||
settings are supported by the given database backend.
|
||||
prepare_order_descriptor : IR.Internal_Column -> Sort_Direction -> Text_Ordering -> IR.Order_Descriptor
|
||||
prepare_order_descriptor : Internal_Column -> Sort_Direction -> Text_Ordering -> Order_Descriptor
|
||||
prepare_order_descriptor self internal_column sort_direction text_ordering =
|
||||
make_order_descriptor internal_column sort_direction text_ordering
|
||||
|
||||
@ -225,23 +227,23 @@ contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
||||
## PRIVATE
|
||||
make_order_descriptor internal_column sort_direction text_ordering =
|
||||
nulls = case sort_direction of
|
||||
Sort_Direction.Ascending -> IR.Nulls_First
|
||||
Sort_Direction.Descending -> IR.Nulls_Last
|
||||
Sort_Direction.Ascending -> Nulls_Order.First
|
||||
Sort_Direction.Descending -> Nulls_Order.Last
|
||||
case internal_column.sql_type.is_likely_text of
|
||||
True ->
|
||||
## In the future we can modify this error to suggest using a custom defined collation.
|
||||
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation_Error "Natural ordering is currently not supported. You may need to materialize the Table to perform this operation.") else
|
||||
case text_ordering.case_sensitivity of
|
||||
Nothing ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction nulls_order=nulls collation=Nothing
|
||||
Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation=Nothing
|
||||
Case_Sensitivity.Sensitive ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction nulls_order=nulls collation="ucs_basic"
|
||||
Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation="ucs_basic"
|
||||
Case_Sensitivity.Insensitive locale -> case locale == Locale.default of
|
||||
False ->
|
||||
Error.throw (Unsupported_Database_Operation_Error "Case insensitive ordering with custom locale is currently not supported. You may need to materialize the Table to perform this operation.")
|
||||
True ->
|
||||
upper = IR.Operation "UPPER" [internal_column.expression]
|
||||
folded_expression = IR.Operation "LOWER" [upper]
|
||||
IR.Order_Descriptor_Data folded_expression sort_direction nulls_order=nulls collation=Nothing
|
||||
upper = Expression.Operation "UPPER" [internal_column.expression]
|
||||
folded_expression = Expression.Operation "LOWER" [upper]
|
||||
Order_Descriptor.Value folded_expression sort_direction nulls_order=nulls collation=Nothing
|
||||
False ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction nulls_order=nulls collation=Nothing
|
||||
Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation=Nothing
|
||||
|
@ -2,21 +2,20 @@ from Standard.Base import all
|
||||
|
||||
from Standard.Table import Aggregate_Column
|
||||
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement
|
||||
import Standard.Database.Data.Dialect
|
||||
import Standard.Database.Internal.Postgres.Postgres_Dialect
|
||||
import Standard.Database.Internal.Base_Generator
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Base_Generator
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
import project.Internal.Postgres.Postgres_Dialect
|
||||
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The dialect for Redshift connections.
|
||||
redshift : Dialect
|
||||
redshift : Redshift_Dialect
|
||||
redshift =
|
||||
Redshift_Dialect_Data Postgres_Dialect.make_internal_generator_dialect
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.Redshift.Redshift_Dialect.Redshift_Dialect import all
|
||||
from project.Internal.Redshift.Redshift_Dialect.Redshift_Dialect export all
|
||||
Redshift_Dialect.Value Postgres_Dialect.make_internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -25,7 +24,7 @@ type Redshift_Dialect
|
||||
## PRIVATE
|
||||
|
||||
The dialect for Redshift connections.
|
||||
Redshift_Dialect_Data internal_generator_dialect
|
||||
Value internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
Name of the dialect.
|
||||
@ -35,7 +34,7 @@ type Redshift_Dialect
|
||||
## PRIVATE
|
||||
A function which generates SQL code from the internal representation
|
||||
according to the specific dialect.
|
||||
generate_sql : Query -> Statement
|
||||
generate_sql : Query -> SQL_Statement
|
||||
generate_sql self query =
|
||||
Base_Generator.generate_query self.internal_generator_dialect query . build
|
||||
|
||||
@ -53,6 +52,6 @@ type Redshift_Dialect
|
||||
|
||||
One of the purposes of this method is to verify if the expected ordering
|
||||
settings are supported by the given database backend.
|
||||
prepare_order_descriptor : IR.Internal_Column -> Sort_Direction -> Text_Ordering -> IR.Order_Descriptor
|
||||
prepare_order_descriptor : Internal_Column -> Sort_Direction -> Text_Ordering -> Order_Descriptor
|
||||
prepare_order_descriptor self internal_column sort_direction text_ordering =
|
||||
Postgres_Dialect.make_order_descriptor internal_column sort_direction text_ordering
|
||||
|
@ -1,9 +1,10 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Table as Materialized_Table
|
||||
import Standard.Table.Data.Column as Materialized_Column
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
import Standard.Table.Data.Column.Column as Materialized_Column
|
||||
import Standard.Table.Internal.Java_Exports
|
||||
from Standard.Database.Data.SQL import SQL_Type, SQL_Type_Data
|
||||
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
|
||||
polyglot java import java.sql.ResultSet
|
||||
|
||||
@ -24,7 +25,7 @@ read_column result_set column_name =
|
||||
|
||||
## PRIVATE
|
||||
Converts a ResultSet into a Materialized_Table.
|
||||
result_set_to_table : ResultSet -> (Vector | Nothing) -> Materialized_Table.Table
|
||||
result_set_to_table : ResultSet -> (Vector | Nothing) -> Materialized_Table
|
||||
result_set_to_table result_set expected_types=Nothing =
|
||||
metadata = result_set.getMetaData
|
||||
ncols = metadata.getColumnCount
|
||||
@ -33,7 +34,7 @@ result_set_to_table result_set expected_types=Nothing =
|
||||
Vector.new ncols ix->
|
||||
typeid = metadata.getColumnType ix+1
|
||||
name = metadata.getColumnTypeName ix+1
|
||||
SQL_Type_Data typeid name
|
||||
SQL_Type.Value typeid name
|
||||
column_builders = column_types.map typ->
|
||||
create_builder typ
|
||||
go has_next = if has_next.not then Nothing else
|
||||
@ -43,7 +44,7 @@ result_set_to_table result_set expected_types=Nothing =
|
||||
go result_set.next
|
||||
columns = column_builders.zip column_names builder-> name->
|
||||
builder.make_column name
|
||||
Materialized_Table.Table.new columns
|
||||
Materialized_Table.new columns
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -130,7 +131,7 @@ type Builder
|
||||
|
||||
Argument:
|
||||
- name: The name of the column.
|
||||
make_column : Text -> Materialized_Column.Column
|
||||
make_column : Text -> Materialized_Column
|
||||
make_column self name =
|
||||
storage = self.java_builder.seal
|
||||
Java_Exports.make_column name storage
|
||||
|
@ -1,20 +1,16 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement
|
||||
import Standard.Database.Data.SQL_Query
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
|
||||
import Standard.Database.Internal.JDBC_Connection
|
||||
import Standard.Database.Data.Dialect
|
||||
from Standard.Database.Connection.Connection import Connection_Data
|
||||
import project.Data.SQL_Query.SQL_Query
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.JDBC_Connection
|
||||
import project.Data.Dialect
|
||||
import project.Connection.Connection.Connection
|
||||
import project.Data.Table.Table as Database_Table
|
||||
|
||||
import Standard.Database.Data.Table as Database_Table
|
||||
import Standard.Table.Data.Table as Materialized_Table
|
||||
|
||||
from Standard.Database.Errors import SQL_Error
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.SQLite.SQLite_Connection.SQLite_Connection import all
|
||||
from project.Internal.SQLite.SQLite_Connection.SQLite_Connection export all
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
from project.Errors import SQL_Error
|
||||
|
||||
type SQLite_Connection
|
||||
## PRIVATE
|
||||
@ -23,7 +19,7 @@ type SQLite_Connection
|
||||
|
||||
Arguments:
|
||||
- connection: the underlying connection.
|
||||
SQLite_Connection_Data connection
|
||||
Value connection
|
||||
|
||||
## Closes the connection releasing the underlying database resources
|
||||
immediately instead of waiting for them to be automatically released.
|
||||
@ -78,7 +74,7 @@ type SQLite_Connection
|
||||
- schema: The schema name to search in (defaults to current schema).
|
||||
- types: The table types to search for. The list of values can be obtained using the `table_types` method.
|
||||
- all_fields: Return all the fields in the metadata table.
|
||||
tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table.Table
|
||||
tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table
|
||||
tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False =
|
||||
self.connection.tables name_like database schema types all_fields
|
||||
|
||||
@ -88,7 +84,7 @@ type SQLite_Connection
|
||||
- query: name of the table or sql statement to query.
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- alias: optionally specify a friendly alias for the query.
|
||||
query : Text | SQL_Query -> Text -> Database_Table.Table
|
||||
query : Text | SQL_Query -> Text -> Database_Table
|
||||
query self query alias="" = self.connection.query query alias
|
||||
|
||||
## Execute the query and load the results into memory as a Table.
|
||||
@ -97,12 +93,12 @@ type SQLite_Connection
|
||||
- query: name of the table or sql statement to query.
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- limit: the maximum number of rows to return.
|
||||
read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table.Table
|
||||
read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table
|
||||
read self query limit=Nothing = self.connection.read query limit
|
||||
|
||||
## PRIVATE
|
||||
Internal read function for a statement with optional types.
|
||||
read_statement : Statement -> (Nothing | Vector SQL_Type) -> Materialized_Table.Table
|
||||
read_statement : SQL_Statement -> (Nothing | Vector SQL_Type) -> Materialized_Table
|
||||
read_statement self statement expected_types=Nothing =
|
||||
self.connection.read_statement statement expected_types
|
||||
|
||||
@ -113,9 +109,9 @@ type SQLite_Connection
|
||||
returns 0 for other types of queries (like creating or altering tables).
|
||||
|
||||
Arguments:
|
||||
- query: either raw SQL code as Text or an instance of Statement
|
||||
- query: either raw SQL code as Text or an instance of SQL_Statement
|
||||
representing the query to execute.
|
||||
execute_update : Text | Statement -> Integer
|
||||
execute_update : Text | SQL_Statement -> Integer
|
||||
execute_update self query =
|
||||
self.connection.execute_update query
|
||||
|
||||
@ -136,7 +132,7 @@ type SQLite_Connection
|
||||
usually not be visible to other connections.
|
||||
- batch_size: Specifies how many rows should be uploaded in a single
|
||||
batch.
|
||||
upload_table : Text -> Materialized_Table.Table -> Boolean -> Integer -> Database_Table.Table
|
||||
upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table
|
||||
upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State_Error <|
|
||||
self.connection.upload_table name table temporary batch_size
|
||||
|
||||
@ -150,4 +146,4 @@ type SQLite_Connection
|
||||
create : Text -> Vector -> SQLite_Connection
|
||||
create url properties =
|
||||
jdbc_connection = JDBC_Connection.create url properties
|
||||
SQLite_Connection_Data (Connection_Data jdbc_connection Dialect.sqlite)
|
||||
SQLite_Connection.Value (Connection.Value jdbc_connection Dialect.sqlite)
|
||||
|
@ -1,23 +1,23 @@
|
||||
from Standard.Base import all hiding First, Last
|
||||
|
||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||
import Standard.Database.Data.SQL
|
||||
from Standard.Database.Data.SQL import SQL_Type, Statement, code
|
||||
import Standard.Database.Data.Dialect
|
||||
import Standard.Database.Internal.Base_Generator
|
||||
import Standard.Database.Internal.IR
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
import project.Data.SQL
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Base_Generator
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
|
||||
from project.Data.SQL import code
|
||||
from project.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The dialect of SQLite databases.
|
||||
sqlite : Dialect
|
||||
sqlite : SQLite_Dialect
|
||||
sqlite =
|
||||
SQLite_Dialect_Data make_internal_generator_dialect
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Internal.SQLite.SQLite_Dialect.SQLite_Dialect import all
|
||||
from project.Internal.SQLite.SQLite_Dialect.SQLite_Dialect export all
|
||||
SQLite_Dialect.Value make_internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -26,7 +26,7 @@ type SQLite_Dialect
|
||||
## PRIVATE
|
||||
|
||||
The dialect of SQLite databases.
|
||||
SQLite_Dialect_Data internal_generator_dialect
|
||||
Value internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
Name of the dialect.
|
||||
@ -36,7 +36,7 @@ type SQLite_Dialect
|
||||
## PRIVATE
|
||||
A function which generates SQL code from the internal representation
|
||||
according to the specific dialect.
|
||||
generate_sql : Query -> Statement
|
||||
generate_sql : Query -> SQL_Statement
|
||||
generate_sql self query =
|
||||
Base_Generator.generate_query self.internal_generator_dialect query . build
|
||||
|
||||
@ -53,22 +53,22 @@ type SQLite_Dialect
|
||||
|
||||
One of the purposes of this method is to verify if the expected ordering
|
||||
settings are supported by the given database backend.
|
||||
prepare_order_descriptor : IR.Internal_Column -> Sort_Direction -> Text_Ordering -> IR.Order_Descriptor
|
||||
prepare_order_descriptor : Internal_Column -> Sort_Direction -> Text_Ordering -> Order_Descriptor
|
||||
prepare_order_descriptor self internal_column sort_direction text_ordering = case internal_column.sql_type.is_likely_text of
|
||||
True ->
|
||||
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation_Error_Data "Natural ordering is not supported by the SQLite backend. You may need to materialize the Table to perform this operation.") else
|
||||
case text_ordering.case_sensitivity of
|
||||
Nothing ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction collation=Nothing
|
||||
Order_Descriptor.Value internal_column.expression sort_direction collation=Nothing
|
||||
Case_Sensitivity.Sensitive ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction collation="BINARY"
|
||||
Order_Descriptor.Value internal_column.expression sort_direction collation="BINARY"
|
||||
Case_Sensitivity.Insensitive locale -> case locale == Locale.default of
|
||||
False ->
|
||||
Error.throw (Unsupported_Database_Operation_Error_Data "Case insensitive ordering with custom locale is not supported by the SQLite backend. You may need to materialize the Table to perform this operation.")
|
||||
True ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction collation="NOCASE"
|
||||
Order_Descriptor.Value internal_column.expression sort_direction collation="NOCASE"
|
||||
False ->
|
||||
IR.Order_Descriptor_Data internal_column.expression sort_direction collation=Nothing
|
||||
Order_Descriptor.Value internal_column.expression sort_direction collation=Nothing
|
||||
|
||||
## PRIVATE
|
||||
make_internal_generator_dialect =
|
||||
|
@ -1,23 +1,24 @@
|
||||
import Standard.Database.Data.SQL_Query
|
||||
import Standard.Database.Connection.Database
|
||||
import project.Connection.Database
|
||||
|
||||
import Standard.Database.Connection.Credentials
|
||||
import Standard.Database.Connection.Client_Certificate
|
||||
import Standard.Database.Connection.SSL_Mode
|
||||
import Standard.Database.Connection.Connection_Options
|
||||
import project.Connection.Credentials.Credentials
|
||||
import project.Connection.Client_Certificate.Client_Certificate
|
||||
import project.Connection.SSL_Mode.SSL_Mode
|
||||
import project.Connection.Connection_Options.Connection_Options
|
||||
|
||||
import Standard.Database.Connection.Postgres_Options
|
||||
import Standard.Database.Connection.SQLite_Options
|
||||
import Standard.Database.Connection.Redshift_Options
|
||||
import project.Connection.Postgres_Options
|
||||
import project.Connection.SQLite_Options
|
||||
import project.Connection.Redshift_Options
|
||||
import project.Data.SQL_Query.SQL_Query
|
||||
|
||||
export Standard.Database.Connection.SSL_Mode
|
||||
export project.Connection.Credentials.Credentials
|
||||
|
||||
from Standard.Database.Connection.Credentials export Credentials, Username_And_Password
|
||||
from Standard.Database.Connection.Client_Certificate export Client_Certificate
|
||||
from Standard.Database.Connection.Connection_Options export Connection_Options
|
||||
export project.Connection.SSL_Mode.SSL_Mode
|
||||
export project.Connection.Client_Certificate.Client_Certificate
|
||||
export project.Connection.Connection_Options.Connection_Options
|
||||
|
||||
from Standard.Database.Connection.Database export connect
|
||||
from Standard.Database.Connection.Postgres_Options export Postgres_Options, Postgres
|
||||
from Standard.Database.Connection.SQLite_Options export SQLite_Options, SQLite, In_Memory
|
||||
from Standard.Database.Connection.Redshift_Options export Redshift_Options, Redshift, AWS_Profile, AWS_Key
|
||||
from Standard.Database.Data.SQL_Query export SQL_Query, Table_Name, Raw_SQL
|
||||
export project.Connection.Database
|
||||
export project.Data.SQL_Query.SQL_Query
|
||||
|
||||
from project.Connection.Postgres_Options export Postgres_Options, Postgres
|
||||
from project.Connection.SQLite_Options export SQLite_Options, In_Memory, SQLite
|
||||
from project.Connection.Redshift_Options export Redshift_Options, Redshift, AWS_Credential
|
||||
|
@ -1,8 +1,8 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Column.Column
|
||||
import Standard.Table.Data.Column_Selector.Column_Selector
|
||||
import Standard.Table.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import project.Data.Column.Column
|
||||
import project.Data.Column_Selector.Column_Selector
|
||||
import project.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
|
||||
## Defines an Aggregate Column
|
||||
type Aggregate_Column
|
||||
|
@ -2,10 +2,10 @@ from Standard.Base import all
|
||||
import Standard.Base.Data.Ordering.Comparator
|
||||
import Standard.Base.Data.Index_Sub_Range
|
||||
|
||||
import Standard.Table.Data.Table.Table
|
||||
from Standard.Table.Data.Table import print_table
|
||||
import Standard.Table.Data.Storage.Storage
|
||||
import Standard.Table.Data.Value_Type.Value_Type
|
||||
import project.Data.Table.Table
|
||||
import project.Data.Storage.Storage
|
||||
import project.Data.Value_Type.Value_Type
|
||||
from project.Data.Table import print_table
|
||||
|
||||
from project.Errors import No_Index_Set_Error
|
||||
|
||||
@ -751,7 +751,7 @@ type Column
|
||||
storage_type : Storage
|
||||
storage_type self =
|
||||
tp = self.java_column.getStorage.getType
|
||||
storage_types.at tp . catch Index_Out_Of_Bounds_Error _->
|
||||
Storage.types.at tp . catch Index_Out_Of_Bounds_Error _->
|
||||
Panic.throw (Illegal_State_Error "Unknown storage type: "+tp.to_text)
|
||||
|
||||
## UNSTABLE TODO this is a prototype that will be revisited later on
|
||||
@ -1139,20 +1139,6 @@ run_vectorized_unary_op column name fallback_fn =
|
||||
rs = s.map name fallback_fn
|
||||
Column.Column_Data (Java_Column.new "Result" ix rs)
|
||||
|
||||
## PRIVATE
|
||||
Enumerates storage types in a way that is consistent with
|
||||
`org.enso.table.data.Storage.Storage`, i.e.
|
||||
`storage_type.at org.enso.table.data.Storage.Storage.LONG` will yield the
|
||||
corresponding `Storage.Integer`.
|
||||
storage_types : Vector Storage
|
||||
storage_types = [Storage.Any, Storage.Integer, Storage.Decimal, Storage.Text, Storage.Boolean, Storage.Date, Storage.Time_Of_Day, Storage.Date_Time]
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Keep this in sync with `org.enso.table.data.Storage.Storage.STRING`
|
||||
storage_type_string : Integer
|
||||
storage_type_string = 3
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A helper function for converting a column storage to JSON.
|
||||
|
@ -1,10 +1,9 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Error.Common as Errors
|
||||
|
||||
import Standard.Table.Data.Column_Type_Selection.Auto
|
||||
import Standard.Table.Data.Storage.Storage
|
||||
|
||||
import Standard.Table.Internal.Parse_Values_Helper
|
||||
import project.Data.Column_Type_Selection.Auto
|
||||
import project.Data.Storage.Storage
|
||||
import project.Internal.Parse_Values_Helper
|
||||
|
||||
polyglot java import org.enso.table.parsing.IntegerParser
|
||||
polyglot java import org.enso.table.parsing.DecimalParser
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Column.Column
|
||||
import Standard.Table.Data.Sort_Column.Sort_Column
|
||||
import project.Data.Column.Column
|
||||
import project.Data.Sort_Column.Sort_Column
|
||||
|
||||
type Sort_Column_Selector
|
||||
By_Name (columns : Vector (Sort_Column.Name | Text)) (matcher:Matcher=Text_Matcher.Case_Sensitive)
|
||||
|
@ -24,3 +24,11 @@ type Storage
|
||||
|
||||
## A column storing arbitrary data.
|
||||
Any
|
||||
|
||||
## PRIVATE
|
||||
Enumerates storage types in a way that is consistent with
|
||||
`org.enso.table.data.Storage.Storage`, i.e.
|
||||
`storage_type.at org.enso.table.data.Storage.Storage.LONG` will yield the
|
||||
corresponding `Storage.Integer`.
|
||||
types : Vector Storage
|
||||
types = [Storage.Any, Storage.Integer, Storage.Decimal, Storage.Text, Storage.Boolean, Storage.Date, Storage.Time_Of_Day, Storage.Date_Time]
|
||||
|
@ -6,30 +6,26 @@ import Standard.Base.Data.Ordering.Comparator
|
||||
import Standard.Base.Data.Text.Case
|
||||
import Standard.Base.System.Platform
|
||||
|
||||
import Standard.Table.Data.Column.Column
|
||||
from Standard.Table.Data.Filter_Condition_Helpers import make_filter_column
|
||||
import project.Data.Column.Column
|
||||
import project.Data.Column_Name_Mapping.Column_Name_Mapping
|
||||
import project.Data.Column_Selector.Column_Selector
|
||||
import project.Data.Data_Formatter.Data_Formatter
|
||||
import project.Data.Match_Columns.Match_Columns
|
||||
import project.Data.Position.Position
|
||||
import project.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import project.Data.Sort_Column.Sort_Column
|
||||
import project.Data.Aggregate_Column.Aggregate_Column
|
||||
import project.Internal.Table_Helpers
|
||||
import project.Internal.Aggregate_Column_Helper
|
||||
import project.Internal.Parse_Values_Helper
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.IO.Auto_Detect.Auto_Detect
|
||||
|
||||
import Standard.Table.Internal.Table_Helpers
|
||||
import Standard.Table.Internal.Aggregate_Column_Helper
|
||||
import Standard.Table.Internal.Parse_Values_Helper
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
from project.Data.Column_Type_Selection import Column_Type_Selection, Auto
|
||||
from project.Delimited.Delimited_Format import Delimited
|
||||
from project.Internal.Filter_Condition_Helpers import make_filter_column
|
||||
from project.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Type_Selector, No_Index_Set_Error, No_Such_Column_Error, No_Such_Column_Error_Data, No_Input_Columns_Selected, No_Output_Columns
|
||||
|
||||
import Standard.Table.IO.Auto_Detect.Auto_Detect
|
||||
|
||||
from Standard.Table.Delimited.Delimited_Format import Delimited_Format, Delimited
|
||||
|
||||
import Standard.Table.Data.Column_Selector.Column_Selector
|
||||
from Standard.Table.Data.Column_Type_Selection import Column_Type_Selection, Auto
|
||||
import Standard.Table.Data.Data_Formatter.Data_Formatter
|
||||
from Standard.Table.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Type_Selector, No_Index_Set_Error, No_Such_Column_Error, No_Such_Column_Error_Data, No_Input_Columns_Selected, No_Output_Columns
|
||||
import Standard.Table.Data.Match_Columns.Match_Columns
|
||||
|
||||
from Standard.Table.Data.Column_Name_Mapping import Column_Name_Mapping
|
||||
import Standard.Table.Data.Position.Position
|
||||
import Standard.Table.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import Standard.Table.Data.Sort_Column.Sort_Column
|
||||
|
||||
import Standard.Table.Data.Aggregate_Column.Aggregate_Column
|
||||
import Standard.Visualization
|
||||
|
||||
polyglot java import org.enso.table.data.table.Table as Java_Table
|
||||
|
@ -2,16 +2,16 @@ from Standard.Base import all
|
||||
from Standard.Base.Error.Problem_Behavior import Report_Warning
|
||||
|
||||
import project.Data.Table.Table
|
||||
import project.Errors
|
||||
|
||||
from project.Delimited.Delimited_Format import Delimited_Format, Delimited
|
||||
import project.Delimited.Delimited_Reader
|
||||
import project.Delimited.Delimited_Writer
|
||||
|
||||
from project.Delimited.Delimited_Format import Delimited_Format, Delimited
|
||||
from project.Errors import unimplemented
|
||||
|
||||
Table.from (that : Text) (format:Delimited_Format = Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) =
|
||||
if format.is_a Delimited then Delimited_Reader.read_text that format on_problems else
|
||||
Errors.unimplemented "Table.from for fixed-width files is not yet implemented."
|
||||
unimplemented "Table.from for fixed-width files is not yet implemented."
|
||||
|
||||
Text.from (that : Table) (format:Delimited_Format = Delimited '\t') =
|
||||
if format.is_a Delimited then Delimited_Writer.write_text that format else
|
||||
Errors.unimplemented "Text.from for fixed-width files is not yet implemented."
|
||||
unimplemented "Text.from for fixed-width files is not yet implemented."
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Table.Errors import Invalid_Value_Type
|
||||
from project.Errors import Invalid_Value_Type
|
||||
|
||||
## Type to represent the different sizes of integer or float possible within a database.
|
||||
type Bits
|
||||
|
@ -1,11 +1,11 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Table.Data.Table import Table
|
||||
from Standard.Table.Data.Data_Formatter import Data_Formatter
|
||||
from Standard.Table.Data.Match_Columns import Match_Columns
|
||||
import Standard.Table.Delimited.Delimited_Reader
|
||||
import Standard.Table.Delimited.Delimited_Writer
|
||||
import Standard.Table.Delimited.Quote_Style.Quote_Style
|
||||
import project.Data.Table.Table
|
||||
import project.Data.Data_Formatter.Data_Formatter
|
||||
import project.Data.Match_Columns.Match_Columns
|
||||
import project.Delimited.Delimited_Reader
|
||||
import project.Delimited.Delimited_Writer
|
||||
import project.Delimited.Quote_Style.Quote_Style
|
||||
|
||||
from project.Delimited.Delimited_Format.Delimited_Format import Delimited
|
||||
from project.Delimited.Delimited_Format.Delimited_Format export Delimited
|
||||
|
@ -2,12 +2,12 @@ from Standard.Base import all
|
||||
import Standard.Base.Error.Common as Errors
|
||||
from Standard.Base.Error.Problem_Behavior import Ignore, Report_Error
|
||||
|
||||
import Standard.Table.Data.Table.Table
|
||||
import Standard.Table.Data.Data_Formatter.Data_Formatter
|
||||
import Standard.Table.Delimited.Delimited_Format.Delimited_Format
|
||||
import Standard.Table.Delimited.Quote_Style.Quote_Style
|
||||
import project.Data.Table.Table
|
||||
import project.Data.Data_Formatter.Data_Formatter
|
||||
import project.Delimited.Delimited_Format.Delimited_Format
|
||||
import project.Delimited.Quote_Style.Quote_Style
|
||||
|
||||
from Standard.Table.Errors import Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, Invalid_Row_Data, Mismatched_Quote, Parser_Error, Additional_Invalid_Rows_Data
|
||||
from project.Errors import Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, Invalid_Row_Data, Mismatched_Quote, Parser_Error, Additional_Invalid_Rows_Data
|
||||
|
||||
polyglot java import org.enso.base.encoding.NewlineDetector
|
||||
polyglot java import org.enso.table.read.DelimitedReader
|
||||
|
@ -2,14 +2,15 @@ from Standard.Base import all
|
||||
import Standard.Base.System
|
||||
import Standard.Base.Error.Common as Errors
|
||||
|
||||
from Standard.Table.Data.Table import Table
|
||||
from Standard.Table.Errors import Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Invalid_Row, Mismatched_Quote, Parser_Error, Additional_Invalid_Rows, Column_Count_Mismatch, Column_Name_Mismatch
|
||||
from Standard.Table.Data.Data_Formatter import Data_Formatter
|
||||
from Standard.Table.Data.Storage import Storage
|
||||
from Standard.Table.Delimited.Delimited_Format import Delimited_Format
|
||||
from Standard.Table.Delimited.Quote_Style import Quote_Style
|
||||
from Standard.Table.Delimited.Delimited_Reader import Detected_Headers, detect_metadata
|
||||
from Standard.Table.Data.Match_Columns import Match_Columns
|
||||
import project.Data.Table.Table
|
||||
import project.Data.Data_Formatter.Data_Formatter
|
||||
import project.Data.Match_Columns.Match_Columns
|
||||
import project.Data.Storage.Storage
|
||||
import project.Delimited.Delimited_Format.Delimited_Format
|
||||
import project.Delimited.Quote_Style.Quote_Style
|
||||
|
||||
from project.Delimited.Delimited_Reader import Detected_Headers, detect_metadata
|
||||
from project.Errors import Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Invalid_Row, Mismatched_Quote, Parser_Error, Additional_Invalid_Rows, Column_Count_Mismatch, Column_Name_Mismatch
|
||||
|
||||
polyglot java import org.enso.table.write.DelimitedWriter
|
||||
polyglot java import org.enso.table.write.WriteQuoteBehavior
|
||||
|
@ -1,10 +1,10 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Table.Table
|
||||
import project.Data.Table.Table
|
||||
import project.Excel.Excel_Reader
|
||||
import project.Excel.Excel_Writer
|
||||
|
||||
from Standard.Table.Excel.Section import Excel_Section, Worksheet, Sheet_Names, Range_Names
|
||||
import Standard.Table.Excel.Excel_Reader
|
||||
import Standard.Table.Excel.Excel_Writer
|
||||
from project.Excel.Excel_Section import Excel_Section, Worksheet, Sheet_Names, Range_Names
|
||||
|
||||
from project.Excel.Excel_Format.Excel_Format import Excel
|
||||
from project.Excel.Excel_Format.Excel_Format export Excel
|
||||
|
@ -1,12 +1,11 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.System.File.Option
|
||||
|
||||
import Standard.Table.Data.Table.Table
|
||||
import Standard.Table.Excel.Range.Excel_Range
|
||||
import project.Data.Table.Table
|
||||
import project.Excel.Excel_Range.Excel_Range
|
||||
|
||||
from Standard.Table.Excel.Section import Excel_Section, Sheet_Names, Range_Names, Worksheet, Cell_Range
|
||||
|
||||
from Standard.Table.Errors import Invalid_Location_Data, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data
|
||||
from project.Excel.Excel_Section import Excel_Section, Sheet_Names, Range_Names, Worksheet, Cell_Range
|
||||
from project.Errors import Invalid_Location_Data, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data
|
||||
|
||||
polyglot java import org.enso.table.excel.ExcelHeaders
|
||||
polyglot java import org.enso.table.read.ExcelReader
|
||||
|
@ -1,9 +1,9 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Excel.Range.Excel_Range
|
||||
import project.Excel.Excel_Range.Excel_Range
|
||||
|
||||
from project.Excel.Section.Excel_Section import all
|
||||
from project.Excel.Section.Excel_Section export all
|
||||
from project.Excel.Excel_Section.Excel_Section import all
|
||||
from project.Excel.Excel_Section.Excel_Section export all
|
||||
|
||||
type Excel_Section
|
||||
## Gets a list of sheets within a workbook.
|
@ -1,13 +1,12 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Table
|
||||
import project.Data.Table
|
||||
import project.Data.Match_Columns.Match_Columns
|
||||
import project.Excel.Excel_Range.Excel_Range
|
||||
|
||||
from Standard.Table.Excel.Excel_Reader import handle_reader, make_java_headers
|
||||
from Standard.Table.Excel.Section import Worksheet, Cell_Range
|
||||
import Standard.Table.Excel.Range.Excel_Range
|
||||
|
||||
from Standard.Table.Errors import Invalid_Location_Data, Range_Exceeded_Data, Existing_Data_Data, Column_Count_Mismatch, Column_Name_Mismatch
|
||||
import Standard.Table.Data.Match_Columns.Match_Columns
|
||||
from project.Excel.Excel_Reader import handle_reader, make_java_headers
|
||||
from project.Excel.Excel_Section import Worksheet, Cell_Range
|
||||
from project.Errors import Invalid_Location_Data, Range_Exceeded_Data, Existing_Data_Data, Column_Count_Mismatch, Column_Name_Mismatch
|
||||
|
||||
polyglot java import org.enso.table.read.ExcelReader
|
||||
polyglot java import org.enso.table.write.ExcelWriter
|
||||
|
@ -1,8 +1,8 @@
|
||||
from Standard.Base import Any, Problem_Behavior, Nothing, Error, Panic, Meta, File, File_Format, Plain_Text_Format, Bytes
|
||||
from Standard.Base.Error.Common import Unsupported_File_Type, Unsupported_File_Type_Data, No_Such_Method_Error_Data, Illegal_Argument_Error_Data
|
||||
|
||||
import Standard.Table.Delimited.Delimited_Format.Delimited_Format
|
||||
import Standard.Table.Excel.Excel_Format.Excel_Format
|
||||
import project.Delimited.Delimited_Format.Delimited_Format
|
||||
import project.Excel.Excel_Format.Excel_Format
|
||||
|
||||
## PRIVATE
|
||||
Set of File_Format types for read files.
|
||||
|
@ -2,7 +2,7 @@ from Standard.Base import Any, Text, Problem_Behavior, Nothing, Error, Panic, Me
|
||||
from Standard.Base.Error.Problem_Behavior import Report_Warning
|
||||
from Standard.Base.Error.Common import Unsupported_File_Type_Data, No_Such_Method_Error_Data, Illegal_Argument_Error_Data
|
||||
|
||||
import Standard.Table.IO.Auto_Detect.Auto_Detect
|
||||
import project.IO.Auto_Detect.Auto_Detect
|
||||
|
||||
## ALIAS Read Text File, Read File
|
||||
|
||||
|
@ -1,20 +1,16 @@
|
||||
from Standard.Base import all hiding First, Last
|
||||
|
||||
import Standard.Table.Data.Column.Column
|
||||
|
||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||
import Standard.Table.Data.Column_Selector.Column_Selector
|
||||
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
import Standard.Table.Internal.Table_Helpers
|
||||
|
||||
import Standard.Table.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import Standard.Table.Data.Sort_Column.Sort_Column
|
||||
|
||||
import Standard.Base.Data.Ordering.Comparator
|
||||
|
||||
from Standard.Table.Errors import Missing_Input_Columns_Data, Column_Indexes_Out_Of_Range, No_Output_Columns, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, Invalid_Aggregation_Data, Floating_Point_Grouping_Data, Unquoted_Delimiter_Data, Additional_Warnings_Data
|
||||
import project.Data.Column.Column
|
||||
import project.Data.Column_Selector.Column_Selector
|
||||
import project.Data.Sort_Column.Sort_Column
|
||||
import project.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Table_Helpers
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
|
||||
from project.Data.Aggregate_Column.Aggregate_Column import all
|
||||
from project.Errors import Missing_Input_Columns_Data, Column_Indexes_Out_Of_Range, No_Output_Columns, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, Invalid_Aggregation_Data, Floating_Point_Grouping_Data, Unquoted_Delimiter_Data, Additional_Warnings_Data
|
||||
|
||||
polyglot java import org.enso.table.aggregations.Aggregator
|
||||
polyglot java import org.enso.table.aggregations.Concatenate as ConcatenateAggregator
|
||||
|
@ -1,6 +1,6 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Table.Data.Value_Type import Value_Type
|
||||
import project.Data.Value_Type.Value_Type
|
||||
|
||||
from Standard.Base.Data.Filter_Condition.Filter_Condition import all
|
||||
|
@ -1,9 +1,9 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Column.Column
|
||||
polyglot java import org.enso.table.data.table.Column as Java_Column
|
||||
import project.Data.Column.Column
|
||||
import project.Data.Table.Table
|
||||
|
||||
import Standard.Table.Data.Table.Table
|
||||
polyglot java import org.enso.table.data.table.Column as Java_Column
|
||||
polyglot java import org.enso.table.data.table.Table as Java_Table
|
||||
|
||||
polyglot java import org.enso.table.data.index.DefaultIndex
|
||||
|
@ -1,6 +1,6 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Table.Errors import Invalid_Format_Data, Leading_Zeros_Data
|
||||
from project.Errors import Invalid_Format_Data, Leading_Zeros_Data
|
||||
|
||||
polyglot java import org.enso.table.parsing.problems.InvalidFormat
|
||||
polyglot java import org.enso.table.parsing.problems.LeadingZeros
|
||||
|
@ -2,9 +2,9 @@ from Standard.Base import all
|
||||
from Standard.Base.Error.Problem_Behavior import Report_Warning
|
||||
import Standard.Base.Runtime.Ref
|
||||
|
||||
import Standard.Table.Internal.Vector_Builder.Vector_Builder
|
||||
import project.Internal.Vector_Builder.Vector_Builder
|
||||
|
||||
from Standard.Table.Errors import Missing_Input_Columns_Data, Column_Indexes_Out_Of_Range_Data, No_Output_Columns, Duplicate_Column_Selectors_Data, Input_Indices_Already_Matched_Data, Too_Many_Column_Names_Provided, Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Column_Matched_By_Multiple_Selectors_Data
|
||||
from project.Errors import Missing_Input_Columns_Data, Column_Indexes_Out_Of_Range_Data, No_Output_Columns, Duplicate_Column_Selectors_Data, Input_Indices_Already_Matched_Data, Too_Many_Column_Names_Provided, Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Column_Matched_By_Multiple_Selectors_Data
|
||||
|
||||
type Problem_Builder
|
||||
Value oob_indices duplicate_column_selectors input_indices_already_matched missing_input_columns other
|
||||
|
@ -4,14 +4,15 @@ import Standard.Base.Data.Ordering.Vector_Lexicographic_Order
|
||||
from Standard.Base.Data.Text.Text_Ordering import Text_Ordering
|
||||
from Standard.Base.Error.Problem_Behavior import Report_Warning
|
||||
|
||||
import Standard.Table.Data.Position.Position
|
||||
from Standard.Table.Errors import Missing_Input_Columns_Data, No_Output_Columns, Too_Many_Column_Names_Provided_Data, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, No_Input_Columns_Selected
|
||||
import Standard.Table.Data.Column_Selector.Column_Selector
|
||||
import Standard.Table.Data.Column_Name_Mapping.Column_Name_Mapping
|
||||
import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
import Standard.Table.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import Standard.Table.Data.Sort_Column.Sort_Column
|
||||
import project.Data.Position.Position
|
||||
import project.Data.Column_Name_Mapping.Column_Name_Mapping
|
||||
import project.Data.Column_Selector.Column_Selector
|
||||
import project.Data.Sort_Column_Selector.Sort_Column_Selector
|
||||
import project.Data.Sort_Column.Sort_Column
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
|
||||
from project.Errors import Missing_Input_Columns_Data, No_Output_Columns, Too_Many_Column_Names_Provided_Data, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, No_Input_Columns_Selected
|
||||
|
||||
polyglot java import java.util.HashSet
|
||||
|
||||
|
@ -20,8 +20,8 @@ import project.Delimited.Quote_Style.Quote_Style
|
||||
import project.Delimited.Delimited_Format
|
||||
import project.Data.Table_Conversions
|
||||
|
||||
import project.Excel.Section
|
||||
import project.Excel.Range.Excel_Range
|
||||
import project.Excel.Excel_Section
|
||||
import project.Excel.Excel_Range.Excel_Range
|
||||
import project.Excel.Excel_Format
|
||||
|
||||
export project.Data.Table.Table
|
||||
@ -33,7 +33,6 @@ export project.Data.Column_Name_Mapping.Column_Name_Mapping
|
||||
export project.Data.Match_Columns.Match_Columns
|
||||
export project.Data.Position.Position
|
||||
export project.Data.Aggregate_Column.Aggregate_Column
|
||||
export project.Data.Filter_Condition.Filter_Condition
|
||||
|
||||
export project.IO.File_Read
|
||||
export project.IO.Auto_Detect.Auto_Detect
|
||||
@ -43,8 +42,8 @@ from project.Delimited.Delimited_Format export Delimited_Format, Delimited
|
||||
export project.Data.Table_Conversions
|
||||
|
||||
from project.Excel.Excel_Format export Excel_Format, Excel
|
||||
from project.Excel.Section export Excel_Section, Sheet_Names, Range_Names, Worksheet, Cell_Range
|
||||
export project.Excel.Range.Excel_Range
|
||||
from project.Excel.Excel_Section export Excel_Section, Sheet_Names, Range_Names, Worksheet, Cell_Range
|
||||
export project.Excel.Excel_Range.Excel_Range
|
||||
|
||||
export project.Data.Data_Formatter.Data_Formatter
|
||||
|
||||
|
@ -9,16 +9,12 @@ from Standard.Base import all
|
||||
- `path`: The path to which the file is being uploaded.
|
||||
file_uploading : (File.File | Text) -> File.File ! File_Being_Uploaded
|
||||
file_uploading path =
|
||||
err = File_Being_Uploaded_Data <| case path of
|
||||
err = File_Being_Uploaded.Value <| case path of
|
||||
_ : Text -> path
|
||||
_ : File.File -> path.path
|
||||
_ -> ""
|
||||
Error.throw err
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.File_Upload.File_Being_Uploaded import all
|
||||
from project.File_Upload.File_Being_Uploaded export all
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
Represents that a file is being uploaded to the given `file_path`.
|
||||
@ -26,4 +22,4 @@ from project.File_Upload.File_Being_Uploaded export all
|
||||
Arguments:
|
||||
- file_path: The path at which the file is being uploaded.
|
||||
type File_Being_Uploaded
|
||||
File_Being_Uploaded_Data file_path
|
||||
Value file_path
|
||||
|
@ -1,8 +1,8 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Table import Table
|
||||
import Standard.Test
|
||||
import Standard.Visualization.Helpers
|
||||
|
||||
import project.Helpers
|
||||
|
||||
## PRIVATE
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Table import Table, Column
|
||||
import Standard.Visualization.Helpers
|
||||
import project.Helpers
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -23,7 +23,7 @@ Table.value_column self =
|
||||
type Update
|
||||
|
||||
## PRIVATE
|
||||
Update_Data values label
|
||||
Value values label
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -43,12 +43,12 @@ from_table table =
|
||||
col = table.value_column
|
||||
label = col.name.catch_ Nothing
|
||||
values = col.to_vector.catch_ []
|
||||
Update.Update_Data values label
|
||||
Update.Value values label
|
||||
|
||||
## PRIVATE
|
||||
from_vector : Vector -> Update
|
||||
from_vector vector =
|
||||
Update.Update_Data vector Nothing
|
||||
Update.Value vector Nothing
|
||||
|
||||
## PRIVATE
|
||||
from_value : Any -> Update
|
||||
|
@ -1,16 +1,13 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Id.Id import all
|
||||
from project.Id.Id export all
|
||||
|
||||
## An ID used by the visualization system to identify different ways of
|
||||
displaying data.
|
||||
type Id
|
||||
## A builtin visulization, implemented in the graphical interface and not
|
||||
imported from any library.
|
||||
Builtin name
|
||||
|
||||
## A visualization implemented in a library.
|
||||
Library project name
|
||||
|
||||
@ -18,8 +15,8 @@ type Id
|
||||
interface.
|
||||
to_json self =
|
||||
project = case self of
|
||||
Builtin _ -> Nothing
|
||||
Library project _ ->
|
||||
Id.Builtin _ -> Nothing
|
||||
Id.Library project _ ->
|
||||
full_name = project.namespace + "." + project.name
|
||||
Base.Json.from_pairs [["name", full_name]]
|
||||
Base.Json.from_pairs [["library", project], ["name", self.name]]
|
||||
@ -29,56 +26,56 @@ type Id
|
||||
|
||||
An identifier for the builtin JSON visualization
|
||||
json : Id
|
||||
json = Builtin "JSON"
|
||||
json = Id.Builtin "JSON"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin Scatter Plot visualization
|
||||
scatter_plot : Id
|
||||
scatter_plot = Builtin "Scatter Plot"
|
||||
scatter_plot = Id.Builtin "Scatter Plot"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin Histogram visualization
|
||||
histogram : Id
|
||||
histogram = Builtin "Histogram"
|
||||
histogram = Id.Builtin "Histogram"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin Heatmap visualization
|
||||
heatmap : Id
|
||||
heatmap = Builtin "Heatmap"
|
||||
heatmap = Id.Builtin "Heatmap"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin Table visualization
|
||||
table : Id
|
||||
table = Builtin "Table"
|
||||
table = Id.Builtin "Table"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin SQL Query visualization
|
||||
sql_query : Id
|
||||
sql_query = Builtin "SQL Query"
|
||||
sql_query = Id.Builtin "SQL Query"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin Geo Map visualization
|
||||
geo_map : Id
|
||||
geo_map = Builtin "Geo Map"
|
||||
geo_map = Id.Builtin "Geo Map"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
||||
An identifier for the builtin Image visualization
|
||||
image : Id
|
||||
image = Builtin "Image"
|
||||
image = Id.Builtin "Image"
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
@ -97,7 +94,7 @@ image = Builtin "Image"
|
||||
|
||||
example_id = Visualization.Id.from_module Base "My Visualization"
|
||||
from_module module visualization_name =
|
||||
Library module.enso_project visualization_name
|
||||
Id.Library module.enso_project visualization_name
|
||||
|
||||
## UNSTABLE
|
||||
ADVANCED
|
||||
|
@ -1,8 +1,8 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Visualization.File_Upload
|
||||
import Standard.Visualization.Id
|
||||
import Standard.Visualization.Preprocessor
|
||||
import project.File_Upload
|
||||
import project.Id
|
||||
import project.Preprocessor
|
||||
|
||||
from Standard.Visualization.File_Upload export file_uploading
|
||||
export Standard.Visualization.Id
|
||||
from project.File_Upload export file_uploading
|
||||
export project.Id
|
||||
|
@ -1,7 +1,8 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Database.Data.SQL import SQL_Type
|
||||
|
||||
import Standard.Visualization.Helpers
|
||||
import Standard.Database.Data.SQL_Type.SQL_Type
|
||||
|
||||
import project.Helpers
|
||||
|
||||
## PRIVATE
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Base.Data.Index_Sub_Range import Sample
|
||||
|
||||
from Standard.Table import Table, Column
|
||||
|
||||
import Standard.Visualization.Helpers
|
||||
from Standard.Base.Data.Index_Sub_Range import Sample
|
||||
import project.Helpers
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -23,10 +23,6 @@ axis_field = 'axis'
|
||||
label_field : Text
|
||||
label_field = 'label'
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Scatter_Plot.Point_Data import all
|
||||
from project.Scatter_Plot.Point_Data export all
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Represents a recognized point data field for a scatter plot visualization.
|
||||
@ -54,7 +50,7 @@ type Point_Data
|
||||
|
||||
Returns all recognized point data fields.
|
||||
all_fields : Vector
|
||||
all_fields = [X,Y,Color,Shape,Label,Size]
|
||||
all_fields = [Point_Data.X, Point_Data.Y, Point_Data.Color, Point_Data.Shape, Point_Data.Label, Point_Data.Size]
|
||||
|
||||
## PRIVATE
|
||||
recognized_names : Vector
|
||||
@ -71,9 +67,9 @@ type Point_Data
|
||||
## PRIVATE
|
||||
fallback_column : Table -> Column ! No_Fallback_Column
|
||||
fallback_column self table = case self of
|
||||
X -> table.index.catch_ <| Point_Data.iota table.row_count
|
||||
Y ->
|
||||
x_column = X.lookup_in table
|
||||
Point_Data.X -> table.index.catch_ <| Point_Data.iota table.row_count
|
||||
Point_Data.Y ->
|
||||
x_column = Point_Data.X.lookup_in table
|
||||
candidates = table.all_columns
|
||||
is_good_enough c = c.is_numeric && c.name != x_column.name
|
||||
is_good c = is_good_enough c && (self.is_recognized c).not
|
||||
@ -124,8 +120,8 @@ Table.axes self =
|
||||
col_name = field.lookup_in self . name
|
||||
label = Json.from_pairs [[label_field, col_name]]
|
||||
[field.name, label]
|
||||
x_axis = describe_axis X
|
||||
y_axis = describe_axis Y
|
||||
x_axis = describe_axis Point_Data.X
|
||||
y_axis = describe_axis Point_Data.Y
|
||||
is_valid axis_pair =
|
||||
label = axis_pair.at 1
|
||||
label.is_valid && (self.all_columns.length > 0)
|
||||
@ -136,7 +132,7 @@ Table.axes self =
|
||||
Vector.Vector.point_data : Vector -> Object
|
||||
Vector.Vector.point_data self =
|
||||
self.map_with_index <| i-> elem->
|
||||
Json.from_pairs [[X.name,i],[Y.name,elem]]
|
||||
Json.from_pairs [[Point_Data.X.name, i], [Point_Data.Y.name, elem]]
|
||||
|
||||
## PRIVATE
|
||||
bound_data bounds data = case bounds of
|
||||
@ -153,12 +149,8 @@ bound_data bounds data = case bounds of
|
||||
|
||||
min_x<=x && x<=max_x && min_y<=y && y<=max_y
|
||||
|
||||
# TODO Dubious constructor export
|
||||
from project.Scatter_Plot.Extreme import all
|
||||
from project.Scatter_Plot.Extreme export all
|
||||
|
||||
type Extreme
|
||||
Extreme_Data min_x max_x min_y max_y
|
||||
Value min_x max_x min_y max_y
|
||||
|
||||
## PRIVATE
|
||||
limit_data limit data = case limit of
|
||||
@ -172,11 +164,11 @@ limit_data limit data = case limit of
|
||||
new_min_y = if y current.min_y.second > y point then [idx, point] else current.min_y
|
||||
new_max_x = if x current.max_x.second < x point then [idx, point] else current.max_x
|
||||
new_max_y = if y current.max_y.second < y point then [idx, point] else current.max_y
|
||||
Extreme_Data new_min_x new_max_x new_min_y new_max_y
|
||||
Extreme.Value new_min_x new_max_x new_min_y new_max_y
|
||||
|
||||
first = [0, data.first]
|
||||
bounds = case data.fold_with_index (Extreme_Data first first first first) update_extreme of
|
||||
Extreme_Data min_x max_x min_y max_y -> [min_x, max_x, min_y, max_y]
|
||||
bounds = case data.fold_with_index (Extreme.Value first first first first) update_extreme of
|
||||
Extreme.Value min_x max_x min_y max_y -> [min_x, max_x, min_y, max_y]
|
||||
_ -> []
|
||||
extreme = Map.from_vector bounds . values
|
||||
|
||||
@ -199,9 +191,9 @@ json_from_vector vec bounds limit =
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Default preprocessor for the scatterplot visualization.
|
||||
Default preprocessor for the scatter-plot visualization.
|
||||
|
||||
Generates JSON text describing the scatterplot visualization.
|
||||
Generates JSON text describing the scatter-plot visualization.
|
||||
|
||||
Arguments:
|
||||
- value: the value to be visualized.
|
||||
@ -214,4 +206,3 @@ process_to_json_text value bounds=Nothing limit=Nothing =
|
||||
_ -> json_from_vector value.to_vector bounds limit
|
||||
|
||||
json.to_text
|
||||
|
||||
|
@ -7,7 +7,7 @@ import Standard.Database.Data.Column.Column as Database_Column
|
||||
|
||||
import Standard.Table.Data.Column_Selector.Column_Selector
|
||||
|
||||
import Standard.Visualization.Helpers
|
||||
import project.Helpers
|
||||
|
||||
# TODO add an initial offset to fully support lazy visualizations
|
||||
|
||||
@ -22,7 +22,7 @@ import Standard.Visualization.Helpers
|
||||
In case of Database backed data, it materializes a fragment of the data.
|
||||
prepare_visualization : Any -> Integer -> Json
|
||||
prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
|
||||
Dataframe_Table.Table_Data _ ->
|
||||
_ : Dataframe_Table ->
|
||||
dataframe = x.take (First max_rows)
|
||||
all_rows_count = x.row_count
|
||||
included_rows = dataframe.row_count
|
||||
@ -30,7 +30,7 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
|
||||
Dataframe_Column.from_vector "" (Vector.new included_rows i->i)
|
||||
make_json dataframe [index] all_rows_count
|
||||
|
||||
Database_Table.Table_Data _ _ _ _ ->
|
||||
_ : Database_Table ->
|
||||
# Materialize a table with indices as normal columns (because dataframe does not support multi-indexing).
|
||||
df = x.reset_index.read max_rows
|
||||
# Then split into actual columns and indices.
|
||||
@ -40,9 +40,9 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
|
||||
make_json vis_df indices all_rows_count
|
||||
|
||||
# We display columns as 1-column tables.
|
||||
Dataframe_Column.Column_Data _ ->
|
||||
_ : Dataframe_Column ->
|
||||
prepare_visualization x.to_table max_rows
|
||||
Database_Column.Column_Data _ _ _ _ _ ->
|
||||
_ : Database_Column ->
|
||||
prepare_visualization x.to_table max_rows
|
||||
|
||||
# TODO [RW] Should we truncate Vectors?
|
||||
|
@ -1,6 +1,6 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Table import Column
|
||||
import Standard.Table.Data.Column as Column_Module
|
||||
import Standard.Table.Data.Column.Empty_Error
|
||||
import Standard.Table.Data.Storage.Storage
|
||||
import Standard.Examples
|
||||
|
||||
@ -13,14 +13,14 @@ spec = Test.group "Columns" <|
|
||||
empty_column = Column.from_vector "Test" []
|
||||
|
||||
Test.specify "should correctly map storage types" <|
|
||||
Column_Module.storage_types.at Java_Storage.Type.LONG . should_equal Storage.Integer
|
||||
Column_Module.storage_types.at Java_Storage.Type.DOUBLE . should_equal Storage.Decimal
|
||||
Column_Module.storage_types.at Java_Storage.Type.STRING . should_equal Storage.Text
|
||||
Column_Module.storage_types.at Java_Storage.Type.BOOL . should_equal Storage.Boolean
|
||||
Column_Module.storage_types.at Java_Storage.Type.OBJECT . should_equal Storage.Any
|
||||
Column_Module.storage_types.at Java_Storage.Type.DATE . should_equal Storage.Date
|
||||
Column_Module.storage_types.at Java_Storage.Type.TIME_OF_DAY . should_equal Storage.Time_Of_Day
|
||||
Column_Module.storage_types.at Java_Storage.Type.DATE_TIME . should_equal Storage.Date_Time
|
||||
Storage.types.at Java_Storage.Type.LONG . should_equal Storage.Integer
|
||||
Storage.types.at Java_Storage.Type.DOUBLE . should_equal Storage.Decimal
|
||||
Storage.types.at Java_Storage.Type.STRING . should_equal Storage.Text
|
||||
Storage.types.at Java_Storage.Type.BOOL . should_equal Storage.Boolean
|
||||
Storage.types.at Java_Storage.Type.OBJECT . should_equal Storage.Any
|
||||
Storage.types.at Java_Storage.Type.DATE . should_equal Storage.Date
|
||||
Storage.types.at Java_Storage.Type.TIME_OF_DAY . should_equal Storage.Time_Of_Day
|
||||
Storage.types.at Java_Storage.Type.DATE_TIME . should_equal Storage.Date_Time
|
||||
|
||||
Test.specify "should allow getting specific elements" <|
|
||||
test_column.at 0 . should_equal 1
|
||||
@ -48,12 +48,12 @@ spec = Test.group "Columns" <|
|
||||
Test.specify "should be able to get the first / head element" <|
|
||||
test_column.first . should_equal 1
|
||||
test_column.head . should_equal 1
|
||||
empty_column.first.should_fail_with Column_Module.Empty_Error
|
||||
empty_column.head.should_fail_with Column_Module.Empty_Error
|
||||
empty_column.first.should_fail_with Empty_Error
|
||||
empty_column.head.should_fail_with Empty_Error
|
||||
|
||||
Test.specify "should be able to get the last element" <|
|
||||
test_column.last . should_equal 6
|
||||
empty_column.last.should_fail_with Column_Module.Empty_Error
|
||||
empty_column.last.should_fail_with Empty_Error
|
||||
|
||||
Test.specify "should be able to be reversed" <|
|
||||
expected_1 = Column.from_vector "Test" [6, 4, 2, 5, 3, 1]
|
||||
|
@ -5,8 +5,8 @@ from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||
from Standard.Table.Errors import No_Input_Columns_Selected, Missing_Input_Columns_Data, No_Such_Column_Error_Data
|
||||
|
||||
from Standard.Database import all
|
||||
from Standard.Database.Data.SQL import SQL_Type
|
||||
import Standard.Database.Data.Dialect
|
||||
import Standard.Database.Data.SQL_Type.SQL_Type
|
||||
from Standard.Database.Data.Table import combine_names, fresh_names
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data
|
||||
|
||||
@ -25,7 +25,7 @@ spec =
|
||||
table3 = ["T3", [["A", int], ["E", bool], ["F", int]]]
|
||||
tables = Map.from_vector [table1, table2, table3]
|
||||
Fake_Test_Connection.make Dialect.sqlite tables
|
||||
t1 = test_connection.query (Table_Name "T1")
|
||||
t1 = test_connection.query (SQL_Query.Table_Name "T1")
|
||||
Test.group "[Codegen] JSON serialization" <|
|
||||
Test.specify "should serialize Tables and Columns to their SQL representation" <|
|
||||
q1 = t1.filter (t1.at "A" == 42) . to_json
|
||||
@ -99,8 +99,8 @@ spec =
|
||||
c2.to_sql.prepare . should_equal ['SELECT "T1"."B" AS "B" FROM "T1" AS "T1" WHERE ("T1"."A" = "T1"."C")', []]
|
||||
|
||||
Test.group "[Codegen] Joining Tables" <|
|
||||
t2 = test_connection.query (Table_Name "T2")
|
||||
t3 = test_connection.query (Table_Name "T3")
|
||||
t2 = test_connection.query (SQL_Query.Table_Name "T2")
|
||||
t3 = test_connection.query (SQL_Query.Table_Name "T3")
|
||||
Test.specify "should allow joining tables index-on-index" <|
|
||||
r1 = t1.set_index 'A' . join (t2.set_index 'D')
|
||||
r1.to_sql.prepare . should_equal ['SELECT "T1"."B" AS "B", "T1"."C" AS "C", "T2"."E" AS "E", "T2"."F" AS "F" FROM (SELECT "T1"."B" AS "B", "T1"."C" AS "C", "T1"."A" AS "A", "T1"."A" AS "A_1" FROM "T1" AS "T1") AS "T1" LEFT JOIN (SELECT "T2"."E" AS "E", "T2"."F" AS "F", "T2"."D" AS "D" FROM "T2" AS "T2") AS "T2" ON ("T1"."A_1" = "T2"."D")', []]
|
||||
@ -123,8 +123,8 @@ spec =
|
||||
table2 = ["T2", [["X", int], ["A", int], ["B", int]]]
|
||||
tables = Map.from_vector [table1, table2]
|
||||
Fake_Test_Connection.make Dialect.sqlite tables
|
||||
t1 = connection.query (Table_Name "T1")
|
||||
t2 = connection.query (Table_Name "T2")
|
||||
t1 = connection.query (SQL_Query.Table_Name "T1")
|
||||
t2 = connection.query (SQL_Query.Table_Name "T2")
|
||||
(t1.set_index "X").join (t2.set_index "X") . should_fail_with Illegal_State_Error_Data
|
||||
|
||||
Test.specify "should ensure that name suffixes are distinct" <|
|
||||
|
@ -1,10 +1,11 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import Standard.Table.Data.Table as Materialized_Table
|
||||
from Standard.Database import SQL_Query, Raw_SQL, Table_Name
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
from Standard.Database import SQL_Query
|
||||
import Standard.Database.Data.Table as Database_Table
|
||||
from Standard.Database.Data.SQL import Statement, SQL_Type
|
||||
import Standard.Database.Internal.IR
|
||||
import Standard.Database.Data.SQL_Statement.SQL_Statement
|
||||
import Standard.Database.Data.SQL_Type.SQL_Type
|
||||
import Standard.Database.Internal.IR.Context.Context
|
||||
|
||||
type Fake_Test_Connection
|
||||
# type Fake_Test_Connection.Value (tables : Map Text (Vector [Text, SQL_Type])) (dialect : Text)
|
||||
@ -16,14 +17,14 @@ type Fake_Test_Connection
|
||||
- query: name of the table or sql statement to query.
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- alias: optionally specify a friendly alias for the query.
|
||||
query : Text | SQL_Query -> Text -> Database_Table
|
||||
query : Text | SQL_Query -> Text -> Database_Table.Table
|
||||
query self query alias="" = case query of
|
||||
Text -> self.query (Table_Name query) alias
|
||||
Raw_SQL _ ->
|
||||
Text -> self.query (SQL_Query.Table_Name query) alias
|
||||
SQL_Query.Raw_SQL _ ->
|
||||
Error.throw (Illegal_Argument_Error "Cannot query a fake connection with raw SQL")
|
||||
Table_Name name ->
|
||||
SQL_Query.Table_Name name ->
|
||||
columns = self.tables.get name
|
||||
Database_Table.make_table self name columns (IR.context_for_table name)
|
||||
Database_Table.make_table self name columns (Context.for_table name)
|
||||
|
||||
## Execute the query and load the results into memory as a Table.
|
||||
|
||||
@ -32,7 +33,7 @@ type Fake_Test_Connection
|
||||
If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query.
|
||||
- alias: optionally specify a friendly alias for the query.
|
||||
- limit: the maximum number of rows to return (default 1000).
|
||||
read : Text | SQL_Query -> Text -> Integer | Nothing -> Materialized_Table.Table
|
||||
read : Text | SQL_Query -> Text -> Integer | Nothing -> Materialized_Table
|
||||
read self _ _="" _=Nothing =
|
||||
Error.throw "Materialization not supported on fake connection."
|
||||
|
||||
@ -41,7 +42,7 @@ type Fake_Test_Connection
|
||||
close self = Nothing
|
||||
|
||||
## PRIVATE
|
||||
execute_update : Text | Statement -> Integer
|
||||
execute_update : Text | SQL_Statement -> Integer
|
||||
execute_update self _ =
|
||||
Error.throw "Materialization not supported on fake connection."
|
||||
|
||||
|
@ -7,11 +7,10 @@ from Standard.Base.System.Process.Exit_Code import Exit_Success
|
||||
from Standard.Table import Table
|
||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First
|
||||
|
||||
import Standard.Database
|
||||
from Standard.Database import all
|
||||
from Standard.Database import Database, Postgres, SQL_Query, Credentials, SSL_Mode
|
||||
from Standard.Database.Errors import SQL_Error
|
||||
from Standard.Database.Data.SQL import SQL_Type
|
||||
from Standard.Database.Internal.Postgres.Pgpass import Pgpass_Entry_Data
|
||||
import Standard.Database.Data.SQL_Type.SQL_Type
|
||||
|
||||
import Standard.Database.Internal.Postgres.Pgpass
|
||||
|
||||
import Standard.Test
|
||||
@ -39,14 +38,14 @@ postgres_specific_spec connection db_name pending =
|
||||
|
||||
Test.specify "should allow changing schema" <|
|
||||
new_connection = connection.set_schema "information_schema"
|
||||
new_schema = new_connection.read (Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first
|
||||
new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first
|
||||
new_schema . should_equal "information_schema"
|
||||
|
||||
databases = connection.databases.filter d->((d!=db_name) && (d!='rdsadmin'))
|
||||
pending_database = if databases.length != 0 then Nothing else "Cannot tests changing database unless two databases defined."
|
||||
Test.specify "should allow changing database" pending=pending_database <|
|
||||
new_connection = connection.set_database databases.first
|
||||
new_database = new_connection.read (Raw_SQL "SELECT current_database()") . at 0 . to_vector . first
|
||||
new_database = new_connection.read (SQL_Query.Raw_SQL "SELECT current_database()") . at 0 . to_vector . first
|
||||
new_database . should_equal databases.first
|
||||
|
||||
Test.group "[PostgreSQL] Tables and Table Types" <|
|
||||
@ -91,7 +90,7 @@ postgres_specific_spec connection db_name pending =
|
||||
Test.group "[PostgreSQL] Info" pending=pending <|
|
||||
tinfo = Name_Generator.random_name "Tinfo"
|
||||
connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL, "doubles" DOUBLE PRECISION)'
|
||||
t = connection.query (Table_Name tinfo)
|
||||
t = connection.query (SQL_Query.Table_Name tinfo)
|
||||
t.insert ["a", Nothing, False, 1.2, 0.000000000001]
|
||||
t.insert ["abc", Nothing, Nothing, 1.3, Nothing]
|
||||
t.insert ["def", 42, True, 1.4, 10]
|
||||
@ -118,7 +117,7 @@ postgres_specific_spec connection db_name pending =
|
||||
Test.group "[PostgreSQL] Table.aggregate should correctly infer result types" pending=pending <|
|
||||
name = Name_Generator.random_name "Ttypes"
|
||||
connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("txt" VARCHAR, "i1" SMALLINT, "i2" INT, "i3" BIGINT, "i4" NUMERIC, "r1" REAL, "r2" DOUBLE PRECISION, "bools" BOOLEAN)'
|
||||
t = connection.query (Table_Name name)
|
||||
t = connection.query (SQL_Query.Table_Name name)
|
||||
Test.specify "Concatenate, Shortest and Longest" <|
|
||||
r = t.aggregate [Concatenate "txt", Shortest "txt", Longest "txt"]
|
||||
r.columns.at 0 . sql_type . should_equal SQL_Type.text
|
||||
@ -198,27 +197,27 @@ table_spec =
|
||||
ssl_pending = if ca_cert_file.is_nothing then "PostgreSQL SSL test not configured." else Nothing
|
||||
Test.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending <|
|
||||
Test.specify "should connect without ssl parameter" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password)) . should_succeed
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) . should_succeed
|
||||
|
||||
Test.specify "should connect, requiring SSL" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed
|
||||
|
||||
Test.specify "should connect be able to verify the certificate" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed
|
||||
|
||||
## Default certificate should not accept the self signed certificate.
|
||||
ca_fail = Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=SSL_Mode.Verify_CA)
|
||||
ca_fail = Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Verify_CA)
|
||||
ca_fail.is_error . should_equal True
|
||||
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
|
||||
|
||||
Test.specify "should connect be able to verify the host name against the certificate" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed
|
||||
|
||||
alternate_host = Environment.get "ENSO_DATABASE_TEST_ALTERNATE_HOST" . if_nothing <|
|
||||
if db_host == "127.0.0.1" then "localhost" else Nothing
|
||||
pending_alternate = if alternate_host.is_nothing then "Alternative host name not configured." else Nothing
|
||||
Test.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <|
|
||||
ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file))
|
||||
ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file))
|
||||
ca_fail.is_error . should_equal True
|
||||
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
|
||||
|
||||
@ -228,7 +227,7 @@ table_spec =
|
||||
connection = Error.throw message
|
||||
run_tests connection pending=message
|
||||
False ->
|
||||
connection = Database.connect (Postgres (db_host_port.at 0) db_port db_name credentials=(Username_And_Password db_user db_password))
|
||||
connection = Database.connect (Postgres (db_host_port.at 0) db_port db_name credentials=(Credentials.Username_And_Password db_user db_password))
|
||||
run_tests connection db_name
|
||||
|
||||
|
||||
@ -240,18 +239,18 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
|
||||
Test.specify "should correctly parse the file, including escapes, blank lines and comments" <|
|
||||
result = Pgpass.parse_file pgpass_file
|
||||
result.length . should_equal 12
|
||||
e1 = Pgpass_Entry_Data "localhost" "5432" "postgres" "postgres" "postgres"
|
||||
e2 = Pgpass_Entry_Data "192.168.4.0" "1234" "foo" "bar" "baz"
|
||||
e3 = Pgpass_Entry_Data "host with : semicolons in it? what?" "*" "*" "*" "well yes, that is possible, the :password: can contain those as well"
|
||||
e4 = Pgpass_Entry_Data ":" ":" ":" ":" ":"
|
||||
e5 = Pgpass_Entry_Data "you can escape an escape too: see \\" "*" "*" "*" "yes it is possible"
|
||||
e6 = Pgpass_Entry_Data "other escapes like \n or \? " "*" "*" "*" "are just parsed as-is"
|
||||
e7 = Pgpass_Entry_Data "a trailing escape character" "*" "*" "*" "is treated as a regular slash\"
|
||||
e8 = Pgpass_Entry_Data "passwords should preserve leading space" "*" "*" "*" " pass"
|
||||
e9 = Pgpass_Entry_Data "\:" "*" "*" "*" "\:"
|
||||
e10 = Pgpass_Entry_Data "::1" "*" "database_name" "user_that_has_no_password" ""
|
||||
e11 = Pgpass_Entry_Data "*" "*" "*" "*" "fallback_password"
|
||||
e12 = Pgpass_Entry_Data "order_matters" "1234" "this" "will_still_match_the_fallback_password" "not_this_one"
|
||||
e1 = Pgpass.Pgpass_Entry.Value "localhost" "5432" "postgres" "postgres" "postgres"
|
||||
e2 = Pgpass.Pgpass_Entry.Value "192.168.4.0" "1234" "foo" "bar" "baz"
|
||||
e3 = Pgpass.Pgpass_Entry.Value "host with : semicolons in it? what?" "*" "*" "*" "well yes, that is possible, the :password: can contain those as well"
|
||||
e4 = Pgpass.Pgpass_Entry.Value ":" ":" ":" ":" ":"
|
||||
e5 = Pgpass.Pgpass_Entry.Value "you can escape an escape too: see \\" "*" "*" "*" "yes it is possible"
|
||||
e6 = Pgpass.Pgpass_Entry.Value "other escapes like \n or \? " "*" "*" "*" "are just parsed as-is"
|
||||
e7 = Pgpass.Pgpass_Entry.Value "a trailing escape character" "*" "*" "*" "is treated as a regular slash\"
|
||||
e8 = Pgpass.Pgpass_Entry.Value "passwords should preserve leading space" "*" "*" "*" " pass"
|
||||
e9 = Pgpass.Pgpass_Entry.Value "\:" "*" "*" "*" "\:"
|
||||
e10 = Pgpass.Pgpass_Entry.Value "::1" "*" "database_name" "user_that_has_no_password" ""
|
||||
e11 = Pgpass.Pgpass_Entry.Value "*" "*" "*" "*" "fallback_password"
|
||||
e12 = Pgpass.Pgpass_Entry.Value "order_matters" "1234" "this" "will_still_match_the_fallback_password" "not_this_one"
|
||||
entries = [e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12]
|
||||
result.should_equal entries
|
||||
|
||||
@ -313,7 +312,7 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
|
||||
|
||||
add_ssl props = props+[Pair_Data 'sslmode' 'prefer']
|
||||
Test.specify "should use the given credentials" <|
|
||||
c = Postgres credentials=(Username_And_Password "myuser" "mypass")
|
||||
c = Postgres credentials=(Credentials.Username_And_Password "myuser" "mypass")
|
||||
c.jdbc_url . should_equal "jdbc:postgresql://localhost:5432"
|
||||
c.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "myuser", Pair_Data "password" "mypass"]
|
||||
|
||||
|
@ -3,8 +3,7 @@ import Standard.Base.Runtime.Ref
|
||||
|
||||
from Standard.Table import Table
|
||||
|
||||
import Standard.Database
|
||||
from Standard.Database import all
|
||||
from Standard.Database import Database, Redshift, AWS_Credential, SQL_Query
|
||||
|
||||
import Standard.Test
|
||||
|
||||
@ -17,7 +16,7 @@ redshift_specific_spec connection pending =
|
||||
Test.group "[Redshift] Info" pending=pending <|
|
||||
tinfo = Name_Generator.random_name "Tinfo"
|
||||
connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)'
|
||||
t = connection.query (Table_Name tinfo)
|
||||
t = connection.query (SQL_Query.Table_Name tinfo)
|
||||
t.insert ["a", Nothing, False, 1.2]
|
||||
t.insert ["abc", Nothing, Nothing, 1.3]
|
||||
t.insert ["def", 42, True, 1.4]
|
||||
@ -82,7 +81,7 @@ connect_via_json_config =
|
||||
db_name = uri.at 2
|
||||
|
||||
user = creds.get 'db_user'
|
||||
Redshift db_uri db_port db_name credentials=(AWS_Key user access_key secret_key)
|
||||
Redshift db_uri db_port db_name credentials=(AWS_Credential.Key user access_key secret_key)
|
||||
|
||||
connect_via_aws_environment db_host_port =
|
||||
db_host_port_split = uri_parse db_host_port
|
||||
@ -94,8 +93,8 @@ connect_via_aws_environment db_host_port =
|
||||
access_key = Environment.get "AWS_ACCESS_KEY_ID"
|
||||
secret_key = Environment.get "AWS_SECRET_ACCESS_KEY"
|
||||
|
||||
credentials = if (access_key.is_nothing || secret_key.is_nothing) then AWS_Profile db_user (Environment.get "AWS_PROFILE" . if_nothing '') else
|
||||
AWS_Key db_user access_key secret_key
|
||||
credentials = if (access_key.is_nothing || secret_key.is_nothing) then AWS_Credential.Profile db_user (Environment.get "AWS_PROFILE" . if_nothing '') else
|
||||
AWS_Credential.Key db_user access_key secret_key
|
||||
|
||||
Redshift db_uri db_port db_name credentials=credentials
|
||||
|
||||
|
@ -3,8 +3,7 @@ import Standard.Base.Runtime.Ref
|
||||
|
||||
from Standard.Table import Table
|
||||
|
||||
import Standard.Database
|
||||
from Standard.Database import all
|
||||
from Standard.Database import Database, SQLite, In_Memory, SQL_Query
|
||||
from Standard.Database.Errors import SQL_Error_Data
|
||||
|
||||
import Standard.Test
|
||||
@ -67,17 +66,17 @@ sqlite_specific_spec connection =
|
||||
|
||||
Test.group "[SQLite] Error Handling" <|
|
||||
Test.specify "should wrap errors" <|
|
||||
connection.read (Raw_SQL "foobar") . should_fail_with SQL_Error_Data
|
||||
connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error_Data
|
||||
connection.execute_update "foobar" . should_fail_with SQL_Error_Data
|
||||
|
||||
action = connection.read (Raw_SQL "SELECT A FROM undefined_table")
|
||||
action = connection.read (SQL_Query.Raw_SQL "SELECT A FROM undefined_table")
|
||||
action . should_fail_with SQL_Error_Data
|
||||
action.catch.to_text . should_equal "There was an SQL error: '[SQLITE_ERROR] SQL error or missing database (no such table: undefined_table)'. [Query was: SELECT A FROM undefined_table]"
|
||||
|
||||
Test.group "[SQLite] Metadata" <|
|
||||
tinfo = Name_Generator.random_name "Tinfo"
|
||||
connection.execute_update 'CREATE TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)'
|
||||
t = connection.query (Table_Name tinfo)
|
||||
t = connection.query (SQL_Query.Table_Name tinfo)
|
||||
t.insert ["a", Nothing, False, 1.2]
|
||||
t.insert ["abc", Nothing, Nothing, 1.3]
|
||||
t.insert ["def", 42, True, 1.4]
|
||||
|
@ -1,7 +1,6 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Database import all
|
||||
import Standard.Database
|
||||
from Standard.Database import Database, SQLite, SQL_Query
|
||||
|
||||
import Standard.Visualization.SQL.Visualization as Visualization
|
||||
|
||||
@ -9,7 +8,7 @@ import Standard.Test
|
||||
|
||||
visualization_spec connection =
|
||||
connection.execute_update 'CREATE TABLE "T" ("A" VARCHAR, "B" INTEGER, "C" INTEGER)'
|
||||
t = connection.query (Table_Name "T")
|
||||
t = connection.query (SQL_Query.Table_Name "T")
|
||||
Test.group "SQL Visualization" <|
|
||||
Test.specify "should provide type metadata for interpolations" <|
|
||||
q = t.filter ((t.at "B" == 2) && (t.at "A" == True)) . at "C"
|
||||
|
@ -3,7 +3,7 @@ from Standard.Base import all
|
||||
from Standard.Table import Table, Aggregate_Column
|
||||
|
||||
from Standard.Database import SQLite
|
||||
import Standard.Database
|
||||
from Standard.Database import Database
|
||||
import Standard.Database.Data.Table.Table as Database_Table
|
||||
|
||||
import Standard.Visualization.Table.Visualization as Visualization
|
||||
@ -32,7 +32,7 @@ visualization_spec connection =
|
||||
|
||||
Test.group "Table Visualization" <|
|
||||
Test.specify "should wrap internal errors" <|
|
||||
bad_table = Database_Table.Table_Data Nothing Nothing Nothing Nothing
|
||||
bad_table = Database_Table.Value Nothing Nothing Nothing Nothing
|
||||
vis = Visualization.prepare_visualization bad_table 2
|
||||
json = Json.from_pairs [["error", "Method `meta_index` of Nothing could not be found."]]
|
||||
vis . should_equal json.to_text
|
||||
|
@ -6,11 +6,11 @@ import Standard.Visualization
|
||||
|
||||
import Standard.Test
|
||||
|
||||
from Standard.Visualization.File_Upload import File_Being_Uploaded_Data
|
||||
import Standard.Visualization.File_Upload.File_Being_Uploaded
|
||||
|
||||
spec = Test.group "File uploads" <|
|
||||
Test.specify "should be able to be signalled as uploading" <|
|
||||
Visualization.file_uploading "file" . should_fail_with File_Being_Uploaded_Data
|
||||
Visualization.file_uploading "file" . should_fail_with File_Being_Uploaded.Value
|
||||
|
||||
Test.specify "should work whether a textual or file path is provided" <|
|
||||
result_file = Visualization.file_uploading Examples.csv . catch
|
||||
@ -18,5 +18,3 @@ spec = Test.group "File uploads" <|
|
||||
|
||||
result_text = Visualization.file_uploading Examples.csv_path . catch
|
||||
result_text.file_path . should_equal Examples.csv_path
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user