Separating Redshift connector from Database library into a new AWS library (#6550)

Related to #5777
This commit is contained in:
Radosław Waśko 2023-05-04 19:36:51 +02:00 committed by GitHub
parent a832c5e2bb
commit 41a8257e8d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
111 changed files with 403 additions and 190 deletions

View File

@ -408,6 +408,7 @@
`Text.write`.][6459]
- [Implemented `create_database_table` allowing saving queries as database
tables.][6467]
- [Moved `Redshift` connector into a separate `AWS` library.][6550]
[debug-shortcuts]:
https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug
@ -614,6 +615,7 @@
[6429]: https://github.com/enso-org/enso/pull/6429
[6459]: https://github.com/enso-org/enso/pull/6459
[6467]: https://github.com/enso-org/enso/pull/6467
[6550]: https://github.com/enso-org/enso/pull/6550
#### Enso Compiler

View File

@ -131,7 +131,8 @@ GatherLicenses.distributions := Seq(
),
makeStdLibDistribution("Table", Distribution.sbtProjects(`std-table`)),
makeStdLibDistribution("Database", Distribution.sbtProjects(`std-database`)),
makeStdLibDistribution("Image", Distribution.sbtProjects(`std-image`))
makeStdLibDistribution("Image", Distribution.sbtProjects(`std-image`)),
makeStdLibDistribution("AWS", Distribution.sbtProjects(`std-aws`))
)
GatherLicenses.licenseConfigurations := Set("compile")
@ -295,6 +296,7 @@ lazy val enso = (project in file("."))
`std-google-api`,
`std-image`,
`std-table`,
`std-aws`,
`simple-httpbin`,
`enso-test-java-helpers`
)
@ -1376,6 +1378,7 @@ lazy val runtime = (project in file("engine/runtime"))
.dependsOn(`std-database` / Compile / packageBin)
.dependsOn(`std-google-api` / Compile / packageBin)
.dependsOn(`std-table` / Compile / packageBin)
.dependsOn(`std-aws` / Compile / packageBin)
.value
)
.settings(
@ -1905,6 +1908,8 @@ val `google-api-polyglot-root` =
stdLibComponentRoot("Google_Api") / "polyglot" / "java"
val `database-polyglot-root` =
stdLibComponentRoot("Database") / "polyglot" / "java"
val `std-aws-polyglot-root` =
stdLibComponentRoot("AWS") / "polyglot" / "java"
lazy val `std-base` = project
.in(file("std-bits") / "base")
@ -2065,13 +2070,9 @@ lazy val `std-database` = project
Compile / packageBin / artifactPath :=
`database-polyglot-root` / "std-database.jar",
libraryDependencies ++= Seq(
"org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided",
"org.xerial" % "sqlite-jdbc" % sqliteVersion,
"org.postgresql" % "postgresql" % "42.4.0",
"com.amazon.redshift" % "redshift-jdbc42" % "2.1.0.9",
"com.amazonaws" % "aws-java-sdk-core" % "1.12.273",
"com.amazonaws" % "aws-java-sdk-redshift" % "1.12.273",
"com.amazonaws" % "aws-java-sdk-sts" % "1.12.273"
"org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided",
"org.xerial" % "sqlite-jdbc" % sqliteVersion,
"org.postgresql" % "postgresql" % "42.4.0"
),
Compile / packageBin := Def.task {
val result = (Compile / packageBin).value
@ -2086,6 +2087,37 @@ lazy val `std-database` = project
}.value
)
.dependsOn(`std-base` % "provided")
.dependsOn(`std-table` % "provided")
lazy val `std-aws` = project
.in(file("std-bits") / "aws")
.settings(
frgaalJavaCompilerSetting,
autoScalaLibrary := false,
Compile / packageBin / artifactPath :=
`std-aws-polyglot-root` / "std-aws.jar",
libraryDependencies ++= Seq(
"org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided",
"com.amazon.redshift" % "redshift-jdbc42" % "2.1.0.9",
"com.amazonaws" % "aws-java-sdk-core" % "1.12.273",
"com.amazonaws" % "aws-java-sdk-redshift" % "1.12.273",
"com.amazonaws" % "aws-java-sdk-sts" % "1.12.273"
),
Compile / packageBin := Def.task {
val result = (Compile / packageBin).value
val _ = StdBits
.copyDependencies(
`std-aws-polyglot-root`,
Seq("std-aws.jar"),
ignoreScalaLibrary = true
)
.value
result
}.value
)
.dependsOn(`std-base` % "provided")
.dependsOn(`std-table` % "provided")
.dependsOn(`std-database` % "provided")
/* Note [Native Image Workaround for GraalVM 20.2]
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -2191,7 +2223,14 @@ runEngineDistribution := {
val allStdBitsSuffix = List("All", "AllWithIndex")
val stdBitsProjects =
List("Base", "Database", "Google_Api", "Image", "Table") ++ allStdBitsSuffix
List(
"AWS",
"Base",
"Database",
"Google_Api",
"Image",
"Table"
) ++ allStdBitsSuffix
val allStdBits: Parser[String] =
stdBitsProjects.map(v => v: Parser[String]).reduce(_ | _)
@ -2240,6 +2279,8 @@ pkgStdLibInternal := Def.inputTask {
(`std-table` / Compile / packageBin).value
case "TestHelpers" =>
(`enso-test-java-helpers` / Compile / packageBin).value
case "AWS" =>
(`std-aws` / Compile / packageBin).value
case _ if buildAllCmd =>
(`std-base` / Compile / packageBin).value
(`enso-test-java-helpers` / Compile / packageBin).value
@ -2247,6 +2288,7 @@ pkgStdLibInternal := Def.inputTask {
(`std-database` / Compile / packageBin).value
(`std-image` / Compile / packageBin).value
(`std-google-api` / Compile / packageBin).value
(`std-aws` / Compile / packageBin).value
case _ =>
}
val libs =

View File

@ -1,10 +1,2 @@
/************************************************************************
* Licensed under Public Domain (CC0) *
* *
* To the extent possible under law, the person who associated CC0 with *
* this code has waived all copyright and related or neighboring *
* rights to this code. *
* *
* You should have received a copy of the CC0 legalcode along with this *
* work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.*
************************************************************************/
See https://github.com/reactive-streams/reactive-streams-jvm for more information.

View File

@ -0,0 +1,77 @@
Enso
Copyright 2020 - 2023 New Byte Order sp. z o. o.
'redshift-jdbc42', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.amazon.redshift.redshift-jdbc42-2.1.0.9`.
'aws-java-sdk-core', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-core-1.12.273`.
'aws-java-sdk-redshift', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-redshift-1.12.273`.
'aws-java-sdk-sts', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-sts-1.12.273`.
'jmespath-java', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.jmespath-java-1.12.273`.
'jackson-annotations', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-annotations-2.12.6`.
'jackson-core', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-core-2.12.6`.
'jackson-databind', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-databind-2.12.6.1`.
'jackson-dataformat-cbor', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6`.
'commons-codec', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `commons-codec.commons-codec-1.15`.
'commons-logging', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `commons-logging.commons-logging-1.1.3`.
'joda-time', licensed under the Apache 2, is distributed with the AWS.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `joda-time.joda-time-2.8.1`.
'httpclient', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpclient-4.5.13`.
'httpcore', licensed under the Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpcore-4.4.13`.
'ion-java', licensed under the The Apache License, Version 2.0, is distributed with the AWS.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `software.amazon.ion.ion-java-1.0.2`.

View File

@ -0,0 +1,10 @@
name: AWS
namespace: Standard
version: 0.0.0-dev
license: APLv2
authors:
- name: Enso Team
email: contact@enso.org
maintainers:
- name: Enso Team
email: contact@enso.org

View File

@ -4,31 +4,32 @@ import Standard.Table.Internal.Naming_Helpers.Naming_Helpers
from Standard.Table import Aggregate_Column
from Standard.Table import Value_Type
import project.Connection.Connection.Connection
import project.Data.Dialect
import project.Data.SQL.Builder
import project.Data.SQL_Statement.SQL_Statement
import project.Data.SQL_Type.SQL_Type
import project.Data.Table.Table
import project.Internal.Base_Generator
import project.Internal.Column_Fetcher.Column_Fetcher
import project.Internal.Column_Fetcher as Column_Fetcher_Module
import project.Internal.Error_Mapper.Error_Mapper
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.Query.Query
import project.Internal.Postgres.Postgres_Dialect
import project.Internal.Common.Database_Join_Helper
import project.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_Mapping
import project.Internal.Redshift.Redshift_Error_Mapper.Redshift_Error_Mapper
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.Internal.Statement_Setter.Statement_Setter
from project.Errors import Unsupported_Database_Operation
import Standard.Database.Connection.Connection.Connection
import Standard.Database.Data.Dialect
import Standard.Database.Data.SQL.Builder
import Standard.Database.Data.SQL_Statement.SQL_Statement
import Standard.Database.Data.SQL_Type.SQL_Type
import Standard.Database.Data.Table.Table
import Standard.Database.Internal.Base_Generator
import Standard.Database.Internal.Column_Fetcher.Column_Fetcher
import Standard.Database.Internal.Column_Fetcher as Column_Fetcher_Module
import Standard.Database.Internal.Error_Mapper.Error_Mapper
import Standard.Database.Internal.IR.Context.Context
import Standard.Database.Internal.IR.From_Spec.From_Spec
import Standard.Database.Internal.IR.Internal_Column.Internal_Column
import Standard.Database.Internal.IR.SQL_Expression.SQL_Expression
import Standard.Database.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import Standard.Database.Internal.IR.Order_Descriptor.Order_Descriptor
import Standard.Database.Internal.IR.Query.Query
import Standard.Database.Internal.Postgres.Postgres_Dialect
import Standard.Database.Internal.Common.Database_Join_Helper
import Standard.Database.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_Mapping
import Standard.Database.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import Standard.Database.Internal.SQL_Type_Reference.SQL_Type_Reference
import Standard.Database.Internal.Statement_Setter.Statement_Setter
from Standard.Database.Errors import Unsupported_Database_Operation
import project.Database.Redshift.Internal.Redshift_Error_Mapper.Redshift_Error_Mapper
## PRIVATE

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from project.Errors import SQL_Error
from Standard.Database.Errors import SQL_Error
## PRIVATE
type Redshift_Error_Mapper

View File

@ -1,19 +1,19 @@
from Standard.Base import all
import project.Connection.Client_Certificate.Client_Certificate
import project.Connection.Connection.Connection
import project.Connection.Connection_Options.Connection_Options
import project.Connection.Credentials.Credentials
import project.Connection.SSL_Mode.SSL_Mode
import project.Data.Dialect
import project.Internal.JDBC_Connection
import project.Internal.Postgres.Pgpass
import Standard.Database.Connection.Client_Certificate.Client_Certificate
import Standard.Database.Connection.Connection.Connection
import Standard.Database.Connection.Connection_Options.Connection_Options
import Standard.Database.Connection.Credentials.Credentials
import Standard.Database.Connection.SSL_Mode.SSL_Mode
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.Internal.Postgres.Pgpass
import project.Database.Redshift.Internal.Redshift_Dialect
polyglot java import com.amazon.redshift.jdbc.Driver
polyglot java import java.util.Properties
polyglot java import org.enso.database.JDBCProxy
type Redshift_Options
type Redshift_Details
## Connect to a AWS Redshift database.
Arguments:
@ -40,7 +40,7 @@ type Redshift_Options
java_props.setProperty pair.first pair.second
jdbc_connection = JDBC_Connection.create self.jdbc_url properties
Connection.Value jdbc_connection Dialect.redshift
Connection.Value jdbc_connection Redshift_Dialect.redshift
## PRIVATE
Provides the jdbc url for the connection.
@ -49,7 +49,7 @@ type Redshift_Options
prefix = case self.credentials of
_ : AWS_Credential -> 'jdbc:redshift:iam://'
_ -> 'jdbc:redshift://'
prefix + self.host + ':' + self.port.to_text + (if self.schema == '' then '' else '/' + self.schema)
prefix + self.host + ':' + self.port.to_text + '/' + self.schema
## PRIVATE
Provides the properties for the connection.

View File

@ -0,0 +1,7 @@
import project.Database.Redshift.Redshift_Details.Redshift_Details
import project.Database.Redshift.Redshift_Details.AWS_Credential
export project.Database.Redshift.Redshift_Details.Redshift_Details
export project.Database.Redshift.Redshift_Details.AWS_Credential
from project.Database.Redshift.Redshift_Details.Redshift_Details export Redshift

View File

@ -1,76 +1,6 @@
Enso
Copyright 2020 - 2023 New Byte Order sp. z o. o.
'redshift-jdbc42', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.amazon.redshift.redshift-jdbc42-2.1.0.9`.
'aws-java-sdk-core', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-core-1.12.273`.
'aws-java-sdk-redshift', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-redshift-1.12.273`.
'aws-java-sdk-sts', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-sts-1.12.273`.
'jmespath-java', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.amazonaws.jmespath-java-1.12.273`.
'jackson-annotations', licensed under the The Apache Software License, Version 2.0, is distributed with the Database.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-annotations-2.12.6`.
'jackson-core', licensed under the The Apache Software License, Version 2.0, is distributed with the Database.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-core-2.12.6`.
'jackson-databind', licensed under the The Apache Software License, Version 2.0, is distributed with the Database.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-databind-2.12.6.1`.
'jackson-dataformat-cbor', licensed under the The Apache Software License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6`.
'commons-codec', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `commons-codec.commons-codec-1.15`.
'commons-logging', licensed under the The Apache Software License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `commons-logging.commons-logging-1.1.3`.
'joda-time', licensed under the Apache 2, is distributed with the Database.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `joda-time.joda-time-2.8.1`.
'httpclient', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpclient-4.5.13`.
'httpcore', licensed under the Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpcore-4.4.13`.
'checker-qual', licensed under the The MIT License, is distributed with the Database.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `org.checkerframework.checker-qual-3.5.0`.
@ -85,8 +15,3 @@ Copyright notices related to this dependency can be found in the directory `org.
The license information can be found along with the copyright notices.
Copyright notices related to this dependency can be found in the directory `org.xerial.sqlite-jdbc-3.41.2.1`.
'ion-java', licensed under the The Apache License, Version 2.0, is distributed with the Database.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `software.amazon.ion.ion-java-1.0.2`.

View File

@ -0,0 +1,23 @@
from Standard.Base import all
import Standard.Base.Errors.Unimplemented.Unimplemented
import project.Connection.Connection.Connection
import project.Connection.Connection_Options.Connection_Options
from project.Errors import SQL_Error
## Specifies the connection details for the database.
This is an interface that is implemented by particular database types, like
`Postgres_Details`, `SQLite_Details` etc.
type Connection_Details
## PRIVATE
Build the Connection resource.
Arguments:
- options: Overrides for the connection properties.
connect : Connection_Options -> Connection ! SQL_Error
connect self options =
_ = options
Unimplemented.throw "This is an interface only."

View File

@ -1,7 +1,9 @@
from Standard.Base import all
type Connection_Options
## Hold a set of key value pairs used to configure the connection.
## Additional customization options for the JDBC connection.
Hold a set of key value pairs used to configure the connection.
Value options:Vector=[]
## PRIVATE

View File

@ -1,20 +1,36 @@
from Standard.Base import all
from Standard.Base.Metadata.Widget import Single_Choice
from Standard.Base.Metadata.Choice import Option
import Standard.Base.Metadata.Display
import project.Connection.Connection_Details.Connection_Details
import project.Connection.Connection_Options.Connection_Options
import project.Connection.Postgres_Options.Postgres_Options
import project.Connection.SQLite_Options.SQLite_Options
import project.Connection.Redshift_Options.Redshift_Options
import project.Connection.Connection.Connection
from project.Errors import SQL_Error
polyglot java import org.enso.database.DatabaseConnectionDetailsSPI
## UNSTABLE
Tries to connect to the database.
Arguments:
- details: Connection_Details to use to connect.
- options: Any overriding options to use.
connect : (Postgres_Options|SQLite_Options|Redshift_Options) -> Connection_Options -> Connection ! SQL_Error
- details: `Connection_Details` specifying the database to connect to.
- options: Additional custom connection options for the JDBC connection.
@details connection_details_widget
connect : Connection_Details -> Connection_Options -> Connection ! SQL_Error
connect details options=Connection_Options.Value =
details.connect options
## PRIVATE
connection_details_widget : Single_Choice
connection_details_widget =
default_constructors = Vector.from_polyglot_array <|
DatabaseConnectionDetailsSPI.get_default_constructors False
choices = default_constructors.map pair->
name = pair.first
code = pair.second
Option name code
Single_Choice display=Display.Always values=choices

View File

@ -11,7 +11,7 @@ import project.Internal.Postgres.Pgpass
polyglot java import org.postgresql.Driver
type Postgres_Options
type Postgres_Details
## Connect to a PostgreSQL database.
Arguments:
@ -41,7 +41,7 @@ type Postgres_Options
## Cannot use default argument values as gets in an infinite loop if you do.
make_new database schema =
Postgres_Options.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
Postgres_Details.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
Postgres_Connection.create self.jdbc_url properties make_new

View File

@ -3,7 +3,7 @@ from Standard.Base import all
import project.Connection.Connection_Options.Connection_Options
import project.Internal.SQLite.SQLite_Connection
type SQLite_Options
type SQLite_Details
## Connect to a SQLite DB File or InMemory DB.
Arguments:

View File

@ -2,7 +2,7 @@ from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import project.Connection.Database
import project.Connection.SQLite_Options.SQLite_Options
import project.Connection.SQLite_Details.SQLite_Details
## Read the file to a `SQLite_Connection` from a `.db` or `.sqlite` file
type SQLite_Format
@ -30,4 +30,4 @@ type SQLite_Format
read : File -> Problem_Behavior -> Any
read self file on_problems =
_ = [on_problems]
Database.connect (SQLite_Options.SQLite file)
Database.connect (SQLite_Details.SQLite file)

View File

@ -19,7 +19,6 @@ import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.Query.Query
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.Postgres.Postgres_Dialect
import project.Internal.Redshift.Redshift_Dialect
import project.Internal.SQLite.SQLite_Dialect
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
@ -213,12 +212,6 @@ sqlite = SQLite_Dialect.sqlite
postgres : Dialect
postgres = Postgres_Dialect.postgres
## PRIVATE
The dialect of Redshift databases.
redshift : Dialect
redshift = Redshift_Dialect.redshift
## PRIVATE
default_fetch_types_query dialect expression context =
empty_context = context.add_where_filters [SQL_Expression.Literal "FALSE"]

View File

@ -3,33 +3,27 @@ import project.Connection.Client_Certificate.Client_Certificate
import project.Connection.Connection_Options.Connection_Options
import project.Connection.Credentials.Credentials
import project.Connection.Database
import project.Connection.Postgres_Options.Postgres_Options
import project.Connection.Redshift_Options.Redshift_Options
import project.Connection.Redshift_Options.AWS_Credential
import project.Connection.SQLite_Options.SQLite_Options
import project.Connection.SQLite_Options.In_Memory
import project.Connection.Postgres_Details.Postgres_Details
import project.Connection.SQLite_Details.SQLite_Details
import project.Connection.SQLite_Details.In_Memory
import project.Connection.SSL_Mode.SSL_Mode
import project.Data.SQL_Query.SQL_Query
import project.Extensions.Upload_Table
from project.Connection.Postgres_Options.Postgres_Options import Postgres
from project.Connection.Redshift_Options.Redshift_Options import Redshift
from project.Connection.SQLite_Options.SQLite_Options import SQLite
from project.Connection.Postgres_Details.Postgres_Details import Postgres
from project.Connection.SQLite_Details.SQLite_Details import SQLite
export project.Connection.Client_Certificate.Client_Certificate
export project.Connection.Connection_Options.Connection_Options
export project.Connection.Credentials.Credentials
export project.Connection.Database
export project.Connection.Postgres_Options.Postgres_Options
export project.Connection.Redshift_Options.Redshift_Options
export project.Connection.Redshift_Options.AWS_Credential
export project.Connection.SQLite_Options.SQLite_Options
export project.Connection.SQLite_Options.In_Memory
export project.Connection.Postgres_Details.Postgres_Details
export project.Connection.SQLite_Details.SQLite_Details
export project.Connection.SQLite_Details.In_Memory
export project.Connection.SSL_Mode.SSL_Mode
export project.Data.SQL_Query.SQL_Query
export project.Extensions.Upload_Table
from project.Connection.Postgres_Options.Postgres_Options export Postgres
from project.Connection.Redshift_Options.Redshift_Options export Redshift
from project.Connection.SQLite_Options.SQLite_Options export SQLite
from project.Connection.Postgres_Details.Postgres_Details export Postgres
from project.Connection.SQLite_Details.SQLite_Details export SQLite

View File

@ -1,6 +1,7 @@
from Standard.Base import all
from Standard.Table import all
from Standard.Database import all
from Standard.AWS import all
main =
operator1 = "Press TAB key to create a new node"

View File

@ -1,8 +1,8 @@
import com.typesafe.sbt.SbtLicenseReport.autoImportImpl.{
import sbtlicensereport.SbtLicenseReport.autoImportImpl.{
licenseOverrides,
licenseSelection
}
import com.typesafe.sbt.license
import sbtlicensereport.license
import sbt.Keys.{ivyModule, streams, update, updateClassifiers}
import sbt.{File, Project}
import src.main.scala.licenses.{

View File

@ -12,6 +12,7 @@ object Editions {
"Standard.Test",
"Standard.Table",
"Standard.Database",
"Standard.AWS",
"Standard.Image",
"Standard.Geo",
"Standard.Visualization",

View File

@ -1,6 +1,6 @@
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0")
addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.5.3")
addSbtPlugin("com.typesafe.sbt" % "sbt-license-report" % "1.2.0")
addSbtPlugin("com.github.sbt" % "sbt-license-report" % "1.3.0")
addSbtPlugin("com.lightbend.sbt" % "sbt-java-formatter" % "0.7.0")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3")

View File

@ -2,7 +2,7 @@ package src.main.scala.licenses
import java.nio.file.Path
import com.typesafe.sbt.license.{DepModuleInfo, LicenseInfo}
import sbtlicensereport.license.{DepModuleInfo, LicenseInfo}
import src.main.scala.licenses.report.Review
/** Defines a way to access sources of a dependency.

View File

@ -1,6 +1,6 @@
package src.main.scala.licenses
import com.typesafe.sbt.license.LicenseReport
import sbtlicensereport.license.LicenseReport
import sbt.File
import sbt.librarymanagement.UpdateReport

View File

@ -1,6 +1,6 @@
package src.main.scala.licenses.frontend
import com.typesafe.sbt.license.DepModuleInfo
import sbtlicensereport.license.DepModuleInfo
import src.main.scala.licenses.DependencyInformation
/** Filters out irrelevant dependencies.

View File

@ -2,7 +2,7 @@ package src.main.scala.licenses.frontend
import java.nio.file.Path
import com.typesafe.sbt.license.{DepLicense, DepModuleInfo}
import sbtlicensereport.license.{DepLicense, DepModuleInfo}
import org.apache.ivy.core.resolve.IvyNode
import sbt.Compile
import sbt.internal.util.ManagedLogger

View File

@ -0,0 +1,26 @@
package org.enso.aws.database;
import org.enso.database.DatabaseConnectionDetailsSPI;
@org.openide.util.lookup.ServiceProvider(service = DatabaseConnectionDetailsSPI.class)
public class RedshiftConnectionDetailsSPI extends DatabaseConnectionDetailsSPI {
@Override
protected String getModuleName() {
return "Standard.AWS.Database.Redshift.Redshift_Details";
}
@Override
protected String getTypeName() {
return "Redshift_Details";
}
@Override
protected String getCodeForDefaultConstructor() {
return "(Redshift host=_ port=_)";
}
@Override
protected String getUserFacingConnectionName() {
return "Redshift";
}
}

View File

@ -0,0 +1,41 @@
package org.enso.database;
import java.util.ServiceLoader;
public abstract class DatabaseConnectionDetailsSPI {
private static final ServiceLoader<DatabaseConnectionDetailsSPI> loader =
ServiceLoader.load(
DatabaseConnectionDetailsSPI.class, DatabaseConnectionDetailsSPI.class.getClassLoader());
/**
* Returns an array of pairs, where the first element is the user facing connection name and the
* second element is a string representing the code to insert to create a default connection
* instance. That code may contain `_` placeholders for expected arguments.
*/
public static String[][] get_default_constructors(boolean refresh) {
if (refresh) {
loader.reload();
}
return loader.stream()
.map(
provider -> {
var spi = provider.get();
return new String[] {
spi.getUserFacingConnectionName(), spi.getCodeForDefaultConstructor()
};
})
.toArray(String[][]::new);
}
/** The module in which the connection details type is defined. */
protected abstract String getModuleName();
/** The name of the connection details type. */
protected abstract String getTypeName();
/** Default code that can be used to construct a default instance of the connection details. */
protected abstract String getCodeForDefaultConstructor();
/** The user facing name of the connection. */
protected abstract String getUserFacingConnectionName();
}

View File

@ -0,0 +1,24 @@
package org.enso.database;
@org.openide.util.lookup.ServiceProvider(service = DatabaseConnectionDetailsSPI.class)
public class PostgresConnectionDetailsSPI extends DatabaseConnectionDetailsSPI {
@Override
protected String getModuleName() {
return "Standard.Database.Connection.Postgres_Details";
}
@Override
protected String getTypeName() {
return "Postgres_Details";
}
@Override
protected String getCodeForDefaultConstructor() {
return "(Postgres)";
}
@Override
protected String getUserFacingConnectionName() {
return "Postgres";
}
}

View File

@ -0,0 +1,24 @@
package org.enso.database;
@org.openide.util.lookup.ServiceProvider(service = DatabaseConnectionDetailsSPI.class)
public class SQLiteConnectionDetailsSPI extends DatabaseConnectionDetailsSPI {
@Override
protected String getModuleName() {
return "Standard.Database.Connection.SQLite_Details";
}
@Override
protected String getTypeName() {
return "SQLite_Details";
}
@Override
protected String getCodeForDefaultConstructor() {
return "(SQLite location=_)";
}
@Override
protected String getUserFacingConnectionName() {
return "SQLite";
}
}

View File

@ -4,7 +4,9 @@ import Standard.Base.Runtime.Ref.Ref
import Standard.Table.Data.Type.Value_Type.Bits
from Standard.Table import Table, Value_Type
from Standard.Database import Database, Redshift, AWS_Credential, SQL_Query
from Standard.Database import Database, SQL_Query
from Standard.AWS import Redshift, AWS_Credential
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions

View File

@ -15,7 +15,7 @@ spec =
f = enso_project.data / "short.txt"
f.delete_if_exists
f.exists.should_be_false
"Cup".write f
"Cup".write f . should_succeed
f.with_input_stream [File_Access.Read] stream->
stream.with_stream_decoder Encoding.utf_8 reporting_stream_decoder->
reporting_stream_decoder.read.should_equal 67
@ -29,7 +29,7 @@ spec =
f = enso_project.data / "transient" / "varying_chunks.txt"
fragment = 'Hello 😎🚀🚧!'
contents = 1.up_to 1000 . map _->fragment . join '\n'
contents.write f
contents.write f . should_succeed
all_codepoints = Vector.new_builder
read_chars decoder n =
case read_characters decoder n of
@ -77,7 +77,7 @@ spec =
Test.specify "should allow reading a UTF-8 file" <|
f = enso_project.data / "transient" / "utf8.txt"
encoding = Encoding.utf_8
((0.up_to 100).map _->'Hello World!' . join '\n').write f
((0.up_to 100).map _->'Hello World!' . join '\n').write f . should_succeed
expected_contents = f.read_text
contents = read_file_one_by_one f encoding expected_contents.length
contents.should_equal expected_contents

View File

@ -5,6 +5,8 @@ import Standard.Base.Metadata.Widget
import Standard.Base.Metadata.Display
from Standard.Database import all
# This ensures that the Redshift connection details are available in the widget.
from Standard.AWS import all
import Standard.Visualization.Widgets
@ -20,8 +22,8 @@ spec =
Test.group "Widgets for In-Database Connection with table types" <|
Test.specify "works for `tables`" <|
result = Widgets.get_widget_json connection "tables" ["types"]
result.contains "'TABLE'" . should_be_true
result.contains "'VIEW'" . should_be_true
result.should_contain "'TABLE'"
result.should_contain "'VIEW'"
Test.group "Widgets for In-Database Connection with table name sets" <|
Test.specify "works for `query` and `read`" <|
@ -44,4 +46,11 @@ spec =
expect = [["column", Widget.Single_Choice choices Nothing Display.Always]] . to_json
Widgets.get_widget_json mock_table "filter" ["column"] . should_equal expect
Test.group "Widgets for Database" <|
Test.specify "works for `connect`" <|
result = Widgets.get_widget_json Database "connect" ["details"]
result.should_contain "SQLite"
result.should_contain "Postgres"
result.should_contain "Redshift"
main = Test_Suite.run_main spec

View File

@ -0,0 +1,2 @@
#license
/FasterXML/jackson-dataformats-binary/blob/2.16/LICENSE

View File

@ -0,0 +1,3 @@
2F8678B00965B3AF1B2A31D12012CD7B7DD9C91ED3DF66311284EFAB1ED25EBB
F0685E9E9F5315627AEDD222C040620631BE0146BA5C093F750E8E0CDEC8E493
0

Some files were not shown because too many files have changed in this diff Show More