Initial Tableau Reading Support (#10733)

- Adds `Hyper_File` allowing reading a Tableau hyper file.
- Can read the schema and table list.
- Can read the structure of a table.
- Can read data into an Enso Table.
This commit is contained in:
James Dunkerley 2024-08-07 10:23:05 +01:00 committed by GitHub
parent 71285e6ff8
commit b8c036c476
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
41 changed files with 1570 additions and 11 deletions

View File

@ -7,10 +7,12 @@
- [Relative paths are now resolved relative to the project location, also in the
Cloud.][10660]
- [Added Newline option to Text_Cleanse/Text_Replace.][10761]
- [Support for reading from Tableau Hyper files.][10733]
[10614]: https://github.com/enso-org/enso/pull/10614
[10660]: https://github.com/enso-org/enso/pull/10660
[10761]: https://github.com/enso-org/enso/pull/10761
[10733]: https://github.com/enso-org/enso/pull/10733
# Enso 2023.3

105
build.sbt
View File

@ -18,6 +18,7 @@ import src.main.scala.licenses.{
import JPMSPlugin.autoImport._
import java.io.File
import java.nio.file.Files
import java.nio.file.Paths
// ============================================================================
@ -167,7 +168,11 @@ GatherLicenses.distributions := Seq(
"Snowflake",
Distribution.sbtProjects(`std-snowflake`)
),
makeStdLibDistribution("Microsoft", Distribution.sbtProjects(`std-microsoft`))
makeStdLibDistribution(
"Microsoft",
Distribution.sbtProjects(`std-microsoft`)
),
makeStdLibDistribution("Tableau", Distribution.sbtProjects(`std-tableau`))
)
GatherLicenses.licenseConfigurations := Set("compile")
@ -351,6 +356,7 @@ lazy val enso = (project in file("."))
`std-aws`,
`std-snowflake`,
`std-microsoft`,
`std-tableau`,
`http-test-helper`,
`enso-test-java-helpers`,
`exploratory-benchmark-java-helpers`,
@ -526,6 +532,7 @@ val poiOoxmlVersion = "5.2.3"
val redshiftVersion = "2.1.0.15"
val univocityParsersVersion = "2.9.1"
val xmlbeansVersion = "5.1.1"
val tableauVersion = "0.0.19691.r2d7e5bc8"
// === ZIO ====================================================================
@ -570,6 +577,7 @@ val apacheArrowVersion = "14.0.1"
val snowflakeJDBCVersion = "3.15.0"
val mssqlserverJDBCVersion = "12.6.2.jre11"
val jsoniterVersion = "2.28.5"
val jnaVersion = "5.14.0"
// ============================================================================
// === Utility methods =====================================================
@ -1947,6 +1955,7 @@ lazy val runtime = (project in file("engine/runtime"))
.dependsOn(`std-aws` / Compile / packageBin)
.dependsOn(`std-snowflake` / Compile / packageBin)
.dependsOn(`std-microsoft` / Compile / packageBin)
.dependsOn(`std-tableau` / Compile / packageBin)
.value
)
.dependsOn(`common-polyglot-core-utils`)
@ -3267,6 +3276,8 @@ val `std-snowflake-polyglot-root` =
stdLibComponentRoot("Snowflake") / "polyglot" / "java"
val `std-microsoft-polyglot-root` =
stdLibComponentRoot("Microsoft") / "polyglot" / "java"
val `std-tableau-polyglot-root` =
stdLibComponentRoot("Tableau") / "polyglot" / "java"
lazy val `std-base` = project
.in(file("std-bits") / "base")
@ -3604,6 +3615,95 @@ lazy val `std-microsoft` = project
.dependsOn(`std-table` % "provided")
.dependsOn(`std-database` % "provided")
lazy val `std-tableau` = project
.in(file("std-bits") / "tableau")
.settings(
frgaalJavaCompilerSetting,
autoScalaLibrary := false,
unmanagedExternalZip := {
val platform = if (Platform.isWindows) {
"windows"
} else if (Platform.isMacOS) {
"macos"
} else if (Platform.isLinux) {
"linux"
}
val arch = if (Platform.isArm64) {
"arm64"
} else {
"x86_64"
}
new URI(
s"https://downloads.tableau.com/tssoftware/tableauhyperapi-java-$platform-$arch-release-main.$tableauVersion.zip"
).toURL()
},
fetchZipToUnmanaged := {
val unmanagedDirectory = (Compile / unmanagedBase).value
val logger = state.value.log
if (IO.listFiles(unmanagedDirectory).size < 2) { // Heuristic, should have at least hyperapi jar and os-specific one.
logger.log(
Level.Info,
"std-tableau's unmanaged dependencies are not up-to-date. fetching..."
)
unmanagedDirectory.mkdirs()
val unmanagedPath = unmanagedDirectory.toPath
IO.withTemporaryDirectory(
tmp => {
val files = IO.unzipURL(
unmanagedExternalZip.value,
tmp,
f =>
f.endsWith(".jar") && !f.contains("gradle") && !f.contains(
"javadoc"
) && !f.contains("jna")
)
files.map { f =>
IO.move(f, unmanagedPath.resolve(f.getName).toFile)
Attributed.blank(unmanagedPath.resolve(f.getName).toFile)
}.toSeq
},
keepDirectory = false
)
} else {
Seq[Attributed[File]]()
}
},
Compile / unmanagedClasspath := Def.task {
val additionalFiles: Seq[Attributed[File]] = fetchZipToUnmanaged.value
val result = (Compile / unmanagedClasspath).value
result ++ additionalFiles
}.value,
Compile / unmanagedJars := (Compile / unmanagedJars)
.dependsOn(fetchZipToUnmanaged)
.value,
Compile / packageBin / artifactPath :=
`std-tableau-polyglot-root` / "std-tableau.jar",
libraryDependencies ++= Seq(
"org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided",
"net.java.dev.jna" % "jna-platform" % jnaVersion
),
Compile / packageBin := Def.task {
val result = (Compile / packageBin).value
val _ = StdBits
.copyDependencies(
`std-tableau-polyglot-root`,
Seq("std-tableau.jar"),
ignoreScalaLibrary = true
)
.value
result
}.value
)
.dependsOn(`std-base` % "provided")
.dependsOn(`std-table` % "provided")
lazy val fetchZipToUnmanaged =
taskKey[Seq[Attributed[File]]](
"Download zip file from an `unmanagedExternalZip` url and unpack jars to unmanaged libs directory"
)
lazy val unmanagedExternalZip =
settingKey[URL]("URL to zip file with dependencies")
/* Note [Native Image Workaround for GraalVM 20.2]
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* In GraalVM 20.2 the Native Image build of even simple Scala programs has
@ -3824,6 +3924,8 @@ pkgStdLibInternal := Def.inputTask {
(`std-snowflake` / Compile / packageBin).value
case "Microsoft" =>
(`std-microsoft` / Compile / packageBin).value
case "Tableau" =>
(`std-tableau` / Compile / packageBin).value
case _ if buildAllCmd =>
(`std-base` / Compile / packageBin).value
(`enso-test-java-helpers` / Compile / packageBin).value
@ -3836,6 +3938,7 @@ pkgStdLibInternal := Def.inputTask {
(`std-aws` / Compile / packageBin).value
(`std-snowflake` / Compile / packageBin).value
(`std-microsoft` / Compile / packageBin).value
(`std-tableau` / Compile / packageBin).value
case _ =>
}
val libs =

View File

@ -162,7 +162,7 @@ type JS_Object
JS_Object.Value object_node (make_field_names object_node)
## PRIVATE
Creates a Jackon_Object from a list of key-value pairs.
Creates a Jackson_Object from a list of key-value pairs.
Keys must be `Text` values.
Values will be recursively converted to JSON serializable as needed.
from_pairs : Vector -> JS_Object

View File

@ -0,0 +1,3 @@
Copyright (c) 2016 - 2023, Salesforce, Inc. and its licensors. All rights reserved.
Protected by U.S. Patents and Trademarks as noted at http://www.tableau.com/ip; Patents pending.

View File

@ -0,0 +1,12 @@
Enso
Copyright 2020 - 2024 New Byte Order sp. z o. o.
'jna', licensed under the Apache-2.0, is distributed with the Tableau.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `net.java.dev.jna.jna-5.14.0`.
'jna-platform', licensed under the Apache-2.0, is distributed with the Tableau.
The license file can be found at `licenses/APACHE2.0`.
Copyright notices related to this dependency can be found in the directory `net.java.dev.jna.jna-platform-5.14.0`.

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,29 @@
Copyright (c) 2007 Timothy Wall, All Rights Reserved
Copyright (c) 2007 Wayne Meissner, All Rights Reserved
Copyright (c) 2007-2008 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2012 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2013 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2015 Timothy Wall, All Rights Reserved
Copyright (c) 2009 Timothy Wall, All Rights Reserved
Copyright (c) 2011 Timothy Wall, All Rights Reserved
Copyright (c) 2012 Timothy Wall, All Rights Reserved
Copyright (c) 2017 Matthias Bläsing, All Rights Reserved
Copyright (c) 2018 Matthias Bläsing
Copyright (c) 2019 Matthias Bläsing, All Rights Reserved
Copyright (c) 2021, Matthias Bläsing, All Rights Reserved
Copyright (c) 2022 Carlos Ballesteros, All Rights Reserved
Copyright 2007 Timothy Wall

View File

@ -0,0 +1,93 @@
Conversion code in this class Copyright 2002-2004 Apache Software Foundation.
Copyright (c) 2007 Olivier Chafik
Copyright (c) 2007 Olivier Chafik, All Rights Reserved
Copyright (c) 2007 Timothy Wall, All Rights Reserved
Copyright (c) 2007, 2013 Timothy Wall, Markus Karg, All Rights Reserved
Copyright (c) 2007-2008 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2013 Timothy Wall, All Rights Reserved
Copyright (c) 2008 Timothy Wall, All Rights Reserved
Copyright (c) 2010 Daniel Doubrovkine, All Rights Reserved
Copyright (c) 2010 EugineLev, All Rights Reserved
Copyright (c) 2010 Timothy Wall, All Rights Reserved
Copyright (c) 2010, 2013 Daniel Doubrovkine, Markus Karg, All Rights Reserved
Copyright (c) 2010,2011 Daniel Doubrovkine, All Rights Reserved
Copyright (c) 2011 Daniel Doubrovkine, All Rights Reserved
Copyright (c) 2011 Denis Tulskiy
Copyright (c) 2011 Timothy Wall, All Rights Reserved
Copyright (c) 2012 Tobias Wolf, All Rights Reserved
Copyright (c) 2013 Ralf Hamberger, Markus Karg, All Rights Reserved
Copyright (c) 2013 Tobias Wolf, All Rights Reserved
Copyright (c) 2014 Dr David H. Akehurst (itemis), All Rights Reserved
Copyright (c) 2014 Reinhard Pointner, All Rights Reserved
Copyright (c) 2015 Adam Marcionek, All Rights Reserved
Copyright (c) 2015 Andreas "PAX" L\u00FCck, All Rights Reserved
Copyright (c) 2015 Daniel Widdis
Copyright (c) 2015 Goldstein Lyor, 2021 Daniel Widdis, All Rights Reserved
Copyright (c) 2015 Goldstein Lyor, All Rights Reserved
Copyright (c) 2015 Michael Freeman, All Rights Reserved
Copyright (c) 2016 Adam Marcionek, All Rights Reserved
Copyright (c) 2016 Minoru Sakamoto, All Rights Reserved
Copyright (c) 2017 Daniel Widdis, All Rights Reserved
Copyright (c) 2017 Matthias Bläsing, All Rights Reserved
Copyright (c) 2018 Daniel Widdis, All Rights Reserved
Copyright (c) 2018 Matthias Bläsing, All Rights Reserved
Copyright (c) 2018 Roshan Muralidharan, All Rights Reserved
Copyright (c) 2018 Václav Haisman, All Rights Reserved
Copyright (c) 2018, 2021 Daniel Widdis, All Rights Reserved
Copyright (c) 2018,2020,2021 Daniel Widdis, All Rights Reserved
Copyright (c) 2019 Daniel Widdis
Copyright (c) 2019 Daniel Widdis, All Rights Reserved
Copyright (c) 2019 Keve Müller
Copyright (c) 2019, 2021 Daniel Widdis
Copyright (c) 2020 Daniel Widdis, All Rights Reserved
Copyright (c) 2020 Torbjörn Svensson, All Rights Reserved
Copyright (c) 2022 Daniel Widdis, All Rights Reserved
Copyright (c) 2023 Reinhard Pointner, All Rights Reserved
Copyright 2010 Digital Rapids Corp.
Copyright 2014 Martin Steiger

View File

@ -0,0 +1,10 @@
name: Tableau
namespace: Standard
version: 0.0.0-dev
license: APLv2
authors:
- name: Enso Team
email: contact@enso.org
maintainers:
- name: Enso Team
email: contact@enso.org

View File

@ -0,0 +1,16 @@
from Standard.Base import all
from Standard.Table import Value_Type
## An Enso representation of a Column in a Tableau Hyper Table.
type Hyper_Column
Value name:Text value_type:Value_Type nullable:Boolean
## PRIVATE
to_display_text : Text
to_display_text self = self.name + " (" + self.value_type.to_display_text + ")"
## PRIVATE
to_js_object : JS_Object
to_js_object self =
JS_Object.from_pairs [["type", "Hyper_Column"], ["name", self.name], ["value_type", self.value_type], ["nullable", self.nullable]]

View File

@ -0,0 +1,18 @@
from Standard.Base import all
polyglot java import org.enso.tableau.HyperQueryError
polyglot java import org.enso.tableau.HyperTableNotFound
## Error when a Table is not found in a Hyper File.
type Table_Not_Found
Error schema:Text name:Text
## Error when a query fails.
type Query_Failed
Error message:Text query:Text
## PRIVATE
private handle_java_exceptions ~action =
Panic.catch HyperTableNotFound handler=(c-> Error.throw (Table_Not_Found.Error c.payload.getSchema c.payload.getName)) <|
Panic.catch HyperQueryError handler=(c-> Error.throw (Query_Failed.Error c.payload.getMessage c.payload.getQuery)) <|
action

View File

@ -0,0 +1,101 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Base.Metadata.Choice import Option
from Standard.Base.Metadata.Widget import Single_Choice
from Standard.Table import Table
import Standard.Table.Rows_To_Read.Rows_To_Read
import project.Hyper_Table.Hyper_Table
import project.Hyper_Errors.Table_Not_Found
polyglot java import org.enso.tableau.HyperReader
## Represents a Tableau Hyper Extract file.
type Hyper_File
## ICON data_input
Creates a Hyper_File
Arguments:
- file: The file to read.
- schema: The schema to read or `*` for all schemas.
new : File -> Text -> Hyper_File
new file:File schema:Text='*' =
if schema == "" then Error.throw (Illegal_Argument.Error "Schema cannot be empty.") else
Hyper_File.Value file schema
## PRIVATE
A representation of a Tableau Hyper Extract file.
private Value file:File internal_schema:Text
## ICON metadata
Returns the list of schemas for the connection within the current database (or catalog).
schemas : Vector Text
schemas self = File_Error.handle_java_exceptions self.file <|
array = HyperReader.readSchemas self.file.path
Vector.from_polyglot_array array
## ICON metadata
Returns the name of the current schema.
`*` represents all schemas.
schema : Text
schema self = self.internal_schema
## ICON data_input
Returns a new Hyper_File with the specified schema set as default.
Arguments:
- schema: The name of the schema to connect to.
@schema (hyper -> make_schema_selector hyper True)
set_schema : Text -> Hyper_File
set_schema self schema =
if schema == self.schema then self else
if schema == "" then Error.throw (Illegal_Argument.Error "Schema cannot be empty.") else
Hyper_File.Value self.file schema
## GROUP Standard.Base.Metadata
ICON metadata
Returns the list of tables for the connection within the current database (or catalog).
@schema (hyper -> make_schema_selector hyper True)
tables : Text -> Vector Hyper_Table
tables self schema:Text=self.schema = if schema == "" then self.tables self.schema else
File_Error.handle_java_exceptions self.file <|
array = case schema of
"*" -> HyperReader.listTablesAllSchemas self.file.path
_ -> HyperReader.listTables self.file.path schema
array.map t-> Hyper_Table.Value self t.schema t.name
## ALIAS sheet, get
GROUP Standard.Base.Input
ICON data_input
Read a table from the Hyper_File into a Table.
Arguments:
- table: table name to read from.
- schema: the schema to read from.
- limit: the maximum number of rows to read.
@table make_table_selector
@schema (hyper -> make_schema_selector hyper True)
@limit Rows_To_Read.default_widget
read : Text -> Text -> Rows_To_Read -> Table
read self (table : Text) (schema : Text = self.schema) (limit : Rows_To_Read = ..All_Rows) = case schema of
"" -> self.read table self.schema limit
"*" ->
table_to_read = self.tables.find if_missing=Nothing t-> t.table == table
if table_to_read.is_nothing then Error.throw (Table_Not_Found.Error "*" table) else
table_to_read.read limit
_ -> Hyper_Table.Value self table schema . read limit
## PRIVATE
make_schema_selector hyper_file:Hyper_File include_any:Boolean=False =
schemas = hyper_file.schemas.map t-> Option t t.pretty
any_entry = if include_any then [Option "<Any Schema>" "'*'"] else []
Single_Choice values=schemas+any_entry
## PRIVATE
make_table_selector hyper_file:Hyper_File cache=Nothing =
schema = cache.if_not_nothing <| cache "schema"
used_schema = if schema == "" || schema == Nothing then hyper_file.schema else schema
tables = hyper_file.tables used_schema . map t-> Option t.table t.table.pretty
Single_Choice values=tables

View File

@ -0,0 +1,94 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
from Standard.Table import Column, Table, Value_Type
import Standard.Table.Rows_To_Read.Rows_To_Read
import Standard.Table.Internal.Java_Problems
import project.Hyper_Column.Hyper_Column
import project.Hyper_File.Hyper_File
import project.Hyper_Errors
polyglot java import java.sql.Types
polyglot java import org.enso.tableau.HyperReader
polyglot java import org.enso.tableau.HyperTableColumn
## An Enso representation of a Tableau Hyper Table.
type Hyper_Table
## PRIVATE
Represents a Tableau Hyper Table.
private Value file:Hyper_File internal_schema:Text internal_table:Text
## The schema of the table.
schema : Text
schema self = self.internal_schema
## The name of the table.
table : Text
table self = self.internal_table
## PRIVATE
to_display_text : Text
to_display_text self = self.table + " (" + self.schema + ")"
## PRIVATE
to_js_object : JS_Object
to_js_object self =
JS_Object.from_pairs [["type", "Hyper_Table"], ["schema", self.schema], ["table", self.table], ["file", self.file.file.path]]
## ICON metadata
Reads The Columns for the Table
column_metadata : Vector Hyper_Column
column_metadata self = File_Error.handle_java_exceptions self.file.file <| Hyper_Errors.handle_java_exceptions <|
array = HyperReader.readStructure self.file.file.path self.schema self.table
array.map column->
value_type = case column.typeID of
Types.BOOLEAN -> Value_Type.Boolean
Types.BIGINT -> Value_Type.Integer ..Bits_64
Types.SMALLINT -> Value_Type.Integer ..Bits_16
Types.INTEGER -> Value_Type.Integer ..Bits_32
Types.NUMERIC ->
precision = if column.precision.isEmpty then Nothing else column.precision.getAsInt
scale = if column.scale.isEmpty then Nothing else column.scale.getAsInt
Value_Type.Decimal precision scale
Types.FLOAT -> Value_Type.Float ..Bits_32
Types.DOUBLE -> Value_Type.Float ..Bits_64
Types.VARCHAR ->
length = if column.length.isEmpty then Nothing else column.length.getAsInt
Value_Type.Char length variable_length=True
Types.CHAR ->
length = if column.length.isEmpty then Nothing else column.length.getAsInt
Value_Type.Char length variable_length=False
Types.DATE -> Value_Type.Date
Types.TIME -> Value_Type.Time
Types.TIMESTAMP -> Value_Type.Date_Time with_timezone=False
Types.TIMESTAMP_WITH_TIMEZONE -> Value_Type.Date_Time with_timezone=True
HyperTableColumn.JSON -> Value_Type.Unsupported_Data_Type "JSON" JS_Object
HyperTableColumn.INTERVAL -> Value_Type.Unsupported_Data_Type "INTERVAL" Duration
_ -> Value_Type.Unsupported_Data_Type "Unknown" Any
Hyper_Column.Value column.name value_type column.nullable
## GROUP Standard.Base.Metadata
ICON metadata
Returns a Table describing this table's contents.
The table lists all columns and value types of each column.
column_info : Table
column_info self =
cols = self.column_metadata
Table.new [["Column", cols.map .name], ["Value Type", cols.map .value_type], ["Nullable", cols.map .nullable]]
## GROUP Standard.Base.Input
ICON data_input
Reads the Table into Enso Table
Arguments:
- max_rows: specifies the maximum number of rows to read.
@max_rows Rows_To_Read.default_widget
read : Rows_To_Read -> Table
read self (max_rows : Rows_To_Read = ..All_Rows) = File_Error.handle_java_exceptions self.file.file <| Hyper_Errors.handle_java_exceptions <|
Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator->
row_count = if max_rows == Rows_To_Read.All_Rows then Nothing else max_rows.rows
java_columns = HyperReader.readTable self.file.file.path self.schema self.table row_count java_problem_aggregator
enso_columns = java_columns.map c-> Column.from_storage c.getName c.getStorage
Table.new enso_columns

View File

@ -0,0 +1,5 @@
from Standard.Base import all
export project.Hyper_Column.Hyper_Column
export project.Hyper_File.Hyper_File
export project.Hyper_Table.Hyper_Table

View File

@ -0,0 +1,57 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Generic.Writable_File.Writable_File
import Standard.Base.System.File_Format_Metadata.File_Format_Metadata
import Standard.Base.System.Input_Stream.Input_Stream
from Standard.Base.Metadata.Choice import Option
import project.Hyper_File.Hyper_File
## Read the file to a `Hyper_File` object.
type Tableau_Format
## Read the file to a `Hyper_File` object.
Arguments:
- schema: The schema to read or `*` for all schemas.
Hyper_File (schema:Text='*')
## PRIVATE
Resolve an unresolved constructor to the actual type.
resolve : Function -> Tableau_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:Tableau_Format) _->Nothing
## PRIVATE
ADVANCED
If the File_Format supports reading from the file, return a configured instance.
for_read : File_Format_Metadata -> Tableau_Format | Nothing
for_read file:File_Format_Metadata =
case file.guess_extension of
".hyper" -> Tableau_Format.Hyper_File
_ -> Nothing
## PRIVATE
If this File_Format should be used for writing to that file, return a configured instance.
Not currently supported.
for_file_write : Writable_File -> Tableau_Format | Nothing
for_file_write file:Writable_File =
_ = [file]
Nothing
## PRIVATE
get_dropdown_options : Vector Option
get_dropdown_options = [Option "Tableau Hyper" "..Hyper_File"]
## PRIVATE
Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any
read self file on_problems:Problem_Behavior =
_ = [on_problems]
Hyper_File.new file self.schema
## PRIVATE
Implements decoding the format from a stream.
read_stream : Input_Stream -> File_Format_Metadata -> Any
read_stream self stream:Input_Stream (metadata : File_Format_Metadata) =
_ = [stream, metadata]
Error.throw (Illegal_Argument.Error "Cannot connect to a Hyper file backed by a stream. Save it to a local file first.")

View File

@ -20,7 +20,8 @@ object Editions {
"Standard.Searcher",
"Standard.Google_Api",
"Standard.Snowflake",
"Standard.Microsoft"
"Standard.Microsoft",
"Standard.Tableau"
)
case class ContribLibrary(name: String, version: String)

View File

@ -48,14 +48,14 @@ object StdBits {
!graalVmOrgs.contains(orgName)
})
)
val unmanagedFiles = (Compile / unmanagedJars).value.map(_.data)
val relevantFiles =
libraryUpdates
.select(
configuration = configFilter,
module = graalModuleFilter,
artifact = DependencyFilter.artifactFilter()
)
) ++ unmanagedFiles
val dependencyStore =
streams.value.cacheStoreFactory.make("std-bits-dependencies")
Tracked.diffInputs(dependencyStore, FileInfo.hash)(relevantFiles.toSet) {
@ -131,4 +131,5 @@ object StdBits {
log.info(s"No changes detected for '$name' package")
}
}
}

View File

@ -11,12 +11,12 @@ public class LongArrayList {
backingStorage = new long[32];
}
// ** Gets the number of elements in the list. */
/** Gets the number of elements in the list. */
public int getSize() {
return lastIndex + 1;
}
// ** Gets an element from the list. */
/** Gets an element from the list. */
public long get(int index) {
if (index > lastIndex) {
throw new IndexOutOfBoundsException(index);
@ -24,12 +24,12 @@ public class LongArrayList {
return backingStorage[index];
}
// ** Gets an element from the list. */
/** Gets an element from the list. */
public long getOrLast(int index) {
return backingStorage[Math.min(index, lastIndex)];
}
// ** Adds an element to the list. */
/** Adds an element to the list. */
public void add(long x) {
int index;

View File

@ -53,7 +53,7 @@ public final class EnsoSecretHelper extends SecretValueResolver {
}
}
// ** Makes a request with secrets in the query string or headers. **//
/** Makes a request with secrets in the query string or headers. * */
public static EnsoHttpResponse makeRequest(
HttpClient client,
Builder builder,

View File

@ -14,7 +14,7 @@ public class JoinResult {
this.rightIndices = rightIndices;
}
// ** Represents a pair of indices of matched rows. -1 means an unmatched row.*/
/** Represents a pair of indices of matched rows. -1 means an unmatched row. */
public record RowPair(int leftIndex, int rightIndex) {}
public OrderMask getLeftOrderMask() {

View File

@ -0,0 +1,14 @@
package org.enso.tableau;
public class HyperQueryError extends RuntimeException {
private final String query;
public HyperQueryError(String message, String query, Throwable cause) {
super(message, cause);
this.query = query;
}
public String getQuery() {
return query;
}
}

View File

@ -0,0 +1,232 @@
package org.enso.tableau;
import com.tableau.hyperapi.*;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.channels.Channels;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.IntStream;
import org.enso.table.data.table.Column;
import org.enso.table.problems.ProblemAggregator;
/** Class responsible for reading from Tableau Hyper files. */
public class HyperReader {
public static final Path HYPER_PATH = Path.of(getHyperPath());
private static HyperProcess process;
private static final Logger LOGGER = Logger.getLogger("enso-hyper-reader");
private static String getHyperPath() {
if (System.getenv("HYPER_PATH") != null) {
return System.getenv("HYPER_PATH");
}
if (System.getenv("ENSO_DATA_DIRECTORY") != null) {
return System.getenv("ENSO_DATA_DIRECTORY") + "/hyper";
} else {
return switch (OSPlatform.CurrentPlatform) {
case WINDOWS -> System.getenv("LocalAppData") + "/enso/hyper";
case MAC_ARM64, MAX_X64 -> System.getProperty("user.home")
+ "/Library/Application Support/org.enso/hyper";
case LINUX, OTHER -> System.getProperty("user.home") + "/.local/share/enso/hyper";
};
}
}
private static HyperProcess getProcess() throws IOException {
// Check if the hyper directory exists, if not create it.
if (!Files.exists(HYPER_PATH)) {
try {
Files.createDirectories(HYPER_PATH);
} catch (Exception e) {
throw new IOException("Failed to create Hyper directory: " + HYPER_PATH, e);
}
}
// Check if any files in the hyper directory, otherwise download them.
try (var files = Files.list(HYPER_PATH)) {
if (files.findAny().isEmpty()) {
switch (OSPlatform.CurrentPlatform) {
case WINDOWS -> downloadHyper(
"https://enso-data-samples.s3.us-west-1.amazonaws.com/tableau/hyperd.exe",
"hyperd.exe",
false);
case MAC_ARM64 -> downloadHyper(
"https://enso-data-samples.s3.us-west-1.amazonaws.com/tableau/macos-arm64/hyperd",
"hyperd",
true);
case MAX_X64 -> downloadHyper(
"https://enso-data-samples.s3.us-west-1.amazonaws.com/tableau/macos-x64/hyperd",
"hyperd",
true);
case LINUX -> downloadHyper(
"https://enso-data-samples.s3.us-west-1.amazonaws.com/tableau/linux/hyperd",
"hyperd",
true);
case OTHER -> throw new IOException(
"Unsupported platform: " + OSPlatform.CurrentPlatform);
}
}
} catch (Exception e) {
throw new IOException("Failed to download hyperd.", e);
}
// Start hyper process.
if (process == null || !process.isOpen()) {
var contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(HyperReader.class.getClassLoader());
LOGGER.log(Level.INFO, "Starting Hyper process: " + HYPER_PATH + ".");
try {
process = new HyperProcess(HYPER_PATH, Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU);
} catch (Throwable ioe) {
LOGGER.log(Level.SEVERE, "Failed to start Hyper process.", ioe);
throw new IOException("Failed to start Hyper process.", ioe);
}
} finally {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
return process;
}
private static void downloadHyper(String uri, String fileName, boolean setExecutable)
throws IOException, URISyntaxException {
LOGGER.log(Level.INFO, "Downloading Hyper from: " + uri);
var hyperdFile = HYPER_PATH.resolve(fileName).toFile();
var url = new URI(uri);
var readChannel = Channels.newChannel(url.toURL().openStream());
try (var fos = new FileOutputStream(hyperdFile)) {
var writeChannel = fos.getChannel();
writeChannel.transferFrom(readChannel, 0, Long.MAX_VALUE);
}
if (setExecutable) {
hyperdFile.setExecutable(true);
}
}
private static Connection getConnection(String path) throws IOException {
var process = getProcess();
try {
return new Connection(process.getEndpoint(), path, CreateMode.NONE);
} catch (HyperException e) {
if (e.getMessage().contains("The database does not exist")) {
throw new FileNotFoundException("Database not found: " + path);
} else {
throw new IOException("Failed to open database: " + path, e);
}
}
}
public static String[] readSchemas(String path) throws IOException {
try (var connection = getConnection(path)) {
var catalog = connection.getCatalog();
return catalog.getSchemaNames().stream()
.map(s -> s.getName().getUnescaped())
.toArray(String[]::new);
}
}
public static HyperTable[] listTablesAllSchemas(String path) throws IOException {
try (var connection = getConnection(path)) {
var catalog = connection.getCatalog();
return listTablesImpl(catalog, catalog.getSchemaNames());
}
}
public static HyperTable[] listTables(String path, String schemaName) throws IOException {
var schemaNames = List.of(new SchemaName(schemaName));
try (var connection = getConnection(path)) {
var catalog = connection.getCatalog();
return listTablesImpl(catalog, schemaNames);
}
}
private static HyperTable[] listTablesImpl(Catalog catalog, List<SchemaName> schemaNames) {
var output = new ArrayList<HyperTable>();
for (var schemaName : schemaNames) {
var tables = catalog.getTableNames(schemaName);
for (var table : tables) {
output.add(
new HyperTable(schemaName.getName().getUnescaped(), table.getName().getUnescaped()));
}
}
return output.toArray(HyperTable[]::new);
}
public static HyperTableColumn[] readStructure(String path, String schemaName, String tableName)
throws IOException {
var tableNameObject = new TableName(new SchemaName(schemaName), tableName);
try (var connection = getConnection(path)) {
return readStructureInternal(connection, tableNameObject);
}
}
private static HyperTableColumn[] readStructureInternal(
Connection connection, TableName tableNameObject) {
try {
var catalog = connection.getCatalog();
var definition = catalog.getTableDefinition(tableNameObject);
var columns = definition.getColumns();
return IntStream.range(0, columns.size())
.mapToObj(i -> HyperTableColumn.fromHyperColumn(i, columns.get(i)))
.toArray(HyperTableColumn[]::new);
} catch (HyperException e) {
if (e.getMessage().contains(" does not exist: ")) {
var schemaObject = tableNameObject.getSchemaName();
var schemaName =
schemaObject.isPresent() ? schemaObject.get().getName().getUnescaped() : "";
throw new HyperTableNotFound(schemaName, tableNameObject.getName().getUnescaped(), e);
} else {
throw new HyperQueryError(e.getMessage(), "TABLE_INFO " + tableNameObject, e);
}
}
}
public static Column[] readTable(
String path,
String schemaName,
String tableName,
Integer rowLimit,
ProblemAggregator problemAggregator)
throws IOException {
var tableNameObject = new TableName(new SchemaName(schemaName), tableName);
var query = "SELECT * FROM " + tableNameObject + (rowLimit == null ? "" : " LIMIT " + rowLimit);
try (var connection = getConnection(path)) {
var columns = readStructureInternal(connection, tableNameObject);
var builders =
Arrays.stream(columns)
.map(
c ->
TableColumnBuilder.create(
c, rowLimit == null ? 1000 : rowLimit, problemAggregator))
.toList();
var result = connection.executeQuery(query);
while (result.nextRow()) {
builders.forEach(b -> b.append(result));
}
var storages = builders.stream().map(TableColumnBuilder::seal).toList();
return IntStream.range(0, columns.length)
.mapToObj(i -> new Column(columns[i].name(), storages.get(i)))
.toArray(Column[]::new);
} catch (HyperException e) {
if (e.getMessage().contains(" does not exist: ")) {
throw new HyperTableNotFound(schemaName, tableName, e);
} else {
throw new HyperQueryError(e.getMessage(), query, e);
}
}
}
}

View File

@ -0,0 +1,4 @@
package org.enso.tableau;
/** Record type for representing a Hyper table. */
public record HyperTable(String schema, String name) {}

View File

@ -0,0 +1,54 @@
package org.enso.tableau;
import com.tableau.hyperapi.Nullability;
import com.tableau.hyperapi.TableDefinition;
import com.tableau.hyperapi.TypeTag;
import java.sql.Types;
import java.util.OptionalInt;
public record HyperTableColumn(
int index,
String name,
int typeID,
boolean nullable,
OptionalInt length,
OptionalInt precision,
OptionalInt scale) {
/** Type ID for JSON data. */
public static final int JSON = 10001;
/** Type ID for INTERVAL data. */
public static final int INTERVAL = 10002;
static HyperTableColumn fromHyperColumn(int index, TableDefinition.Column hyperColumn) {
return new HyperTableColumn(
index,
hyperColumn.getName().getUnescaped(),
mapTypeTag(hyperColumn.getType().getTag()),
hyperColumn.getNullability().equals(Nullability.NULLABLE),
hyperColumn.getType().getMaxLength(),
hyperColumn.getType().getPrecision(),
hyperColumn.getType().getScale());
}
private static int mapTypeTag(TypeTag tag) {
return switch (tag) {
case BOOL -> Types.BOOLEAN;
case BIG_INT -> Types.BIGINT;
case SMALL_INT -> Types.SMALLINT;
case INT -> Types.INTEGER;
case NUMERIC -> Types.NUMERIC;
case FLOAT -> Types.FLOAT;
case DOUBLE -> Types.DOUBLE;
case TEXT, VARCHAR -> Types.VARCHAR;
case CHAR -> Types.CHAR;
case DATE -> Types.DATE;
case TIME -> Types.TIME;
case TIMESTAMP -> Types.TIMESTAMP;
case TIMESTAMP_TZ -> Types.TIMESTAMP_WITH_TIMEZONE;
case JSON -> JSON;
case INTERVAL -> INTERVAL;
default -> Types.OTHER;
};
}
}

View File

@ -0,0 +1,20 @@
package org.enso.tableau;
public class HyperTableNotFound extends RuntimeException {
private final String schema;
private final String name;
public HyperTableNotFound(String schema, String name, Throwable cause) {
super("The table " + schema + "." + name + " does not exist.", cause);
this.schema = schema;
this.name = name;
}
public String getSchema() {
return schema;
}
public String getName() {
return name;
}
}

View File

@ -0,0 +1,30 @@
package org.enso.tableau;
public enum OSPlatform {
WINDOWS,
MAC_ARM64,
MAX_X64,
LINUX,
OTHER;
/** Returns the current platform. */
public static final OSPlatform CurrentPlatform = getPlatform();
private static OSPlatform getPlatform() {
var osName = System.getProperty("os.name").toUpperCase();
if (osName.contains("WIN")) {
return OSPlatform.WINDOWS;
} else if (osName.contains("MAC")) {
var osArch = System.getProperty("os.arch").toUpperCase();
if (osArch.contains("ARM64") || osArch.contains("AARCH64")) {
return OSPlatform.MAC_ARM64;
} else {
return OSPlatform.MAX_X64;
}
} else if (osName.contains("LINUX")) {
return OSPlatform.LINUX;
} else {
return OSPlatform.OTHER;
}
}
}

View File

@ -0,0 +1,215 @@
package org.enso.tableau;
import com.tableau.hyperapi.Result;
import java.sql.Types;
import java.time.Duration;
import java.time.Period;
import java.time.ZoneId;
import java.util.function.Consumer;
import org.enso.table.data.column.builder.BigDecimalBuilder;
import org.enso.table.data.column.builder.BigIntegerBuilder;
import org.enso.table.data.column.builder.BoolBuilder;
import org.enso.table.data.column.builder.Builder;
import org.enso.table.data.column.builder.DateBuilder;
import org.enso.table.data.column.builder.DateTimeBuilder;
import org.enso.table.data.column.builder.InferredBuilder;
import org.enso.table.data.column.builder.NumericBuilder;
import org.enso.table.data.column.builder.ObjectBuilder;
import org.enso.table.data.column.builder.StringBuilder;
import org.enso.table.data.column.builder.TimeOfDayBuilder;
import org.enso.table.data.column.storage.Storage;
import org.enso.table.data.column.storage.type.IntegerType;
import org.enso.table.data.column.storage.type.TextType;
import org.enso.table.problems.ProblemAggregator;
/** A builder for a single column of a table. */
record TableColumnBuilder(Builder builder, Consumer<Result> appendMethod) {
private static Consumer<Result> nullAppender(Builder builder, int index, Consumer<Result> inner) {
return r -> {
if (r.isNull(index)) {
builder.appendNulls(1);
} else {
inner.accept(r);
}
};
}
/**
* Convert a Tableau Interval into either a Duration or a Period (with fallback to String if
* needed).
*/
private static Object readInterval(Result r, int index) {
var interval = r.getInterval(index);
if (interval.getMonths() == 0 && interval.getYears() == 0) {
// Treat as a Duration
long seconds =
((interval.getDays() * 24L + interval.getHours()) * 60 + interval.getMinutes()) * 60
+ interval.getSeconds();
return Duration.ofNanos(seconds * 1_000_000_000L + interval.getMicroseconds() * 1_000L);
} else if (interval.getHours() == 0
&& interval.getMinutes() == 0
&& interval.getSeconds() == 0
&& interval.getMicroseconds() == 0) {
// Treat as a Period
return Period.of(interval.getYears(), interval.getMonths(), interval.getDays());
} else {
// Can't do better than toString
return interval.toString();
}
}
public static TableColumnBuilder create(
HyperTableColumn column, int initialRowCount, ProblemAggregator problemAggregator) {
switch (column.typeID()) {
case Types.BOOLEAN:
var boolBuilder = new BoolBuilder(initialRowCount);
return new TableColumnBuilder(
boolBuilder,
nullAppender(
boolBuilder,
column.index(),
r -> boolBuilder.appendBoolean(r.getBool(column.index()))));
case Types.BIGINT:
var longBuilder =
NumericBuilder.createLongBuilder(
initialRowCount, IntegerType.INT_64, problemAggregator);
return new TableColumnBuilder(
longBuilder,
nullAppender(
longBuilder,
column.index(),
r -> longBuilder.appendLong(r.getLong(column.index()))));
case Types.INTEGER:
var intBuilder =
NumericBuilder.createLongBuilder(
initialRowCount, IntegerType.INT_32, problemAggregator);
return new TableColumnBuilder(
intBuilder,
nullAppender(
intBuilder, column.index(), r -> intBuilder.appendLong(r.getInt(column.index()))));
case Types.SMALLINT:
var shortBuilder =
NumericBuilder.createLongBuilder(
initialRowCount, IntegerType.INT_16, problemAggregator);
return new TableColumnBuilder(
shortBuilder,
nullAppender(
shortBuilder,
column.index(),
r -> shortBuilder.appendLong(r.getShort(column.index()))));
case Types.NUMERIC:
if (column.scale().isEmpty()) {
throw new IllegalArgumentException("NUMERIC column must have a scale.");
}
if (column.scale().getAsInt() == 0) {
var bigIntBuilder = new BigIntegerBuilder(initialRowCount, problemAggregator);
return new TableColumnBuilder(
bigIntBuilder,
nullAppender(
bigIntBuilder,
column.index(),
r -> bigIntBuilder.append(r.getBigDecimal(column.index()).toBigInteger())));
} else {
var bigDecimalBuilder = new BigDecimalBuilder(initialRowCount);
return new TableColumnBuilder(
bigDecimalBuilder,
nullAppender(
bigDecimalBuilder,
column.index(),
r -> bigDecimalBuilder.append(r.getBigDecimal(column.index()))));
}
case Types.FLOAT:
var floatBuilder = NumericBuilder.createDoubleBuilder(initialRowCount, problemAggregator);
return new TableColumnBuilder(
floatBuilder,
nullAppender(
floatBuilder,
column.index(),
r -> floatBuilder.appendDouble(r.getFloat(column.index()))));
case Types.DOUBLE:
var doubleBuilder = NumericBuilder.createDoubleBuilder(initialRowCount, problemAggregator);
return new TableColumnBuilder(
doubleBuilder,
nullAppender(
doubleBuilder,
column.index(),
r -> doubleBuilder.appendDouble(r.getDouble(column.index()))));
case Types.VARCHAR, Types.CHAR:
var textType =
column.length().isEmpty()
? new TextType(-1, false)
: new TextType(column.length().getAsInt(), column.typeID() == Types.CHAR);
var textBuilder = new StringBuilder(initialRowCount, textType);
return new TableColumnBuilder(
textBuilder,
nullAppender(
textBuilder, column.index(), r -> textBuilder.append(r.getString(column.index()))));
case Types.DATE:
var dateBuilder = new DateBuilder(initialRowCount);
return new TableColumnBuilder(
dateBuilder,
nullAppender(
dateBuilder,
column.index(),
r -> dateBuilder.appendDate(r.getLocalDate(column.index()))));
case Types.TIME:
var timeBuilder = new TimeOfDayBuilder(initialRowCount);
return new TableColumnBuilder(
timeBuilder,
nullAppender(
timeBuilder,
column.index(),
r -> timeBuilder.append(r.getLocalTime(column.index()))));
case Types.TIMESTAMP:
var dateTimeBuilder = new DateTimeBuilder(initialRowCount);
return new TableColumnBuilder(
dateTimeBuilder,
nullAppender(
dateTimeBuilder,
column.index(),
r ->
dateTimeBuilder.append(
r.getLocalDateTime(column.index()).atZone(ZoneId.systemDefault()))));
case Types.TIMESTAMP_WITH_TIMEZONE:
var dateTimeTzBuilder = new DateTimeBuilder(initialRowCount);
return new TableColumnBuilder(
dateTimeTzBuilder,
nullAppender(
dateTimeTzBuilder,
column.index(),
r -> dateTimeTzBuilder.append(r.getZonedDateTime(column.index()))));
case HyperTableColumn.JSON:
var jsonBuilder = new ObjectBuilder(initialRowCount);
return new TableColumnBuilder(
jsonBuilder,
nullAppender(
jsonBuilder, column.index(), r -> jsonBuilder.append(r.getString(column.index()))));
case HyperTableColumn.INTERVAL:
var intervalBuilder = new InferredBuilder(initialRowCount, problemAggregator);
return new TableColumnBuilder(
intervalBuilder,
nullAppender(
intervalBuilder,
column.index(),
r -> intervalBuilder.append(readInterval(r, column.index()))));
case Types.OTHER:
var mixedBuilder = new ObjectBuilder(initialRowCount);
return new TableColumnBuilder(
mixedBuilder,
nullAppender(
mixedBuilder,
column.index(),
r -> mixedBuilder.append(r.getObject(column.index()))));
}
throw new IllegalArgumentException("Unsupported column type: " + column.typeID());
}
public void append(Result result) {
appendMethod.accept(result);
}
public Storage<?> seal() {
return builder.seal();
}
}

View File

@ -0,0 +1,21 @@
package org.enso.tableau;
import org.enso.base.file_format.FileFormatSPI;
@org.openide.util.lookup.ServiceProvider(service = FileFormatSPI.class)
public class TableauFormatSPI extends FileFormatSPI {
@Override
protected String getModuleName() {
return "Standard.Tableau.Tableau_Format";
}
@Override
protected String getTypeName() {
return "Tableau_Format";
}
@Override
protected String getDataLinkFormatName() {
return "tableau";
}
}

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,7 @@
name: Tableau_Tests
namespace: enso_dev
version: 0.0.1
license: MIT
author: enso-dev@enso.org
maintainer: enso-dev@enso.org
prefer-local-libraries: true

View File

@ -0,0 +1,15 @@
from Standard.Base import all
from Standard.Test import Test
import project.Read_Spec
import project.Structure_Spec
add_specs suite_builder =
Structure_Spec.add_specs suite_builder
Read_Spec.add_specs suite_builder
main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter

View File

@ -0,0 +1,66 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
from Standard.Table import all
from Standard.Tableau import Hyper_File
import Standard.Tableau.Hyper_Errors.Table_Not_Found
from Standard.Test import all
main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter
add_specs suite_builder = suite_builder.group "Read Tables" group_builder->
no_file = enso_project.data / "no_file.hyper"
names_file = enso_project.data / "names.hyper"
names_table = Hyper_File.new names_file . tables . first
check_names_table table rows=14 =
table.row_count.should_equal rows
table.columns.map .name . should_equal ["first_name", "last_name", "age", "Calculation1"]
table.columns.map .value_type . should_equal ([Value_Type.Char Nothing variable_length=True, Value_Type.Char Nothing variable_length=True, Value_Type.Integer ..Bits_64, Value_Type.Integer ..Bits_64].take rows)
table.at "first_name" . to_vector . should_equal (["James", "John", "Robert", "Michael", "William", "David", "Richard", "Joseph", "Charles", "Thomas", "Daniel", Nothing, "Anthony", Nothing].take rows)
table.at "last_name" . to_vector . should_equal (["Smith", "Johnson", "Williams", "Jones", "Brown", "Davis", "Miller", "Wilson", "Moore", "Taylor", "Anderson", "Garcia", Nothing, Nothing].take rows)
table.at "age" . to_vector . should_equal ([39, 40, 41, 42, 43, 44, 39, 9, 10, 11, 12, 1, 2, 25].take rows)
group_builder.specify "should be able to read the structure of a table" <|
metadata = names_table.column_metadata
metadata.length.should_equal 4
metadata.map .name . should_equal ["first_name", "last_name", "age", "Calculation1"]
metadata.map .value_type . should_equal [Value_Type.Char Nothing variable_length=True, Value_Type.Char Nothing variable_length=True, Value_Type.Integer ..Bits_64, Value_Type.Integer ..Bits_64]
metadata.map .nullable . should_equal [True, True, True, True]
group_builder.specify "should be able to read the column_info of a table" <|
metadata = names_table.column_info
metadata.row_count.should_equal 4
metadata.columns.map .name . should_equal ["Column", "Value Type", "Nullable"]
metadata.at "Column" . to_vector . should_equal ["first_name", "last_name", "age", "Calculation1"]
metadata.at "Value Type" . to_vector . should_equal [Value_Type.Char Nothing variable_length=True, Value_Type.Char Nothing variable_length=True, Value_Type.Integer ..Bits_64, Value_Type.Integer ..Bits_64]
group_builder.specify "should be able to read a table" <|
table = names_table.read
check_names_table table
table_2 = Hyper_File.new names_file . read "Extract"
check_names_table table_2
group_builder.specify "should be able to read a table with limited rows" <|
table = names_table.read (..First 5)
check_names_table table 5
table_2 = Hyper_File.new names_file . read "Extract" limit=(..First 7)
check_names_table table_2 7
group_builder.specify "should handle a missing file when reading a table" <|
r1 = Hyper_File.new no_file . read "Extract"
r1.should_fail_with File_Error
r1.catch.should_be_a File_Error.Not_Found
group_builder.specify "should handle a missing table when reading a table" <|
r1 = Hyper_File.new names_file . read "NoTable"
r1.should_fail_with Table_Not_Found
r2 = Hyper_File.new names_file . read "NoTable" schema="Extract"
r2.should_fail_with Table_Not_Found

View File

@ -0,0 +1,64 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Tableau import Hyper_File
from Standard.Test import all
main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter
add_specs suite_builder = suite_builder.group "Structure" group_builder->
no_file = enso_project.data / "no_file.hyper"
empty_file = enso_project.data / "empty.hyper"
names_file = enso_project.data / "names.hyper"
group_builder.specify "should default to all schema" <|
Hyper_File.new names_file . schema . should_equal "*"
group_builder.specify "should be able to list schema" <|
schemas = Hyper_File.new names_file . schemas
schemas.sort.should_equal ["Extract", "public"]
schemas_2 = Hyper_File.new empty_file . schemas
schemas_2.should_equal ["public"]
group_builder.specify "should handle a missing file when listing schema" <|
r1 = Hyper_File.new no_file . schemas
r1.should_fail_with File_Error
r1.catch.should_be_a File_Error.Not_Found
group_builder.specify "should default to set schema" <|
Hyper_File.new names_file schema="Extract" . schema . should_equal "Extract"
Hyper_File.new names_file . set_schema "Extract" . schema . should_equal "Extract"
Hyper_File.new names_file schema="*" . schema . should_equal "*"
Hyper_File.new names_file schema="*" . set_schema "Extract" . schema . should_equal "Extract"
group_builder.specify "should reject invalid schema to set schema" <|
Hyper_File.new names_file schema="" . should_fail_with Illegal_Argument
Hyper_File.new names_file . set_schema "" . should_fail_with Illegal_Argument
group_builder.specify "should be able to list table" <|
tables = Hyper_File.new names_file . tables
tables.length.should_equal 1
tables.first.schema.should_equal "Extract"
tables.first.table.should_equal "Extract"
tables_2 = Hyper_File.new names_file schema="Extract" . tables
tables_2.length.should_equal 1
tables_2.first.schema.should_equal "Extract"
tables_2.first.table.should_equal "Extract"
tables_3 = Hyper_File.new names_file schema="public" . tables
tables_3.length.should_equal 0
tables_4 = Hyper_File.new names_file schema="DoesNotExist" . tables
tables_4.length.should_equal 0
group_builder.specify "should handle a missing file when listing tables" <|
r1 = Hyper_File.new no_file . tables
r1.should_fail_with File_Error
r1.catch.should_be_a File_Error.Not_Found

View File

@ -0,0 +1,3 @@
Copyright (c) 2016 - 2023, Salesforce, Inc. and its licensors. All rights reserved.
Protected by U.S. Patents and Trademarks as noted at http://www.tableau.com/ip; Patents pending.

View File

@ -0,0 +1,15 @@
Copyright (c) 2007 Timothy Wall, All Rights Reserved
Copyright (c) 2007 Wayne Meissner, All Rights Reserved
Copyright (c) 2007-2008 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2012 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2013 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2015 Timothy Wall, All Rights Reserved
Copyright (c) 2009 Timothy Wall, All Rights Reserved
Copyright (c) 2011 Timothy Wall, All Rights Reserved
Copyright (c) 2012 Timothy Wall, All Rights Reserved
Copyright (c) 2017 Matthias Bläsing, All Rights Reserved
Copyright (c) 2018 Matthias Bläsing
Copyright (c) 2019 Matthias Bläsing, All Rights Reserved
Copyright (c) 2021, Matthias Bläsing, All Rights Reserved
Copyright (c) 2022 Carlos Ballesteros, All Rights Reserved
Copyright 2007 Timothy Wall

View File

@ -0,0 +1,2 @@
case 61: return XA_COPYRIGHT;
Atom XA_COPYRIGHT = new Atom(61);

View File

@ -0,0 +1,47 @@
Copyright (c) 2007 Olivier Chafik
Copyright (c) 2007 Olivier Chafik, All Rights Reserved
Copyright (c) 2007 Timothy Wall, All Rights Reserved
Copyright (c) 2007, 2013 Timothy Wall, Markus Karg, All Rights Reserved
Copyright (c) 2007-2008 Timothy Wall, All Rights Reserved
Copyright (c) 2007-2013 Timothy Wall, All Rights Reserved
Copyright (c) 2008 Timothy Wall, All Rights Reserved
Copyright (c) 2010 Daniel Doubrovkine, All Rights Reserved
Copyright (c) 2010 EugineLev, All Rights Reserved
Copyright (c) 2010 Timothy Wall, All Rights Reserved
Copyright (c) 2010, 2013 Daniel Doubrovkine, Markus Karg, All Rights Reserved
Copyright (c) 2010,2011 Daniel Doubrovkine, All Rights Reserved
Copyright (c) 2011 Daniel Doubrovkine, All Rights Reserved
Copyright (c) 2011 Denis Tulskiy
Copyright (c) 2011 Timothy Wall, All Rights Reserved
Copyright (c) 2012 Tobias Wolf, All Rights Reserved
Copyright (c) 2013 Ralf Hamberger, Markus Karg, All Rights Reserved
Copyright (c) 2013 Tobias Wolf, All Rights Reserved
Copyright (c) 2014 Dr David H. Akehurst (itemis), All Rights Reserved
Copyright (c) 2014 Reinhard Pointner, All Rights Reserved
Copyright (c) 2015 Adam Marcionek, All Rights Reserved
Copyright (c) 2015 Andreas "PAX" L\u00FCck, All Rights Reserved
Copyright (c) 2015 Daniel Widdis
Copyright (c) 2015 Goldstein Lyor, 2021 Daniel Widdis, All Rights Reserved
Copyright (c) 2015 Goldstein Lyor, All Rights Reserved
Copyright (c) 2015 Michael Freeman, All Rights Reserved
Copyright (c) 2016 Adam Marcionek, All Rights Reserved
Copyright (c) 2016 Minoru Sakamoto, All Rights Reserved
Copyright (c) 2017 Daniel Widdis, All Rights Reserved
Copyright (c) 2017 Matthias Bläsing, All Rights Reserved
Copyright (c) 2018 Daniel Widdis, All Rights Reserved
Copyright (c) 2018 Matthias Bläsing, All Rights Reserved
Copyright (c) 2018 Roshan Muralidharan, All Rights Reserved
Copyright (c) 2018 Václav Haisman, All Rights Reserved
Copyright (c) 2018, 2021 Daniel Widdis, All Rights Reserved
Copyright (c) 2018,2020,2021 Daniel Widdis, All Rights Reserved
Copyright (c) 2019 Daniel Widdis
Copyright (c) 2019 Daniel Widdis, All Rights Reserved
Copyright (c) 2019 Keve Müller
Copyright (c) 2019, 2021 Daniel Widdis
Copyright (c) 2020 Daniel Widdis, All Rights Reserved
Copyright (c) 2020 Torbjörn Svensson, All Rights Reserved
Copyright (c) 2022 Daniel Widdis, All Rights Reserved
Copyright (c) 2023 Reinhard Pointner, All Rights Reserved
Copyright 2010 Digital Rapids Corp.
Copyright 2014 Martin Steiger
Conversion code in this class Copyright 2002-2004 Apache Software Foundation.

View File

@ -0,0 +1,3 @@
B499CA6488883A07DDC6EE0844F6FD12155E8E36FA8721714C6572E1388728F5
F43F163A7B66354A2F2EC3F4B9121806D0A2B596D44578AFF2EEF363E8CE2AE4
0

View File

@ -0,0 +1 @@
tools/legal-review/license-texts/APACHE2.0