mirror of
https://github.com/enso-org/enso.git
synced 2024-11-27 05:23:48 +03:00
Tidy up the public module level statics (#6032)
Tidies up a lot of PUBLIC module statics - marked some as PRIVATE, made some methods of types.
This commit is contained in:
parent
1b30a5275f
commit
dd009fd1af
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@ -23,6 +23,7 @@ Cargo.toml
|
|||||||
# GUI
|
# GUI
|
||||||
/app/gui/ @MichaelMauderer @wdanilo @farmaazon @mwu-tow @kazcw
|
/app/gui/ @MichaelMauderer @wdanilo @farmaazon @mwu-tow @kazcw
|
||||||
/app/gui/view/ @MichaelMauderer @wdanilo @farmaazon @kazcw
|
/app/gui/view/ @MichaelMauderer @wdanilo @farmaazon @kazcw
|
||||||
|
/app/gui/view/graph-editor/src/builtin/visualization/java_script/ @MichaelMauderer @wdanilo @farmaazon @kazcw @jdunkerley
|
||||||
/app/ide-desktop/ @MichaelMauderer @wdanilo @kazcw
|
/app/ide-desktop/ @MichaelMauderer @wdanilo @kazcw
|
||||||
|
|
||||||
# Engine (old)
|
# Engine (old)
|
||||||
|
@ -160,7 +160,7 @@ class TableVisualization extends Visualization {
|
|||||||
rowData = parsedData.json
|
rowData = parsedData.json
|
||||||
dataTruncated = parsedData.all_rows_count !== parsedData.json.length
|
dataTruncated = parsedData.all_rows_count !== parsedData.json.length
|
||||||
} else if (parsedData.json != null && isObjectMatrix(parsedData.json)) {
|
} else if (parsedData.json != null && isObjectMatrix(parsedData.json)) {
|
||||||
let firstKeys = Object.keys(data[0])
|
let firstKeys = Object.keys(parsedData.json[0])
|
||||||
columnDefs = firstKeys.map(field => ({ field }))
|
columnDefs = firstKeys.map(field => ({ field }))
|
||||||
rowData = parsedData.json.map(obj =>
|
rowData = parsedData.json.map(obj =>
|
||||||
firstKeys.reduce((acc, key) => ({ ...acc, [key]: toRender(obj[key]) }), {})
|
firstKeys.reduce((acc, key) => ({ ...acc, [key]: toRender(obj[key]) }), {})
|
||||||
|
@ -268,23 +268,37 @@ make_field_name_selector : JS_Object -> Display -> Single_Choice
|
|||||||
make_field_name_selector js_object display=Display.Always =
|
make_field_name_selector js_object display=Display.Always =
|
||||||
Single_Choice display=display values=(js_object.field_names.map n->(Option n n.pretty))
|
Single_Choice display=display values=(js_object.field_names.map n->(Option n n.pretty))
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Make a new JavaScript object.
|
||||||
foreign js new_object = """
|
foreign js new_object = """
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Parse a text value into JavaScript object.
|
||||||
foreign js json_parse text = """
|
foreign js json_parse text = """
|
||||||
return JSON.parse(text)
|
return JSON.parse(text)
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Convert a JavaScript object to a text value.
|
||||||
foreign js json_stringify js_object = """
|
foreign js json_stringify js_object = """
|
||||||
return JSON.stringify(js_object)
|
return JSON.stringify(js_object)
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Check a JavaScript object has a given property.
|
||||||
foreign js has_property js_object key = """
|
foreign js has_property js_object key = """
|
||||||
return js_object.hasOwnProperty(key)
|
return js_object.hasOwnProperty(key)
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Get a value from a JavaScript object.
|
||||||
foreign js get_value object key = """
|
foreign js get_value object key = """
|
||||||
return object[key]
|
return object[key]
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Set a value on a JavaScript object and return the new object.
|
||||||
foreign js set_value object key value = """
|
foreign js set_value object key value = """
|
||||||
return {...object, [key]: value}
|
return {...object, [key]: value}
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Gets all the property names of a JavaScript object.
|
||||||
foreign js get_property_names object = """
|
foreign js get_property_names object = """
|
||||||
return Object.getOwnPropertyNames(object)
|
return Object.getOwnPropertyNames(object)
|
||||||
|
@ -10,7 +10,7 @@ from project.Data.Boolean import True, False
|
|||||||
|
|
||||||
polyglot java import org.enso.base.ObjectComparator
|
polyglot java import org.enso.base.ObjectComparator
|
||||||
|
|
||||||
## ADVANCED
|
## PRIVATE
|
||||||
Creates a Java Comparator object which can call back to Enso for comparison
|
Creates a Java Comparator object which can call back to Enso for comparison
|
||||||
of non-primitive types.
|
of non-primitive types.
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ new custom_comparator=Nothing =
|
|||||||
Nothing -> ObjectComparator.getInstance (comparator_to_java Ordering.compare)
|
Nothing -> ObjectComparator.getInstance (comparator_to_java Ordering.compare)
|
||||||
_ -> ObjectComparator.new (comparator_to_java custom_comparator)
|
_ -> ObjectComparator.new (comparator_to_java custom_comparator)
|
||||||
|
|
||||||
## ADVANCED
|
## PRIVATE
|
||||||
Create a Java Comparator with the specified Text_Ordering
|
Create a Java Comparator with the specified Text_Ordering
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
@ -13,7 +13,7 @@ import project.Error.Error
|
|||||||
import project.Meta
|
import project.Meta
|
||||||
import project.Nothing.Nothing
|
import project.Nothing.Nothing
|
||||||
import project.Panic.Panic
|
import project.Panic.Panic
|
||||||
import project.Polyglot
|
import project.Polyglot.Polyglot
|
||||||
|
|
||||||
from project.Data.Boolean import Boolean, True, False
|
from project.Data.Boolean import Boolean, True, False
|
||||||
|
|
||||||
|
@ -22,7 +22,6 @@ import project.Math
|
|||||||
import project.Meta
|
import project.Meta
|
||||||
import project.Nothing.Nothing
|
import project.Nothing.Nothing
|
||||||
import project.Panic.Panic
|
import project.Panic.Panic
|
||||||
import project.Polyglot
|
|
||||||
|
|
||||||
from project.Data.Boolean import Boolean, True, False
|
from project.Data.Boolean import Boolean, True, False
|
||||||
from project.Data.Time.Date_Time import ensure_in_epoch
|
from project.Data.Time.Date_Time import ensure_in_epoch
|
||||||
|
@ -14,8 +14,8 @@ import project.Math
|
|||||||
import project.Meta
|
import project.Meta
|
||||||
import project.Nothing.Nothing
|
import project.Nothing.Nothing
|
||||||
import project.Panic.Panic
|
import project.Panic.Panic
|
||||||
import project.Polyglot
|
|
||||||
import project.Polyglot.Java
|
import project.Polyglot.Java
|
||||||
|
import project.Polyglot.Polyglot
|
||||||
import project.Runtime
|
import project.Runtime
|
||||||
import project.System
|
import project.System
|
||||||
import project.System.Environment
|
import project.System.Environment
|
||||||
@ -44,8 +44,8 @@ export project.Math
|
|||||||
export project.Meta
|
export project.Meta
|
||||||
export project.Nothing.Nothing
|
export project.Nothing.Nothing
|
||||||
export project.Panic.Panic
|
export project.Panic.Panic
|
||||||
export project.Polyglot
|
|
||||||
export project.Polyglot.Java
|
export project.Polyglot.Java
|
||||||
|
export project.Polyglot.Polyglot
|
||||||
export project.Runtime
|
export project.Runtime
|
||||||
export project.System
|
export project.System
|
||||||
export project.System.Environment
|
export project.System.Environment
|
||||||
|
@ -14,7 +14,6 @@ import project.Nothing.Nothing
|
|||||||
import project.Polyglot.Java
|
import project.Polyglot.Java
|
||||||
|
|
||||||
import project.Error.Error as Base_Error
|
import project.Error.Error as Base_Error
|
||||||
import project.Polyglot as Base_Polyglot
|
|
||||||
|
|
||||||
from project.Data.Boolean import Boolean, True, False
|
from project.Data.Boolean import Boolean, True, False
|
||||||
|
|
||||||
|
@ -1,18 +1,17 @@
|
|||||||
## A module representing interactions with polyglot languages.
|
## A module representing interactions with polyglot languages.
|
||||||
Polyglot is a term that refers to other languages (such as Java) that are
|
Polyglot is a term that refers to other languages (such as Java) that are
|
||||||
running on the same JVM.
|
running on the same JVM.
|
||||||
|
|
||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
import project.Data.Boolean.Boolean
|
import project.Data.Boolean.Boolean
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
|
import project.Nothing.Nothing
|
||||||
import project.Runtime.Source_Location.Source_Location
|
import project.Runtime.Source_Location.Source_Location
|
||||||
|
|
||||||
@Builtin_Type
|
@Builtin_Type
|
||||||
type Polyglot
|
type Polyglot
|
||||||
|
|
||||||
## Reads the number of elements in a given polyglot array object.
|
## Reads the number of elements in a given polyglot array object.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@ -115,3 +114,47 @@ is_language_installed language_name = @Builtin_Method "Polyglot.is_language_inst
|
|||||||
Returns the executable name of a polyglot object.
|
Returns the executable name of a polyglot object.
|
||||||
get_executable_name : Any -> Text
|
get_executable_name : Any -> Text
|
||||||
get_executable_name value = @Builtin_Method "Polyglot.get_executable_name"
|
get_executable_name value = @Builtin_Method "Polyglot.get_executable_name"
|
||||||
|
|
||||||
|
## Utilities for working with Java polyglot objects.
|
||||||
|
type Java
|
||||||
|
## Adds the provided entry to the host class path.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- path: The java classpath entry to add.
|
||||||
|
|
||||||
|
Use of the actual polyglot imports system should be preferred to use of
|
||||||
|
this method.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Adding Random to the classpath.
|
||||||
|
|
||||||
|
Java.add_to_class_path "java.util.Random"
|
||||||
|
add_to_class_path : Text -> Nothing
|
||||||
|
add_to_class_path path = @Builtin_Method "Java.add_to_class_path"
|
||||||
|
|
||||||
|
## Looks up a java symbol on the classpath by name.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- name: The name of the java symbol to look up.
|
||||||
|
|
||||||
|
Use of the actual polyglot imports system should be preferred to use of
|
||||||
|
this method.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Look up java's Random class.
|
||||||
|
|
||||||
|
Java.lookup_class "java.util.Random"
|
||||||
|
lookup_class : Text -> Any
|
||||||
|
lookup_class name = @Builtin_Method "Java.lookup_class"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
|
||||||
|
Checks whether an object is an instance of a given class.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- object: The object to check for class membership.
|
||||||
|
- class: The java class to check for membership in.
|
||||||
|
is_instance : Any -> Any -> Boolean
|
||||||
|
is_instance object class =
|
||||||
|
class_object = class.class
|
||||||
|
class_object.isInstance object
|
||||||
|
@ -1,48 +0,0 @@
|
|||||||
## Utilities for working with Java polyglot objects.
|
|
||||||
|
|
||||||
import project.Any.Any
|
|
||||||
import project.Data.Boolean.Boolean
|
|
||||||
import project.Data.Text.Text
|
|
||||||
import project.Nothing.Nothing
|
|
||||||
|
|
||||||
## Adds the provided entry to the host class path.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- path: The java classpath entry to add.
|
|
||||||
|
|
||||||
Use of the actual polyglot imports system should be preferred to use of
|
|
||||||
this method.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Adding Random to the classpath.
|
|
||||||
|
|
||||||
Java.add_to_class_path "java.util.Random"
|
|
||||||
add_to_class_path : Text -> Nothing
|
|
||||||
add_to_class_path path = @Builtin_Method "Java.add_to_class_path"
|
|
||||||
|
|
||||||
## Looks up a java symbol on the classpath by name.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- name: The name of the java symbol to look up.
|
|
||||||
|
|
||||||
Use of the actual polyglot imports system should be preferred to use of
|
|
||||||
this method.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Look up java's Random class.
|
|
||||||
|
|
||||||
Java.lookup_class "java.util.Random"
|
|
||||||
lookup_class : Text -> Any
|
|
||||||
lookup_class name = @Builtin_Method "Java.lookup_class"
|
|
||||||
|
|
||||||
## PRIVATE
|
|
||||||
|
|
||||||
Checks whether an object is an instance of a given class.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- object: The object to check for class membership.
|
|
||||||
- class: The java class to check for membership in.
|
|
||||||
is_instance : Any -> Any -> Boolean
|
|
||||||
is_instance object class =
|
|
||||||
class_object = class.class
|
|
||||||
class_object.isInstance object
|
|
@ -3,7 +3,7 @@ import project.Data.Array.Array
|
|||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
import project.Nothing.Nothing
|
import project.Nothing.Nothing
|
||||||
import project.Polyglot
|
import project.Polyglot.Polyglot
|
||||||
import project.Runtime.Source_Location.Source_Location
|
import project.Runtime.Source_Location.Source_Location
|
||||||
|
|
||||||
from project.Data.Index_Sub_Range.Index_Sub_Range import First, Last
|
from project.Data.Index_Sub_Range.Index_Sub_Range import First, Last
|
||||||
|
@ -7,7 +7,7 @@ import project.Data.Pair.Pair
|
|||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
import project.Error.Error
|
import project.Error.Error
|
||||||
import project.Nothing.Nothing
|
import project.Nothing.Nothing
|
||||||
import project.Polyglot
|
import project.Polyglot.Polyglot
|
||||||
import project.Runtime
|
import project.Runtime
|
||||||
import project.Runtime.Source_Location.Source_Location
|
import project.Runtime.Source_Location.Source_Location
|
||||||
import project.Runtime.Stack_Trace_Element
|
import project.Runtime.Stack_Trace_Element
|
||||||
|
@ -6,51 +6,6 @@ import Standard.Table.Internal.Vector_Builder.Vector_Builder
|
|||||||
import project.Data.SQL_Type.SQL_Type
|
import project.Data.SQL_Type.SQL_Type
|
||||||
import project.Data.SQL_Statement.SQL_Statement
|
import project.Data.SQL_Statement.SQL_Statement
|
||||||
|
|
||||||
## UNSTABLE
|
|
||||||
|
|
||||||
Creates a Builder representing and empty code fragment.
|
|
||||||
empty : Builder
|
|
||||||
empty = Builder.Value (Vector_Builder.empty)
|
|
||||||
|
|
||||||
## UNSTABLE
|
|
||||||
|
|
||||||
Creates a Builder representing a code fragment containing the specified raw
|
|
||||||
code.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- text: The raw SQL code.
|
|
||||||
code : Text -> Builder
|
|
||||||
code text =
|
|
||||||
vec = if text.is_empty then [] else [SQL_Fragment.Code_Part text]
|
|
||||||
Builder.Value (Vector_Builder.from_vector vec)
|
|
||||||
|
|
||||||
## UNSTABLE
|
|
||||||
|
|
||||||
Creates a Builder representing an interpolation of the given object.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- sql_type: The expected SQL type of `object`.
|
|
||||||
- object: The object to be interpolated into the query as if it has the type
|
|
||||||
given by `sql_type`.
|
|
||||||
interpolation : SQL_Type -> Any -> Builder
|
|
||||||
interpolation sql_type object = Builder.Value (Vector_Builder.from_vector [SQL_Fragment.Interpolation sql_type object])
|
|
||||||
|
|
||||||
## UNSTABLE
|
|
||||||
|
|
||||||
Joins a vector of code fragments with the provided separator.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- separator: The separator to use when joining the code fragments.
|
|
||||||
- statements: The SQL statements to join using `separator`.
|
|
||||||
join : Builder | Text -> Vector Builder -> Builder
|
|
||||||
join separator statements =
|
|
||||||
sep = case separator of
|
|
||||||
Builder.Value _ -> separator
|
|
||||||
_ -> code separator
|
|
||||||
|
|
||||||
if statements.length == 0 then empty else
|
|
||||||
(1.up_to statements.length . fold (statements.at 0) acc-> i-> acc ++ sep ++ statements.at i)
|
|
||||||
|
|
||||||
## UNSTABLE
|
## UNSTABLE
|
||||||
|
|
||||||
A fragment of a SQL query.
|
A fragment of a SQL query.
|
||||||
@ -59,7 +14,6 @@ join separator statements =
|
|||||||
SQL_Fragment.Interpolation which represents an object that will be
|
SQL_Fragment.Interpolation which represents an object that will be
|
||||||
interpolated into the query.
|
interpolated into the query.
|
||||||
type SQL_Fragment
|
type SQL_Fragment
|
||||||
|
|
||||||
## UNSTABLE
|
## UNSTABLE
|
||||||
|
|
||||||
A SQL fragment that represents raw SQL code.
|
A SQL fragment that represents raw SQL code.
|
||||||
@ -80,6 +34,42 @@ type SQL_Fragment
|
|||||||
Interpolation sql_type:SQL_Type object:Any
|
Interpolation sql_type:SQL_Type object:Any
|
||||||
|
|
||||||
type Builder
|
type Builder
|
||||||
|
## Creates a Builder representing and empty code fragment.
|
||||||
|
empty : Builder
|
||||||
|
empty = Builder.Value (Vector_Builder.empty)
|
||||||
|
|
||||||
|
## Creates a Builder representing a code fragment containing the specified raw
|
||||||
|
code.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- text: The raw SQL code.
|
||||||
|
code : Text -> Builder
|
||||||
|
code text =
|
||||||
|
vec = if text.is_empty then [] else [SQL_Fragment.Code_Part text]
|
||||||
|
Builder.Value (Vector_Builder.from_vector vec)
|
||||||
|
|
||||||
|
## Creates a Builder representing an interpolation of the given object.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- sql_type: The expected SQL type of `object`.
|
||||||
|
- object: The object to be interpolated into the query as if it has the type
|
||||||
|
given by `sql_type`.
|
||||||
|
interpolation : SQL_Type -> Any -> Builder
|
||||||
|
interpolation sql_type object = Builder.Value (Vector_Builder.from_vector [SQL_Fragment.Interpolation sql_type object])
|
||||||
|
|
||||||
|
## Joins a vector of code fragments with the provided separator.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- separator: The separator to use when joining the code fragments.
|
||||||
|
- statements: The SQL statements to join using `separator`.
|
||||||
|
join : Builder | Text -> Vector Builder -> Builder
|
||||||
|
join separator statements =
|
||||||
|
sep = case separator of
|
||||||
|
Builder.Value _ -> separator
|
||||||
|
_ -> Builder.code separator
|
||||||
|
|
||||||
|
if statements.length == 0 then Builder.empty else
|
||||||
|
(1.up_to statements.length . fold (statements.at 0) acc-> i-> acc ++ sep ++ statements.at i)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -100,7 +90,7 @@ type Builder
|
|||||||
- other: The code fragment to append to `self`.
|
- other: The code fragment to append to `self`.
|
||||||
++ : Builder -> Builder
|
++ : Builder -> Builder
|
||||||
++ self other = case other of
|
++ self other = case other of
|
||||||
text : Text -> if text == "" then self else Builder.Value (self.fragments ++ (code text).fragments)
|
text : Text -> if text == "" then self else Builder.Value (self.fragments ++ (Builder.code text).fragments)
|
||||||
_ -> Builder.Value (self.fragments ++ other.fragments)
|
_ -> Builder.Value (self.fragments ++ other.fragments)
|
||||||
|
|
||||||
## UNSTABLE
|
## UNSTABLE
|
||||||
@ -121,7 +111,7 @@ type Builder
|
|||||||
|
|
||||||
Wraps the code fragment in parentheses.
|
Wraps the code fragment in parentheses.
|
||||||
paren : Builder
|
paren : Builder
|
||||||
paren self = code "(" ++ self ++ ")"
|
paren self = Builder.code "(" ++ self ++ ")"
|
||||||
|
|
||||||
## UNSTABLE
|
## UNSTABLE
|
||||||
|
|
||||||
@ -135,7 +125,7 @@ type Builder
|
|||||||
prefix_if_present self prefix =
|
prefix_if_present self prefix =
|
||||||
pref = case prefix of
|
pref = case prefix of
|
||||||
_ : Builder -> prefix
|
_ : Builder -> prefix
|
||||||
_ -> code prefix
|
_ -> Builder.code prefix
|
||||||
if self.is_empty then self else pref++self
|
if self.is_empty then self else pref++self
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
@ -1548,9 +1548,8 @@ type Table
|
|||||||
Arguments:
|
Arguments:
|
||||||
- connection: The connection to a database.
|
- connection: The connection to a database.
|
||||||
- table_name: The name of the table to get.
|
- table_name: The name of the table to get.
|
||||||
- columns: The names of the columns to get.
|
- columns: List of columns to fetch. Each column is represented by a pair of column name and its expected SQL Type.
|
||||||
- ctx: The context to use for the table.
|
- ctx: The context to use for the table.
|
||||||
# make_table : Connection -> Text -> Vector [Text, SQL_Type] -> Context -> Table
|
|
||||||
make_table : Connection -> Text -> Vector -> Context -> Table
|
make_table : Connection -> Text -> Vector -> Context -> Table
|
||||||
make_table connection table_name columns ctx =
|
make_table connection table_name columns ctx =
|
||||||
if columns.is_empty then Error.throw (Illegal_State.Error "Unexpectedly attempting to create a Database Table with no columns. This is a bug in the Database library.") else
|
if columns.is_empty then Error.throw (Illegal_State.Error "Unexpectedly attempting to create a Database Table with no columns. This is a bug in the Database library.") else
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from Standard.Base import all
|
from Standard.Base import all
|
||||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||||
|
|
||||||
import project.Data.SQL
|
|
||||||
import project.Data.SQL.Builder
|
import project.Data.SQL.Builder
|
||||||
import project.Internal.IR.Context.Context
|
import project.Internal.IR.Context.Context
|
||||||
import project.Internal.IR.SQL_Expression.SQL_Expression
|
import project.Internal.IR.SQL_Expression.SQL_Expression
|
||||||
@ -10,7 +9,6 @@ import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
|
|||||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||||
import project.Internal.IR.Nulls_Order.Nulls_Order
|
import project.Internal.IR.Nulls_Order.Nulls_Order
|
||||||
import project.Internal.IR.Query.Query
|
import project.Internal.IR.Query.Query
|
||||||
from project.Data.SQL import code
|
|
||||||
|
|
||||||
from project.Errors import Unsupported_Database_Operation
|
from project.Errors import Unsupported_Database_Operation
|
||||||
|
|
||||||
@ -69,7 +67,7 @@ make_unary_op name =
|
|||||||
arguments ->
|
arguments ->
|
||||||
case arguments.length == 1 of
|
case arguments.length == 1 of
|
||||||
True ->
|
True ->
|
||||||
(code name+" ")++(arguments.at 0) . paren
|
(Builder.code name+" ")++(arguments.at 0) . paren
|
||||||
False ->
|
False ->
|
||||||
Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name)
|
Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name)
|
||||||
|
|
||||||
@ -130,7 +128,7 @@ make_right_unary_op name =
|
|||||||
make_function : Text -> (Vector Builder -> Builder)
|
make_function : Text -> (Vector Builder -> Builder)
|
||||||
make_function name =
|
make_function name =
|
||||||
arguments ->
|
arguments ->
|
||||||
(code name) ++ (SQL.join ", " arguments . paren)
|
(Builder.code name) ++ (Builder.join ", " arguments . paren)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -142,7 +140,7 @@ make_constant : Text -> (Vector Builder -> Builder)
|
|||||||
make_constant sql_code =
|
make_constant sql_code =
|
||||||
arguments ->
|
arguments ->
|
||||||
if arguments.not_empty then Error.throw <| Illegal_State.Error "No arguments were expected" else
|
if arguments.not_empty then Error.throw <| Illegal_State.Error "No arguments were expected" else
|
||||||
code sql_code
|
Builder.code sql_code
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -157,7 +155,7 @@ make_constant sql_code =
|
|||||||
wrap_in_quotes : Text -> Builder
|
wrap_in_quotes : Text -> Builder
|
||||||
wrap_in_quotes identifier =
|
wrap_in_quotes identifier =
|
||||||
escaped = identifier.replace '"' '""'
|
escaped = identifier.replace '"' '""'
|
||||||
code '"'+escaped+'"'
|
Builder.code '"'+escaped+'"'
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -197,7 +195,7 @@ make_iif arguments = case arguments.length of
|
|||||||
expr = arguments.at 0
|
expr = arguments.at 0
|
||||||
when_true = arguments.at 1
|
when_true = arguments.at 1
|
||||||
when_false = arguments.at 2
|
when_false = arguments.at 2
|
||||||
(code "CASE WHEN" ++ expr ++ " THEN " ++ when_true ++ " WHEN " ++ expr ++ " IS NULL THEN NULL ELSE " ++ when_false ++ " END").paren
|
(Builder.code "CASE WHEN" ++ expr ++ " THEN " ++ when_true ++ " WHEN " ++ expr ++ " IS NULL THEN NULL ELSE " ++ when_false ++ " END").paren
|
||||||
_ ->
|
_ ->
|
||||||
Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation IIF")
|
Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation IIF")
|
||||||
|
|
||||||
@ -218,14 +216,14 @@ make_is_in arguments = case arguments.length of
|
|||||||
0 -> Error.throw <| Illegal_State.Error ("The operation IS_IN requires at least one argument.")
|
0 -> Error.throw <| Illegal_State.Error ("The operation IS_IN requires at least one argument.")
|
||||||
## If only the self argument is provided, no value will ever be in the empty list, so we just short circuit to false.
|
## If only the self argument is provided, no value will ever be in the empty list, so we just short circuit to false.
|
||||||
`IN ()` would be more meaningful, but it is a syntax error.
|
`IN ()` would be more meaningful, but it is a syntax error.
|
||||||
1 -> code 'FALSE' . paren
|
1 -> Builder.code 'FALSE' . paren
|
||||||
_ ->
|
_ ->
|
||||||
expr = arguments.first
|
expr = arguments.first
|
||||||
list = arguments.drop 1
|
list = arguments.drop 1
|
||||||
is_in = expr ++ " IN (" ++ (SQL.join ", " list) ++ ")"
|
is_in = expr ++ " IN (" ++ (Builder.join ", " list) ++ ")"
|
||||||
## We ensure that even `NULL IN (...)` is coalesced to False, so that
|
## We ensure that even `NULL IN (...)` is coalesced to False, so that
|
||||||
negation will work as expected.
|
negation will work as expected.
|
||||||
code "COALESCE(" ++ is_in ++ ", FALSE)"
|
Builder.code "COALESCE(" ++ is_in ++ ", FALSE)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_is_in_column : Vector Builder -> Builder
|
make_is_in_column : Vector Builder -> Builder
|
||||||
@ -234,9 +232,9 @@ make_is_in_column arguments = case arguments.length of
|
|||||||
expr = arguments.at 0
|
expr = arguments.at 0
|
||||||
in_query = arguments.at 1
|
in_query = arguments.at 1
|
||||||
has_nulls_query = arguments.at 2
|
has_nulls_query = arguments.at 2
|
||||||
is_in = code "COALESCE(" ++ expr ++ " IN (" ++ in_query ++ "), FALSE)"
|
is_in = Builder.code "COALESCE(" ++ expr ++ " IN (" ++ in_query ++ "), FALSE)"
|
||||||
has_nulls = has_nulls_query.paren ++ " = TRUE"
|
has_nulls = has_nulls_query.paren ++ " = TRUE"
|
||||||
code "CASE WHEN " ++ expr ++ " IS NULL THEN " ++ has_nulls ++ " ELSE " ++ is_in ++ " END"
|
Builder.code "CASE WHEN " ++ expr ++ " IS NULL THEN " ++ has_nulls ++ " ELSE " ++ is_in ++ " END"
|
||||||
_ -> Error.throw <| Illegal_State.Error ("The operation IS_IN_COLUMN requires at exactly 3 arguments: the expression, the IN subquery, the subquery checking for nulls.")
|
_ -> Error.throw <| Illegal_State.Error ("The operation IS_IN_COLUMN requires at exactly 3 arguments: the expression, the IN subquery, the subquery checking for nulls.")
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -250,7 +248,7 @@ generate_expression : Internal_Dialect -> SQL_Expression | Order_Descriptor | Qu
|
|||||||
generate_expression dialect expr = case expr of
|
generate_expression dialect expr = case expr of
|
||||||
SQL_Expression.Column origin name ->
|
SQL_Expression.Column origin name ->
|
||||||
dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name
|
dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name
|
||||||
SQL_Expression.Constant sql_type value -> SQL.interpolation sql_type value
|
SQL_Expression.Constant sql_type value -> Builder.interpolation sql_type value
|
||||||
SQL_Expression.Operation kind arguments ->
|
SQL_Expression.Operation kind arguments ->
|
||||||
op = dialect.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
|
op = dialect.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
|
||||||
parsed_args = arguments.map (generate_expression dialect)
|
parsed_args = arguments.map (generate_expression dialect)
|
||||||
@ -269,7 +267,7 @@ generate_expression dialect expr = case expr of
|
|||||||
alias : Internal_Dialect -> Text -> Builder
|
alias : Internal_Dialect -> Text -> Builder
|
||||||
alias dialect name =
|
alias dialect name =
|
||||||
wrapped = dialect.wrap_identifier name
|
wrapped = dialect.wrap_identifier name
|
||||||
code " AS " ++ wrapped
|
Builder.code " AS " ++ wrapped
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -283,11 +281,11 @@ generate_from_part dialect from_spec = case from_spec of
|
|||||||
From_Spec.Table name as_name ->
|
From_Spec.Table name as_name ->
|
||||||
dialect.wrap_identifier name ++ alias dialect as_name
|
dialect.wrap_identifier name ++ alias dialect as_name
|
||||||
From_Spec.Query raw_sql as_name ->
|
From_Spec.Query raw_sql as_name ->
|
||||||
code raw_sql . paren ++ alias dialect as_name
|
Builder.code raw_sql . paren ++ alias dialect as_name
|
||||||
From_Spec.Join kind left_spec right_spec on ->
|
From_Spec.Join kind left_spec right_spec on ->
|
||||||
left = generate_from_part dialect left_spec
|
left = generate_from_part dialect left_spec
|
||||||
right = generate_from_part dialect right_spec
|
right = generate_from_part dialect right_spec
|
||||||
ons = SQL.join " AND " (on.map (generate_expression dialect)) . prefix_if_present " ON "
|
ons = Builder.join " AND " (on.map (generate_expression dialect)) . prefix_if_present " ON "
|
||||||
left ++ (" " + kind.to_sql + " ") ++ right ++ ons
|
left ++ (" " + kind.to_sql + " ") ++ right ++ ons
|
||||||
From_Spec.Sub_Query columns context as_name ->
|
From_Spec.Sub_Query columns context as_name ->
|
||||||
sub = generate_query dialect (Query.Select columns context)
|
sub = generate_query dialect (Query.Select columns context)
|
||||||
@ -295,7 +293,7 @@ generate_from_part dialect from_spec = case from_spec of
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
fold_case = lift_unary_op "FOLD_CASE" arg->
|
fold_case = lift_unary_op "FOLD_CASE" arg->
|
||||||
code "LOWER(UPPER(" ++ arg ++ "))"
|
Builder.code "LOWER(UPPER(" ++ arg ++ "))"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_case_sensitive = lift_unary_op "MAKE_CASE_SENSITIVE" _->
|
make_case_sensitive = lift_unary_op "MAKE_CASE_SENSITIVE" _->
|
||||||
@ -303,7 +301,7 @@ make_case_sensitive = lift_unary_op "MAKE_CASE_SENSITIVE" _->
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
simple_equals_ignore_case = Base_Generator.lift_binary_op "equals_ignore_case" a-> b->
|
simple_equals_ignore_case = Base_Generator.lift_binary_op "equals_ignore_case" a-> b->
|
||||||
code "LOWER(UPPER(" ++ a ++ ")) = LOWER(UPPER(" ++ b ++ "))"
|
Builder.code "LOWER(UPPER(" ++ a ++ ")) = LOWER(UPPER(" ++ b ++ "))"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_equals a b =
|
make_equals a b =
|
||||||
@ -347,15 +345,15 @@ generate_select_context : Internal_Dialect -> Context -> Builder
|
|||||||
generate_select_context dialect ctx =
|
generate_select_context dialect ctx =
|
||||||
gen_exprs exprs = exprs.map (generate_expression dialect)
|
gen_exprs exprs = exprs.map (generate_expression dialect)
|
||||||
from_part = generate_from_part dialect ctx.from_spec
|
from_part = generate_from_part dialect ctx.from_spec
|
||||||
where_part = (SQL.join " AND " (gen_exprs ctx.where_filters)) . prefix_if_present " WHERE "
|
where_part = (Builder.join " AND " (gen_exprs ctx.where_filters)) . prefix_if_present " WHERE "
|
||||||
group_part = (SQL.join ", " (gen_exprs ctx.groups)) . prefix_if_present " GROUP BY "
|
group_part = (Builder.join ", " (gen_exprs ctx.groups)) . prefix_if_present " GROUP BY "
|
||||||
limit_part = case ctx.limit of
|
limit_part = case ctx.limit of
|
||||||
Nothing -> ""
|
Nothing -> ""
|
||||||
_ : Integer -> " LIMIT " + ctx.limit.to_text
|
_ : Integer -> " LIMIT " + ctx.limit.to_text
|
||||||
|
|
||||||
orders = ctx.orders.map (generate_order dialect)
|
orders = ctx.orders.map (generate_order dialect)
|
||||||
order_part = (SQL.join ", " orders) . prefix_if_present " ORDER BY "
|
order_part = (Builder.join ", " orders) . prefix_if_present " ORDER BY "
|
||||||
(code " FROM ") ++ from_part ++ where_part ++ group_part ++ order_part ++ limit_part
|
(Builder.code " FROM ") ++ from_part ++ where_part ++ group_part ++ order_part ++ limit_part
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -368,11 +366,11 @@ generate_select_context dialect ctx =
|
|||||||
expression returning a value.
|
expression returning a value.
|
||||||
generate_insert_query : Internal_Dialect -> Text -> Vector Any -> Builder
|
generate_insert_query : Internal_Dialect -> Text -> Vector Any -> Builder
|
||||||
generate_insert_query dialect table_name pairs =
|
generate_insert_query dialect table_name pairs =
|
||||||
names = SQL.join ", " <| pairs.map (.first >> dialect.wrap_identifier)
|
names = Builder.join ", " <| pairs.map (.first >> dialect.wrap_identifier)
|
||||||
values = SQL.join ", " <| pairs.map (.second >> generate_expression dialect)
|
values = Builder.join ", " <| pairs.map (.second >> generate_expression dialect)
|
||||||
into = dialect.wrap_identifier table_name
|
into = dialect.wrap_identifier table_name
|
||||||
|
|
||||||
code "INSERT INTO " ++ into ++ " (" ++ names ++ ") VALUES (" ++ values ++ ")"
|
Builder.code "INSERT INTO " ++ into ++ " (" ++ names ++ ") VALUES (" ++ values ++ ")"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -386,15 +384,15 @@ generate_query dialect query = case query of
|
|||||||
Query.Select columns ctx ->
|
Query.Select columns ctx ->
|
||||||
gen_column pair = (generate_expression dialect pair.second) ++ alias dialect pair.first
|
gen_column pair = (generate_expression dialect pair.second) ++ alias dialect pair.first
|
||||||
cols = case columns of
|
cols = case columns of
|
||||||
Nothing -> code "*"
|
Nothing -> Builder.code "*"
|
||||||
_ -> SQL.join ", " (columns.map gen_column)
|
_ -> Builder.join ", " (columns.map gen_column)
|
||||||
prefix = case ctx.distinct_on of
|
prefix = case ctx.distinct_on of
|
||||||
Nothing -> code ""
|
Nothing -> Builder.code ""
|
||||||
expressions : Vector ->
|
expressions : Vector ->
|
||||||
# TODO I just realised this does not make sense in other backends than Postgres, so we should probably fail in such cases; probably rewrite into a generic modifier? or a transform?
|
# TODO I just realised this does not make sense in other backends than Postgres, so we should probably fail in such cases; probably rewrite into a generic modifier? or a transform?
|
||||||
generated = SQL.join ", " (expressions.map (generate_expression dialect))
|
generated = Builder.join ", " (expressions.map (generate_expression dialect))
|
||||||
code "DISTINCT ON (" ++ generated ++ ") "
|
Builder.code "DISTINCT ON (" ++ generated ++ ") "
|
||||||
code "SELECT " ++ prefix ++ cols ++ generate_select_context dialect ctx
|
Builder.code "SELECT " ++ prefix ++ cols ++ generate_select_context dialect ctx
|
||||||
Query.Insert table_name pairs ->
|
Query.Insert table_name pairs ->
|
||||||
generate_insert_query dialect table_name pairs
|
generate_insert_query dialect table_name pairs
|
||||||
_ -> Error.throw <| Unsupported_Database_Operation.Error "Unsupported query type."
|
_ -> Error.throw <| Unsupported_Database_Operation.Error "Unsupported query type."
|
||||||
@ -437,10 +435,10 @@ make_concat make_raw_concat_expr make_contains_expr has_quote args =
|
|||||||
includes_quote = make_contains_expr expr quote
|
includes_quote = make_contains_expr expr quote
|
||||||
is_empty = expr ++ " = ''"
|
is_empty = expr ++ " = ''"
|
||||||
needs_quoting = includes_separator.paren ++ " OR " ++ includes_quote.paren ++ " OR " ++ is_empty.paren
|
needs_quoting = includes_separator.paren ++ " OR " ++ includes_quote.paren ++ " OR " ++ is_empty.paren
|
||||||
escaped = code "replace(" ++ expr ++ ", " ++ quote ++ ", " ++ quote ++ append ++ quote ++ ")"
|
escaped = Builder.code "replace(" ++ expr ++ ", " ++ quote ++ ", " ++ quote ++ append ++ quote ++ ")"
|
||||||
quoted = quote ++ append ++ escaped ++ append ++ quote
|
quoted = quote ++ append ++ escaped ++ append ++ quote
|
||||||
code "CASE WHEN " ++ needs_quoting ++ " THEN " ++ quoted ++ " ELSE " ++ expr ++ " END"
|
Builder.code "CASE WHEN " ++ needs_quoting ++ " THEN " ++ quoted ++ " ELSE " ++ expr ++ " END"
|
||||||
False -> expr
|
False -> expr
|
||||||
transformed_expr = code "CASE WHEN " ++ expr ++ " IS NULL THEN '' ELSE " ++ possibly_quoted.paren ++ " END"
|
transformed_expr = Builder.code "CASE WHEN " ++ expr ++ " IS NULL THEN '' ELSE " ++ possibly_quoted.paren ++ " END"
|
||||||
concatenated = make_raw_concat_expr transformed_expr separator
|
concatenated = make_raw_concat_expr transformed_expr separator
|
||||||
prefix.paren ++ append ++ concatenated ++ append ++ suffix.paren
|
prefix.paren ++ append ++ concatenated ++ append ++ suffix.paren
|
||||||
|
@ -78,6 +78,7 @@ type Join_Subquery_Setup
|
|||||||
self.old_columns.zip self.new_columns old-> new->
|
self.old_columns.zip self.new_columns old-> new->
|
||||||
[old.name, new]
|
[old.name, new]
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
prepare_subqueries : Table -> Table -> Boolean -> Boolean -> Pair Join_Subquery_Setup
|
prepare_subqueries : Table -> Table -> Boolean -> Boolean -> Pair Join_Subquery_Setup
|
||||||
prepare_subqueries left right needs_left_indicator needs_right_indicator =
|
prepare_subqueries left right needs_left_indicator needs_right_indicator =
|
||||||
## If a self-join, make sure we are able to distinguish the left and
|
## If a self-join, make sure we are able to distinguish the left and
|
||||||
|
@ -5,7 +5,7 @@ import Standard.Base.Runtime.Managed_Resource.Managed_Resource
|
|||||||
import Standard.Table.Data.Storage.Storage
|
import Standard.Table.Data.Storage.Storage
|
||||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||||
|
|
||||||
import project.Data.SQL
|
import project.Data.SQL.Builder
|
||||||
import project.Data.SQL_Statement.SQL_Statement
|
import project.Data.SQL_Statement.SQL_Statement
|
||||||
import project.Data.SQL_Type.SQL_Type
|
import project.Data.SQL_Type.SQL_Type
|
||||||
import project.Internal.Base_Generator
|
import project.Internal.Base_Generator
|
||||||
@ -195,8 +195,8 @@ create_table_statement name table temporary =
|
|||||||
column_names = table.columns.map .name
|
column_names = table.columns.map .name
|
||||||
col_makers = column_names.zip column_types name-> typ->
|
col_makers = column_names.zip column_types name-> typ->
|
||||||
Base_Generator.wrap_in_quotes name ++ " " ++ typ.name
|
Base_Generator.wrap_in_quotes name ++ " " ++ typ.name
|
||||||
create_prefix = SQL.code <| if temporary then "CREATE TEMPORARY TABLE " else "CREATE TABLE "
|
create_prefix = Builder.code <| if temporary then "CREATE TEMPORARY TABLE " else "CREATE TABLE "
|
||||||
(create_prefix ++ Base_Generator.wrap_in_quotes name ++ " (" ++ (SQL.join ", " col_makers) ++ ")").build
|
(create_prefix ++ Base_Generator.wrap_in_quotes name ++ " (" ++ (Builder.join ", " col_makers) ++ ")").build
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Returns the default database type corresponding to an in-memory storage type.
|
Returns the default database type corresponding to an in-memory storage type.
|
||||||
|
@ -9,7 +9,7 @@ import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
|||||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||||
|
|
||||||
import project.Connection.Connection.Connection
|
import project.Connection.Connection.Connection
|
||||||
import project.Data.SQL
|
import project.Data.SQL.Builder
|
||||||
import project.Data.SQL_Statement.SQL_Statement
|
import project.Data.SQL_Statement.SQL_Statement
|
||||||
import project.Data.SQL_Type.SQL_Type
|
import project.Data.SQL_Type.SQL_Type
|
||||||
import project.Data.Table.Table
|
import project.Data.Table.Table
|
||||||
@ -25,7 +25,6 @@ import project.Internal.IR.Nulls_Order.Nulls_Order
|
|||||||
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
|
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
|
||||||
import project.Internal.IR.Query.Query
|
import project.Internal.IR.Query.Query
|
||||||
|
|
||||||
from project.Data.SQL import code
|
|
||||||
from project.Errors import Unsupported_Database_Operation
|
from project.Errors import Unsupported_Database_Operation
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -152,42 +151,43 @@ resolve_target_sql_type aggregate = case aggregate of
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
||||||
code "COUNT(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 END)"
|
Builder.code "COUNT(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 END)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
||||||
code "COUNT(CASE WHEN (" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " = '') THEN 1 END)"
|
Builder.code "COUNT(CASE WHEN (" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " = '') THEN 1 END)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
||||||
code "COUNT(CASE WHEN (" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '') THEN 1 END)"
|
Builder.code "COUNT(CASE WHEN (" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '') THEN 1 END)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
|
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
|
||||||
median = code "percentile_cont(0.5) WITHIN GROUP (ORDER BY " ++ arg ++ ")"
|
median = Builder.code "percentile_cont(0.5) WITHIN GROUP (ORDER BY " ++ arg ++ ")"
|
||||||
## TODO Technically, this check may not be necessary if the input column has
|
## TODO Technically, this check may not be necessary if the input column has
|
||||||
type INTEGER, because it is impossible to represent a NaN in that type.
|
type INTEGER, because it is impossible to represent a NaN in that type.
|
||||||
However, currently the column type inference is not tested well-enough to
|
However, currently the column type inference is not tested well-enough to
|
||||||
rely on this, so leaving an uniform approach regardless of type. This
|
rely on this, so leaving an uniform approach regardless of type. This
|
||||||
could be revisited when further work on column types takes place.
|
could be revisited when further work on column types takes place.
|
||||||
See issue: https://www.pivotaltracker.com/story/show/180854759
|
See issue: https://www.pivotaltracker.com/story/show/180854759
|
||||||
has_nan = code "bool_or(" ++ arg ++ " = double precision 'NaN')"
|
has_nan = Builder.code "bool_or(" ++ arg ++ " = double precision 'NaN')"
|
||||||
code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ median ++ " END"
|
Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ median ++ " END"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_mode = Base_Generator.lift_unary_op "MODE" arg->
|
agg_mode = Base_Generator.lift_unary_op "MODE" arg->
|
||||||
code "mode() WITHIN GROUP (ORDER BY " ++ arg ++ ")"
|
Builder.code "mode() WITHIN GROUP (ORDER BY " ++ arg ++ ")"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
agg_percentile = Base_Generator.lift_binary_op "PERCENTILE" p-> expr->
|
agg_percentile = Base_Generator.lift_binary_op "PERCENTILE" p-> expr->
|
||||||
percentile = code "percentile_cont(" ++ p ++ ") WITHIN GROUP (ORDER BY " ++ expr ++ ")"
|
percentile = Builder.code "percentile_cont(" ++ p ++ ") WITHIN GROUP (ORDER BY " ++ expr ++ ")"
|
||||||
## TODO Technically, this check may not be necessary if the input column has
|
## TODO Technically, this check may not be necessary if the input column has
|
||||||
type INTEGER, because it is impossible to represent a NaN in that type.
|
type INTEGER, because it is impossible to represent a NaN in that type.
|
||||||
However, currently the column type inference is not tested well-enough to
|
However, currently the column type inference is not tested well-enough to
|
||||||
rely on this, so leaving an uniform approach regardless of type. This
|
rely on this, so leaving an uniform approach regardless of type. This
|
||||||
could be revisited when further work on column types takes place.
|
could be revisited when further work on column types takes place.
|
||||||
See issue: https://www.pivotaltracker.com/story/show/180854759
|
See issue: https://www.pivotaltracker.com/story/show/180854759
|
||||||
has_nan = code "bool_or(" ++ expr ++ " = double precision 'NaN')"
|
has_nan = Builder.code "bool_or(" ++ expr ++ " = double precision 'NaN')"
|
||||||
code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ percentile ++ " END"
|
Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ percentile ++ " END"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
These are written in a not most-efficient way, but a way that makes them
|
These are written in a not most-efficient way, but a way that makes them
|
||||||
@ -200,36 +200,39 @@ first_last_aggregators =
|
|||||||
last_not_null = make_first_aggregator reverse=True ignore_null=True
|
last_not_null = make_first_aggregator reverse=True ignore_null=True
|
||||||
[["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]]
|
[["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]]
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
make_first_aggregator reverse ignore_null args =
|
make_first_aggregator reverse ignore_null args =
|
||||||
if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else
|
if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else
|
||||||
result_expr = args.first
|
result_expr = args.first
|
||||||
order_bys = args.drop 1
|
order_bys = args.drop 1
|
||||||
|
|
||||||
filter_clause = if ignore_null.not then "" else
|
filter_clause = if ignore_null.not then "" else
|
||||||
code " FILTER (WHERE " ++ result_expr.paren ++ " IS NOT NULL)"
|
Builder.code " FILTER (WHERE " ++ result_expr.paren ++ " IS NOT NULL)"
|
||||||
order_clause =
|
order_clause =
|
||||||
code " ORDER BY " ++ SQL.join "," order_bys
|
Builder.code " ORDER BY " ++ Builder.join "," order_bys
|
||||||
index_expr = case reverse of
|
index_expr = case reverse of
|
||||||
True -> if ignore_null.not then "COUNT(*)" else
|
True -> if ignore_null.not then "COUNT(*)" else
|
||||||
code "COUNT(" ++ result_expr ++ ")"
|
Builder.code "COUNT(" ++ result_expr ++ ")"
|
||||||
False -> "1"
|
False -> "1"
|
||||||
|
|
||||||
code "(array_agg(" ++ result_expr.paren ++ order_clause ++ ")" ++ filter_clause ++ ")[" ++ index_expr ++ "]"
|
Builder.code "(array_agg(" ++ result_expr.paren ++ order_clause ++ ")" ++ filter_clause ++ ")[" ++ index_expr ++ "]"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
agg_shortest = Base_Generator.lift_unary_op "SHORTEST" arg->
|
agg_shortest = Base_Generator.lift_unary_op "SHORTEST" arg->
|
||||||
order_clause =
|
order_clause =
|
||||||
code " ORDER BY char_length(" ++ arg ++ ") ASC NULLS LAST"
|
Builder.code " ORDER BY char_length(" ++ arg ++ ") ASC NULLS LAST"
|
||||||
code "(array_agg(" ++ arg.paren ++ order_clause ++ "))[1]"
|
Builder.code "(array_agg(" ++ arg.paren ++ order_clause ++ "))[1]"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
agg_longest = Base_Generator.lift_unary_op "LONGEST" arg->
|
agg_longest = Base_Generator.lift_unary_op "LONGEST" arg->
|
||||||
order_clause =
|
order_clause =
|
||||||
code " ORDER BY char_length(" ++ arg ++ ") DESC NULLS LAST"
|
Builder.code " ORDER BY char_length(" ++ arg ++ ") DESC NULLS LAST"
|
||||||
code "(array_agg(" ++ arg.paren ++ order_clause ++ "))[1]"
|
Builder.code "(array_agg(" ++ arg.paren ++ order_clause ++ "))[1]"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
concat_ops =
|
concat_ops =
|
||||||
make_raw_concat_expr expr separator =
|
make_raw_concat_expr expr separator =
|
||||||
code "string_agg(" ++ expr ++ ", " ++ separator ++ ")"
|
Builder.code "string_agg(" ++ expr ++ ", " ++ separator ++ ")"
|
||||||
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
|
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
|
||||||
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
|
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
|
||||||
|
|
||||||
@ -239,24 +242,24 @@ agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument.E
|
|||||||
case args.length == 1 of
|
case args.length == 1 of
|
||||||
True ->
|
True ->
|
||||||
## A single null value will be skipped.
|
## A single null value will be skipped.
|
||||||
code "COUNT(DISTINCT " ++ args.first ++ ")"
|
Builder.code "COUNT(DISTINCT " ++ args.first ++ ")"
|
||||||
False ->
|
False ->
|
||||||
## A tuple of nulls is not a null, so it will not be skipped - but
|
## A tuple of nulls is not a null, so it will not be skipped - but
|
||||||
we want to ignore all-null columns. So we manually filter them
|
we want to ignore all-null columns. So we manually filter them
|
||||||
out.
|
out.
|
||||||
count = code "COUNT(DISTINCT (" ++ SQL.join ", " args ++ "))"
|
count = Builder.code "COUNT(DISTINCT (" ++ Builder.join ", " args ++ "))"
|
||||||
are_nulls = args.map arg-> arg.paren ++ " IS NULL"
|
are_nulls = args.map arg-> arg.paren ++ " IS NULL"
|
||||||
all_nulls_filter = code " FILTER (WHERE NOT (" ++ SQL.join " AND " are_nulls ++ "))"
|
all_nulls_filter = Builder.code " FILTER (WHERE NOT (" ++ Builder.join " AND " are_nulls ++ "))"
|
||||||
(count ++ all_nulls_filter).paren
|
(count ++ all_nulls_filter).paren
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_distinct_include_null args =
|
agg_count_distinct_include_null args =
|
||||||
## If we always count as tuples, then even null fields are counted.
|
## If we always count as tuples, then even null fields are counted.
|
||||||
code "COUNT(DISTINCT (" ++ SQL.join ", " args ++ ", 0))"
|
Builder.code "COUNT(DISTINCT (" ++ Builder.join ", " args ++ ", 0))"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub->
|
starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub->
|
||||||
res = code "starts_with(" ++ str ++ "," ++ sub ++ ")"
|
res = Builder.code "starts_with(" ++ str ++ "," ++ sub ++ ")"
|
||||||
res.paren
|
res.paren
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -266,11 +269,11 @@ ends_with = Base_Generator.lift_binary_op "ends_with" str-> sub->
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
|
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
|
||||||
code "((" ++ arg ++ ') COLLATE "ucs_basic")'
|
Builder.code "((" ++ arg ++ ') COLLATE "ucs_basic")'
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_contains_expr expr substring =
|
make_contains_expr expr substring =
|
||||||
code "position(" ++ substring ++ " in " ++ expr ++ ") > 0"
|
Builder.code "position(" ++ substring ++ " in " ++ expr ++ ") > 0"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
||||||
@ -305,11 +308,11 @@ is_nan = Base_Generator.lift_unary_op "IS_NAN" arg->
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
|
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
|
||||||
code "bool_or(" ++ arg ++ ")"
|
Builder.code "bool_or(" ++ arg ++ ")"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
decimal_div = Base_Generator.lift_binary_op "/" x-> y->
|
decimal_div = Base_Generator.lift_binary_op "/" x-> y->
|
||||||
code "CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)"
|
Builder.code "CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
mod_op = Base_Generator.lift_binary_op "mod" x-> y->
|
mod_op = Base_Generator.lift_binary_op "mod" x-> y->
|
||||||
|
@ -8,7 +8,7 @@ import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
|||||||
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
|
||||||
|
|
||||||
import project.Connection.Connection.Connection
|
import project.Connection.Connection.Connection
|
||||||
import project.Data.SQL
|
import project.Data.SQL.Builder
|
||||||
import project.Data.SQL_Statement.SQL_Statement
|
import project.Data.SQL_Statement.SQL_Statement
|
||||||
import project.Data.SQL_Type.SQL_Type
|
import project.Data.SQL_Type.SQL_Type
|
||||||
import project.Data.Table.Table
|
import project.Data.Table.Table
|
||||||
@ -22,7 +22,6 @@ import project.Internal.IR.Query.Query
|
|||||||
import project.Internal.Common.Database_Distinct_Helper
|
import project.Internal.Common.Database_Distinct_Helper
|
||||||
import project.Internal.Common.Database_Join_Helper
|
import project.Internal.Common.Database_Join_Helper
|
||||||
|
|
||||||
from project.Data.SQL import code
|
|
||||||
from project.Errors import Unsupported_Database_Operation
|
from project.Errors import Unsupported_Database_Operation
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -168,31 +167,31 @@ unsupported name =
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
||||||
code "COALESCE(SUM(" ++ arg.paren ++ " IS NULL), 0)"
|
Builder.code "COALESCE(SUM(" ++ arg.paren ++ " IS NULL), 0)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
||||||
code "COALESCE(SUM((" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " == '')), 0)"
|
Builder.code "COALESCE(SUM((" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " == '')), 0)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
||||||
code "COALESCE(SUM((" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '')), 0)"
|
Builder.code "COALESCE(SUM((" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '')), 0)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_stddev_pop = Base_Generator.lift_unary_op "STDDEV_POP" arg->
|
agg_stddev_pop = Base_Generator.lift_unary_op "STDDEV_POP" arg->
|
||||||
sum_of_squares = code "SUM(" ++ arg.paren ++ "*" ++ arg.paren ++ ")"
|
sum_of_squares = Builder.code "SUM(" ++ arg.paren ++ "*" ++ arg.paren ++ ")"
|
||||||
square_of_sums = code "SUM(" ++ arg ++ ") * SUM(" ++ arg ++ ")"
|
square_of_sums = Builder.code "SUM(" ++ arg ++ ") * SUM(" ++ arg ++ ")"
|
||||||
n = code "CAST(COUNT(" ++ arg ++ ") AS REAL)"
|
n = Builder.code "CAST(COUNT(" ++ arg ++ ") AS REAL)"
|
||||||
var = code "(" ++ sum_of_squares ++ " - (" ++ square_of_sums ++ " / " ++ n ++ ")) / " ++ n
|
var = Builder.code "(" ++ sum_of_squares ++ " - (" ++ square_of_sums ++ " / " ++ n ++ ")) / " ++ n
|
||||||
code "SQRT(" ++ var ++ ")"
|
Builder.code "SQRT(" ++ var ++ ")"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_stddev_samp = Base_Generator.lift_unary_op "STDDEV_SAMP" arg->
|
agg_stddev_samp = Base_Generator.lift_unary_op "STDDEV_SAMP" arg->
|
||||||
sum_of_squares = code "SUM(" ++ arg.paren ++ "*" ++ arg.paren ++ ")"
|
sum_of_squares = Builder.code "SUM(" ++ arg.paren ++ "*" ++ arg.paren ++ ")"
|
||||||
square_of_sums = code "SUM(" ++ arg ++ ") * SUM(" ++ arg ++ ")"
|
square_of_sums = Builder.code "SUM(" ++ arg ++ ") * SUM(" ++ arg ++ ")"
|
||||||
n = code "CAST(COUNT(" ++ arg ++ ") AS REAL)"
|
n = Builder.code "CAST(COUNT(" ++ arg ++ ") AS REAL)"
|
||||||
var = code "(" ++ sum_of_squares ++ " - (" ++ square_of_sums ++ " / " ++ n ++ ")) / (" ++ n ++ " - 1)"
|
var = Builder.code "(" ++ sum_of_squares ++ " - (" ++ square_of_sums ++ " / " ++ n ++ ")) / (" ++ n ++ " - 1)"
|
||||||
code "SQRT(" ++ var ++ ")"
|
Builder.code "SQRT(" ++ var ++ ")"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
This is a prototype that doesn't work correctly. Left for reference for
|
This is a prototype that doesn't work correctly. Left for reference for
|
||||||
@ -212,30 +211,30 @@ window_aggregate window_type ignore_null args =
|
|||||||
result_expr = args.first
|
result_expr = args.first
|
||||||
order_exprs = args.drop 1
|
order_exprs = args.drop 1
|
||||||
|
|
||||||
filter_clause = if ignore_null.not then code "" else
|
filter_clause = if ignore_null.not then Builder.code "" else
|
||||||
code " FILTER (WHERE " ++ result_expr.paren ++ " IS NOT NULL)"
|
Builder.code " FILTER (WHERE " ++ result_expr.paren ++ " IS NOT NULL)"
|
||||||
|
|
||||||
code window_type+"(" ++ result_expr ++ ")" ++ filter_clause ++ " OVER (ORDER BY " ++ SQL.join "," order_exprs ++ ")"
|
Builder.code window_type+"(" ++ result_expr ++ ")" ++ filter_clause ++ " OVER (ORDER BY " ++ Builder.join "," order_exprs ++ ")"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
concat_ops =
|
concat_ops =
|
||||||
make_raw_concat_expr expr separator =
|
make_raw_concat_expr expr separator =
|
||||||
code "group_concat(" ++ expr ++ ", " ++ separator ++ ")"
|
Builder.code "group_concat(" ++ expr ++ ", " ++ separator ++ ")"
|
||||||
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
|
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
|
||||||
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
|
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
|
||||||
|
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_distinct args = case args.length == 1 of
|
agg_count_distinct args = case args.length == 1 of
|
||||||
True -> code "COUNT(DISTINCT (" ++ args.first ++ "))"
|
True -> Builder.code "COUNT(DISTINCT (" ++ args.first ++ "))"
|
||||||
False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.")
|
False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.")
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
agg_count_distinct_include_null args = case args.length == 1 of
|
agg_count_distinct_include_null args = case args.length == 1 of
|
||||||
True ->
|
True ->
|
||||||
arg = args.first
|
arg = args.first
|
||||||
count = code "COUNT(DISTINCT " ++ arg ++ ")"
|
count = Builder.code "COUNT(DISTINCT " ++ arg ++ ")"
|
||||||
all_nulls_case = code "CASE WHEN COUNT(CASE WHEN " ++ arg ++ "IS NULL THEN 1 END) > 0 THEN 1 ELSE 0 END"
|
all_nulls_case = Builder.code "CASE WHEN COUNT(CASE WHEN " ++ arg ++ "IS NULL THEN 1 END) > 0 THEN 1 ELSE 0 END"
|
||||||
count ++ " + " ++ all_nulls_case
|
count ++ " + " ++ all_nulls_case
|
||||||
False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.")
|
False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.")
|
||||||
|
|
||||||
@ -251,22 +250,22 @@ ends_with = Base_Generator.lift_binary_op "ends_with" str-> sub->
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
|
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
|
||||||
code "((" ++ arg ++ ") COLLATE BINARY)"
|
Builder.code "((" ++ arg ++ ") COLLATE BINARY)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
make_contains_expr expr substring =
|
make_contains_expr expr substring =
|
||||||
code "instr(" ++ expr ++ ", " ++ substring ++ ") > 0"
|
Builder.code "instr(" ++ expr ++ ", " ++ substring ++ ") > 0"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
|
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
|
||||||
code "max(" ++ arg ++ ")"
|
Builder.code "max(" ++ arg ++ ")"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
decimal_div = Base_Generator.lift_binary_op "/" x-> y->
|
decimal_div = Base_Generator.lift_binary_op "/" x-> y->
|
||||||
code "CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)"
|
Builder.code "CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
mod_op = Base_Generator.lift_binary_op "mod" x-> y->
|
mod_op = Base_Generator.lift_binary_op "mod" x-> y->
|
||||||
|
@ -3,7 +3,8 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
|||||||
|
|
||||||
import project.Data.Image.Image
|
import project.Data.Image.Image
|
||||||
|
|
||||||
## List comes from org.opencv.imgcodecs.Imgcodecs#imread doc comment.
|
## PRIVATE
|
||||||
|
List comes from org.opencv.imgcodecs.Imgcodecs#imread doc comment.
|
||||||
supported = [".bmp", ".dib", ".jpeg", ".jpg", ".jpe", ".jp2", ".png", ".webp", ".pbm", ".pgm", ".ppm", ".pxm", ".pnm", ".pfm", ".sr", ".ras", ".tiff", ".tif", ".exr", ".hdr", ".pic"]
|
supported = [".bmp", ".dib", ".jpeg", ".jpg", ".jpe", ".jp2", ".png", ".webp", ".pbm", ".pgm", ".ppm", ".pxm", ".pnm", ".pfm", ".sr", ".ras", ".tiff", ".tif", ".exr", ".hdr", ".pic"]
|
||||||
|
|
||||||
## Read the file to a `Image` from a supported file format.
|
## Read the file to a `Image` from a supported file format.
|
||||||
|
@ -5,9 +5,9 @@ import Standard.Base.Errors.Unimplemented.Unimplemented
|
|||||||
|
|
||||||
import project.Data.Table.Table
|
import project.Data.Table.Table
|
||||||
import project.Delimited.Delimited_Format.Delimited_Format
|
import project.Delimited.Delimited_Format.Delimited_Format
|
||||||
import project.Delimited.Delimited_Reader
|
|
||||||
import project.Delimited.Delimited_Writer
|
|
||||||
import project.Errors.Invalid_JSON_Format
|
import project.Errors.Invalid_JSON_Format
|
||||||
|
import project.Internal.Delimited_Reader
|
||||||
|
import project.Internal.Delimited_Writer
|
||||||
|
|
||||||
Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) =
|
Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) =
|
||||||
case format of
|
case format of
|
||||||
|
@ -4,9 +4,9 @@ import Standard.Base.Network.HTTP.Response.Response
|
|||||||
import project.Data.Table.Table
|
import project.Data.Table.Table
|
||||||
import project.Data.Data_Formatter.Data_Formatter
|
import project.Data.Data_Formatter.Data_Formatter
|
||||||
import project.Data.Match_Columns.Match_Columns
|
import project.Data.Match_Columns.Match_Columns
|
||||||
import project.Delimited.Delimited_Reader
|
|
||||||
import project.Delimited.Delimited_Writer
|
|
||||||
import project.Delimited.Quote_Style.Quote_Style
|
import project.Delimited.Quote_Style.Quote_Style
|
||||||
|
import project.Internal.Delimited_Reader
|
||||||
|
import project.Internal.Delimited_Writer
|
||||||
|
|
||||||
## Read delimited files such as CSVs into a Table.
|
## Read delimited files such as CSVs into a Table.
|
||||||
type Delimited_Format
|
type Delimited_Format
|
||||||
|
@ -3,10 +3,10 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
|||||||
|
|
||||||
import project.Data.Table.Table
|
import project.Data.Table.Table
|
||||||
import project.Data.Match_Columns.Match_Columns
|
import project.Data.Match_Columns.Match_Columns
|
||||||
import project.Excel.Excel_Reader
|
|
||||||
import project.Excel.Excel_Workbook.Excel_Workbook
|
import project.Excel.Excel_Workbook.Excel_Workbook
|
||||||
import project.Excel.Excel_Section.Excel_Section
|
import project.Excel.Excel_Section.Excel_Section
|
||||||
import project.Excel.Excel_Writer
|
import project.Internal.Excel_Reader
|
||||||
|
import project.Internal.Excel_Writer
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Resolve the xls_format setting to a boolean.
|
Resolve the xls_format setting to a boolean.
|
||||||
|
@ -9,7 +9,7 @@ import Standard.Base.Metadata.Display
|
|||||||
|
|
||||||
import project.Data.Table.Table
|
import project.Data.Table.Table
|
||||||
import project.Excel.Excel_Range.Excel_Range
|
import project.Excel.Excel_Range.Excel_Range
|
||||||
import project.Excel.Excel_Reader
|
import project.Internal.Excel_Reader
|
||||||
|
|
||||||
polyglot java import org.enso.table.read.ExcelReader
|
polyglot java import org.enso.table.read.ExcelReader
|
||||||
polyglot java import org.apache.poi.ss.usermodel.Workbook
|
polyglot java import org.apache.poi.ss.usermodel.Workbook
|
||||||
|
@ -110,7 +110,8 @@ default_aggregate_column_name aggregate_column include_column=True =
|
|||||||
c = aggregate_column.column
|
c = aggregate_column.column
|
||||||
prefix + " " + (if include_column then c.name else "")
|
prefix + " " + (if include_column then c.name else "")
|
||||||
|
|
||||||
## Utility function to check if all same column
|
## PRIVATE
|
||||||
|
Utility function to check if all aggregates are operating on the same source column.
|
||||||
all_same_column : Vector Aggregate_Column -> Boolean
|
all_same_column : Vector Aggregate_Column -> Boolean
|
||||||
all_same_column aggregates =
|
all_same_column aggregates =
|
||||||
is_not_count c = case c of
|
is_not_count c = case c of
|
||||||
|
@ -24,7 +24,8 @@ polyglot java import org.enso.table.parsing.TypeInferringParser
|
|||||||
polyglot java import org.enso.table.read.QuoteStrippingParser
|
polyglot java import org.enso.table.read.QuoteStrippingParser
|
||||||
polyglot java import org.enso.table.parsing.problems.MismatchedQuote
|
polyglot java import org.enso.table.parsing.problems.MismatchedQuote
|
||||||
|
|
||||||
## Reads a delimited file according to the provided format.
|
## PRIVATE
|
||||||
|
Reads a delimited file according to the provided format.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
- format: The specification of the delimited file format.
|
- format: The specification of the delimited file format.
|
||||||
@ -45,6 +46,7 @@ read_file format file on_problems =
|
|||||||
result.catch Mismatched_Quote error->
|
result.catch Mismatched_Quote error->
|
||||||
Error.throw (File_Error.Corrupted_Format file error.to_display_text error)
|
Error.throw (File_Error.Corrupted_Format file error.to_display_text error)
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
read_text : Text -> Delimited_Format -> Problem_Behavior -> Table
|
read_text : Text -> Delimited_Format -> Problem_Behavior -> Table
|
||||||
read_text text format on_problems =
|
read_text text format on_problems =
|
||||||
java_reader = StringReader.new text
|
java_reader = StringReader.new text
|
||||||
@ -134,6 +136,7 @@ type Detected_Headers
|
|||||||
## Indicates that the file exists but no headers have been found, so only positional column matching is possible.
|
## Indicates that the file exists but no headers have been found, so only positional column matching is possible.
|
||||||
None (column_count : Integer)
|
None (column_count : Integer)
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
type Detected_File_Metadata
|
type Detected_File_Metadata
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
An internal type representing metadata describing the format of a specific
|
An internal type representing metadata describing the format of a specific
|
@ -9,8 +9,8 @@ import project.Data.Match_Columns.Match_Columns
|
|||||||
import project.Data.Storage.Storage
|
import project.Data.Storage.Storage
|
||||||
import project.Delimited.Delimited_Format.Delimited_Format
|
import project.Delimited.Delimited_Format.Delimited_Format
|
||||||
import project.Delimited.Quote_Style.Quote_Style
|
import project.Delimited.Quote_Style.Quote_Style
|
||||||
import project.Delimited.Delimited_Reader
|
import project.Internal.Delimited_Reader
|
||||||
import project.Delimited.Delimited_Reader.Detected_Headers
|
import project.Internal.Delimited_Reader.Detected_Headers
|
||||||
import project.Internal.Java_Problems
|
import project.Internal.Java_Problems
|
||||||
|
|
||||||
from project.Errors import Column_Count_Mismatch, Column_Name_Mismatch
|
from project.Errors import Column_Count_Mismatch, Column_Name_Mismatch
|
||||||
@ -24,7 +24,8 @@ polyglot java import java.io.StringWriter
|
|||||||
polyglot java import java.io.IOException
|
polyglot java import java.io.IOException
|
||||||
polyglot java import java.io.Writer
|
polyglot java import java.io.Writer
|
||||||
|
|
||||||
## Writes a delimited file according to the provided format.
|
## PRIVATE
|
||||||
|
Writes a delimited file according to the provided format.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
- table: The table to serialize.
|
- table: The table to serialize.
|
@ -30,8 +30,7 @@ prepare_reader_table on_problems result_with_problems =
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Convert Boolean|Infer to the correct HeaderBehavior
|
Convert Boolean|Infer to the correct HeaderBehavior
|
||||||
# TODO[DB] Fix composite types #183857386
|
make_java_headers : (Boolean | Infer) -> ExcelHeaders.HeaderBehavior
|
||||||
# make_java_headers : (Boolean | Infer) -> ExcelHeaders.HeaderBehavior
|
|
||||||
make_java_headers headers = case headers of
|
make_java_headers headers = case headers of
|
||||||
True -> ExcelHeaders.HeaderBehavior.USE_FIRST_ROW_AS_HEADERS
|
True -> ExcelHeaders.HeaderBehavior.USE_FIRST_ROW_AS_HEADERS
|
||||||
Infer -> ExcelHeaders.HeaderBehavior.INFER
|
Infer -> ExcelHeaders.HeaderBehavior.INFER
|
@ -6,7 +6,7 @@ import project.Data.Table.Table
|
|||||||
import project.Data.Match_Columns.Match_Columns
|
import project.Data.Match_Columns.Match_Columns
|
||||||
import project.Excel.Excel_Range.Excel_Range
|
import project.Excel.Excel_Range.Excel_Range
|
||||||
import project.Excel.Excel_Section.Excel_Section
|
import project.Excel.Excel_Section.Excel_Section
|
||||||
import project.Excel.Excel_Reader
|
import project.Internal.Excel_Reader
|
||||||
from project.Errors import Invalid_Location, Range_Exceeded, Existing_Data, Column_Count_Mismatch, Column_Name_Mismatch
|
from project.Errors import Invalid_Location, Range_Exceeded, Existing_Data, Column_Count_Mismatch, Column_Name_Mismatch
|
||||||
|
|
||||||
polyglot java import org.enso.table.read.ExcelReader
|
polyglot java import org.enso.table.read.ExcelReader
|
@ -1,7 +1,6 @@
|
|||||||
from Standard.Base import all
|
from Standard.Base import all
|
||||||
|
|
||||||
## UNSTABLE
|
## PRIVATE
|
||||||
|
|
||||||
A function that throws an error to indicate that a file is being uploaded to
|
A function that throws an error to indicate that a file is being uploaded to
|
||||||
`path`.
|
`path`.
|
||||||
|
|
||||||
|
@ -143,6 +143,8 @@ Vector.default_visualization self = Id.table
|
|||||||
Vector.to_default_visualization_data : Text
|
Vector.to_default_visualization_data : Text
|
||||||
Vector.to_default_visualization_data self = render_vector self
|
Vector.to_default_visualization_data self = render_vector self
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Internal function to convert a Vector to a JSON string.
|
||||||
render_vector object depth=0 max_depth=5 max_length=100 =
|
render_vector object depth=0 max_depth=5 max_length=100 =
|
||||||
case object of
|
case object of
|
||||||
_ : Vector -> if depth == max_depth then "[...]" else
|
_ : Vector -> if depth == max_depth then "[...]" else
|
||||||
|
@ -36,7 +36,8 @@ type Table_Update
|
|||||||
Json.from_pairs [chunks, table_specification_update]
|
Json.from_pairs [chunks, table_specification_update]
|
||||||
|
|
||||||
|
|
||||||
## Return a sub-window of a table. The window is defined by a cell row/col and line/chunk
|
## PRIVATE
|
||||||
|
Return a sub-window of a table. The window is defined by a cell row/col and line/chunk
|
||||||
coordinate as origin and the extent of the window in text chunks and lines. The size of
|
coordinate as origin and the extent of the window in text chunks and lines. The size of
|
||||||
a chunk (the characters in it) is defined by `chunk_width`. The output is formatted as a message
|
a chunk (the characters in it) is defined by `chunk_width`. The output is formatted as a message
|
||||||
that can be sent to the IDE's lazy text visualisation.
|
that can be sent to the IDE's lazy text visualisation.
|
||||||
@ -88,7 +89,8 @@ compute_table_update table table_cell_position text_window_position text_window_
|
|||||||
layout = Table_Specification_Update.Value row_heights column_widths column_names []
|
layout = Table_Specification_Update.Value row_heights column_widths column_names []
|
||||||
Table_Update.Value chunks layout
|
Table_Update.Value chunks layout
|
||||||
|
|
||||||
## Returns a vector that contains a pairs of row index and vector of corresponding lines indices.
|
## PRIVATE
|
||||||
|
Returns a vector that contains a pairs of row index and vector of corresponding lines indices.
|
||||||
compute_vertical_indices table start_row end_row start_line lines_to_get =
|
compute_vertical_indices table start_row end_row start_line lines_to_get =
|
||||||
## agg is a Vector of `[processed_lines, initial_offset, result_indices]`
|
## agg is a Vector of `[processed_lines, initial_offset, result_indices]`
|
||||||
process_line agg row_ix =
|
process_line agg row_ix =
|
||||||
@ -114,7 +116,8 @@ compute_vertical_indices table start_row end_row start_line lines_to_get =
|
|||||||
result_agg = row_ix_iter.to_vector.fold agg process_line
|
result_agg = row_ix_iter.to_vector.fold agg process_line
|
||||||
result_agg.get 2 . flatten
|
result_agg.get 2 . flatten
|
||||||
|
|
||||||
## Compute the text chunks for the row/line defined by the given indices limited to the given
|
## PRIVATE
|
||||||
|
Compute the text chunks for the row/line defined by the given indices limited to the given
|
||||||
column indices. The number of chunks to get is defined by `chunks_to_get`.
|
column indices. The number of chunks to get is defined by `chunks_to_get`.
|
||||||
get_chunks_for_row table row_ix line_ix initial_chunk_offset column_range chunk_size chunks_to_get =
|
get_chunks_for_row table row_ix line_ix initial_chunk_offset column_range chunk_size chunks_to_get =
|
||||||
process_cell agg column_ix =
|
process_cell agg column_ix =
|
||||||
@ -136,27 +139,32 @@ get_chunks_for_row table row_ix line_ix initial_chunk_offset column_range chunk_
|
|||||||
if column_indices == [] then [] else
|
if column_indices == [] then [] else
|
||||||
(fold_map [0, initial_chunk_offset] process_cell column_indices).flatten
|
(fold_map [0, initial_chunk_offset] process_cell column_indices).flatten
|
||||||
|
|
||||||
## Return the max value in the given vector.
|
## PRIVATE
|
||||||
|
Return the max value in the given vector.
|
||||||
max : Vector Integer -> Integer
|
max : Vector Integer -> Integer
|
||||||
max vector =
|
max vector =
|
||||||
vector.fold 0 (l -> r -> Math.max l r)
|
vector.fold 0 (l -> r -> Math.max l r)
|
||||||
|
|
||||||
## Return the longest line in the given text.
|
## PRIVATE
|
||||||
|
Return the longest line in the given text.
|
||||||
get_longest_line : Text -> Integer
|
get_longest_line : Text -> Integer
|
||||||
get_longest_line text =
|
get_longest_line text =
|
||||||
max (text.lines.map (line -> line.length))
|
max (text.lines.map (line -> line.length))
|
||||||
|
|
||||||
## Return the length of the longest line in the given column.
|
## PRIVATE
|
||||||
|
Return the length of the longest line in the given column.
|
||||||
get_column_width column =
|
get_column_width column =
|
||||||
max (column.to_vector.map (x -> get_longest_line x.to_text))
|
max (column.to_vector.map (x -> get_longest_line x.to_text))
|
||||||
|
|
||||||
## Return the height of the row defined by the given index.
|
## PRIVATE
|
||||||
|
Return the height of the row defined by the given index.
|
||||||
get_row_height table row_ix =
|
get_row_height table row_ix =
|
||||||
columns = table.columns
|
columns = table.columns
|
||||||
row = columns.map (column -> column.at row_ix)
|
row = columns.map (column -> column.at row_ix)
|
||||||
max (row.map (x -> x.to_text.lines.length))
|
max (row.map (x -> x.to_text.lines.length))
|
||||||
|
|
||||||
## Return the index of the first item in the given vector that brings the cummulative sum of items
|
## PRIVATE
|
||||||
|
Return the index of the first item in the given vector that brings the cummulative sum of items
|
||||||
above the target value. If no such item exists, return `Nothing`.
|
above the target value. If no such item exists, return `Nothing`.
|
||||||
find_first_over_cum_sum : Vector Integer -> Integer -> Integer | Nothing
|
find_first_over_cum_sum : Vector Integer -> Integer -> Integer | Nothing
|
||||||
find_first_over_cum_sum items target =
|
find_first_over_cum_sum items target =
|
||||||
@ -166,7 +174,8 @@ find_first_over_cum_sum items target =
|
|||||||
Not_Found -> Nothing
|
Not_Found -> Nothing
|
||||||
value -> value.get 0
|
value -> value.get 0
|
||||||
|
|
||||||
## Return the index of the column that is at the end of the given text width, when starting from the
|
## PRIVATE
|
||||||
|
Return the index of the column that is at the end of the given text width, when starting from the
|
||||||
given start column index.
|
given start column index.
|
||||||
find_end_column table start_column_ix chunks chunk_size =
|
find_end_column table start_column_ix chunks chunk_size =
|
||||||
table_columns_count = table.columns.length
|
table_columns_count = table.columns.length
|
||||||
@ -178,7 +187,8 @@ find_end_column table start_column_ix chunks chunk_size =
|
|||||||
Nothing -> table_columns_count - 1
|
Nothing -> table_columns_count - 1
|
||||||
value -> value + start_column_ix
|
value -> value + start_column_ix
|
||||||
|
|
||||||
## Return the index of the row that is at the end of the given text height, when starting from the
|
## PRIVATE
|
||||||
|
Return the index of the row that is at the end of the given text height, when starting from the
|
||||||
given start row index.
|
given start row index.
|
||||||
find_end_row table start_row_ix max_height =
|
find_end_row table start_row_ix max_height =
|
||||||
table_row_count = (table.columns.get 0).length
|
table_row_count = (table.columns.get 0).length
|
||||||
@ -189,7 +199,8 @@ find_end_row table start_row_ix max_height =
|
|||||||
Nothing -> table_row_count - 1
|
Nothing -> table_row_count - 1
|
||||||
value -> value + start_row_ix
|
value -> value + start_row_ix
|
||||||
|
|
||||||
## Helper for fold_map that takes a function, an accumulator value and the current item and returns
|
## PRIVATE
|
||||||
|
Helper for fold_map that takes a function, an accumulator value and the current item and returns
|
||||||
a tuple of the new accumulator value and the result of the function.
|
a tuple of the new accumulator value and the result of the function.
|
||||||
fold_map_inner f acc item =
|
fold_map_inner f acc item =
|
||||||
previous_mappings = acc.first
|
previous_mappings = acc.first
|
||||||
@ -200,20 +211,23 @@ fold_map_inner f acc item =
|
|||||||
new_mappings = previous_mappings + [current_mapping]
|
new_mappings = previous_mappings + [current_mapping]
|
||||||
Pair.new new_mappings new_acc_value
|
Pair.new new_mappings new_acc_value
|
||||||
|
|
||||||
## Map a function over a vectors, but also pass on a accumulator value from one step to the next.
|
## PRIVATE
|
||||||
|
Map a function over a vectors, but also pass on a accumulator value from one step to the next.
|
||||||
The function must return a tuple of the result of the function and the new accumulator value.
|
The function must return a tuple of the result of the function and the new accumulator value.
|
||||||
fold_map acc f iterable =
|
fold_map acc f iterable =
|
||||||
result = iterable.fold (Pair.new [] acc) (fold_map_inner f)
|
result = iterable.fold (Pair.new [] acc) (fold_map_inner f)
|
||||||
result.first
|
result.first
|
||||||
|
|
||||||
## Return a vector of the cumulative sum of the given vector.
|
## PRIVATE
|
||||||
|
Return a vector of the cumulative sum of the given vector.
|
||||||
map_to_cumulative_sum iterable =
|
map_to_cumulative_sum iterable =
|
||||||
map_running_sums previous_sum current =
|
map_running_sums previous_sum current =
|
||||||
running_sum = previous_sum + current
|
running_sum = previous_sum + current
|
||||||
[running_sum, running_sum]
|
[running_sum, running_sum]
|
||||||
fold_map 0 map_running_sums iterable
|
fold_map 0 map_running_sums iterable
|
||||||
|
|
||||||
## Return the given vector where each item is mapped to itself and its index in the vector.
|
## PRIVATE
|
||||||
|
Return the given vector where each item is mapped to itself and its index in the vector.
|
||||||
enumerate : Vector Any -> Vector Any
|
enumerate : Vector Any -> Vector Any
|
||||||
enumerate vector =
|
enumerate vector =
|
||||||
(0.up_to vector.length).to_vector.zip vector
|
(0.up_to vector.length).to_vector.zip vector
|
||||||
|
@ -3,7 +3,6 @@ from Standard.Base import all
|
|||||||
from Standard.Base.Data.Text.Extensions import slice_text
|
from Standard.Base.Data.Text.Extensions import slice_text
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
Message to be sent to the IDE.
|
Message to be sent to the IDE.
|
||||||
type Message
|
type Message
|
||||||
|
|
||||||
@ -19,7 +18,8 @@ type Message
|
|||||||
max_line_length = ["longest_line", self.max_line_length]
|
max_line_length = ["longest_line", self.max_line_length]
|
||||||
Json.from_pairs [chunks, line_count, max_line_length]
|
Json.from_pairs [chunks, line_count, max_line_length]
|
||||||
|
|
||||||
## Return a sub-window of a string. The window is defined by line/chunk coordinates. The size of
|
## PRIVATE
|
||||||
|
Return a sub-window of a string. The window is defined by line/chunk coordinates. The size of
|
||||||
a chunk is defined by `chunk_width`. The output is formatted as a message that can be sent to
|
a chunk is defined by `chunk_width`. The output is formatted as a message that can be sent to
|
||||||
the IDE's lazy text visualisation.
|
the IDE's lazy text visualisation.
|
||||||
get_lazy_visualisation_text_window text pos size chunk_width =
|
get_lazy_visualisation_text_window text pos size chunk_width =
|
||||||
|
@ -112,12 +112,13 @@ class PolyglotTest extends InterpreterTest {
|
|||||||
"fail to match on Polyglot symbol when imported everything from stdlib" in {
|
"fail to match on Polyglot symbol when imported everything from stdlib" in {
|
||||||
val code =
|
val code =
|
||||||
"""from Standard.Base import all
|
"""from Standard.Base import all
|
||||||
|
|import Standard.Base.Polyglot as Polyglot_Module
|
||||||
|polyglot java import java.util.Random
|
|polyglot java import java.util.Random
|
||||||
|
|
|
|
||||||
|main =
|
|main =
|
||||||
| random_gen = Random.new
|
| random_gen = Random.new
|
||||||
| case random_gen of
|
| case random_gen of
|
||||||
| Polyglot -> IO.println "OK"
|
| Polyglot_Module -> IO.println "OK"
|
||||||
| _ -> IO.println "FAIL"
|
| _ -> IO.println "FAIL"
|
||||||
|""".stripMargin
|
|""".stripMargin
|
||||||
eval(code)
|
eval(code)
|
||||||
@ -127,7 +128,7 @@ class PolyglotTest extends InterpreterTest {
|
|||||||
|
|
||||||
"match on Polyglot type when explicitly importing everything from Polyglot module" in {
|
"match on Polyglot type when explicitly importing everything from Polyglot module" in {
|
||||||
val code =
|
val code =
|
||||||
"""from Standard.Base.Polyglot import all
|
"""import Standard.Base.Polyglot.Polyglot
|
||||||
|import Standard.Base.IO
|
|import Standard.Base.IO
|
||||||
|polyglot java import java.util.Random
|
|polyglot java import java.util.Random
|
||||||
|
|
|
|
||||||
|
@ -110,10 +110,10 @@ spec = Test.group "Pattern Matches" <|
|
|||||||
Test.specify "should be able to match on the Polyglot type" <|
|
Test.specify "should be able to match on the Polyglot type" <|
|
||||||
random_gen = Random.new
|
random_gen = Random.new
|
||||||
case random_gen of
|
case random_gen of
|
||||||
Polyglot.Polyglot -> Nothing
|
Polyglot -> Nothing
|
||||||
_ -> Test.fail "Expected a polyglot object to match."
|
_ -> Test.fail "Expected a polyglot object to match."
|
||||||
case Polyglot.Polyglot of
|
case Polyglot of
|
||||||
Polyglot.Polyglot -> Nothing
|
Polyglot -> Nothing
|
||||||
_ -> Test.fail "Expected the Polyglot constructor to match."
|
_ -> Test.fail "Expected the Polyglot constructor to match."
|
||||||
Test.specify "should be able to match on the Any type" <|
|
Test.specify "should be able to match on the Any type" <|
|
||||||
value_1 = 1.23143
|
value_1 = 1.23143
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
polyglot java import java.time.LocalDate
|
polyglot java import java.time.LocalDate
|
||||||
import project.Polyglot
|
|
||||||
|
|
||||||
new year (month = 1) (day = 1) = (LocalDate.of year month day) . internal_local_date
|
new year (month = 1) (day = 1) = (LocalDate.of year month day) . internal_local_date
|
||||||
|
|
||||||
|
@ -31,12 +31,12 @@ from project.Data.Numbers export Number, Integer
|
|||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
export project.Data.Vector.Vector
|
export project.Data.Vector.Vector
|
||||||
|
|
||||||
import project.Polyglot
|
|
||||||
export project.Polyglot
|
|
||||||
|
|
||||||
import project.Polyglot.Java
|
import project.Polyglot.Java
|
||||||
export project.Polyglot.Java
|
export project.Polyglot.Java
|
||||||
|
|
||||||
|
import project.Polyglot.Polyglot
|
||||||
|
export project.Polyglot.Polyglot
|
||||||
|
|
||||||
import project.Runtime
|
import project.Runtime
|
||||||
export project.Runtime
|
export project.Runtime
|
||||||
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
@Builtin_Type
|
@Builtin_Type
|
||||||
type Polyglot
|
type Polyglot
|
||||||
|
|
||||||
get_array_size array = @Builtin_Method "Polyglot.get_array_size"
|
get_array_size array = @Builtin_Method "Polyglot.get_array_size"
|
||||||
execute callable arguments = @Builtin_Method "Polyglot.execute"
|
execute callable arguments = @Builtin_Method "Polyglot.execute"
|
||||||
get_member object member_name = @Builtin_Method "Polyglot.get_member"
|
get_member object member_name = @Builtin_Method "Polyglot.get_member"
|
||||||
@ -11,3 +10,6 @@ has_source_location value = @Builtin_Method "Polyglot.has_source_location"
|
|||||||
get_source_location value = @Builtin_Method "Polyglot.get_source_location"
|
get_source_location value = @Builtin_Method "Polyglot.get_source_location"
|
||||||
is_language_installed language_name = @Builtin_Method "Polyglot.is_language_installed"
|
is_language_installed language_name = @Builtin_Method "Polyglot.is_language_installed"
|
||||||
get_executable_name = @Builtin_Method "Polyglot.get_executable_name"
|
get_executable_name = @Builtin_Method "Polyglot.get_executable_name"
|
||||||
|
|
||||||
|
type Java
|
||||||
|
lookup_class name = @Builtin_Method "Java.lookup_class"
|
||||||
|
@ -1 +0,0 @@
|
|||||||
lookup_class name = @Builtin_Method "Java.lookup_class"
|
|
Loading…
Reference in New Issue
Block a user