From 12267c6d98e59a635f58ed0cfad023563bda8586 Mon Sep 17 00:00:00 2001 From: Sergei Garin Date: Mon, 28 Oct 2024 17:44:23 +0300 Subject: [PATCH 01/43] Batch refetches for different `listDirectory` queries (#11380) This PR moves invalidation of the `listDirectory` queries into a separate `useQuery` to sync mutliple invalidations into a single one --- app/gui/src/dashboard/layouts/AssetsTable.tsx | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/app/gui/src/dashboard/layouts/AssetsTable.tsx b/app/gui/src/dashboard/layouts/AssetsTable.tsx index 63106feba8..9bbf3bff99 100644 --- a/app/gui/src/dashboard/layouts/AssetsTable.tsx +++ b/app/gui/src/dashboard/layouts/AssetsTable.tsx @@ -20,6 +20,7 @@ import { queryOptions, useMutation, useQueries, + useQuery, useQueryClient, useSuspenseQuery, } from '@tanstack/react-query' @@ -465,24 +466,11 @@ export default function AssetsTable(props: AssetsTableProps) { } }, - refetchInterval: - enableAssetsTableBackgroundRefresh ? assetsTableBackgroundRefreshInterval : false, - refetchOnMount: 'always', - refetchIntervalInBackground: false, - refetchOnWindowFocus: true, - enabled: !hidden, meta: { persist: false }, }), ), - [ - hidden, - backend, - category, - expandedDirectoryIds, - assetsTableBackgroundRefreshInterval, - enableAssetsTableBackgroundRefresh, - ], + [hidden, backend, category, expandedDirectoryIds], ), combine: (results) => { const rootQuery = results[expandedDirectoryIds.indexOf(rootDirectory.id)] @@ -509,6 +497,20 @@ export default function AssetsTable(props: AssetsTableProps) { }, }) + // We use a different query to refetch the directory data in the background. + // This reduces the amount of rerenders by batching them together, so they happen less often. + useQuery({ + queryKey: [backend.type, 'refetchListDirectory'], + queryFn: () => queryClient.refetchQueries({ queryKey: [backend.type, 'listDirectory'] }), + refetchInterval: + enableAssetsTableBackgroundRefresh ? assetsTableBackgroundRefreshInterval : false, + refetchOnMount: 'always', + refetchIntervalInBackground: false, + refetchOnWindowFocus: true, + enabled: !hidden, + meta: { persist: false }, + }) + /** Return type of the query function for the listDirectory query. */ type DirectoryQuery = typeof directories.rootDirectory.data From 5bf064f97f0fe08453c04dbf9dbf250abdb06a2d Mon Sep 17 00:00:00 2001 From: Sergei Garin Date: Mon, 28 Oct 2024 18:53:40 +0300 Subject: [PATCH 02/43] Open enso devtools on call of toggleDevtools() (#11423) This PR changes the behavior of `toggleDevtools()` function and shows `ensoDevtools` with `tanstack` devtools --- app/gui/src/dashboard/App.tsx | 12 +++---- .../components/Devtools/EnsoDevtools.tsx | 8 +++++ .../Devtools/EnsoDevtoolsProvider.tsx | 34 ++++++++++++++++++- .../Devtools/ReactQueryDevtools.tsx | 9 ++--- app/gui/src/dashboard/index.tsx | 32 +++++++++-------- 5 files changed, 65 insertions(+), 30 deletions(-) diff --git a/app/gui/src/dashboard/App.tsx b/app/gui/src/dashboard/App.tsx index 7b43c605ff..0cd3820ce0 100644 --- a/app/gui/src/dashboard/App.tsx +++ b/app/gui/src/dashboard/App.tsx @@ -519,13 +519,11 @@ function AppRouter(props: AppRouterProps) { {routes} - {detect.IS_DEV_MODE && ( - - - - - - )} + + + + + diff --git a/app/gui/src/dashboard/components/Devtools/EnsoDevtools.tsx b/app/gui/src/dashboard/components/Devtools/EnsoDevtools.tsx index fbe1a7f1e6..93148def8d 100644 --- a/app/gui/src/dashboard/components/Devtools/EnsoDevtools.tsx +++ b/app/gui/src/dashboard/components/Devtools/EnsoDevtools.tsx @@ -24,6 +24,7 @@ import { useEnableVersionChecker, usePaywallDevtools, useSetEnableVersionChecker, + useShowDevtools, } from './EnsoDevtoolsProvider' import * as ariaComponents from '#/components/AriaComponents' @@ -54,6 +55,9 @@ export function EnsoDevtools() { const { authQueryKey, session } = authProvider.useAuth() const queryClient = reactQuery.useQueryClient() const { getFeature } = billing.usePaywallFeatures() + + const showDevtools = useShowDevtools() + const { features, setFeature } = usePaywallDevtools() const enableVersionChecker = useEnableVersionChecker() const setEnableVersionChecker = useSetEnableVersionChecker() @@ -66,6 +70,10 @@ export function EnsoDevtools() { const featureFlags = useFeatureFlags() const setFeatureFlags = useSetFeatureFlags() + if (!showDevtools) { + return null + } + return ( diff --git a/app/gui/src/dashboard/components/Devtools/EnsoDevtoolsProvider.tsx b/app/gui/src/dashboard/components/Devtools/EnsoDevtoolsProvider.tsx index 213db13bb8..4aa7064103 100644 --- a/app/gui/src/dashboard/components/Devtools/EnsoDevtoolsProvider.tsx +++ b/app/gui/src/dashboard/components/Devtools/EnsoDevtoolsProvider.tsx @@ -3,6 +3,8 @@ * This file provides a zustand store that contains the state of the Enso devtools. */ import type { PaywallFeatureName } from '#/hooks/billing' +import { IS_DEV_MODE } from 'enso-common/src/detect' +import * as React from 'react' import * as zustand from 'zustand' /** Configuration for a paywall feature. */ @@ -16,13 +18,23 @@ export interface PaywallDevtoolsFeatureConfiguration { /** The state of this zustand store. */ interface EnsoDevtoolsStore { + readonly showDevtools: boolean + readonly setShowDevtools: (showDevtools: boolean) => void + readonly toggleDevtools: () => void readonly showVersionChecker: boolean | null readonly paywallFeatures: Record readonly setPaywallFeature: (feature: PaywallFeatureName, isForceEnabled: boolean | null) => void readonly setEnableVersionChecker: (showVersionChecker: boolean | null) => void } -const ensoDevtoolsStore = zustand.createStore((set) => ({ +export const ensoDevtoolsStore = zustand.createStore((set) => ({ + showDevtools: IS_DEV_MODE, + setShowDevtools: (showDevtools) => { + set({ showDevtools }) + }, + toggleDevtools: () => { + set(({ showDevtools }) => ({ showDevtools: !showDevtools })) + }, showVersionChecker: false, paywallFeatures: { share: { isForceEnabled: null }, @@ -67,3 +79,23 @@ export function usePaywallDevtools() { setFeature: state.setPaywallFeature, })) } + +/** A hook that provides access to the show devtools state. */ +export function useShowDevtools() { + return zustand.useStore(ensoDevtoolsStore, (state) => state.showDevtools) +} + +// ================================= +// === DevtoolsProvider === +// ================================= + +/** + * Provide the Enso devtools to the app. + */ +export function DevtoolsProvider(props: { children: React.ReactNode }) { + React.useEffect(() => { + window.toggleDevtools = ensoDevtoolsStore.getState().toggleDevtools + }, []) + + return <>{props.children} +} diff --git a/app/gui/src/dashboard/components/Devtools/ReactQueryDevtools.tsx b/app/gui/src/dashboard/components/Devtools/ReactQueryDevtools.tsx index e786fec979..6ddb0664d5 100644 --- a/app/gui/src/dashboard/components/Devtools/ReactQueryDevtools.tsx +++ b/app/gui/src/dashboard/components/Devtools/ReactQueryDevtools.tsx @@ -4,6 +4,7 @@ import * as React from 'react' import * as reactQuery from '@tanstack/react-query' import * as reactQueryDevtools from '@tanstack/react-query-devtools' import * as errorBoundary from 'react-error-boundary' +import { useShowDevtools } from './EnsoDevtoolsProvider' const ReactQueryDevtoolsProduction = React.lazy(() => import('@tanstack/react-query-devtools/build/modern/production.js').then((d) => ({ @@ -13,19 +14,13 @@ const ReactQueryDevtoolsProduction = React.lazy(() => /** Show the React Query Devtools and provide the ability to show them in production. */ export function ReactQueryDevtools() { - const [showDevtools, setShowDevtools] = React.useState(false) + const showDevtools = useShowDevtools() // It is safer to pass the client directly to the devtools // since there might be a chance that we have multiple versions of `react-query`, // in case we forget to update the devtools, npm messes up the versions, // or there are hoisting issues. const client = reactQuery.useQueryClient() - React.useEffect(() => { - window.toggleDevtools = () => { - setShowDevtools((old) => !old) - } - }, []) - return ( { diff --git a/app/gui/src/dashboard/index.tsx b/app/gui/src/dashboard/index.tsx index 943408afe5..3dfdcb9931 100644 --- a/app/gui/src/dashboard/index.tsx +++ b/app/gui/src/dashboard/index.tsx @@ -21,7 +21,7 @@ import LoggerProvider, { type Logger } from '#/providers/LoggerProvider' import LoadingScreen from '#/pages/authentication/LoadingScreen' -import { ReactQueryDevtools } from '#/components/Devtools' +import { DevtoolsProvider, ReactQueryDevtools } from '#/components/Devtools' import { ErrorBoundary } from '#/components/ErrorBoundary' import { OfflineNotificationManager } from '#/components/OfflineNotificationManager' import { Suspense } from '#/components/Suspense' @@ -113,21 +113,23 @@ export function run(props: DashboardProps) { reactDOM.createRoot(root).render( - - }> - - - - - - - - - - - + + + }> + + + + + + + + + + + - + + , ) From 78d9e3484062fc3e83db4ceab308edae981a3f44 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Mon, 28 Oct 2024 20:20:06 +0000 Subject: [PATCH 03/43] Excel before 1900 and AWS signed requests. (#11373) --- CHANGELOG.md | 2 + .../lib/Standard/AWS/0.0.0-dev/src/AWS.enso | 114 +++++++ .../Standard/AWS/0.0.0-dev/src/Errors.enso | 9 + .../lib/Standard/AWS/0.0.0-dev/src/Main.enso | 1 + .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 6 +- .../Base/0.0.0-dev/src/Network/HTTP.enso | 199 +++++++------ .../0.0.0-dev/src/Network/HTTP/Header.enso | 20 +- .../src/Extensions/Excel_Extensions.enso | 23 ++ .../Standard/Table/0.0.0-dev/src/Main.enso | 1 + .../main/java/org/enso/aws/ClientBuilder.java | 14 + .../java/org/enso/aws/SignedHttpClient.java | 280 ++++++++++++++++++ .../base/net/http/MultipartBodyBuilder.java | 28 +- .../base/net/http/UrlencodedBodyBuilder.java | 17 +- .../java/org/enso/table/excel/ExcelRow.java | 35 ++- .../java/org/enso/table/excel/ExcelUtils.java | 86 ++++++ .../org/enso/table/write/ExcelWriter.java | 21 +- test/Base_Tests/src/Network/Http_Spec.enso | 24 +- test/Table_Tests/data/OlderDates.xlsx | Bin 0 -> 9123 bytes test/Table_Tests/src/IO/Excel_Spec.enso | 39 +++ 19 files changed, 785 insertions(+), 134 deletions(-) create mode 100644 distribution/lib/Standard/AWS/0.0.0-dev/src/AWS.enso create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Excel_Extensions.enso create mode 100644 std-bits/aws/src/main/java/org/enso/aws/SignedHttpClient.java create mode 100644 std-bits/table/src/main/java/org/enso/table/excel/ExcelUtils.java create mode 100644 test/Table_Tests/data/OlderDates.xlsx diff --git a/CHANGELOG.md b/CHANGELOG.md index d18cbd44b8..570388a6a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,10 +28,12 @@ - [The user may set description and labels of an Enso Cloud asset programmatically.][11255] - [DB_Table may be saved as a Data Link.][11371] +- [Support for dates before 1900 in Excel and signed AWS requests.][11373] [11235]: https://github.com/enso-org/enso/pull/11235 [11255]: https://github.com/enso-org/enso/pull/11255 [11371]: https://github.com/enso-org/enso/pull/11371 +[11373]: https://github.com/enso-org/enso/pull/11373 #### Enso Language & Runtime diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/AWS.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/AWS.enso new file mode 100644 index 0000000000..1101b8fa9c --- /dev/null +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/AWS.enso @@ -0,0 +1,114 @@ +from Standard.Base import all +import Standard.Base.Errors.Common.Missing_Argument +import Standard.Base.Network.HTTP.HTTP_Error.HTTP_Error +import Standard.Base.Network.HTTP.Request.Request +import Standard.Base.Network.HTTP.Request_Body.Request_Body +import Standard.Base.Network.HTTP.Request_Error +import Standard.Base.Network.HTTP.Response.Response +from Standard.Base.Metadata.Widget import Text_Input +from Standard.Base.Network.HTTP import if_fetch_method, if_post_method, internal_http_client, with_hash_and_client + +import project.AWS_Credential.AWS_Credential +import project.AWS_Region.AWS_Region +import project.Errors.Invalid_AWS_URI + +polyglot java import org.enso.aws.ClientBuilder + +## Methods for interacting with AWS services. +type AWS + ## ALIAS download, http get + ICON data_input + Fetches from an AWS URI signing the request with the necessary headers, + and returns the response, parsing the body if the content-type is + recognised. Returns an error if the status code does not represent a + successful response. + + Arguments: + - method: The HTTP method to use. Must be one of `HTTP_Method.Get`, + `HTTP_Method.Head`, `HTTP_Method.Delete`, `HTTP_Method.Options`. + Defaults to `HTTP_Method.Get`. + - headers: The headers to send with the request. Defaults to an empty + vector. + - format: The format to use for interpreting the response. + Defaults to `Auto_Detect`. If `Raw_Response` is selected or if the + format cannot be determined automatically, a raw HTTP `Response` will + be returned. + - credentials: The credentials to use for signing the request. Defaults + to the default AWS credentials. + - region_service: The region and service to use for signing the request. + Defaults to the region and service parsed from the URI. + @uri (Text_Input display=..Always) + @format File_Format.default_widget + @headers Header.default_widget + @credentials AWS_Credential.default_widget + signed_fetch : URI -> HTTP_Method -> (Vector (Header | Pair Text Text)) -> File_Format -> AWS_Credential -> AWS_Region_Service -> Any + signed_fetch (uri:URI=(Missing_Argument.throw "uri")) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (format = Auto_Detect) credentials:AWS_Credential=..Default (region_service:AWS_Region_Service=(AWS.resolve_region_and_service uri)) = if_fetch_method method <| + request = Request.new method uri (Header.unify_vector headers) Request_Body.Empty + http = with_hash_and_client HTTP.new hash_method=AWS.hash_bytes make_client=(_make_client credentials region_service) + raw_response = http.request request + raw_response.decode format=format if_unsupported=raw_response.with_materialized_body + + ## ALIAS http post, upload + ICON data_upload + Writes the provided data to the provided AWS URI signing the request with + the necessary headers. Returns the response, parsing the body if the + content-type is recognised. Returns an error if the status code does not + represent a successful response. + + Arguments: + - uri: The URI to fetch. + - body: The data to write. See `Supported Body Types` below. + - method: The HTTP method to use. Must be one of `HTTP_Method.Post`, + `HTTP_Method.Put`, `HTTP_Method.Patch`. Defaults to `HTTP_Method.Post`. + - headers: The headers to send with the request. Defaults to an empty + vector. + - response_format: The format to use for interpreting the response. + Defaults to `Auto_Detect`. If `Raw_Response` is selected or if the + format cannot be determined automatically, a raw HTTP `Response` will + be returned. + - credentials: The credentials to use for signing the request. Defaults + to the default AWS credentials. + - region_service: The region and service to use for signing the request. + Defaults to the region and service parsed from the URI. + @uri (Text_Input display=..Always) + @format File_Format.default_widget + @headers Header.default_widget + @credentials AWS_Credential.default_widget + signed_post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error + signed_post (uri:URI=(Missing_Argument.throw "uri")) (body:Request_Body=..Empty) (method:HTTP_Method=..Post) (headers:(Vector (Header | Pair Text Text))=[]) (response_format = Auto_Detect) credentials:AWS_Credential=..Default (region_service:AWS_Region_Service=(AWS.resolve_region_and_service uri)) = if_post_method method <| + request = Request.new method uri (Header.unify_vector headers) body + http = with_hash_and_client HTTP.new hash_method=AWS.hash_bytes make_client=(_make_client credentials region_service) + raw_response = http.request request + raw_response.decode format=response_format if_unsupported=raw_response.with_materialized_body + + ## PRIVATE + Hash a Vector of bytes using SHA256 (as used by AWS). + hash_bytes : Vector Integer -> Text + hash_bytes bytes:Vector = ClientBuilder.getSHA256 bytes + + ## Resolve the region and service from an AWS based URI. + Splits a standard form AWS URI into the region and service. + + The URI must be in the forms: + - `https://(*.)..amazonaws.com`. + - `https://(*.)..amazonaws.com`. + + Arguments: + - uri: The URI to resolve. + resolve_region_and_service : URI -> AWS_Region_Service + resolve_region_and_service (uri:URI=(Missing_Argument.throw "uri")) = + region_regex = regex "^(([a-z]{2}-[^.]+?-\d+)|(global))$" + domain = uri.host.split '.' + if (domain.length < 4 || (domain.at -1) != "com" || (domain.at -2) != "amazonaws") then Error.throw (Invalid_AWS_URI.Error domain.length.to_text+":"+uri.to_text) else + if (domain.at -3).match region_regex then AWS_Region_Service.Region_Service region=(domain.at -3) service=(domain.at -4) else + if (domain.at -4).match region_regex then AWS_Region_Service.Region_Service region=(domain.at -4) service=(domain.at -3) else + Error.throw (Invalid_AWS_URI.Error domain.to_display_text) + +## Holds the region and service of an AWS URI. +type AWS_Region_Service + ## Holds the region and service of an AWS URI. + Region_Service region:Text service:Text + +private _make_client credentials region_service http hash = + builder = ClientBuilder.new credentials.as_java (AWS_Region.Region region_service.region).as_java + builder.createSignedClient region_service.region region_service.service (internal_http_client http "") hash diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Errors.enso index 32c90451dc..b71822295b 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Errors.enso @@ -2,6 +2,15 @@ from Standard.Base import all polyglot java import software.amazon.awssdk.core.exception.SdkClientException +## An invalid URI was provided. +type Invalid_AWS_URI + ## PRIVATE + Error uri:Text + + ## PRIVATE + to_display_text : Text + to_display_text self = "Invalid AWS URI: " + self.uri + ## An error in the core AWS SDK type AWS_SDK_Error ## PRIVATE diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso index 1060a50baf..beb96125f9 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso @@ -1,3 +1,4 @@ +export project.AWS.AWS export project.AWS_Credential.AWS_Credential export project.AWS_Region.AWS_Region export project.Database.Redshift.Redshift_Details.Redshift_Details diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index a862b6f4bd..b6886fa047 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -195,7 +195,7 @@ list (directory:(Text | File)=enso_project.root) (name_filter:Text="") recursive import Standard.Base.Data file = enso_project.data / "spreadsheet.xls" Data.fetch URL . body . write file -@uri Text_Input +@uri (Text_Input display=..Always) @format Data_Read_Helpers.format_widget_with_raw_response @headers Header.default_widget fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> File_Format -> Any ! Request_Error | HTTP_Error @@ -322,7 +322,7 @@ fetch (uri:(URI | Text)=(Missing_Argument.throw "uri")) (method:HTTP_Method=..Ge test_file = enso_project.data / "sample.txt" form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True) -@uri Text_Input +@uri (Text_Input display=..Always) @headers Header.default_widget @response_format Data_Read_Helpers.format_widget_with_raw_response post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> File_Format -> Any ! Request_Error | HTTP_Error @@ -342,7 +342,7 @@ post (uri:(URI | Text)=(Missing_Argument.throw "uri")) (body:Request_Body=..Empt `HTTP_Method.Head`, `HTTP_Method.Delete`, `HTTP_Method.Options`. Defaults to `HTTP_Method.Get`. - headers: The headers to send with the request. Defaults to an empty vector. -@uri Text_Input +@uri (Text_Input display=..Always) @headers Header.default_widget download : (URI | Text) -> Writable_File -> HTTP_Method -> Vector (Header | Pair Text Text) -> File ! Request_Error | HTTP_Error download (uri:(URI | Text)=(Missing_Argument.throw "uri")) file:Writable_File (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index 5d8575906e..34511c97e7 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -11,6 +11,7 @@ import project.Enso_Cloud.Enso_Secret.Enso_Secret import project.Error.Error import project.Errors.Common.Forbidden_Operation import project.Errors.Illegal_Argument.Illegal_Argument +import project.Errors.Unimplemented.Unimplemented import project.Function.Function import project.Meta import project.Network.HTTP.Header.Header @@ -28,6 +29,7 @@ import project.Runtime.Context import project.System.File.File from project.Data.Boolean import Boolean, False, True from project.Data.Json.Extensions import all +from project.Data.Text.Extensions import all polyglot java import java.lang.IllegalArgumentException polyglot java import java.io.IOException @@ -48,6 +50,20 @@ polyglot java import org.enso.base.net.http.MultipartBodyBuilder polyglot java import org.enso.base.net.http.UrlencodedBodyBuilder type HTTP + ## PRIVATE + Static helper for get-like methods + fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error + fetch (uri:(URI | Text)) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = if_fetch_method method <| + request = Request.new method uri (Header.unify_vector headers) Request_Body.Empty + HTTP.new.request request + + ## PRIVATE + Static helper for post-like methods + post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error + post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) = if_post_method method <| + request = Request.new method uri (Header.unify_vector headers) body + HTTP.new.request request + ## PRIVATE ADVANCED Create a new instance of the HTTP client. @@ -76,7 +92,7 @@ type HTTP example_new = HTTP.new (timeout = (Duration.new seconds=30)) (proxy = Proxy.Address "example.com" 8080) new : Duration -> Boolean -> Proxy -> HTTP_Version -> HTTP - new (timeout:Duration=(Duration.new seconds=10)) (follow_redirects:Boolean=True) (proxy:Proxy=Proxy.System) (version:HTTP_Version=HTTP_Version.HTTP_2) = + new (timeout:Duration=(Duration.new seconds=10)) (follow_redirects:Boolean=True) (proxy:Proxy=..System) (version:HTTP_Version=..HTTP_2) = HTTP.Value timeout follow_redirects proxy version Nothing ## PRIVATE @@ -90,7 +106,9 @@ type HTTP - custom_ssl_context: A custom SSL context to use for requests, or Nothing if the default should be used. For most use cases, it is recommended to use the default. - Value timeout follow_redirects proxy version custom_ssl_context + - hash_method: The hash method to use for body hashing. + - make_client: Creates the Java HTTPClient. + private Value timeout follow_redirects:Boolean proxy:Proxy version:HTTP_Version custom_ssl_context hash_method=Nothing make_client=internal_http_client ## ADVANCED ICON data_download @@ -107,7 +125,7 @@ type HTTP request self req error_on_failure_code=True = # Prevent request if the method is a write-like method and output context is disabled. check_output_context ~action = - if fetch_methods.contains req.method || Context.Output.is_enabled then action else + if (if_fetch_method req.method True if_not=Context.Output.is_enabled) then action else Error.throw (Forbidden_Operation.Error ("As writing is disabled, " + req.method.to_text + " request not sent. Press the Write button ▶ to send it.")) handle_request_error = handler caught_panic = @@ -116,66 +134,25 @@ type HTTP Panic.catch IllegalArgumentException handler=handler <| Panic.catch IOException handler=handler handle_request_error <| Illegal_Argument.handle_java_exception <| check_output_context <| - headers = resolve_headers req + headers = _resolve_headers req headers.if_not_error <| - body_publisher_and_boundary = resolve_body_to_publisher_and_boundary req.body - body_publisher_and_boundary.if_not_error <| + resolved_body = _resolve_body req.body self.hash_method + resolved_body.if_not_error <| # Create builder and set method and body builder = HttpRequest.newBuilder - builder.method req.method.to_http_method_name body_publisher_and_boundary.first + builder.method req.method.to_http_method_name resolved_body.publisher # Create Unified Header list - boundary = body_publisher_and_boundary.second - boundary_header_list = if boundary.is_nothing then [] else [Header.multipart_form_data boundary] + boundary_header_list = if resolved_body.boundary.is_nothing then [] else [Header.multipart_form_data resolved_body.boundary] all_headers = headers + boundary_header_list mapped_headers = all_headers.map on_problems=No_Wrap .to_java_pair - response = Response.Value (EnsoSecretHelper.makeRequest self.internal_http_client builder req.uri.to_java_representation mapped_headers) + response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers) if error_on_failure_code.not || response.code.is_success then response else body = response.body.decode_as_text.catch Any _->"" message = if body.is_empty then Nothing else body Error.throw (HTTP_Error.Status_Error response.code message response.uri) - ## PRIVATE - Static helper for get-like methods - fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error - fetch (uri:(URI | Text)) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = - check_method fetch_methods method <| - request = Request.new method uri (parse_headers headers) Request_Body.Empty - HTTP.new.request request - - ## PRIVATE - Static helper for post-like methods - post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error - post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) = - check_method post_methods method <| - request = Request.new method uri (parse_headers headers) body - HTTP.new.request request - - ## PRIVATE - - Build an HTTP client. - internal_http_client : HttpClient - internal_http_client self = - builder = HttpClient.newBuilder.connectTimeout self.timeout - - redirect_policy = if self.follow_redirects then HttpClient.Redirect.ALWAYS else HttpClient.Redirect.NEVER - builder.followRedirects redirect_policy - - case self.proxy of - Proxy.Address proxy_host proxy_port -> builder.proxy (ProxySelector.of (InetSocketAddress.new proxy_host proxy_port)) - Proxy.System -> builder.proxy ProxySelector.getDefault - Proxy.None -> Nothing - - case self.version of - HTTP_Version.HTTP_1_1 -> builder.version HttpClient.Version.HTTP_1_1 - HTTP_Version.HTTP_2 -> builder.version HttpClient.Version.HTTP_2 - - if self.custom_ssl_context.is_nothing.not then - builder.sslContext self.custom_ssl_context - - builder.build - ## PRIVATE ADVANCED Create a copy of the HTTP client with a custom SSL context. @@ -183,23 +160,14 @@ type HTTP set_custom_ssl_context self ssl_context = HTTP.Value self.timeout self.follow_redirects self.proxy self.version ssl_context -## PRIVATE -parse_headers : Vector (Header | Pair Text Text) -> Vector Header -parse_headers headers = - headers . map on_problems=No_Wrap h-> case h of - _ : Vector -> Header.new (h.at 0) (h.at 1) - _ : Pair -> Header.new (h.at 0) (h.at 1) - _ : Function -> h:Header - _ : Header -> h - _ -> Error.throw (Illegal_Argument.Error "Invalid header type - all values must be Vector, Pair or Header (got "+(Meta.get_simple_type_name h)+").") - ## PRIVATE If either encoding or content type is specified in the Request_Body, that is used as the content type header. If encoding is specified without content type, "text/plain" is used as the content type. It is an error to specify the content type in both the request body and the header list. If the body is not Request_Body.Empty, and no content type is specified, a default is used. -resolve_headers : Request -> Vector Header -resolve_headers req = + Not explicitly private as allows direct testing. +_resolve_headers : Request -> Vector Header +_resolve_headers req = is_content_type_header h = h.name . equals_ignore_case Header.content_type_header_name # Check for content type and encoding in the Request_Body. @@ -235,66 +203,103 @@ resolve_headers req = all_headers + default_content_type ## PRIVATE - Generate body publisher and optional form content boundary -resolve_body_to_publisher_and_boundary : Request_Body -> Pair BodyPublisher Text -resolve_body_to_publisher_and_boundary body:Request_Body = +type Resolved_Body + private Value publisher:BodyPublisher boundary:Text|Nothing hash:Text|Nothing + +## PRIVATE + Generate body publisher, optional form content boundary and optionally hash from the body +_resolve_body : Request_Body -> Function | Nothing -> Resolved_Body +private _resolve_body body:Request_Body hash_function = body_publishers = HttpRequest.BodyPublishers case body of Request_Body.Text text encoding _ -> body_publisher = case encoding of Nothing -> body_publishers.ofString text _ : Encoding -> body_publishers.ofString text encoding.to_java_charset - Pair.new body_publisher Nothing + hash = if hash_function.is_nothing then "" else hash_function (text.bytes (encoding.if_nothing Encoding.utf_8)) + Resolved_Body.Value body_publisher Nothing hash Request_Body.Json x -> json = x.to_json - json.if_not_error <| - Pair.new (body_publishers.ofString json) Nothing + hash = if hash_function.is_nothing then "" else hash_function json.bytes + json.if_not_error <| Resolved_Body.Value (body_publishers.ofString json) Nothing hash Request_Body.Binary file -> path = File_Utils.toPath file.path - Pair.new (body_publishers.ofFile path) Nothing + ## ToDo: Support hashing a file. + hash = if hash_function.is_nothing then "" else Unimplemented.throw "Hashing a file body is not yet supported." + Resolved_Body.Value (body_publishers.ofFile path) Nothing hash Request_Body.Form_Data form_data url_encoded -> - build_form_body_publisher form_data url_encoded + _resolve_form_body form_data url_encoded hash_function Request_Body.Empty -> - Pair.new (body_publishers.noBody) Nothing + hash = if hash_function.is_nothing then "" else hash_function [] + Resolved_Body.Value body_publishers.noBody Nothing hash _ -> - Error.throw (Illegal_Argument.Error ("Unsupported POST body: " + body.to_display_text + "; this is a bug in the Data library")) - + Error.throw (Illegal_Argument.Error ("Unsupported POST body: " + body.to_display_text + "; this is a bug library.")) ## PRIVATE - Build a BodyPublisher from the given form data. The pair's second value is a content boundary in the case of a `multipart/form-data` form; otherwise, Nothing -build_form_body_publisher : Dictionary Text (Text | File) -> Boolean -> Pair BodyPublisher Text -build_form_body_publisher (form_data:(Dictionary Text (Text | File))) (url_encoded:Boolean=False) = case url_encoded of +_resolve_form_body : Dictionary Text (Text | File) -> Boolean -> Function | Nothing -> Resolved_Body +private _resolve_form_body (form_data:(Dictionary Text (Text | File))) (url_encoded:Boolean=False) hash_function = case url_encoded of True -> body_builder = UrlencodedBodyBuilder.new form_data.map_with_key key-> value-> case value of _ : Text -> body_builder.add_part_text key value _ : File -> body_builder.add_part_file key value.path - Pair.new body_builder.build Nothing + publisher = body_builder.build + hash = if hash_function.is_nothing then "" else hash_function body_builder.getContents.bytes + Resolved_Body.Value publisher Nothing hash False -> body_builder = MultipartBodyBuilder.new form_data.map_with_key key-> value-> case value of _ : Text -> body_builder.add_part_text key value _ : File -> body_builder.add_part_file key value.path - boundary = body_builder.get_boundary - Pair.new body_builder.build boundary + publisher = body_builder.build + hash = if hash_function.is_nothing then "" else hash_function body_builder.getContents + Resolved_Body.Value publisher body_builder.get_boundary hash ## PRIVATE -fetch_methods : Hashset HTTP_Method -fetch_methods = Hashset.from_vector [HTTP_Method.Get, HTTP_Method.Head, HTTP_Method.Options] +if_fetch_method : HTTP_Method -> Function -> Any -> Any ! Illegal_Argument +if_fetch_method method:HTTP_Method ~action ~if_not=(Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text))) = + if [HTTP_Method.Get, HTTP_Method.Head, HTTP_Method.Options].contains method then action else + if_not ## PRIVATE -post_methods : Hashset HTTP_Method -post_methods = Hashset.from_vector [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete] +if_post_method : HTTP_Method -> Function -> Any -> Any ! Illegal_Argument +if_post_method method:HTTP_Method ~action ~if_not=(Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text))) = + if [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete].contains method then action else + if_not ## PRIVATE -check_method : Hashset HTTP_Method -> Any -> Any -> Any ! Illegal_Argument -check_method allowed_methods method ~action = - if allowed_methods.contains method then action else - Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text)) + Build a custom HTTP with hash function and make_client function. +with_hash_and_client : HTTP -> Function -> Function -> HTTP +with_hash_and_client http hash_method make_client = + HTTP.Value http.timeout http.follow_redirects http.proxy http.version http.custom_ssl_context hash_method make_client + +## PRIVATE + Build a Java HttpClient with the given settings. +internal_http_client : HTTP -> Text -> HttpClient +internal_http_client http hash = + _ = hash + builder = HttpClient.newBuilder.connectTimeout http.timeout + + redirect_policy = if http.follow_redirects then HttpClient.Redirect.ALWAYS else HttpClient.Redirect.NEVER + builder.followRedirects redirect_policy + + case http.proxy of + Proxy.Address proxy_host proxy_port -> builder.proxy (ProxySelector.of (InetSocketAddress.new proxy_host proxy_port)) + Proxy.System -> builder.proxy ProxySelector.getDefault + Proxy.None -> Nothing + + case http.version of + HTTP_Version.HTTP_1_1 -> builder.version HttpClient.Version.HTTP_1_1 + HTTP_Version.HTTP_2 -> builder.version HttpClient.Version.HTTP_2 + + if http.custom_ssl_context.is_nothing.not then + builder.sslContext http.custom_ssl_context + + builder.build ## PRIVATE An error when sending an HTTP request. @@ -314,3 +319,23 @@ type Request_Error Nothing -> "" _ -> " " + self.message self.error_type + " error when sending request." + description_text + +## PRIVATE + Access the HTTP's timeout (for testing purposes). +get_timeout : HTTP -> Duration +get_timeout http:HTTP = http.timeout + +## PRIVATE + Access the HTTP's follow_redirects (for testing purposes). +get_follow_redirects : HTTP -> Boolean +get_follow_redirects http:HTTP = http.follow_redirects + +## PRIVATE + Access the HTTP's proxy (for testing purposes). +get_proxy : HTTP -> Proxy +get_proxy http:HTTP = http.proxy + +## PRIVATE + Access the HTTP's version (for testing purposes). +get_version : HTTP -> HTTP_Version +get_version http:HTTP = http.version diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso index 0ebcb985d6..c8357c65bf 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso @@ -1,8 +1,14 @@ import project.Data.Numbers.Integer +import project.Data.Pair.Pair import project.Data.Text.Encoding.Encoding import project.Data.Text.Text +import project.Data.Vector.No_Wrap +import project.Data.Vector.Vector import project.Enso_Cloud.Enso_Secret.Derived_Secret_Value import project.Enso_Cloud.Enso_Secret.Enso_Secret +import project.Error.Error +import project.Errors.Illegal_Argument.Illegal_Argument +import project.Function.Function import project.Meta import project.Metadata.Display import project.Metadata.Widget @@ -20,7 +26,18 @@ polyglot java import org.graalvm.collections.Pair as Java_Pair type Header ## PRIVATE + Normalize a vector of `Header`, `Pair`s or `Vector`s into a vector of + `Header` values. + unify_vector : Vector (Header | Pair Text Text | Vector) -> Vector Header + unify_vector headers:Vector = + headers . map on_problems=No_Wrap h-> case h of + _ : Vector -> Header.new (h.at 0) (h.at 1) + _ : Pair -> Header.new (h.at 0) (h.at 1) + _ : Function -> h:Header + _ : Header -> h + _ -> Error.throw (Illegal_Argument.Error "Invalid header type - all values must be Vector, Pair or Header (got "+(Meta.get_simple_type_name h)+").") + ## PRIVATE A type representing a header. Arguments: @@ -45,7 +62,8 @@ type Header example_new = Header.new "My_Header" "my header's value" @value make_text_secret_selector new : Text -> Text | Enso_Secret | Derived_Secret_Value -> Header - new name:Text value:(Text | Enso_Secret | Derived_Secret_Value) = Header.Value name value + new name:Text value:(Text | Enso_Secret | Derived_Secret_Value) = + Header.Value name value ## ICON text_input Create an "Accept" header. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Excel_Extensions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Excel_Extensions.enso new file mode 100644 index 0000000000..15246e3ab0 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Excel_Extensions.enso @@ -0,0 +1,23 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +polyglot java import org.enso.table.excel.ExcelUtils + +## GROUP Standard.Base.Conversions + ICON date_and_time + Converts an Excel date to a `Date`. +Date.from_excel : Integer -> Date +Date.from_excel excel_date:Integer = case excel_date of + 60 -> Error.throw (Illegal_Argument.Error "29th February 1900 does not exist.") + 0 -> Error.throw (Illegal_Argument.Error "0 is not a valid Excel date.") + _ -> ExcelUtils.fromExcelDateTime excel_date + +## GROUP Standard.Base.Conversions + ICON date_and_time + Converts an Excel date time to a `Date_Time`. +Date_Time.from_excel : Number -> Date_Time +Date_Time.from_excel excel_date:Number = + if excel_date >= 60 && excel_date < 61 then Error.throw (Illegal_Argument.Error "29th February 1900 does not exist.") else + if excel_date >= 0 && excel_date < 1 then Error.throw (Illegal_Argument.Error "0 is not a valid Excel date.") else + raw_date = ExcelUtils.fromExcelDateTime excel_date + if raw_date.is_a Date then raw_date.to_date_time else raw_date diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso index 6a93b20fb2..ab01be628e 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso @@ -25,6 +25,7 @@ export project.Excel.Excel_Workbook.Excel_Workbook export project.Expression.expr export project.Extensions.Column_Vector_Extensions.to_column +export project.Extensions.Excel_Extensions.from_excel export project.Extensions.Table_Conversions.from_objects export project.Extensions.Table_Conversions.parse_to_table export project.Extensions.Table_Conversions.to_table diff --git a/std-bits/aws/src/main/java/org/enso/aws/ClientBuilder.java b/std-bits/aws/src/main/java/org/enso/aws/ClientBuilder.java index cda1edf0d5..f7cba219cb 100644 --- a/std-bits/aws/src/main/java/org/enso/aws/ClientBuilder.java +++ b/std-bits/aws/src/main/java/org/enso/aws/ClientBuilder.java @@ -1,6 +1,7 @@ package org.enso.aws; import java.net.URI; +import java.net.http.HttpClient; import java.util.function.Supplier; import org.enso.base.enso_cloud.ExternalLibrarySecretHelper; import org.enso.base.enso_cloud.HideableValue; @@ -59,6 +60,19 @@ public class ClientBuilder { .build(); } + /** + * Builds an HttpClient that will sign requests and payloads using the AWSv4 Signature algorithm. + */ + public HttpClient createSignedClient( + String regionName, String serviceName, HttpClient baseClient, String bodySHA256) { + return new SignedHttpClient( + regionName, serviceName, unsafeBuildCredentialProvider(), baseClient, bodySHA256); + } + + public static String getSHA256(byte[] rawData) { + return SignedHttpClient.getSHA256(rawData); + } + /** * Instantiates an S3Client configured in such a way that it can query buckets regardless of their * region. diff --git a/std-bits/aws/src/main/java/org/enso/aws/SignedHttpClient.java b/std-bits/aws/src/main/java/org/enso/aws/SignedHttpClient.java new file mode 100644 index 0000000000..df3c59a759 --- /dev/null +++ b/std-bits/aws/src/main/java/org/enso/aws/SignedHttpClient.java @@ -0,0 +1,280 @@ +package org.enso.aws; + +import java.io.IOException; +import java.net.Authenticator; +import java.net.CookieHandler; +import java.net.ProxySelector; +import java.net.URL; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.time.Duration; +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; +import java.util.stream.Collectors; +import javax.crypto.Mac; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLParameters; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; + +/** + * Wraps an HttpClient to sign requests with AWS signature v4. Designed to be called by + * EnsoSecretHelper.makeRequest. + */ +class SignedHttpClient extends HttpClient { + private static final String SCHEME = "AWS4"; + private static final String ALGORITHM = "HMAC-SHA256"; + private static final String TERMINATOR = "aws4_request"; + + private final String regionName; + private final String serviceName; + private final AwsCredentialsProvider credentialsProvider; + private final HttpClient parent; + private final String bodyHash; + + SignedHttpClient( + String regionName, + String serviceName, + AwsCredentialsProvider credentialsProvider, + HttpClient parent, + String bodyHash) { + this.regionName = regionName; + this.serviceName = serviceName; + this.credentialsProvider = credentialsProvider; + this.parent = parent; + this.bodyHash = bodyHash; + } + + @Override + public Optional cookieHandler() { + return parent.cookieHandler(); + } + + @Override + public Optional connectTimeout() { + return parent.connectTimeout(); + } + + @Override + public Redirect followRedirects() { + return parent.followRedirects(); + } + + @Override + public Optional proxy() { + return parent.proxy(); + } + + @Override + public SSLContext sslContext() { + return parent.sslContext(); + } + + @Override + public SSLParameters sslParameters() { + return parent.sslParameters(); + } + + @Override + public Optional authenticator() { + return parent.authenticator(); + } + + @Override + public Version version() { + return parent.version(); + } + + @Override + public Optional executor() { + return parent.executor(); + } + + @Override + public CompletableFuture> sendAsync( + HttpRequest request, HttpResponse.BodyHandler responseBodyHandler) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public CompletableFuture> sendAsync( + HttpRequest request, + HttpResponse.BodyHandler responseBodyHandler, + HttpResponse.PushPromiseHandler pushPromiseHandler) { + throw new UnsupportedOperationException("Not implemented"); + } + + @Override + public HttpResponse send( + HttpRequest request, HttpResponse.BodyHandler responseBodyHandler) + throws IOException, InterruptedException { + URL url = request.uri().toURL(); + + var headerMap = request.headers().map(); + var output = new HashMap(); + + var bodyPublisher = request.bodyPublisher().orElse(HttpRequest.BodyPublishers.noBody()); + long bodyLength = bodyPublisher.contentLength(); + output.put("content-length", bodyLength == 0 ? "" : Long.toString(bodyLength)); + + output.put("x-amz-content-sha256", bodyHash); + + output.put( + "x-amz-date", + ZonedDateTime.now(ZoneId.of("UTC")) + .format(DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'"))); + + int port = url.getPort(); + String hostHeader = url.getHost() + (port == -1 ? "" : ":" + port); + output.put("Host", hostHeader); + + // Create canonical headers + var sortedHeaders = new ArrayList(); + sortedHeaders.addAll(headerMap.keySet()); + sortedHeaders.addAll(output.keySet()); + sortedHeaders.sort(String.CASE_INSENSITIVE_ORDER); + var canonicalHeaderNames = + sortedHeaders.stream().map(String::toLowerCase).collect(Collectors.joining(";")); + var canonicalHeaders = + sortedHeaders.stream() + .map( + k -> + k.toLowerCase().replaceAll("\\s+", " ") + + ":" + + output.getOrDefault( + k, headerMap.containsKey(k) ? headerMap.get(k).get(0) : null)) + .collect(Collectors.joining("\n")); + + // Create canonical query string. + var queryParameters = ""; + if (url.getQuery() != null) { + var parameters = Arrays.stream(url.getQuery().split("&")).map(p -> p.split("=", 2)); + queryParameters = + parameters + .sorted(Comparator.comparing(l -> l[0])) + .map(p -> urlEncode(p[0]) + "=" + urlEncode(p[1])) + .collect(Collectors.joining("&")); + } + + // Create canonical request + var canonicalPath = url.getPath(); + if (!canonicalPath.startsWith("/")) { + canonicalPath = "/" + canonicalPath; + } + canonicalPath = urlEncode(canonicalPath).replace("%2F", "/"); + var canonicalRequest = + String.join( + "\n", + request.method(), + canonicalPath, + queryParameters, + canonicalHeaders, + "", + canonicalHeaderNames, + bodyHash); + var canonicalRequestHash = getSHA256(canonicalRequest.getBytes(StandardCharsets.UTF_8)); + + // Need the credentials + var credentials = credentialsProvider.resolveCredentials(); + + // Create signing string + String dateStamp = LocalDate.now().format(DateTimeFormatter.BASIC_ISO_DATE); + String scope = dateStamp + "/" + regionName + "/" + serviceName + "/" + TERMINATOR; + String toSign = + String.join( + "\n", SCHEME + "-" + ALGORITHM, output.get("x-amz-date"), scope, canonicalRequestHash); + var signature = + sign( + SCHEME + credentials.secretAccessKey(), + dateStamp, + regionName, + serviceName, + TERMINATOR, + toSign); + + // Build the authorization header + var authorizationHeader = + SCHEME + + "-" + + ALGORITHM + + " " + + "Credential=" + + credentials.accessKeyId() + + "/" + + scope + + ", " + + "SignedHeaders=" + + canonicalHeaderNames + + ", " + + "Signature=" + + signature; + output.put("Authorization", authorizationHeader); + + // Build a new request with the additional headers + output.remove("Host"); + output.remove("content-length"); + var newBuilder = HttpRequest.newBuilder(request, (n, v) -> !output.containsKey(n)); + output.keySet().forEach(n -> newBuilder.header(n, output.get(n))); + + // Send the request + return parent.send(newBuilder.build(), responseBodyHandler); + } + + private static String sign(String init, String... values) { + try { + var mac = Mac.getInstance("HmacSHA256"); + byte[] key = init.getBytes(StandardCharsets.UTF_8); + try { + for (String value : values) { + mac.init(new javax.crypto.spec.SecretKeySpec(key, ALGORITHM)); + key = mac.doFinal(value.getBytes(StandardCharsets.UTF_8)); + } + return bytesToHex(key); + } catch (java.security.InvalidKeyException e) { + throw new RuntimeException("Failed to sign the request.", e); + } + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("Failed to get HMAC-SHA-256 algorithm.", e); + } + } + + /** + * Returns the SHA-256 hash of the given data. + * + * @param rawData the data to hash + * @return the SHA-256 hash of the data + */ + static String getSHA256(byte[] rawData) { + try { + byte[] hash = MessageDigest.getInstance("SHA-256").digest(rawData); + return bytesToHex(hash); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("Failed to get SHA-256 algorithm.", e); + } + } + + private static String bytesToHex(byte[] hash) { + StringBuilder hexString = new StringBuilder(2 * hash.length); + for (byte b : hash) { + String hex = Integer.toHexString(0xff & b); + if (hex.length() == 1) { + hexString.append('0'); + } + hexString.append(hex); + } + return hexString.toString(); + } + + private static String urlEncode(String input) { + return URLEncoder.encode(input, StandardCharsets.UTF_8); + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/net/http/MultipartBodyBuilder.java b/std-bits/base/src/main/java/org/enso/base/net/http/MultipartBodyBuilder.java index e7e54e74eb..4ac1074e9e 100644 --- a/std-bits/base/src/main/java/org/enso/base/net/http/MultipartBodyBuilder.java +++ b/std-bits/base/src/main/java/org/enso/base/net/http/MultipartBodyBuilder.java @@ -23,13 +23,39 @@ public class MultipartBodyBuilder { * @return the body publisher. */ public HttpRequest.BodyPublisher build() { - if (partsSpecificationList.size() == 0) { + if (partsSpecificationList.isEmpty()) { throw new IllegalStateException("Must have at least one part to build multipart message."); } addFinalBoundaryPart(); return HttpRequest.BodyPublishers.ofByteArrays(PartsIterator::new); } + /** + * Get the content of the multipart form. The form needs to be built before this. + * + * @return the content of the multipart form. + */ + public byte[] getContents() { + if (partsSpecificationList.isEmpty() + || partsSpecificationList.get(partsSpecificationList.size() - 1).type + != PartsSpecification.TYPE.FINAL_BOUNDARY) { + throw new IllegalStateException( + "Must have built the MultipartBodyBuilder, before calling getContents."); + } + + var iterator = new PartsIterator(); + byte[] output = new byte[0]; + while (iterator.hasNext()) { + byte[] part = iterator.next(); + byte[] newOutput = new byte[output.length + part.length]; + System.arraycopy(output, 0, newOutput, 0, output.length); + System.arraycopy(part, 0, newOutput, output.length, part.length); + output = newOutput; + } + + return output; + } + /** * Get the multipart boundary separator. * diff --git a/std-bits/base/src/main/java/org/enso/base/net/http/UrlencodedBodyBuilder.java b/std-bits/base/src/main/java/org/enso/base/net/http/UrlencodedBodyBuilder.java index 4865471c1d..4244ecb272 100644 --- a/std-bits/base/src/main/java/org/enso/base/net/http/UrlencodedBodyBuilder.java +++ b/std-bits/base/src/main/java/org/enso/base/net/http/UrlencodedBodyBuilder.java @@ -12,6 +12,7 @@ import java.util.ArrayList; public final class UrlencodedBodyBuilder { private final ArrayList parts = new ArrayList<>(); + private String contents = null; /** * Create HTTP body publisher for an url-encoded form data. @@ -19,10 +20,24 @@ public final class UrlencodedBodyBuilder { * @return the body publisher. */ public HttpRequest.BodyPublisher build() { - String contents = String.join("&", parts); + contents = String.join("&", parts); return HttpRequest.BodyPublishers.ofString(contents); } + /** + * Get the contents of the form data. + * + * @return the contents. + */ + public String getContents() { + if (contents == null) { + throw new IllegalStateException( + "Must have built the UrlencodedBodyBuilder, before calling getContents."); + } + + return contents; + } + /** * Add text field to the form. * diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ExcelRow.java b/std-bits/table/src/main/java/org/enso/table/excel/ExcelRow.java index e99d62596b..5e2b25fe32 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/ExcelRow.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/ExcelRow.java @@ -1,8 +1,14 @@ package org.enso.table.excel; -import java.time.LocalDateTime; +import java.time.LocalDate; import java.time.ZoneId; -import org.apache.poi.ss.usermodel.*; +import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.CellType; +import org.apache.poi.ss.usermodel.DataFormatter; +import org.apache.poi.ss.usermodel.DateUtil; +import org.apache.poi.ss.usermodel.ExcelNumberFormat; +import org.apache.poi.ss.usermodel.FormulaError; +import org.apache.poi.ss.usermodel.Row; import org.graalvm.polyglot.Context; /** Wrapper class to handle Excel rows. */ @@ -37,20 +43,21 @@ public class ExcelRow { switch (cellType) { case NUMERIC: double dblValue = cell.getNumericCellValue(); - if (DateUtil.isCellDateFormatted(cell) && DateUtil.isValidExcelDate(dblValue)) { - var dateTime = DateUtil.getLocalDateTime(dblValue); - if (dateTime.isBefore(LocalDateTime.of(1900, 1, 2, 0, 0))) { - // Excel stores times as if they are on the 1st January 1900. - // Due to the 1900 leap year bug might be 31st December 1899. - return dateTime.toLocalTime(); + var nf = ExcelNumberFormat.from(cell, null); + if (nf != null && DateUtil.isADateFormat(nf.getIdx(), nf.getFormat())) { + var temporal = ExcelUtils.fromExcelDateTime(dblValue); + if (temporal == null) { + return null; } - if (dateTime.getHour() == 0 && dateTime.getMinute() == 0 && dateTime.getSecond() == 0) { - var dateFormat = cell.getCellStyle().getDataFormatString(); - if (!dateFormat.contains("h") && !dateFormat.contains("H")) { - return dateTime.toLocalDate(); + return switch (temporal) { + case LocalDate date -> { + var dateFormat = cell.getCellStyle().getDataFormatString(); + yield (dateFormat.contains("h") || dateFormat.contains("H")) + ? date.atStartOfDay(ZoneId.systemDefault()) + : date; } - } - return dateTime.atZone(ZoneId.systemDefault()); + default -> temporal; + }; } else { if (dblValue == (long) dblValue) { return (long) dblValue; diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ExcelUtils.java b/std-bits/table/src/main/java/org/enso/table/excel/ExcelUtils.java new file mode 100644 index 0000000000..6b1a432982 --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/excel/ExcelUtils.java @@ -0,0 +1,86 @@ +package org.enso.table.excel; + +import java.time.*; +import java.time.temporal.ChronoUnit; +import java.time.temporal.Temporal; + +public class ExcelUtils { + // The epoch for Excel date-time values. Due to 1900-02-29 being a valid date + // in Excel, it is actually 1899-12-30. Excel dates are counted from 1 being + // 1900-01-01. + private static final LocalDate EPOCH_1900 = LocalDate.of(1899, 12, 30); + private static final long MILLIS_PER_DAY = 24 * 60 * 60 * 1000L; + + /** Converts an Excel date-time value to a {@link Temporal}. */ + public static Temporal fromExcelDateTime(double value) { + // Excel treats 1900-02-29 as a valid date, which it is not a valid date. + if (value >= 60 && value < 61) { + return null; + } + + // For days before 1900-01-01, Stored as milliseconds before 1900-01-01. + long days = (long) value; + + // Extract the milliseconds part of the value. + long millis = (long) ((value - days) * MILLIS_PER_DAY + (value < 0 ? -0.5 : 0.5)); + if (millis < 0) { + millis += MILLIS_PER_DAY; + } + if (millis != 0 && value < 0) { + days--; + } + + // Excel stores times as 0 to 1. + if (days == 0) { + return LocalTime.ofNanoOfDay(millis * 1000000); + } + + int shift = 0; + if (days > 0 && days < 60) { + // Due to a bug in Excel, 1900-02-29 is treated as a valid date. + // So within the first two months of 1900, the epoch needs to be 1 day later. + shift = 1; + } else if (days < 0) { + // For days before 1900-01-01, Excel has no representation. + // 0 is 1900-01-00 in Excel. + // We make -1 as 1899-12-31, -2 as 1899-12-30, etc. + // This needs the shift to be 2 days later. + shift = 2; + } + LocalDate date = EPOCH_1900.plusDays(days + shift); + + return millis < 1000 + ? date + : date.atTime(LocalTime.ofNanoOfDay(millis * 1000000)).atZone(ZoneId.systemDefault()); + } + + /** Converts a {@link Temporal} to an Excel date-time value. */ + public static double toExcelDateTime(Temporal temporal) { + return switch (temporal) { + case ZonedDateTime zonedDateTime -> toExcelDateTime(zonedDateTime.toLocalDateTime()); + case LocalDateTime dateTime -> toExcelDateTime(dateTime.toLocalDate()) + + toExcelDateTime(dateTime.toLocalTime()); + case LocalDate date -> { + long days = ChronoUnit.DAYS.between(EPOCH_1900, date); + + if (date.getYear() == 1900 && date.getMonthValue() < 3) { + // Due to a bug in Excel, 1900-02-29 is treated as a valid date. + // So within the first two months of 1900, the epoch needs to be 1 day later. + days--; + } + if (date.getYear() < 1900) { + // For days before 1900-01-01, Excel has no representation. + // 0 is 1900-01-00 in Excel. + // We make -1 as 1899-12-31, -2 as 1899-12-30, etc. + // This means the epoch needs to be 2 days later. + days -= 2; + } + + yield days; + } + case LocalTime time -> time.toNanoOfDay() / 1000000.0 / MILLIS_PER_DAY; + default -> throw new IllegalArgumentException( + "Unsupported Temporal type: " + temporal.getClass()); + }; + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java index 29e7dea7fa..9378761216 100644 --- a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java +++ b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java @@ -1,8 +1,8 @@ package org.enso.table.write; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.function.Function; import org.apache.poi.ss.usermodel.Cell; @@ -14,7 +14,6 @@ import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.enso.table.data.column.storage.BoolStorage; import org.enso.table.data.column.storage.Storage; -import org.enso.table.data.column.storage.datetime.DateTimeStorage; import org.enso.table.data.column.storage.numeric.AbstractLongStorage; import org.enso.table.data.column.storage.numeric.DoubleStorage; import org.enso.table.data.table.Column; @@ -24,16 +23,11 @@ import org.enso.table.error.ColumnNameMismatchException; import org.enso.table.error.ExistingDataException; import org.enso.table.error.InvalidLocationException; import org.enso.table.error.RangeExceededException; -import org.enso.table.excel.ExcelHeaders; -import org.enso.table.excel.ExcelRange; -import org.enso.table.excel.ExcelRow; -import org.enso.table.excel.ExcelSheet; +import org.enso.table.excel.*; import org.enso.table.util.ColumnMapper; import org.enso.table.util.NameDeduplicator; public class ExcelWriter { - private static final double SECONDS_IN_A_DAY = 86400.0; - private static Function ensoToTextCallback; public static Function getEnsoToTextCallback() { @@ -499,9 +493,6 @@ public class ExcelWriter { cell.setCellValue(longStorage.getItem(j)); } else if (storage instanceof BoolStorage boolStorage) { cell.setCellValue(boolStorage.getItem(j)); - } else if (storage instanceof DateTimeStorage dateTimeStorage) { - cell.setCellValue(dateTimeStorage.getItem(j).toLocalDateTime()); - cell.setCellStyle(getDateTimeStyle(workbook, "yyyy-MM-dd HH:mm:ss")); } else { Object value = storage.getItemBoxed(j); switch (value) { @@ -509,16 +500,16 @@ public class ExcelWriter { case Boolean b -> cell.setCellValue(b); case Double d -> cell.setCellValue(d); case Long l -> cell.setCellValue(l); - case LocalDateTime ldt -> { - cell.setCellValue(ldt); + case ZonedDateTime zdt -> { + cell.setCellValue(ExcelUtils.toExcelDateTime(zdt)); cell.setCellStyle(getDateTimeStyle(workbook, "yyyy-MM-dd HH:mm:ss")); } case LocalDate ld -> { - cell.setCellValue(ld); + cell.setCellValue(ExcelUtils.toExcelDateTime(ld)); cell.setCellStyle(getDateTimeStyle(workbook, "yyyy-MM-dd")); } case LocalTime lt -> { - cell.setCellValue(lt.toSecondOfDay() / SECONDS_IN_A_DAY); + cell.setCellValue(ExcelUtils.toExcelDateTime(lt)); cell.setCellStyle(getDateTimeStyle(workbook, "HH:mm:ss")); } default -> { diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index c3a03d5a8c..bcf256f71f 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -10,7 +10,7 @@ import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Base.Network.HTTP.Request_Error import Standard.Base.Network.Proxy.Proxy import Standard.Base.Runtime.Context -from Standard.Base.Network.HTTP import resolve_headers +from Standard.Base.Network.HTTP import _resolve_headers, get_follow_redirects, get_proxy, get_timeout, get_version from Standard.Test import all from Standard.Test.Execution_Context_Helpers import run_with_and_without_output @@ -65,11 +65,11 @@ add_specs suite_builder = suite_builder.group "HTTP client" pending=pending_has_url group_builder-> group_builder.specify "should create HTTP client with timeout setting" <| http = HTTP.new (timeout = (Duration.new seconds=30)) - http.timeout.should_equal (Duration.new seconds=30) + (get_timeout http).should_equal (Duration.new seconds=30) group_builder.specify "should create HTTP client with follow_redirects setting" <| http = HTTP.new (follow_redirects = False) - http.follow_redirects.should_equal False + (get_follow_redirects http).should_equal False Test.with_retries <| r = http.request (Request.new HTTP_Method.Get base_url_with_slash+"test_redirect") @@ -80,12 +80,12 @@ add_specs suite_builder = group_builder.specify "should create HTTP client with proxy setting" <| proxy_setting = Proxy.Address "example.com" 80 http = HTTP.new (proxy = proxy_setting) - http.proxy.should_equal proxy_setting + (get_proxy http).should_equal proxy_setting group_builder.specify "should create HTTP client with version setting" <| version_setting = HTTP_Version.HTTP_2 http = HTTP.new (version = version_setting) - http.version.should_equal version_setting + (get_version http).should_equal version_setting url_get = base_url_with_slash.if_not_nothing <| base_url_with_slash + "get" suite_builder.group "fetch" pending=pending_has_url group_builder-> @@ -559,30 +559,30 @@ add_specs suite_builder = suite_builder.group "Header resolution" group_builder-> group_builder.specify "Default content type and encoding" <| expected = [Header.content_type "text/plain; charset=UTF-8"] - resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "")) . should_equal_ignoring_order expected + _resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "")) . should_equal_ignoring_order expected group_builder.specify "Content type specified in body" <| expected = [Header.content_type "application/json; charset=UTF-8"] - resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "" content_type="application/json")) . should_equal_ignoring_order expected + _resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "" content_type="application/json")) . should_equal_ignoring_order expected group_builder.specify "Content type specified in header list" <| expected = [Header.content_type "application/json"] - resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json"] (Request_Body.Text "")) . should_equal_ignoring_order expected + _resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json"] (Request_Body.Text "")) . should_equal_ignoring_order expected group_builder.specify "Text encoding specified in body" <| expected = [Header.content_type "text/plain; charset=UTF-16LE"] - resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "" encoding=Encoding.utf_16_le)) . should_equal_ignoring_order expected + _resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "" encoding=Encoding.utf_16_le)) . should_equal_ignoring_order expected group_builder.specify "Can't specify content type in both places" <| - resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json"] (Request_Body.Text "" content_type="text/plain")) . should_fail_with Illegal_Argument + _resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json"] (Request_Body.Text "" content_type="text/plain")) . should_fail_with Illegal_Argument group_builder.specify "Custom header" <| expected = [Header.new "some" "header", Header.content_type "application/json; charset=UTF-8"] - resolve_headers (Request.new HTTP_Method.Get "" [Header.new "some" "header"] (Request_Body.Text "" content_type="application/json")) . should_equal_ignoring_order expected + _resolve_headers (Request.new HTTP_Method.Get "" [Header.new "some" "header"] (Request_Body.Text "" content_type="application/json")) . should_equal_ignoring_order expected group_builder.specify "Multiple content types in header list are ok" <| expected = [Header.content_type "application/json", Header.content_type "text/plain"] - resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json", Header.content_type "text/plain"] (Request_Body.Text "")) . should_equal_ignoring_order expected + _resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json", Header.content_type "text/plain"] (Request_Body.Text "")) . should_equal_ignoring_order expected suite_builder.group "Http Error handling" group_builder-> group_builder.specify "should be able to handle request errors" <| diff --git a/test/Table_Tests/data/OlderDates.xlsx b/test/Table_Tests/data/OlderDates.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..c73b322331a3f6b03191160b97a493e7b8cf856a GIT binary patch literal 9123 zcmeHtgEiprPHv=QxAwzddOCupI-HJ4*l(e*zbV@oPsnmD$zW3+7 z-23|r-gkfI>^T|02P1^003wKx$KTm2Lu2B9|Zs)0-z&8 zq@hmkR!;6FIzG-;ZbqEmPaJ6qkP%t(0f_MH|JVMDpFq{~PPJ}s>(&-hWmQsp#hT=ox9!cIOm#*UZ#a-Yjifm~js>IY zi4-4HSi*+BT;*Cl2-ocr!eX39dNX(O!~V^JUq|d4jO&oArThtS%nox_^Ej)-vmy7t$cnHS z>Dplo4ya1sV;Muk$8-pu&7iB6rLVu<5?jv;O)9klax3_~XISVa*a<>X9~~k{R!N}k zeTP8Ny02J2+CqZx34+Z}`V{Hv)7RC+u6`d?4qj&hwbn0J<QeXx5tE6Y0+$0xCAVut(thnM8k9@GMVm=3xO&~*A^|l1#?b~{Zu%4Wj0&8BSa6P- zxLP^7adH0a|HsY$VnY7q)~k|L)&Jn;Rb>D9)M6sOl(LtkLL044V6fr>UPD|V1NmY* zGX=g5*;6F>pm%{+gY%2PxZQr*vo-#zXN1I}^o?HC5$Qi%JkVH~T+`%Ss#ZS`cut*8 zoxPA(@?r6Om%v)tT=r6VV3}TS@<^r@f0R?741=hcB7y|;BE+y))nLi|q6T3~O8cNX z;&s!b{N0q%te}~+k{ulJC}EZTi5K_!-OO#~YW({hX-~hC>1x{w+r2i=brqrWF|l;) zIF`w3C%X3MP|NPuqT?sL;2o9kW6VDdGOXu68p`qc@Cc*3vj1=(^ci*865L(=lO)IH zTb%SraDu@78Xo+~csp`=L0uipp-_jPPOM1x2{elv|3=L27U8>S50eCu4(}#TwU(%^ z-L#Wd(I84G80DkmWKrhXvR6bR=PPn1df+261Umsk3|b$k z%(2(PV}2myD0{JfqQO=-;AMEZPJC51D@eJ;+Ad7ZjAhypDm6XTyd?}?6mvoAC;htN z&u9i{px(?JNVXRwC`K)RQr@nwhjYe2L}TtS68PycuaGDWEjpTEX4r|bk!FrT!$*34 zjC40WjE3w~C3c^|G%`8^&R7;0$zes>!zuUgkt91W4{xR8_<07LX%ILD&aORpgDbH` zdyMStIxoMUAifDy&C5sC*m3uh8KoU4^3rGGW$+TeoPEY~_Sla&)?&Up7kd7#v;4r< zn>!v>~y03tbM?jLx>Y?uieMdl;~6Gh`>(Nd&9YYU8I2XU?09ie+DpIyZI-o7 zn5^FH0q}1{sHZMX}5l`@8mkA<|w^P$`&#Wtl+sZp}Bfb4uzP)PLhsGTQTf zWH{5r;hZ7^pd-Nf^oQU1E35wWK?v~p5Pt1{cdJSsvj4!1Eqfk*^K|^Z3vYl%gp2jK zR>qU8z+mU+LMrBHkbn)CgORo9dfyopf(8u3ZuNUad@=S?&*FB3*?9oj1F`9Y%B<4K!XB)o@Xy(lsv@0s)t zA#I782-6;UPMkhkx{9MUDT*7(ZpOP)1SeO$>4Lb z93H9zjwMpDN$t$ez*I#HC-@H}B&o;`&uog1p$%TVa5uVD&2X3FaFm_xz+%13DB>5d zdxPcpLlW!~5x)7xifcFKN;Q54dD-qk@{n`pr98>;OLgM|b0=S}@CbeK7UR_j%J-rC zZ84^mR5(!c#UG~m&xApJQ-}}-c4f<#<^AdPk4t2oNr8bFvn^iBNj8-*nh-yU)Q8bh zPhZ8(A@|+qXOx^=!B33!s|{yPBpeWC{m#cUz(pptqBwf#INF`m;c9OS=DJQvYL`8L ze9Y|W;crvtL#AS0{`i$VcC*;WKa%LJ*HT#>fxck9wE=b7icl_w4jDaQZ$r*JTu4qS ztUC@gd}i^);=KWIs8qU>$qd68y#I=J5x^SiWr8!j@ zCJ6=H#69hpJA2ksY2xuRiuu6(8E)K~V{Sfjm&HYBJ4X}EyMEdipP1=Ek!SLL!ZQJW zcA1cXx0KJWPmk5zLFf+)&Ss7Z1(v_b$OKur0ykgX?v{WAsGFY7R+($=t9#no^?70* z`#A)@yPtLa!+3v&`B6aufs#x&`cA@_Hr{4JnVk&qTT~>01K*z6?v~(sxyeV6RqN*x zI+1H9XdX;Fvng*eK|Q^A!pvxTn`U1G)^i@l5zWc8wblZe+Vp;6EbIPc1WmL2ZSo7v zc2bQ3mSL@tQ5F6dv!ZIM^@AQzI$6zfD6cUaAI_T?eZ?Py31Sb|WtZl(VTPL?uxgo? zkr;^j3AH}hW)C3r=0HY`UZ01L!p4E&6ZjAGMsrc;RB};4!5)1#wUAb$N=05XD5F-Q z3?G%?i0MvYH{>EQx^I-EQnTgOceq|IV0a^cn+&l>I0?}uxspF= zZo7Mk#(+ktqsyWmr>CE671XN+k;RmW&r{c;akY=8Frabm=%PO-jL8=0+5o4%QV8ik zI!Ann-{Sq}Toab;2$`8Ha3v8j$pTXqQ-;%Q!&{La4cTd_bA7ref(E794GtYB&qjV} zL3qFrWz^a+VnE}4OdK@mI z%2PAuR1Jy|Z#5UPdaf2ZQ5@v{r@fXYoJ~Y?ZzJ)z!J@R1;^RmgE()9RKyJ* z3q;OvUevjNe}aeE2Zk&LL)~>iq%=nn)EAjx96{h(_3JQvfe7-ft|siq6u-(_p6-@c3nBF-A-T5bjwW}CEcNM z1vyiuZ~0X$k+;cO#4??g3i7oy=POO7h22Nqf~*Q70SPIH3r>7WsVC}WP3hSFZj24I z=H`_-6JmnzG5DooPV9%nyyRyGsnX=ce{8ljtoCNxR3@! z(?u1upPx8WIhC9)k@N%40!(sMnN!9(AvesRk*wq6)7jGmn)RU1-RdPa$IfdP@`;_1 z0k>ImJwnch=)4yOc;Y!ZCRH}LRFb4a-&OG_k=lMSPB%va;8@CRE@&s*Q&m8I=fmXM#ZUgG$a0oyfL4^-nc}{1P^pnf-)o!uED=V{ znzstczyj}y0wh5q|tVZ zIcLwm>vw~irk~Xm;ev8vw)oxo^GTdG7rHz zW1`8W@8!Xm=g|rnZYw4Q?5T;nq5U5jC}jJ}B$L=!fob>IOd^*(k=Dy-`#FS=B`;oY zkkGJE$59Y3a`37Wl09tOPeUoCMU+^L)5E@bHe`Z3%{ByKITw5ed&lO<7D#AYd&n|| z5$jfN`YFS)x8-$M)n0o}r_RvVjyO%2EI2jEw_w)XX@#YLgVLW*a{c*JWRZu?%X^aE zsk;c5yXBWBWz*x}g1KQ8dEJvw6eN@90 z7XgnEX>C_LPs_Fm81D`k#Q5du{TSg=sc2>rrZ z%G%Ln@UQKfaO@cyKfJ!T{&Am@M6MvW-AK83Am(XB0FYK-cU^_KYzB?i=?qIabjr}9 zLLfbuyrR)c;96_ZKnP1QKniiAl(xt20i#Xh`HyIY6pcwV>z!^ZPOk#O66uD|irOoc z>%`yKpyVq;S*9BXEt)tNy^J_yUy?zH%EJNWenW@|_lz@~3xuoNK9XS=w~K1^lT={;E%{FM-tq7}xJPbS_vEnc*wQp5Q+`XNbAG zI+RTjVOz+5iEaMmOelh)Dd4i1D><*hx(uBA8Ybu0PQ+Hh=5-CG4w$!a*{Yc1N?Wy` za~pEj#B>qHyr~-#N9B2gQr!WoRu)J0ig)SW(C?z+&boQ7Py;6OYilJIRSIFS zw^jCBSfIX-<-dC?N#01*vG95i$xowKlKA3Nkj*h@#1S~Y^mZ>_Mf#a%=Dmx&`JT$T zZ;eW!cJ&#kDQuajb?&v;69dEMEmhWK+^$fmg62|@{%6+dO8{CCrvYriX~Hf2Y9pCohDf+cXGv!81chh_LqtE zzPmeR7zyT)=3azx0}g$o6-T_oe%!uwC^Ts$1O8_s^Kg=h9AWLVWX{?tCgj; zyQ`g(%`Z|q#t&h3apTLLhu-2_zbeO*VTi`+DQ5Ga{ydJFsd%6wd%F}BMZ-DTLeXp; zkPz2u)<>HZKF|yZ*C}Bg>*+drP0k@%>s{fWg3bXSkwc`aoiz9fhJaDf*N_;CGJr?)FKmb$$+ zB;fPhc}isqxLUZbCP(d^=*-pi!KqW<3zJyU7iO-FTg%hO>#Jwh%b72ya&{=OP`@}V zaSUx1SNPA*jbBW;*K+!Q&)6DgU-b~vzu|?reamSW64J>!^@YSqcHmv64GW~3cNSpezwL6LwyGNwCHVqjl3>`Id{mA*~WST{(+R96I8h1JpB>z05!E z$poe)@K@&V+(QvTd_(EOE-O~r%32gM7BG@^Z@7k@X73n`xnh}UzQPX(2dP%Sq-N@A zmef+q?`oqhdRg~FdsmU z!D8AD|M?cH)Uk!5p?yP!u0Jw+s5$1-G3CNieVz@ty`xq%8VsIV$C)Xe1 zo%RO6XKvtiC;Y!^n3=Ql{}c?aS^sI-$wTnMDBPSYTuE<$JTTE;4)RC=KOo8!lYq%> zCcw>}8>9tkuWXW|k`dWFkDFV?-Z!?+Rs~E;N!^#GElGK7iSNj61irUs=i}$j$;&{b zv~CeB<%rM|E(X*%CMH+d0Pr=UgQGIbUMNO4Ml^I{yh4IjWK2gitFn<+4jMGap9SEA zY`Em+DbC`D5e^nw+pwVa87SWha1t~)DmfYwj@(ri_^{t1jTEetk5C++>Tjh4dM~#fMyL<-8k2{<+CPi5*UUm@?0Yxl`AEZy z0`e_qK=(-(<{;NFO!ja>E0)LK0@F4pywb#4AyA`P4&;fXc{Z2jVF8J^1_TNwdfD|6q|NoQk zcjeqox&D@9g#G{i#6J?Scct7-5&V{NOz^J>gS!Im?n!NixA z^bhFWaO$pvyD`dd3EX7AB>XL6xr_e0U;T{*09wfbfPZ+|yYRmo&A-AasQv=~j|tUK VMuF?b&$2!i;62>Tj?(-*`#(+&niBv3 literal 0 HcmV?d00001 diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index f8821df659..63f66f552b 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -9,6 +9,7 @@ import Standard.Base.Runtime.Managed_Resource.Managed_Resource import Standard.Base.Runtime.Ref.Ref from Standard.Table import Table, Match_Columns, Excel_Format, Excel_Range, Data_Formatter, Delimited_Format, Excel_Workbook, Value_Type +from Standard.Table.Extensions.Excel_Extensions import all from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names, Invalid_Location, Range_Exceeded, Existing_Data, Column_Count_Mismatch, Column_Name_Mismatch, Empty_Sheet @@ -1025,6 +1026,44 @@ add_specs suite_builder = table.at "InvMixDates" . to_vector . should_equal [(Date_Time.new 1997 7 25), (Date_Time.new 1993 5 3 10 58 45 millisecond=980), (Date_Time.new 2010 8 1 17 9 29 millisecond=923), (Date_Time.new 1988 7 12 12 39 20 millisecond=185), (Date_Time.new 2009 10 22)] table.at "Mixed" . to_vector . should_equal [(Date.new 1997 7 25), (Date_Time.new 1993 5 3 10 58 45 millisecond=980), (Date_Time.new 2010 8 1 17 9 29 millisecond=923), (Time_Of_Day.new 18 39 24 millisecond=572), 85] + group_builder.specify "should be able to read dates before 1900-01-01 (treating 1899-12-31 as -1)" <| + table = (enso_project.data / "OlderDates.xlsx") . read ..Sheet + table.row_count . should_equal 15 + table.column_names . should_equal ["Num","Date","Date13","NumVal"] + table.at "Date" . to_vector . should_equal [(Date.new 1899 12 28),(Date.new 1899 12 29),(Date.new 1899 12 30),(Date.new 1899 12 31),(Date.new 1900 1 1),(Date.new 1900 1 2),(Date.new 1900 1 3),(Date.new 1900 1 4),(Date.new 1900 1 5),(Date.new 1900 2 28),Nothing,(Date.new 1900 3 1),(Date.new 1900 3 2),(Date.new 1900 3 3),(Date.new 1900 3 4)] + table.at "Date13" . to_vector . should_equal [(Date_Time.new 1899 12 28 hour=13),(Date_Time.new 1899 12 29 hour=13),(Date_Time.new 1899 12 30 hour=13),(Date_Time.new 1899 12 31 hour=13),(Date_Time.new 1900 1 1 hour=13),(Date_Time.new 1900 1 2 hour=13),(Date_Time.new 1900 1 3 hour=13),(Date_Time.new 1900 1 4 hour=13),(Date_Time.new 1900 1 5 hour=13),(Date_Time.new 1900 2 28 hour=13),Nothing,(Date_Time.new 1900 3 1 hour=13),(Date_Time.new 1900 3 2 hour=13),(Date_Time.new 1900 3 3 hour=13),(Date_Time.new 1900 3 4 hour=13)] + + group_builder.specify "should be able to write and then read dates before 1900-01-01 (treating 1899-12-31 as -1)" <| + numbers = [-100,-1,1,2,50,61,100] + dates = numbers.map n-> (Date.new 1899 12 31).date_add n ..Day + date_times = dates.map d-> d.to_date_time (Time_Of_Day.new 12 34 56) + table = Table.new [["Num", numbers], ["Date", dates], ["DateTimes", date_times]] + file = enso_project.data / "transient" / "TestOlderDates.xlsx" + file.delete_if_exists . should_succeed + table.write file . should_succeed + read_table = file.read ..Sheet + read_table.should_equal table + + group_builder.specify "should be able to convert Excel dates from Integer to Date" <| + numbers = [-100,-1,1,2,50,61,100] + dates = [Date.new 1899 09 23, Date.new 1899 12 31, Date.new 1900 01 01, Date.new 1900 01 02, Date.new 1900 02 19, Date.new 1900 03 01, Date.new 1900 04 09] + parsed = numbers.map n-> Date.from_excel n + parsed . should_equal dates + + Date.from_excel 0 . should_fail_with Illegal_Argument + Date.from_excel 60 . should_fail_with Illegal_Argument + + group_builder.specify "should be able to convert Excel dates from Integer to Date" <| + numbers = [-100,-1,1,2,50,61,100] + dates = [Date.new 1899 09 23, Date.new 1899 12 31, Date.new 1900 01 01, Date.new 1900 01 02, Date.new 1900 02 19, Date.new 1900 03 01, Date.new 1900 04 09] + numbers_2 = numbers.map_with_index i->n-> n+(1+i*2)/24 + date_times = dates.map_with_index i->d-> (d.to_date_time (Time_Of_Day.new hour=1+i*2)) + parsed = numbers_2.map n-> Date_Time.from_excel n + parsed . should_equal date_times + + Date_Time.from_excel 0.25 . should_fail_with Illegal_Argument + Date_Time.from_excel 60.45 . should_fail_with Illegal_Argument + ci_pending = if Environment.get "CI" != Nothing then "This test takes a lot of time so it is disabled on CI." group_builder.specify "should be able to write and read a big XLSX file (>110MB)" pending=ci_pending <| n = 10^6 From 71acb83e7f0c6ef28e955b8f6d4920a0b675e7f0 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Tue, 29 Oct 2024 11:23:22 +0100 Subject: [PATCH 04/43] Removing useless `GraphOccurrence.Global` & some encapsulation (#11419) Work on #11365 made me realize that `GraphOccurrence.Global` isn't really used anywhere. Simplifying the code by removing it and associated methods. --- .../inline/InlineCompilerBenchmark.java | 4 +- .../inline/InlineCompilerErrorBenchmark.java | 4 +- .../inline/InlineContextResource.java | 23 --- .../inline/InlineContextResourceFactory.java | 15 +- .../benchmarks/inline/InlineSource.java | 2 +- .../pass/analyse/PassPersistance.java | 1 - .../enso/compiler/context/InlineContext.scala | 8 +- .../compiler/pass/analyse/AliasAnalysis.scala | 2 - .../pass/analyse/alias/graph/Graph.scala | 176 +++++++----------- .../analyse/alias/graph/GraphOccurrence.scala | 13 +- .../analyse/test}/AliasAnalysisTest.scala | 2 +- .../test}/FramePointerAnalysisTest.scala | 2 +- 12 files changed, 86 insertions(+), 166 deletions(-) delete mode 100644 engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResource.java rename engine/runtime-integration-tests/src/test/scala/org/enso/compiler/{test/pass/analyse => pass/analyse/test}/AliasAnalysisTest.scala (99%) rename engine/runtime-integration-tests/src/test/scala/org/enso/compiler/{test/pass/analyse => pass/analyse/test}/FramePointerAnalysisTest.scala (99%) diff --git a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerBenchmark.java b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerBenchmark.java index dcf60186d5..5496f5c49a 100644 --- a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerBenchmark.java +++ b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerBenchmark.java @@ -75,8 +75,8 @@ public class InlineCompilerBenchmark { @Benchmark public void longExpression(Blackhole blackhole) throws IOException { - try (InlineContextResource resource = inlineSource.inlineContextFactory().create()) { - var tuppleOpt = compiler.runInline(longExpression, resource.inlineContext()); + try (var inlineCtx = inlineSource.inlineContextFactory().create()) { + var tuppleOpt = compiler.runInline(longExpression, inlineCtx); if (tuppleOpt.isEmpty()) { throw new AssertionError("Unexpected: inline compilation should succeed"); } diff --git a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerErrorBenchmark.java b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerErrorBenchmark.java index f0bfde5c07..fc640c088c 100644 --- a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerErrorBenchmark.java +++ b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineCompilerErrorBenchmark.java @@ -80,8 +80,8 @@ public class InlineCompilerErrorBenchmark { @Benchmark public void expressionWithErrors(Blackhole blackhole) throws IOException { - try (InlineContextResource resource = mainInlineContextResourceFactory.create()) { - var tuppleOpt = compiler.runInline(expressionWithErrors, resource.inlineContext()); + try (var inlineCtx = mainInlineContextResourceFactory.create()) { + var tuppleOpt = compiler.runInline(expressionWithErrors, inlineCtx); blackhole.consume(tuppleOpt); } } diff --git a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResource.java b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResource.java deleted file mode 100644 index 9144a01d38..0000000000 --- a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResource.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.enso.compiler.benchmarks.inline; - -import java.io.IOException; -import org.enso.compiler.context.InlineContext; - -/** - * InlineContextResource ensures that the underlying InlineContext is properly cleaned up after - * usage. - * - * @param inlineContext InlineContext for the main method - */ -public record InlineContextResource(InlineContext inlineContext) implements AutoCloseable { - @Override - public void close() throws IOException { - inlineContext - .localScope() - .foreach( - s -> { - s.scope().removeScopeFromParent(); - return null; - }); - } -} diff --git a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResourceFactory.java b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResourceFactory.java index bf3c997f18..ed358d55ff 100644 --- a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResourceFactory.java +++ b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineContextResourceFactory.java @@ -13,16 +13,15 @@ public record InlineContextResourceFactory( EnsoContext ensoCtx, PackageRepository pkgRepository) { - public InlineContextResource create() { + public InlineContext create() { var mainFunc = moduleScope.getMethodForType(assocTypeReceiver, "main"); var mainFuncRootNode = (MethodRootNode) mainFunc.getCallTarget().getRootNode(); var mainLocalScope = mainFuncRootNode.getLocalScope(); - return new InlineContextResource( - InlineContext.fromJava( - mainLocalScope.createChild(), - moduleScope.getModule().asCompilerModule(), - scala.Option.apply(false), - ensoCtx.getCompilerConfig(), - scala.Option.apply(pkgRepository))); + return InlineContext.fromJava( + mainLocalScope.createChild(), + moduleScope.getModule().asCompilerModule(), + scala.Option.apply(false), + ensoCtx.getCompilerConfig(), + scala.Option.apply(pkgRepository)); } } diff --git a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineSource.java b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineSource.java index 33892531aa..a4f2e6a646 100644 --- a/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineSource.java +++ b/engine/runtime-benchmarks/src/main/java/org/enso/compiler/benchmarks/inline/InlineSource.java @@ -4,7 +4,7 @@ import java.util.Set; record InlineSource( String source, - // InlineContextResource for the main method + // InlineContext for the main method InlineContextResourceFactory inlineContextFactory, // Local variables in main method Set localVarNames) {} diff --git a/engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/PassPersistance.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/PassPersistance.java index c88bbc586a..c4634ba8bb 100644 --- a/engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/PassPersistance.java +++ b/engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/PassPersistance.java @@ -143,7 +143,6 @@ public final class PassPersistance { var nextIdCounter = in.readInt(); var g = new Graph(rootScope, nextIdCounter, links); - g.freeze(); return g; } diff --git a/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala index 3599e6198f..af96f97820 100644 --- a/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala @@ -25,9 +25,15 @@ case class InlineContext( freshNameSupply: Option[FreshNameSupply] = None, passConfiguration: Option[PassConfiguration] = None, pkgRepo: Option[PackageRepository] = None -) { +) extends AutoCloseable { def bindingsAnalysis(): BindingsMap = module.bindingsAnalysis() def getModule() = module.module + + def close(): Unit = { + this.localScope + .foreach(_.scope.removeScopeFromParent()) + } + } object InlineContext { diff --git a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala index a5d465d5ee..e851b84b18 100644 --- a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala @@ -784,8 +784,6 @@ case object AliasAnalysis extends IRPass { parentScope.add(occurrence) if (!isConstructorNameInPatternContext && !name.isMethod) { graph.resolveLocalUsage(occurrence) - } else { - graph.resolveGlobalUsage(occurrence) } } name.updateMetadata( diff --git a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/Graph.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/Graph.scala index 21abf4d890..6292a94047 100644 --- a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/Graph.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/Graph.scala @@ -1,4 +1,6 @@ -package org.enso.compiler.pass.analyse.alias.graph +package org.enso.compiler +package pass.analyse +package alias.graph import org.enso.compiler.core.CompilerError import org.enso.compiler.debug.Debug @@ -7,68 +9,53 @@ import org.enso.compiler.pass.analyse.alias.graph.Graph.Scope import scala.collection.immutable.HashMap import scala.collection.mutable import scala.reflect.ClassTag +import scala.annotation.unused /** A graph containing aliasing information for a given root scope in Enso. */ sealed class Graph( val rootScope: Graph.Scope = new Graph.Scope(), private var _nextIdCounter: Int = 0, private var links: Set[Graph.Link] = Set() -) extends Serializable { +) { private var sourceLinks: Map[Graph.Id, Set[Graph.Link]] = new HashMap() private var targetLinks: Map[Graph.Id, Set[Graph.Link]] = new HashMap() - private var frozen: Boolean = false { links.foreach(addSourceTargetLink) } - private var globalSymbols: Map[Graph.Symbol, GraphOccurrence.Global] = - Map() - /** @return the next counter value */ def nextIdCounter: Int = _nextIdCounter /** @return a deep structural copy of `this` */ - def deepCopy( + final def deepCopy( scope_mapping: mutable.Map[Scope, Scope] = mutable.Map() ): Graph = { val copy = new Graph( this.rootScope.deepCopy(scope_mapping), - this.nextIdCounter + this._nextIdCounter ) - copy.links = this.links - copy.sourceLinks = this.sourceLinks - copy.targetLinks = this.targetLinks - copy.globalSymbols = this.globalSymbols + copy.links = this.links + copy.sourceLinks = this.sourceLinks + copy.targetLinks = this.targetLinks copy } - def getLinks(): Set[Graph.Link] = links + private[analyse] def getLinks(): Set[Graph.Link] = links - def freeze(): Unit = { - frozen = true - } - - /** Registers a requested global symbol in the aliasing scope. - * - * @param sym the symbol occurrence - */ - def addGlobalSymbol(sym: GraphOccurrence.Global): Unit = { - org.enso.common.Asserts.assertInJvm(!frozen) - if (!globalSymbols.contains(sym.symbol)) { - globalSymbols = globalSymbols + (sym.symbol -> sym) - } + final def freeze(): Unit = { + _nextIdCounter = -1 } /** Creates a deep copy of the aliasing graph structure. * * @return a copy of the graph structure */ - def copy: Graph = { + private[analyse] def copy: Graph = { val graph = new Graph( rootScope.deepCopy(mutable.Map()), - nextIdCounter + _nextIdCounter ) graph.links = links graph.sourceLinks = sourceLinks @@ -93,8 +80,11 @@ sealed class Graph( * * @return a unique identifier for this graph */ - def nextId(): Graph.Id = { + final def nextId(): Graph.Id = { val nextId = _nextIdCounter + if (nextId < 0) { + throw new IllegalStateException("Cannot emit new IDs. Frozen!") + } _nextIdCounter += 1 nextId } @@ -105,7 +95,7 @@ sealed class Graph( * @param occurrence the symbol usage * @return the link, if it exists */ - def resolveLocalUsage( + final def resolveLocalUsage( occurrence: GraphOccurrence.Use ): Option[Graph.Link] = { scopeFor(occurrence.id).flatMap(_.resolveUsage(occurrence).map { link => @@ -126,24 +116,6 @@ sealed class Graph( ) } - /** Resolves any links for the given usage of a symbol, assuming the symbol - * is global (i.e. method, constructor etc.) - * - * @param occurrence the symbol usage - * @return the link, if it exists - */ - def resolveGlobalUsage( - occurrence: GraphOccurrence.Use - ): Option[Graph.Link] = { - scopeFor(occurrence.id) match { - case Some(scope) => - globalSymbols - .get(occurrence.symbol) - .map(g => Graph.Link(occurrence.id, scope.scopesToRoot + 1, g.id)) - case None => None - } - } - /** Returns a string representation of the graph. * * @return a string representation of `this` @@ -155,7 +127,7 @@ sealed class Graph( * * @return a pretty-printed string representation of the graph */ - def pprint: String = { + @unused private def pretty: String = { val original = toString Debug.pretty(original) } @@ -165,7 +137,7 @@ sealed class Graph( * @param id the identifier for the symbol * @return a list of links in which `id` occurs */ - def linksFor(id: Graph.Id): Set[Graph.Link] = { + final def linksFor(id: Graph.Id): Set[Graph.Link] = { sourceLinks.getOrElse(id, Set.empty[Graph.Link]) ++ targetLinks.getOrElse( id, Set() @@ -179,7 +151,7 @@ sealed class Graph( * @tparam T the role in which `symbol` should occur * @return a set of all links in which `symbol` occurs with role `T` */ - def linksFor[T <: GraphOccurrence: ClassTag]( + private[analyse] def linksFor[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): Set[Graph.Link] = { val idsForSym = rootScope.symbolToIds[T](symbol) @@ -195,7 +167,7 @@ sealed class Graph( * @param id the occurrence identifier * @return the occurrence for `id`, if it exists */ - def getOccurrence(id: Graph.Id): Option[GraphOccurrence] = + final def getOccurrence(id: Graph.Id): Option[GraphOccurrence] = scopeFor(id).flatMap(_.getOccurrence(id)) /** Gets the link from an id to the definition of the symbol it represents. @@ -203,7 +175,7 @@ sealed class Graph( * @param id the identifier to find the definition link for * @return the definition link for `id` if it exists */ - def defLinkFor(id: Graph.Id): Option[Graph.Link] = { + final def defLinkFor(id: Graph.Id): Option[Graph.Link] = { linksFor(id).find { edge => val occ = getOccurrence(edge.target) occ match { @@ -218,7 +190,7 @@ sealed class Graph( * @param id the id to find the scope for * @return the scope where `id` occurs */ - def scopeFor(id: Graph.Id): Option[Graph.Scope] = { + final def scopeFor(id: Graph.Id): Option[Graph.Scope] = { rootScope.scopeFor(id) } @@ -228,7 +200,7 @@ sealed class Graph( * @tparam T the role in which `symbol` occurs * @return all the scopes where `symbol` occurs with role `T` */ - def scopesFor[T <: GraphOccurrence: ClassTag]( + private[analyse] def scopesFor[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): List[Graph.Scope] = { rootScope.scopesForSymbol[T](symbol) @@ -239,7 +211,7 @@ sealed class Graph( * @return the number of scopes that are either this scope or children of * it */ - def numScopes: Int = { + private[analyse] def numScopes: Int = { rootScope.scopeCount } @@ -247,7 +219,7 @@ sealed class Graph( * * @return the maximum nesting depth of scopes through this scope. */ - def nesting: Int = { + private[analyse] def nesting: Int = { rootScope.maxNesting } @@ -256,7 +228,7 @@ sealed class Graph( * @param id the occurrence identifier * @return `true` if `id` shadows other bindings, otherwise `false` */ - def canShadow(id: Graph.Id): Boolean = { + private[analyse] def canShadow(id: Graph.Id): Boolean = { scopeFor(id) .flatMap( _.getOccurrence(id).flatMap { @@ -277,15 +249,14 @@ sealed class Graph( * @param definition the definition to find the 'shadowees' of * @return the bindings shadowed by `definition` */ - def knownShadowedDefinitions( + final def knownShadowedDefinitions( definition: GraphOccurrence ): Set[GraphOccurrence] = { def getShadowedIds( scope: Graph.Scope ): Set[GraphOccurrence] = { scope.occurrences.values.collect { - case d: GraphOccurrence.Def if d.symbol == definition.symbol => d - case g: GraphOccurrence.Global if g.symbol == definition.symbol => g + case d: GraphOccurrence.Def if d.symbol == definition.symbol => d } ++ scope.parent.map(getShadowedIds).getOrElse(Set()) }.toSet @@ -295,27 +266,15 @@ sealed class Graph( case Some(scope) => getShadowedIds(scope) // + globals case None => Set() } - case _: GraphOccurrence.Global => Set() - case _: GraphOccurrence.Use => Set() + case _: GraphOccurrence.Use => Set() } } - /** Determines if the provided id is linked to a binding that shadows - * another binding. - * - * @param id the identifier to check - * @return `true` if the definition of the symbol for `id` shadows another - * binding for the same symbol, `false`, otherwise - */ - def linkedToShadowingBinding(id: Graph.Id): Boolean = { - defLinkFor(id).isDefined - } - /** Gets all symbols defined in the graph. * * @return the set of symbols defined in this graph */ - def symbols: Set[Graph.Symbol] = { + private[analyse] def symbols: Set[Graph.Symbol] = { rootScope.symbols } @@ -326,7 +285,7 @@ sealed class Graph( * @tparam T the role in which `symbol` should occur * @return a list of identifiers for that symbol */ - def symbolToIds[T <: GraphOccurrence: ClassTag]( + private[analyse] def symbolToIds[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): List[Graph.Id] = { rootScope.symbolToIds[T](symbol) @@ -337,7 +296,7 @@ sealed class Graph( * @param id the identifier of an occurrence * @return the symbol associated with `id`, if it exists */ - def idToSymbol( + private[analyse] def idToSymbol( id: Graph.Id ): Option[Graph.Symbol] = { rootScope.idToSymbol(id) @@ -362,7 +321,7 @@ object Graph { var childScopes: List[Scope] = List(), var occurrences: Map[Id, GraphOccurrence] = HashMap(), var allDefinitions: List[GraphOccurrence.Def] = List() - ) extends Serializable { + ) { var parent: Option[Scope] = None @@ -372,7 +331,7 @@ object Graph { * * @return the number of scopes from this scope to the root */ - def scopesToRoot: Int = { + private[analyse] def scopesToRoot: Int = { parent.flatMap(scope => Some(scope.scopesToRoot + 1)).getOrElse(0) } @@ -382,7 +341,7 @@ object Graph { * * @return this scope with parent scope set */ - def withParent(parentScope: Scope): this.type = { + final def withParent(parentScope: Scope): this.type = { org.enso.common.Asserts.assertInJvm(parent.isEmpty) this.parent = Some(parentScope) this @@ -393,7 +352,7 @@ object Graph { * * @return a copy of `this` */ - def deepCopy( + final def deepCopy( mapping: mutable.Map[Scope, Scope] = mutable.Map() ): Scope = { mapping.get(this) match { @@ -435,7 +394,7 @@ object Graph { * * @return a scope that is a child of `this` */ - def addChild(): Scope = { + final def addChild(): Scope = { val scope = new Scope() scope.parent = Some(this) childScopes ::= scope @@ -447,7 +406,7 @@ object Graph { * * @param occurrence the occurrence to add */ - def add(occurrence: GraphOccurrence): Unit = { + final def add(occurrence: GraphOccurrence): Unit = { if (occurrences.contains(occurrence.id)) { throw new CompilerError( s"Multiple occurrences found for ID ${occurrence.id}." @@ -462,7 +421,7 @@ object Graph { * * @param definition The definition to add. */ - def addDefinition(definition: GraphOccurrence.Def): Unit = { + final def addDefinition(definition: GraphOccurrence.Def): Unit = { allDefinitions = allDefinitions ++ List(definition) } @@ -472,7 +431,9 @@ object Graph { * @param id the occurrence identifier * @return the occurrence for `id`, if it exists */ - def getOccurrence(id: Graph.Id): Option[GraphOccurrence] = { + private[analyse] def getOccurrence( + id: Graph.Id + ): Option[GraphOccurrence] = { occurrences.get(id) } @@ -483,7 +444,7 @@ object Graph { * @tparam T the role for the symbol * @return the occurrences for `name`, if they exist */ - def getOccurrences[T <: GraphOccurrence: ClassTag]( + private[analyse] def getOccurrences[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): Set[GraphOccurrence] = { occurrences.values.collect { @@ -491,18 +452,6 @@ object Graph { }.toSet } - /** Unsafely gets the occurrence for the provided ID in the current scope. - * - * Please note that this will crash if the ID is not defined in this - * scope. - * - * @param id the occurrence identifier - * @return the occurrence for `id` - */ - def unsafeGetOccurrence(id: Graph.Id): GraphOccurrence = { - getOccurrence(id).get - } - /** Checks whether a symbol occurs in a given role in the current scope. * * @param symbol the symbol to check for @@ -510,7 +459,7 @@ object Graph { * @return `true` if `symbol` occurs in role `T` in this scope, `false` * otherwise */ - def hasSymbolOccurrenceAs[T <: GraphOccurrence: ClassTag]( + final def hasSymbolOccurrenceAs[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): Boolean = { occurrences.values.collectFirst { @@ -526,7 +475,7 @@ object Graph { * @return the link from `occurrence` to the definition of that symbol, if it * exists */ - def resolveUsage( + private[analyse] def resolveUsage( occurrence: GraphOccurrence.Use, parentCounter: Int = 0 ): Option[Graph.Link] = { @@ -556,7 +505,7 @@ object Graph { * @return the number of scopes that are either this scope or children of * it */ - def scopeCount: Int = { + private[analyse] def scopeCount: Int = { childScopes.map(_.scopeCount).sum + 1 } @@ -564,7 +513,7 @@ object Graph { * * @return the maximum nesting depth of scopes through this scope. */ - def maxNesting: Int = { + private[analyse] def maxNesting: Int = { childScopes.map(_.maxNesting).foldLeft(0)(Math.max) + 1 } @@ -573,7 +522,7 @@ object Graph { * @param id the id to find the scope for * @return the scope where `id` occurs */ - def scopeFor(id: Graph.Id): Option[Scope] = { + private[analyse] def scopeFor(id: Graph.Id): Option[Scope] = { if (!occurrences.contains(id)) { if (childScopes.isEmpty) { None @@ -611,7 +560,7 @@ object Graph { * @param n the number of scopes to walk up * @return the n-th parent of `this` scope, if present */ - def nThParent(n: Int): Option[Scope] = { + private[analyse] def nThParent(n: Int): Option[Scope] = { if (n == 0) Some(this) else this.parent.flatMap(_.nThParent(n - 1)) } @@ -624,7 +573,7 @@ object Graph { * @tparam T the role in which `name` occurs * @return all the scopes where `name` occurs with role `T` */ - def scopesForSymbol[T <: GraphOccurrence: ClassTag]( + private[analyse] def scopesForSymbol[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): List[Scope] = { val occursInThisScope = hasSymbolOccurrenceAs[T](symbol) @@ -643,7 +592,7 @@ object Graph { * * @return the set of symbols */ - def symbols: Set[Graph.Symbol] = { + private[analyse] def symbols: Set[Graph.Symbol] = { val symbolsInThis = occurrences.values.map(_.symbol).toSet val symbolsInChildScopes = childScopes.flatMap(_.symbols) @@ -657,7 +606,7 @@ object Graph { * @tparam T the role in which `symbol` should occur * @return a list of identifiers for that symbol */ - def symbolToIds[T <: GraphOccurrence: ClassTag]( + private[analyse] def symbolToIds[T <: GraphOccurrence: ClassTag]( symbol: Graph.Symbol ): List[Graph.Id] = { val scopes = @@ -670,7 +619,7 @@ object Graph { * @param id the identifier of an occurrence * @return the symbol associated with `id`, if it exists */ - def idToSymbol( + private[analyse] def idToSymbol( id: Graph.Id ): Option[Graph.Symbol] = { scopeFor(id).flatMap(_.getOccurrence(id)).map(_.symbol) @@ -681,7 +630,7 @@ object Graph { * @param scope the potential parent scope * @return `true` if `this` is a child of `scope`, otherwise `false` */ - def isChildOf(scope: Scope): Boolean = { + private[analyse] def isChildOf(scope: Scope): Boolean = { val isDirectChildOf = scope.childScopes.contains(this) val isChildOfChildren = scope.childScopes @@ -697,7 +646,7 @@ object Graph { /** Disassociates this Scope from its parent. */ - def removeScopeFromParent(): Unit = { + private[compiler] def removeScopeFromParent(): Unit = { org.enso.common.Asserts.assertInJvm(this.parent.nonEmpty) this.parent.foreach(_.removeScopeFromParent(this)) } @@ -712,6 +661,9 @@ object Graph { * @param scopeCount the number of scopes that the link traverses * @param target the target ID of the link in the graph */ - sealed case class Link(source: Id, scopeCount: Int, target: Id) - extends Serializable + sealed private[analyse] case class Link( + source: Id, + scopeCount: Int, + target: Id + ) {} } diff --git a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/GraphOccurrence.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/GraphOccurrence.scala index 545da729e1..d23feb1cf2 100644 --- a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/GraphOccurrence.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/alias/graph/GraphOccurrence.scala @@ -9,7 +9,7 @@ import java.util.UUID * Note that this is not present in the metadata attached to the [[org.enso.compiler.core.IR]] elements, * but only in the alias [[Graph]]. */ -sealed trait GraphOccurrence extends Serializable { +sealed trait GraphOccurrence { val id: Id val symbol: Graph.Symbol } @@ -51,15 +51,4 @@ object GraphOccurrence { identifier: UUID @Identifier, externalId: Option[UUID @ExternalID] ) extends GraphOccurrence - - // TODO [AA] At some point the analysis should make use of these. - /** Represents a global symbol that has been _asked for_ in the program. - * - * @param id the identifier of the name in the graph - * @param symbol the text of the name - */ - sealed case class Global( - override val id: Id, - override val symbol: Graph.Symbol - ) extends GraphOccurrence } diff --git a/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/test/pass/analyse/AliasAnalysisTest.scala b/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/pass/analyse/test/AliasAnalysisTest.scala similarity index 99% rename from engine/runtime-integration-tests/src/test/scala/org/enso/compiler/test/pass/analyse/AliasAnalysisTest.scala rename to engine/runtime-integration-tests/src/test/scala/org/enso/compiler/pass/analyse/test/AliasAnalysisTest.scala index da8d9d197f..526fa34b72 100644 --- a/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/test/pass/analyse/AliasAnalysisTest.scala +++ b/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/pass/analyse/test/AliasAnalysisTest.scala @@ -1,4 +1,4 @@ -package org.enso.compiler.test.pass.analyse +package org.enso.compiler.pass.analyse.test import org.enso.compiler.Passes import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext} diff --git a/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/test/pass/analyse/FramePointerAnalysisTest.scala b/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/pass/analyse/test/FramePointerAnalysisTest.scala similarity index 99% rename from engine/runtime-integration-tests/src/test/scala/org/enso/compiler/test/pass/analyse/FramePointerAnalysisTest.scala rename to engine/runtime-integration-tests/src/test/scala/org/enso/compiler/pass/analyse/test/FramePointerAnalysisTest.scala index dde696864f..5a5561e04a 100644 --- a/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/test/pass/analyse/FramePointerAnalysisTest.scala +++ b/engine/runtime-integration-tests/src/test/scala/org/enso/compiler/pass/analyse/test/FramePointerAnalysisTest.scala @@ -1,4 +1,4 @@ -package org.enso.compiler.test.pass.analyse +package org.enso.compiler.pass.analyse.test import org.enso.compiler.Passes import org.enso.compiler.pass.analyse.FramePointer From 5b8bc86cf91a3523a159df89b7b703d25bc7d777 Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Tue, 29 Oct 2024 08:48:11 -0400 Subject: [PATCH 05/43] Clean up SQLite file after the SQLite file tests have run. (#11416) --- test/Table_Tests/src/Database/SQLite_Spec.enso | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index e658f5d917..c71ae0ebb7 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -424,6 +424,9 @@ add_specs suite_builder = suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> data = File_Connection.setup database_file + group_builder.teardown <| + data.teardown + group_builder.specify "should recognise a SQLite database file" <| Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format From 15575b495a0b2039313d4ae411b567355ee396c2 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Tue, 29 Oct 2024 15:47:12 +0100 Subject: [PATCH 06/43] Skeletal PanicExceptionTest and more logging when AssertionError happens (#11393) --- .../builtin/error/PanicExceptionTest.java | 59 +++++++++++++++++++ .../interpreter/runtime/data/text/Text.java | 17 ++++++ .../runtime/error/PanicException.java | 15 ++++- 3 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java new file mode 100644 index 0000000000..3492a97352 --- /dev/null +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java @@ -0,0 +1,59 @@ +package org.enso.interpreter.node.expression.builtin.error; + +import static org.junit.Assert.assertEquals; + +import com.oracle.truffle.api.interop.InteropLibrary; +import org.enso.interpreter.node.expression.builtin.interop.syntax.HostValueToEnsoNode; +import org.enso.interpreter.runtime.data.text.Text; +import org.enso.interpreter.runtime.error.PanicException; +import org.enso.test.utils.ContextUtils; +import org.enso.test.utils.TestRootNode; +import org.graalvm.polyglot.Context; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class PanicExceptionTest { + + private static final InteropLibrary interop = InteropLibrary.getUncached(); + + private static Context context; + private static CatchPanicNode catchPanicNode; + private static HostValueToEnsoNode hostValueToEnsoNode; + private static TestRootNode testRootNode; + + @BeforeClass + public static void initContextAndData() { + context = ContextUtils.createDefaultContext(); + ContextUtils.executeInContext( + context, + () -> { + catchPanicNode = CatchPanicNode.build(); + hostValueToEnsoNode = HostValueToEnsoNode.build(); + testRootNode = new TestRootNode(); + testRootNode.insertChildren(catchPanicNode, hostValueToEnsoNode); + return null; + }); + } + + @AfterClass + public static void disposeContext() { + context.close(); + context = null; + } + + @Test + public void panicExceptionMessageForAssertionError() throws Exception { + ContextUtils.executeInContext( + context, + () -> { + var text = Text.create("Some text for the exception"); + var thrown = new java.lang.AssertionError(text.toString()); + var ex = new PanicException(text, thrown, null); + assertEquals(text.toString(), ex.getMessage()); + var msg = InteropLibrary.getUncached().getExceptionMessage(ex); + assertEquals(text, msg); + return null; + }); + } +} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java index b9ba191c7c..5a882bf17e 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java @@ -285,4 +285,21 @@ public final class Text implements EnsoObject { } return result; } + + @Override + public int hashCode() { + int hash = 7 * toString().hashCode(); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof Text other) { + return this.toString().equals(other.toString()); + } + return false; + } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java index 91930a1e2d..c85698eef4 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java @@ -23,6 +23,7 @@ import org.enso.interpreter.runtime.callable.UnresolvedSymbol; import org.enso.interpreter.runtime.callable.argument.CallArgumentInfo; import org.enso.interpreter.runtime.data.EnsoObject; import org.enso.interpreter.runtime.data.Type; +import org.enso.interpreter.runtime.data.atom.Atom; import org.enso.interpreter.runtime.data.text.Text; import org.enso.interpreter.runtime.library.dispatch.TypesLibrary; import org.enso.interpreter.runtime.state.State; @@ -87,7 +88,19 @@ public final class PanicException extends AbstractTruffleException implements En var info = library.getExceptionMessage(this); msg = library.asString(info); } catch (StackOverflowError | AssertionError | UnsupportedMessageException e) { - logger().atError().log("Cannot compute message for " + payload, e); + var l = logger(); + l.atError().log("Cannot compute message for " + payload, e); + l.error("Exception location: " + getLocation()); + if (getLocation() != null) { + l.error(" location source: " + getLocation().getEncapsulatingSourceSection()); + l.error(" location class: " + getLocation().getClass().getName()); + l.error(" location string: " + getLocation()); + } + l.error(" payload class: " + payload.getClass().getName()); + if (payload instanceof Atom atom) { + l.error(" payload cons: " + atom.getConstructor()); + l.error(" payload type: " + atom.getConstructor().getType()); + } msg = TypeToDisplayTextNode.getUncached().execute(payload); } cacheMessage = msg; From 74220f243ae72f8d5c98b45a20bed2c79ce62378 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Tue, 29 Oct 2024 18:33:53 +0300 Subject: [PATCH 07/43] Dependency tracking between nodes is too coarse grained (#11428) close #11237 Changelog: - update: implement special case for a line removal when calculating the changeset # Important Notes Note that the graph is still re-calculated when the node is re-added (by pressing `ctrl-z`). The reason is that the engine processes edits on the textual level and there is not enough information to do similar workarounds. The issue becomes irrelevant when we switch to the direct tree manipulation in Ydoc. https://github.com/user-attachments/assets/c85afde8-6386-44df-82b5-6fb0cca5205b --- .../instrument/ChangesetBuilder.scala | 88 ++++++++++++++++--- .../test/context/ChangesetBuilderTest.scala | 22 +++++ 2 files changed, 100 insertions(+), 10 deletions(-) diff --git a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala index c872efd9fe..8f6b45f1d9 100644 --- a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala +++ b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala @@ -190,10 +190,19 @@ final class ChangesetBuilder[A: TextEditor: IndexedSource]( val edit = edits.dequeue() val locationEdit = ChangesetBuilder.toLocationEdit(edit, source) var invalidatedSet = - ChangesetBuilder.invalidated(tree, locationEdit.location, true) + ChangesetBuilder.invalidated( + tree, + locationEdit.location, + locationEdit.isNodeRemoved, + true + ) if (invalidatedSet.isEmpty) { - invalidatedSet = - ChangesetBuilder.invalidated(tree, locationEdit.location, false) + invalidatedSet = ChangesetBuilder.invalidated( + tree, + locationEdit.location, + locationEdit.isNodeRemoved, + false + ) } val newTree = ChangesetBuilder.updateLocations(tree, locationEdit) val newSource = TextEditor[A].edit(source, edit) @@ -260,8 +269,13 @@ object ChangesetBuilder { * * @param location the location of the edit * @param length the length of the inserted text + * @param isNodeRemoved the flag indicating that the edit removes a node */ - private case class LocationEdit(location: Location, length: Int) { + private case class LocationEdit( + location: Location, + length: Int, + isNodeRemoved: Boolean + ) { /** The difference in length between the edited text and the inserted text. * Determines how much the rest of the text will be shifted after applying @@ -409,19 +423,50 @@ object ChangesetBuilder { /** Calculate the invalidated subset of the tree affected by the edit by * comparing the source locations. * + * The `isNodeRemoved` argument covers the case when the user removes a + * single line, for example: + * + * {{{ + * 0|main = + * | + * 1| x = 0 + * | ^^^^^^ + * 2| y = 1 + * |^^^^ + * 3| y + * }}} + * + * In this case, when removing the line (1) `x = 0`, the expression `y = 1` + * on the line (2) should not be affected by the edit because it causes + * invalidation of all the subsequent expressions in the body of `main` + * function. Instead, the algorithm detects that only the `x` variable name + * was changed and later finds all its usages through the `DataflowAnalysis` + * metadata. Also note that we ignore the right hand side of the `x = ...` + * binding because the removal of rhs expression does not affect other + * expressions in the `main` body, while the usage of a common symbol, i.e. + * `foo`: + * {{{ + * x = foo + * y = foo + * }}} + * will lead to the invalidation of the `y` expression as well (when looking + * for dynamic usages of the `foo` symbol) which is unwanted. + * * @param tree the source tree * @param edit the location of the edit + * @param isNodeRemoved flag indicating that the edit removes a single node * @return the invalidated nodes of the tree */ private def invalidated( tree: Tree, edit: Location, + isNodeRemoved: Boolean, onlyLeafs: Boolean ): Tree = { val invalidated = mutable.TreeSet[ChangesetBuilder.Node]() tree.iterator.foreach { node => if (!onlyLeafs || node.leaf) { - if (intersect(edit, node)) { + if (intersect(edit, node, isNodeRemoved)) { invalidated += node tree -= node } @@ -438,12 +483,14 @@ object ChangesetBuilder { */ private def intersect( edit: Location, - node: ChangesetBuilder.Node + node: ChangesetBuilder.Node, + isNodeRemoved: Boolean ): Boolean = { - intersect(edit, node.location) + if (isNodeRemoved) intersectWhenNodeRemoved(edit, node.location) + else intersect(edit, node.location) } - /** Check if the node location intersects the edit location. + /** Check if the node location intersects or borders with the edit location. * * @param edit location of the edit * @param node location of the node @@ -456,7 +503,23 @@ object ChangesetBuilder { inside(edit.end, node) } - /** Check if the character position index is inside the location. + /** Check if the node location intersects the edit that removes the line. + * + * In this case we assume that the edit removes the binding + * `name = expression`, and we only interested in detecting the `name` part. + * + * @param edit location of the edit + * @param node location of the node + * @return true if the node and edit locations are intersecting + */ + private def intersectWhenNodeRemoved( + edit: Location, + node: Location + ): Boolean = { + node.start == edit.start && node.end < edit.end + } + + /** Check if the character position index is inside or on the border of the location. * * @param index the character position * @param location the location @@ -476,7 +539,12 @@ object ChangesetBuilder { edit: TextEdit, source: A ): LocationEdit = { - LocationEdit(toLocation(edit, source), edit.text.length) + def isSameOffset: Boolean = + edit.range.end.character == edit.range.start.character + def isAcrossLines: Boolean = + edit.range.end.line > edit.range.start.line + val isNodeRemoved = edit.text.isEmpty && isSameOffset && isAcrossLines + LocationEdit(toLocation(edit, source), edit.text.length, isNodeRemoved) } /** Convert [[TextEdit]] location to [[Location]] in the provided source. diff --git a/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala b/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala index 8579d3b93d..76015824f6 100644 --- a/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala +++ b/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala @@ -221,6 +221,27 @@ class ChangesetBuilderTest ) } + "multiline remove node" in { + val code = + """x -> + | y = foo 5 + | z = foo 7 + | y + x""".stripMargin.linesIterator.mkString("\n") + val edit = TextEdit(Range(Position(2, 4), Position(3, 4)), "") + + val ir = code + .preprocessExpression(freshInlineContext) + .get + .asInstanceOf[Function.Lambda] + + val secondLine = ir.body.children()(1).asInstanceOf[Expression.Binding] + val zName = secondLine.name + + invalidated(ir, code, edit) should contain theSameElementsAs Seq( + zName.getId + ) + } + "multiline insert line 1" in { val code = """x -> @@ -434,6 +455,7 @@ class ChangesetBuilderTest atCode ) } + } def findIR(ir: IR, uuid: String): IR = { From 442123bba0d0497fdaa7e3cfd06c6ae8cef733ba Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Tue, 29 Oct 2024 18:39:32 -0400 Subject: [PATCH 08/43] Fix `Float.parse` benchmark regression (#11402) --- .../Base/0.0.0-dev/src/Data/Numbers.enso | 21 ++++++++++++------- test/Base_Tests/src/Data/Numbers_Spec.enso | 2 +- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index dfc0f655e5..91a40f88d2 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -794,13 +794,20 @@ type Float Float.parse "(123,456,789.654)" format="###,###.##;(###,###.##)" # => -123456789.654 parse : Text -> Locale | Nothing -> Float ! Number_Parse_Error ! Illegal_Argument - parse text locale:Locale=Locale.default format:Text="" -> Float ! Number_Parse_Error ! Illegal_Argument = - Illegal_Argument.handle_java_exception <| - # `getInstance` returns `DecimalFormat` or a subclass of `DecimalFormat`. - decimal_format = NumberFormat.getInstance locale.java_locale - decimal_format.applyLocalizedPattern format - Panic.catch ParseException (decimal_format.parse text) _-> - Error.throw (Number_Parse_Error.Error text) + parse text (locale : Locale | Nothing = Nothing) (format : Text | Nothing = Nothing) -> Float ! Number_Parse_Error ! Illegal_Argument = + case locale.is_nothing && format.is_nothing of + True -> + Panic.catch NumberFormatException (Double.parseDouble text) _-> + Error.throw (Number_Parse_Error.Error text) + False -> + Illegal_Argument.handle_java_exception <| + defaulted_locale = locale.if_nothing Locale.default + defaulted_format = format.if_nothing "" + # `getInstance` returns `DecimalFormat` or a subclass of `DecimalFormat`. + decimal_format = NumberFormat.getInstance defaulted_locale.java_locale + decimal_format.applyLocalizedPattern defaulted_format + Panic.catch ParseException (decimal_format.parse text) _-> + Error.throw (Number_Parse_Error.Error text) ## ICON input_number diff --git a/test/Base_Tests/src/Data/Numbers_Spec.enso b/test/Base_Tests/src/Data/Numbers_Spec.enso index e86ed651ac..4a66dc988a 100644 --- a/test/Base_Tests/src/Data/Numbers_Spec.enso +++ b/test/Base_Tests/src/Data/Numbers_Spec.enso @@ -303,7 +303,7 @@ add_specs suite_builder = Float.parse "aaaa" l . should_fail_with Number_Parse_Error group_builder.specify "should parse correctly with format and/or locale" <| - Float.parse "123,456,789.87654" . should_equal 123456789.87654 + Float.parse "123,456,789.87654" locale=Locale.default . should_equal 123456789.87654 Float.parse "123.456.789,87654" locale=Locale.italy . should_equal 123456789.87654 Float.parse "123,456,789.88" format="#,###.##" . should_equal 123456789.88 From 39c44e7adbc4cc47bec207bd50a4f6bfc8a0d03c Mon Sep 17 00:00:00 2001 From: Adam Obuchowicz Date: Wed, 30 Oct 2024 10:34:58 +0100 Subject: [PATCH 09/43] Table Input Widget: Size persistence (#11435) Fixes #10861 Every widget may set metadata on its AST. Because once widget picker will be implemented a single AST node may have many possible widgets, their settings are kept in a map keyed by their name/key. --- CHANGELOG.md | 3 ++ .../components/GraphEditor/NodeWidgetTree.vue | 25 +++++++++------ .../GraphEditor/widgets/WidgetFunction.vue | 5 +++ .../GraphEditor/widgets/WidgetTableEditor.vue | 28 ++++++++++++++-- .../project-view/providers/widgetRegistry.ts | 25 ++++++++------- app/gui/src/project-view/util/ast/reactive.ts | 12 ++++++- app/ydoc-server/src/edits.ts | 17 +++++++--- app/ydoc-server/src/fileFormat.ts | 6 ++-- app/ydoc-server/src/languageServerSession.ts | 32 +++++++++++++------ app/ydoc-shared/src/ast/mutableModule.ts | 32 +++++++++++++++++-- app/ydoc-shared/src/ast/tree.ts | 23 +++++++++++++ 11 files changed, 164 insertions(+), 44 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 570388a6a1..97754e3e87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ - [Changed the way of adding new column in Table Input Widget][11388]. The "virtual column" is replaced with an explicit (+) button. - [New dropdown-based component menu][11398]. +- [Size of Table Input Widget is preserved and restored after project + re-opening][11435] [11151]: https://github.com/enso-org/enso/pull/11151 [11271]: https://github.com/enso-org/enso/pull/11271 @@ -20,6 +22,7 @@ [11383]: https://github.com/enso-org/enso/pull/11383 [11388]: https://github.com/enso-org/enso/pull/11388 [11398]: https://github.com/enso-org/enso/pull/11398 +[11435]: https://github.com/enso-org/enso/pull/11435 #### Enso Standard Library diff --git a/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue b/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue index c16fd3fc12..ea545d34b7 100644 --- a/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue +++ b/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue @@ -67,16 +67,23 @@ function handleWidgetUpdates(update: WidgetUpdate) { selectNode() const edit = update.edit ?? graph.startEdit() if (update.portUpdate) { - const { value, origin } = update.portUpdate + const { origin } = update.portUpdate if (Ast.isAstId(origin)) { - const ast = - value instanceof Ast.Ast ? value - : value == null ? Ast.Wildcard.new(edit) - : undefined - if (ast) { - edit.replaceValue(origin, ast) - } else if (typeof value === 'string') { - edit.tryGet(origin)?.syncToCode(value) + if ('value' in update.portUpdate) { + const value = update.portUpdate.value + const ast = + value instanceof Ast.Ast ? value + : value == null ? Ast.Wildcard.new(edit) + : undefined + if (ast) { + edit.replaceValue(origin, ast) + } else if (typeof value === 'string') { + edit.tryGet(origin)?.syncToCode(value) + } + } + if ('metadata' in update.portUpdate) { + const { metadataKey, metadata } = update.portUpdate + edit.tryGet(origin)?.setWidgetMetadata(metadataKey, metadata) } } else { console.error(`[UPDATE ${origin}] Invalid top-level origin. Expected expression ID.`) diff --git a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue index 2616049e19..386ff90e0a 100644 --- a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue +++ b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue @@ -82,6 +82,11 @@ const innerInput = computed(() => { function handleArgUpdate(update: WidgetUpdate): boolean { const app = application.value if (update.portUpdate && app instanceof ArgumentApplication) { + if (!('value' in update.portUpdate)) { + if (!Ast.isAstId(update.portUpdate.origin)) + console.error('Tried to set metadata on arg placeholder. This is not implemented yet!') + return false + } const { value, origin } = update.portUpdate const edit = update.edit ?? graph.startEdit() // Find the updated argument by matching origin port/expression with the appropriate argument. diff --git a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue index da5a17f3f2..7733a7f444 100644 --- a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue +++ b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue @@ -29,12 +29,29 @@ import type { } from 'ag-grid-enterprise' import { computed, markRaw, ref } from 'vue' import type { ComponentExposed } from 'vue-component-type-helpers' +import { z } from 'zod' const props = defineProps(widgetProps(widgetDefinition)) const graph = useGraphStore() const suggestionDb = useSuggestionDbStore() const grid = ref>>() +const configSchema = z.object({ size: z.object({ x: z.number(), y: z.number() }) }) +type Config = z.infer + +const DEFAULT_CFG: Config = { size: { x: 200, y: 150 } } + +const config = computed(() => { + const configObj = props.input.value.widgetMetadata('WidgetTableEditor') + if (configObj == null) return DEFAULT_CFG + const parsed = configSchema.safeParse(configObj) + if (parsed.success) return parsed.data + else { + console.warn('Table Editor Widget: could not read config; invalid format: ', parsed.error) + return DEFAULT_CFG + } +}) + const { rowData, columnDefs, moveColumn, moveRow, pasteFromClipboard } = useTableNewArgument( () => props.input, graph, @@ -131,15 +148,22 @@ const headerEditHandler = new HeaderEditing() // === Resizing === -const size = ref(new Vec2(200, 150)) const graphNav = injectGraphNavigator() +const size = computed(() => Vec2.FromXY(config.value.size)) + const clientBounds = computed({ get() { return new Rect(Vec2.Zero, size.value.scale(graphNav.scale)) }, set(value) { - size.value = new Vec2(value.width / graphNav.scale, value.height / graphNav.scale) + props.onUpdate({ + portUpdate: { + origin: props.input.portId, + metadataKey: 'WidgetTableEditor', + metadata: { size: { x: value.width / graphNav.scale, y: value.height / graphNav.scale } }, + }, + }) }, }) diff --git a/app/gui/src/project-view/providers/widgetRegistry.ts b/app/gui/src/project-view/providers/widgetRegistry.ts index c196783702..3f4afbd3a5 100644 --- a/app/gui/src/project-view/providers/widgetRegistry.ts +++ b/app/gui/src/project-view/providers/widgetRegistry.ts @@ -12,7 +12,7 @@ import type { WidgetEditHandlerParent } from './widgetRegistry/editHandler' export type WidgetComponent = Component> export namespace WidgetInput { - /** TODO: Add docs */ + /** Create a basic {@link WidgetInput } from AST node. */ export function FromAst(ast: A): WidgetInput & { value: A } { return { portId: ast.id, @@ -20,7 +20,7 @@ export namespace WidgetInput { } } - /** TODO: Add docs */ + /** Create a basic {@link WidgetInput } from AST node with enforced port. */ export function FromAstWithPort( ast: A, ): WidgetInput & { value: A } { @@ -31,7 +31,7 @@ export namespace WidgetInput { } } - /** TODO: Add docs */ + /** A string representation of widget's value - the code in case of AST value. */ export function valueRepr(input: WidgetInput): string | undefined { if (typeof input.value === 'string') return input.value else return input.value?.code() @@ -56,24 +56,24 @@ export namespace WidgetInput { isPlaceholder(input) || input.value instanceof nodeType } - /** TODO: Add docs */ + /** Check if input's value is existing AST node (not placeholder or token). */ export function isAst(input: WidgetInput): input is WidgetInput & { value: Ast.Ast } { return input.value instanceof Ast.Ast } - /** Rule out token inputs. */ + /** Check if input's value is existing AST node or placeholder. Rule out token inputs. */ export function isAstOrPlaceholder( input: WidgetInput, ): input is WidgetInput & { value: Ast.Ast | string | undefined } { return isPlaceholder(input) || isAst(input) } - /** TODO: Add docs */ + /** Check if input's value is an AST token. */ export function isToken(input: WidgetInput): input is WidgetInput & { value: Ast.Token } { return input.value instanceof Ast.Token } - /** TODO: Add docs */ + /** Check if input's value is an AST which potentially may be a function call. */ export function isFunctionCall( input: WidgetInput, ): input is WidgetInput & { value: Ast.App | Ast.Ident | Ast.PropertyAccess | Ast.OprApp } { @@ -163,15 +163,18 @@ export interface WidgetProps { * port may not represent any existing AST node) with `edit` containing any additional modifications * (like inserting necessary imports). * + * The same way widgets may set their metadata (as this is also technically an AST modification). + * Every widget type should set it's name as `metadataKey`. + * * The handlers interested in a specific port update should apply it using received edit. The edit * is committed in {@link NodeWidgetTree}. */ export interface WidgetUpdate { edit?: MutableModule | undefined - portUpdate?: { - value: Ast.Owned | string | undefined - origin: PortId - } + portUpdate?: { origin: PortId } & ( + | { value: Ast.Owned | string | undefined } + | { metadataKey: string; metadata: unknown } + ) } /** diff --git a/app/gui/src/project-view/util/ast/reactive.ts b/app/gui/src/project-view/util/ast/reactive.ts index 5223dc1ed1..aa8e596d01 100644 --- a/app/gui/src/project-view/util/ast/reactive.ts +++ b/app/gui/src/project-view/util/ast/reactive.ts @@ -2,13 +2,23 @@ import { markRaw, shallowReactive } from 'vue' import { MutableModule } from 'ydoc-shared/ast' import * as Y from 'yjs' -/** TODO: Add docs */ +/** + * Make AST structures inside the module reactive (including the node's and widgets' metadata). + * + * Note that non-Ast structured fields (e.g. ArgumentDefinition) are not themselves reactive -- + * an access is tracked when obtaining the object from the Ast, not when accessing the inner + * object's fields. + */ export function reactiveModule(doc: Y.Doc, onCleanup: (f: () => void) => void): MutableModule { const module = markRaw(new MutableModule(doc)) const handle = module.observe((update) => { update.nodesAdded.forEach((astId) => { const fields = module.get(astId).fields ;(fields as any)._map = shallowReactive((fields as any)._map) + const metadata = fields.get('metadata') + ;(metadata as any)._map = shallowReactive((metadata as any)._map) + const widgetsMetadata = metadata.get('widget') + ;(widgetsMetadata as any)._map = shallowReactive((widgetsMetadata as any)._map) }) }) onCleanup(() => module.unobserve(handle)) diff --git a/app/ydoc-server/src/edits.ts b/app/ydoc-server/src/edits.ts index 8274e0412d..2e72a1b79b 100644 --- a/app/ydoc-server/src/edits.ts +++ b/app/ydoc-server/src/edits.ts @@ -38,7 +38,7 @@ const MAX_SIZE_FOR_NORMAL_DIFF = 30000 interface AppliedUpdates { newCode: string | undefined newIdMap: IdMap | undefined - newMetadata: fileFormat.IdeMetadata['node'] | undefined + newMetadata: fileFormat.IdeMetadata | undefined } /** Return an object containing updated versions of relevant fields, given an update payload. */ @@ -49,7 +49,7 @@ export function applyDocumentUpdates( ): AppliedUpdates { const codeChanged = update.nodesUpdated.size || update.nodesAdded.size || update.nodesDeleted.size let idsChanged = false - let metadataChanged = false + let metadataChanged = update.widgetMetadataUpdated.size > 0 for (const { changes } of update.metadataUpdated) { for (const [key] of changes) { if (key === 'externalId') { @@ -63,7 +63,7 @@ export function applyDocumentUpdates( let newIdMap = undefined let newCode = undefined - let newMetadata = undefined + let newMetadata: fileFormat.IdeMetadata | undefined = undefined const syncModule = new MutableModule(doc.ydoc) const root = syncModule.root() @@ -76,19 +76,26 @@ export function applyDocumentUpdates( if (codeChanged || idsChanged || metadataChanged) { // Update the metadata object. // Depth-first key order keeps diffs small. - newMetadata = {} satisfies fileFormat.IdeMetadata['node'] + newMetadata = { node: {}, widget: {} } root.visitRecursiveAst(ast => { let pos = ast.nodeMetadata.get('position') const vis = ast.nodeMetadata.get('visualization') const colorOverride = ast.nodeMetadata.get('colorOverride') if (vis && !pos) pos = { x: 0, y: 0 } if (pos) { - newMetadata![ast.externalId] = { + newMetadata!.node[ast.externalId] = { position: { vector: [Math.round(pos.x), Math.round(-pos.y)] }, visualization: vis && translateVisualizationToFile(vis), colorOverride, } } + const widgets = ast.widgetsMetadata() + if (!widgets.entries().next().done) { + if (newMetadata!.widget == null) newMetadata!.widget = {} + newMetadata!.widget[ast.externalId] = Object.fromEntries( + widgets.entries() as IterableIterator<[string, Record]>, + ) + } }) } diff --git a/app/ydoc-server/src/fileFormat.ts b/app/ydoc-server/src/fileFormat.ts index 6cf6c87874..213219e5c9 100644 --- a/app/ydoc-server/src/fileFormat.ts +++ b/app/ydoc-server/src/fileFormat.ts @@ -34,15 +34,12 @@ export const nodeMetadata = z }) .passthrough() -export type ImportMetadata = z.infer -export const importMetadata = z.object({}).passthrough() - export type IdeMetadata = z.infer export const ideMetadata = z .object({ node: z.record(z.string().uuid(), nodeMetadata), - import: z.record(z.string(), importMetadata), snapshot: z.string().optional(), + widget: z.optional(z.record(z.string().uuid(), z.record(z.string(), z.unknown()))), }) .passthrough() .default(() => defaultMetadata().ide) @@ -87,6 +84,7 @@ function defaultMetadata() { ide: { node: {}, import: {}, + widget: {}, }, } } diff --git a/app/ydoc-server/src/languageServerSession.ts b/app/ydoc-server/src/languageServerSession.ts index 80f7f42cad..98d7923d3b 100644 --- a/app/ydoc-server/src/languageServerSession.ts +++ b/app/ydoc-server/src/languageServerSession.ts @@ -474,12 +474,14 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { private static getIdMapToPersist( idMap: IdMap | undefined, - metadata: fileFormat.IdeMetadata['node'], + metadata: fileFormat.IdeMetadata, ): IdMap | undefined { if (idMap === undefined) { return } else { - const entriesIntersection = idMap.entries().filter(([, id]) => id in metadata) + const entriesIntersection = idMap + .entries() + .filter(([, id]) => id in metadata.node || id in (metadata.widget ?? {})) return new IdMap(entriesIntersection) } } @@ -496,7 +498,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { synced: EnsoFileParts, newCode: string | undefined, newIdMap: IdMap | undefined, - newMetadata: fileFormat.IdeMetadata['node'] | undefined, + newMetadata: fileFormat.IdeMetadata | undefined, ) { if (this.syncedContent == null || this.syncedVersion == null) return @@ -508,14 +510,13 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { json.stringify({ ...this.syncedMeta, ide: { - ...this.syncedMeta.ide, + ...newMetadata, ...newSnapshot, - node: newMetadata, }, }) const idMapToPersist = (newIdMap || newMetadata) && - ModulePersistence.getIdMapToPersist(newIdMap, newMetadata ?? this.syncedMeta.ide.node) + ModulePersistence.getIdMapToPersist(newIdMap, newMetadata ?? this.syncedMeta.ide) const newIdMapToPersistJson = idMapToPersist && serializeIdMap(idMapToPersist) const code = newCode ?? synced.code const newContent = combineFileParts({ @@ -566,7 +567,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { if (!result.ok) return handleError(result.error) this.syncedContent = newContent this.syncedVersion = newVersion - if (newMetadata) this.syncedMeta.ide.node = newMetadata + if (newMetadata) this.syncedMeta.ide = newMetadata if (newCode) this.syncedCode = newCode if (newIdMapToPersistJson) this.syncedIdMap = newIdMapToPersistJson if (newMetadataJson) this.syncedMetaJson = newMetadataJson @@ -583,6 +584,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { const { code, idMapJson, metadataJson } = contentsReceived const metadata = fileFormat.tryParseMetadataOrFallback(metadataJson) const nodeMeta = Object.entries(metadata.ide.node) + const widgetMeta = Object.entries(metadata.ide.widget ?? {}) let parsedSpans let parsedIdMap @@ -646,7 +648,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { (code !== this.syncedCode || idMapJson !== this.syncedIdMap || metadataJson !== this.syncedMetaJson) && - nodeMeta.length !== 0 + (nodeMeta.length !== 0 || widgetMeta.length !== 0) ) { const externalIdToAst = new Map() astRoot.visitRecursiveAst(ast => { @@ -671,6 +673,18 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { const newColorOverride = meta.colorOverride if (oldColorOverride !== newColorOverride) metadata.set('colorOverride', newColorOverride) } + for (const [id, meta] of widgetMeta) { + if (typeof id !== 'string') continue + const ast = externalIdToAst.get(id as ExternalId) + if (!ast) { + missing.add(id) + continue + } + const widgetsMetadata = syncModule.getVersion(ast).mutableWidgetsMetadata() + for (const [widgetKey, widgetMeta] of Object.entries(meta)) { + widgetsMetadata.set(widgetKey, widgetMeta) + } + } } this.syncedCode = code @@ -685,7 +699,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { contentsReceived, this.syncedCode ?? undefined, unsyncedIdMap, - this.syncedMeta?.ide?.node, + this.syncedMeta?.ide, ) } diff --git a/app/ydoc-shared/src/ast/mutableModule.ts b/app/ydoc-shared/src/ast/mutableModule.ts index a8f3d3bec9..6271b05453 100644 --- a/app/ydoc-shared/src/ast/mutableModule.ts +++ b/app/ydoc-shared/src/ast/mutableModule.ts @@ -40,6 +40,7 @@ export interface ModuleUpdate { nodesUpdated: Set updateRoots: Set metadataUpdated: { id: AstId; changes: Map }[] + widgetMetadataUpdated: Set origin: Origin | undefined } @@ -280,6 +281,12 @@ export class MutableModule implements Module { metadata.get(key as any), ]) updateBuilder.updateMetadata(id, changes) + } else if (event.target.parent.parent.parent === this.nodes) { + // Updates to some specific widget's metadata + const id = event.target.parent.parent.get('id') + assertAstId(id) + if (!this.nodes.get(id)) continue + updateBuilder.updateWidgets(id) } } return updateBuilder.finish() @@ -351,6 +358,7 @@ export class MutableModule implements Module { const metadata = new Y.Map() as unknown as FixedMap const metadataFields = setAll(metadata, { externalId: externalId ?? newExternalId(), + widget: new Y.Map(), }) const fields = setAll(map_, { id, @@ -437,7 +445,11 @@ class UpdateBuilder { readonly nodesAdded = new Set() readonly nodesDeleted = new Set() readonly nodesUpdated = new Set() - readonly metadataUpdated: { id: AstId; changes: Map }[] = [] + readonly metadataUpdated: { + id: AstId + changes: Map + }[] = [] + readonly widgetMetadataUpdated = new Set() readonly origin: Origin | undefined private readonly module: Module @@ -471,15 +483,29 @@ class UpdateBuilder { } } if (fieldsChanged) this.nodesUpdated.add(id) - if (metadataChanges) this.metadataUpdated.push({ id, changes: metadataChanges }) + if (metadataChanges) { + this.metadataUpdated.push({ id, changes: metadataChanges }) + if (metadataChanges.has('widget')) { + this.widgetMetadataUpdated.add(id) + } + } } updateMetadata(id: AstId, changes: Iterable) { const changeMap = new Map() - for (const [key, value] of changes) changeMap.set(key, value) + for (const [key, value] of changes) { + changeMap.set(key, value) + if (key === 'widget') { + this.widgetMetadataUpdated.add(id) + } + } this.metadataUpdated.push({ id, changes: changeMap }) } + updateWidgets(id: AstId) { + this.widgetMetadataUpdated.add(id) + } + deleteNode(id: AstId) { this.nodesDeleted.add(id) } diff --git a/app/ydoc-shared/src/ast/tree.ts b/app/ydoc-shared/src/ast/tree.ts index 75a76c7106..267e8fd4b6 100644 --- a/app/ydoc-shared/src/ast/tree.ts +++ b/app/ydoc-shared/src/ast/tree.ts @@ -1,4 +1,5 @@ /* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */ +import * as Y from 'yjs' import type { Identifier, IdentifierOrOperatorIdentifier, @@ -53,6 +54,7 @@ export type AstId = string & { [brandAstId]: never } /** @internal */ export interface MetadataFields { externalId: ExternalId + widget: Y.Map } export interface NodeMetadataFields { position?: { x: number; y: number } | undefined @@ -66,6 +68,7 @@ const nodeMetadataKeys = allKeys({ }) export type NodeMetadata = FixedMapView export type MutableNodeMetadata = FixedMap + /** @internal */ interface RawAstFields { id: AstId @@ -105,6 +108,16 @@ export abstract class Ast { return metadata as FixedMapView } + /** Get metadata of all widgets assigned to this node. */ + widgetsMetadata(): FixedMapView> { + return this.fields.get('metadata').get('widget') + } + + /** Get metadata of given widget assigned to this node. */ + widgetMetadata(widgetKey: string): DeepReadonly | undefined { + return this.fields.get('metadata').get('widget').get(widgetKey) + } + /** Returns a JSON-compatible object containing all metadata properties. */ serializeMetadata(): MetadataFields & NodeMetadataFields { return this.fields.get('metadata').toJSON() as any @@ -249,6 +262,16 @@ export abstract class MutableAst extends Ast { this.fields.get('metadata').set('externalId', id) } + /** Set the widget's new metadata. */ + setWidgetMetadata(widgetKey: string, widgetMetadata: unknown) { + this.fields.get('metadata').get('widget').set(widgetKey, widgetMetadata) + } + + /** Get map of all widget's metadata. */ + mutableWidgetsMetadata() { + return this.fields.get('metadata').get('widget') + } + /** TODO: Add docs */ mutableNodeMetadata(): MutableNodeMetadata { const metadata = this.fields.get('metadata') From 10d76ca6148f2efb931bbc651d50ee465eb36071 Mon Sep 17 00:00:00 2001 From: Nikita Pekin Date: Wed, 30 Oct 2024 13:15:37 +0200 Subject: [PATCH 10/43] chore(flake.nix): Add missing macOS-specific dependencies of `enso-formatter` (#11430) When compiling the `enso-formatter` binary for use in `~/.cargo/bin` (for the `cloud-v2` repo), the formatter requires some additional dependencies that are not currently provided in the nix build environment. This PR adds those dependencies to `flake.nix` so that `enso-formatter` compiles successfully. cc @somebody1234 --- flake.nix | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index cb7ec8a6bf..61d5e39f3a 100644 --- a/flake.nix +++ b/flake.nix @@ -30,7 +30,11 @@ buildInputs = with pkgs; [ # === Graal dependencies === libxcrypt-legacy - ]; + ] ++ (if !isOnLinux then [ + # === macOS-specific dependencies === + darwin.apple_sdk.frameworks.IOKit # Required by `enso-formatter`. + darwin.apple_sdk.frameworks.Security # Required by `enso-formatter`. + ] else [ ]); packages = with pkgs; [ # === TypeScript dependencies === From dc50a7e3691c8e73da0fe1bfd598427f371daff6 Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Wed, 30 Oct 2024 08:50:35 -0400 Subject: [PATCH 11/43] HTTP response caching, with TTL and LRU logic (#11342) --- CHANGELOG.md | 3 + .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 50 ++- .../src/Enso_Cloud/Internal/Utils.enso | 3 +- .../Base/0.0.0-dev/src/Errors/Common.enso | 23 ++ .../src/Internal/Data_Read_Helpers.enso | 6 +- .../Base/0.0.0-dev/src/Network/HTTP.enso | 47 ++- .../src/Network/HTTP/Cache_Policy.enso | 19 + .../main/java/org/enso/base/Stream_Utils.java | 24 ++ .../java/org/enso/base/cache/LRUCache.java | 353 ++++++++++++++++++ .../base/cache/ResponseTooLargeException.java | 15 + .../enso_cloud/EnsoHTTPResponseCache.java | 163 ++++++++ .../base/enso_cloud/EnsoSecretHelper.java | 138 +++++-- test/Base_Tests/src/Network/Http_Spec.enso | 7 +- test/Table_Tests/src/IO/Fetch_Spec.enso | 341 +++++++++++++++++ .../org/enso/shttp/HTTPTestHelperServer.java | 1 + .../test_helpers/DownloadTestHandler.java | 58 +++ 16 files changed, 1204 insertions(+), 47 deletions(-) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso create mode 100644 std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java create mode 100644 std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java create mode 100644 std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java create mode 100644 tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 97754e3e87..9e54b5f091 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -106,6 +106,8 @@ range.][11135] - [Added `format` parameter to `Decimal.parse`.][11205] - [Added `format` parameter to `Float.parse`.][11229] +- [Implemented a cache for HTTP data requests, as well as a per-file response + size limit.][11342] [10614]: https://github.com/enso-org/enso/pull/10614 [10660]: https://github.com/enso-org/enso/pull/10660 @@ -121,6 +123,7 @@ [11135]: https://github.com/enso-org/enso/pull/11135 [11205]: https://github.com/enso-org/enso/pull/11205 [11229]: https://github.com/enso-org/enso/pull/11229 +[11342]: https://github.com/enso-org/enso/pull/11342 #### Enso Language & Runtime diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index b6886fa047..2123bd0f58 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -12,6 +12,7 @@ import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Problem_Behavior.Problem_Behavior import project.Internal.Data_Read_Helpers import project.Meta +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP import project.Network.HTTP.HTTP_Error.HTTP_Error @@ -51,6 +52,19 @@ from project.System.File_Format import Auto_Detect, File_Format If set to `Report_Error`, the operation fails with a dataflow error. If set to `Ignore`, the operation proceeds without errors or warnings. + ! Request Caching + + Responses to HTTP data requests are cached, and additional requests for the + same resources will use the cache, saving a round-trip call to the remote + server. Two resources are considered the same if the URIs and request + headers are the same. Header order does not affect sameness. + + The cache respects the "max-age" and "Age" response headers; see + `Data.fetch` for more details. + + The cached values are retained as long as the project remains open. Closing + a project will clear the cache. + > Example Read the first sheet of an XLSX from disk and convert it into a table. @@ -72,7 +86,7 @@ read : Text | URI | File -> File_Format -> Problem_Behavior -> Any ! File_Error read path=(Missing_Argument.throw "path") format=Auto_Detect (on_problems : Problem_Behavior = ..Report_Warning) = case path of _ : Text -> if Data_Read_Helpers.looks_like_uri path then Data_Read_Helpers.fetch_following_data_links path format=format else read (File.new path) format on_problems - uri : URI -> Data_Read_Helpers.fetch_following_data_links uri format=format + uri : URI -> fetch uri format=format _ -> file_obj = File.new path if file_obj.is_directory then Error.throw (Illegal_Argument.Error "Cannot `read` a directory, use `Data.list`.") else @@ -183,6 +197,32 @@ list (directory:(Text | File)=enso_project.root) (name_filter:Text="") recursive Defaults to `Auto_Detect`. If `Raw_Response` is selected or if the format cannot be determined automatically, a raw HTTP `Response` will be returned. + ! Request Caching + + Responses to HTTP data requests are cached, and additional requests for the + same resources will use the cache, saving a round-trip call to the remote + server. Two resources are considered the same if the URIs and request + headers are the same. Header order does not affect sameness. + + The cached values are retained as long as the project remains open. Closing + a project will clear the cache. + + The cache respects the "max-age" and "Age" response headers received from + remote servers. These headers are used to determine if the cached value is + fresh or stale. If it is stale, the cached value is removed and a request + is made again to the remote servers. + + The following limits are imposed on values stored in the cache: + - Single file limit: a single file can be no more than 10M. + - Total cache size limit: the entire cache can be no more than 10G. + + For data responses over the single file limit, you can use `Data.download` + to download the file locally. Download sizes are not constrained by either + limit. + + If the entire cache goes over the total cache size limit, the + least-recently-used entries are removed. + > Example Read from an HTTP endpoint. @@ -198,9 +238,9 @@ list (directory:(Text | File)=enso_project.root) (name_filter:Text="") recursive @uri (Text_Input display=..Always) @format Data_Read_Helpers.format_widget_with_raw_response @headers Header.default_widget -fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> File_Format -> Any ! Request_Error | HTTP_Error -fetch (uri:(URI | Text)=(Missing_Argument.throw "uri")) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (format = Auto_Detect) = - Data_Read_Helpers.fetch_following_data_links uri method headers (Data_Read_Helpers.handle_legacy_format "fetch" "format" format) +fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> File_Format -> Cache_Policy -> Any ! Request_Error | HTTP_Error +fetch (uri:(URI | Text)=(Missing_Argument.throw "uri")) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (format = Auto_Detect) (cache_policy:Cache_Policy = ..Default) = + Data_Read_Helpers.fetch_following_data_links uri method headers (Data_Read_Helpers.handle_legacy_format "fetch" "format" format) cache_policy=cache_policy ## ALIAS http post, upload GROUP Output @@ -347,7 +387,7 @@ post (uri:(URI | Text)=(Missing_Argument.throw "uri")) (body:Request_Body=..Empt download : (URI | Text) -> Writable_File -> HTTP_Method -> Vector (Header | Pair Text Text) -> File ! Request_Error | HTTP_Error download (uri:(URI | Text)=(Missing_Argument.throw "uri")) file:Writable_File (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = Context.Output.if_enabled disabled_message="As writing is disabled, cannot download to a file. Press the Write button ▶ to perform the operation." panic=False <| - response = HTTP.fetch uri method headers + response = HTTP.fetch uri method headers cache_policy=Cache_Policy.No_Cache case Data_Link.is_data_link response.body.metadata of True -> # If the resource was a data link, we follow it, download the target data and try to write it to a file. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso index 73dd820165..c7b973b73e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso @@ -12,6 +12,7 @@ import project.Enso_Cloud.Errors.Not_Logged_In import project.Enso_Cloud.Internal.Authentication import project.Error.Error import project.Function.Function +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP import project.Network.HTTP.HTTP_Error.HTTP_Error @@ -95,7 +96,7 @@ http_request (method : HTTP_Method) (url : URI) (body : Request_Body = ..Empty) all_headers = [authorization_header] + additional_headers as_connection_error err = Error.throw (Enso_Cloud_Error.Connection_Error err) - response = HTTP.new.request (Request.new method url headers=all_headers body=body) error_on_failure_code=False + response = HTTP.new.request (Request.new method url headers=all_headers body=body) cache_policy=..No_Cache error_on_failure_code=False . catch HTTP_Error as_connection_error . catch Request_Error as_connection_error if response.is_error && (retries > 0) then http_request method url body additional_headers error_handlers (retries - 1) else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso index d1f6f53c3b..86c132f88b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso @@ -15,6 +15,7 @@ polyglot java import java.lang.ArithmeticException polyglot java import java.lang.ClassCastException polyglot java import java.lang.OutOfMemoryError polyglot java import org.enso.base.CompareException +polyglot java import org.enso.base.cache.ResponseTooLargeException ## An error indicating that no value was found. type Not_Found @@ -553,3 +554,25 @@ type Out_Of_Range to_text self = extra = if self.message.is_nothing then "" else ": "+self.message.to_text "(Out_Of_Range (value = "+self.value.to_text+")" + extra + ")" + +## Indiciates that the response from a remote endpoint is over the size limit. +type Response_Too_Large + ## PRIVATE + Error limit:Integer + + ## PRIVATE + Create a human-readable version of the error. + to_display_text : Text + to_display_text self = + suggestion = " Use `Data.fetch` with `cache_policy=No_Cache`, or use `Data.download` to fetch the data to a local file, and `Data.read` to read the file." + "Response too large: repsonse size is over the limit ("+self.limit.to_text+")" + suggestion + + ## PRIVATE + to_text : Text + to_text self = + "(Response_Too_Large (limit = "+self.limit.to_text+")" + ")" + + ## PRIVATE + Convert the Java exception to an Enso dataflow error. + handle_java_exception ~action = + Panic.catch ResponseTooLargeException action (cause-> Error.throw (Response_Too_Large.Error cause.payload.getLimit)) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso index 37d879f0a4..fa00cdafa2 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso @@ -10,10 +10,12 @@ import project.Errors.Deprecated.Deprecated import project.Errors.Problem_Behavior.Problem_Behavior import project.Metadata.Display import project.Metadata.Widget +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.HTTP import project.Network.HTTP.HTTP_Error.HTTP_Error import project.Network.HTTP.HTTP_Method.HTTP_Method import project.Network.URI.URI +import project.Nothing.Nothing import project.Warning.Warning from project.Data import Raw_Response from project.Data.Boolean import Boolean, False, True @@ -31,9 +33,9 @@ looks_like_uri path:Text -> Boolean = ## PRIVATE A common implementation for fetching a resource and decoding it, following encountered data links. -fetch_following_data_links (uri:URI) (method:HTTP_Method = HTTP_Method.Get) (headers:Vector = []) format = +fetch_following_data_links (uri:URI) (method:HTTP_Method = ..Get) (headers:Vector = []) format (cache_policy:Cache_Policy = ..Default) = fetch_and_decode = - response = HTTP.fetch uri method headers + response = HTTP.fetch uri method headers cache_policy=cache_policy decode_http_response_following_data_links response format error_handler attempt = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index 34511c97e7..5f115b906d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -1,19 +1,24 @@ import project.Any.Any import project.Data.Dictionary.Dictionary import project.Data.Hashset.Hashset +import project.Data.Numbers.Integer import project.Data.Pair.Pair +import project.Data.Sort_Direction.Sort_Direction import project.Data.Text.Encoding.Encoding import project.Data.Text.Text +import project.Data.Time.Date_Time.Date_Time import project.Data.Time.Duration.Duration import project.Data.Vector.No_Wrap import project.Data.Vector.Vector import project.Enso_Cloud.Enso_Secret.Enso_Secret import project.Error.Error import project.Errors.Common.Forbidden_Operation +import project.Errors.Common.Response_Too_Large import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Unimplemented.Unimplemented import project.Function.Function import project.Meta +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP_Error.HTTP_Error import project.Network.HTTP.HTTP_Method.HTTP_Method @@ -44,6 +49,7 @@ polyglot java import java.net.http.HttpRequest.Builder polyglot java import java.net.InetSocketAddress polyglot java import java.net.ProxySelector polyglot java import javax.net.ssl.SSLContext +polyglot java import org.enso.base.enso_cloud.EnsoHTTPResponseCache polyglot java import org.enso.base.enso_cloud.EnsoSecretHelper polyglot java import org.enso.base.file_system.File_Utils polyglot java import org.enso.base.net.http.MultipartBodyBuilder @@ -52,10 +58,14 @@ polyglot java import org.enso.base.net.http.UrlencodedBodyBuilder type HTTP ## PRIVATE Static helper for get-like methods - fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error - fetch (uri:(URI | Text)) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = if_fetch_method method <| + + ! Response caching + + See `Data.fetch` for information about response caching. + fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Cache_Policy -> Response ! Request_Error | HTTP_Error + fetch (uri:(URI | Text)) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (cache_policy:Cache_Policy = ..Default) = if_fetch_method method <| request = Request.new method uri (Header.unify_vector headers) Request_Body.Empty - HTTP.new.request request + HTTP.new.request request cache_policy=cache_policy ## PRIVATE Static helper for post-like methods @@ -117,23 +127,33 @@ type HTTP Please note, this must be closed after use (either directly or via the helpers on Response_Body). + ! Response caching + + See `Data.fetch` for information about response caching. + Arguments: - req: The HTTP request to send using `self` HTTP client. - error_on_failure_code: Whether or not to throw an error if the response code is not a success code. - request : Request -> Boolean -> Response ! Request_Error | HTTP_Error | Illegal_Argument - request self req error_on_failure_code=True = + request : Request -> Boolean -> Cache_Policy -> Response ! Request_Error | HTTP_Error | Illegal_Argument | Response_Too_Large + request self req error_on_failure_code=True (cache_policy:Cache_Policy = ..Default) = # Prevent request if the method is a write-like method and output context is disabled. check_output_context ~action = if (if_fetch_method req.method True if_not=Context.Output.is_enabled) then action else Error.throw (Forbidden_Operation.Error ("As writing is disabled, " + req.method.to_text + " request not sent. Press the Write button ▶ to send it.")) + # You can only explicitly mention the cache for GET requests. + check_cache_policy ~action = + cache_policy_value_ok = req.method == HTTP_Method.Get || cache_policy != Cache_Policy.Use_Cache + if cache_policy_value_ok then action else + Error.throw (Illegal_Argument.Error "Cannot specify cache policy for a "+req.method.to_text+" request") + handle_request_error = handler caught_panic = exception = caught_panic.payload Error.throw (Request_Error.Error (Meta.type_of exception . to_text) exception.getMessage) Panic.catch IllegalArgumentException handler=handler <| Panic.catch IOException handler=handler - handle_request_error <| Illegal_Argument.handle_java_exception <| check_output_context <| + handle_request_error <| Illegal_Argument.handle_java_exception <| check_output_context <| check_cache_policy <| Response_Too_Large.handle_java_exception <| headers = _resolve_headers req headers.if_not_error <| resolved_body = _resolve_body req.body self.hash_method @@ -147,12 +167,25 @@ type HTTP all_headers = headers + boundary_header_list mapped_headers = all_headers.map on_problems=No_Wrap .to_java_pair - response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers) + response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers (cache_policy.should_use_cache req)) if error_on_failure_code.not || response.code.is_success then response else body = response.body.decode_as_text.catch Any _->"" message = if body.is_empty then Nothing else body Error.throw (HTTP_Error.Status_Error response.code message response.uri) + ## ALIAS flush + ICON temp + Clear the HTTP request cache. + + > Example + Clear the HTTP response cache. + + import Standard.Base.Network.HTTP + + HTTP.clear_response_cache + clear_response_cache : Nothing + clear_response_cache -> Nothing = EnsoHTTPResponseCache.clear + ## PRIVATE ADVANCED Create a copy of the HTTP client with a custom SSL context. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso new file mode 100644 index 0000000000..cad1bcece4 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso @@ -0,0 +1,19 @@ +import project.Network.HTTP.HTTP_Method.HTTP_Method +import project.Network.HTTP.Request.Request +from project.Data.Boolean import Boolean, False, True + +type Cache_Policy + ## Use the default policy for the HTTP method of the request. + Default + + ## Use the response cache. + Use_Cache + + ## Don't use the response cache. + No_Cache + + # Default to using the cache for GET requests, unless explicitly disabled + should_use_cache self request:Request -> Boolean = case self of + Cache_Policy.Default -> if request.method == HTTP_Method.Get then True else False + Cache_Policy.Use_Cache -> True + Cache_Policy.No_Cache -> False diff --git a/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java b/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java index 04bd9e3be0..cad4db4e96 100644 --- a/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java +++ b/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java @@ -160,4 +160,28 @@ public final class Stream_Utils { outputStreamLike.write(b, off, len); } } + + /** + * Copies the contents of the input sream to the output stream. If the number of bytes copied is + * greater than maxLength, abort the cpoy and return false; otherwise return true. + */ + public static boolean limitedCopy( + InputStream inputStream, OutputStream outputStream, long maxLength) throws IOException { + byte buffer[] = new byte[4096]; + long numBytesRead = 0; + while (true) { + int n = inputStream.read(buffer); + if (n <= 0) { + break; + } + if (numBytesRead + n <= maxLength) { + outputStream.write(buffer, 0, n); + } + numBytesRead += n; + if (numBytesRead > maxLength) { + return false; + } + } + return true; + } } diff --git a/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java b/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java new file mode 100644 index 0000000000..df1329da6a --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java @@ -0,0 +1,353 @@ +package org.enso.base.cache; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.time.Duration; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Predicate; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import org.enso.base.Stream_Utils; + +/** + * LRUCache is a cache for data presented via InputStreams. Files are deleted on JVM exit. + * + *

It puts limits on the size of files that can be requested, and on the total cache size, + * deleting entries to make space for new ones. All cache files are set to be deleted automatically + * on JVM exit. + * + * @param Additional metadata to associate with the data. + */ +public class LRUCache { + private static final Logger logger = Logger.getLogger(LRUCache.class.getName()); + + private final long maxFileSize; + private final long maxTotalCacheSize; + + private final CacheTestParameters cacheTestParameters = new CacheTestParameters(); + + private final Map> cache = new HashMap<>(); + private final Map lastUsed = new HashMap<>(); + + public LRUCache(long maxFileSize, long maxTotalCacheSize) { + this.maxFileSize = maxFileSize; + this.maxTotalCacheSize = maxTotalCacheSize; + } + + public CacheResult getResult(ItemBuilder itemBuilder) + throws IOException, InterruptedException, ResponseTooLargeException { + String cacheKey = itemBuilder.makeCacheKey(); + if (cache.containsKey(cacheKey)) { + return getResultForCacheEntry(cacheKey); + } else { + return makeRequestAndCache(cacheKey, itemBuilder); + } + } + + /** + * IOExceptions thrown by the HTTP request are propagated; IOExceptions thrown while storing the + * data in the cache are caught, and the request is re-issued without caching. + */ + private CacheResult makeRequestAndCache(String cacheKey, ItemBuilder itemBuilder) + throws IOException, InterruptedException, ResponseTooLargeException { + assert !cache.containsKey(cacheKey); + + Item item = itemBuilder.buildItem(); + + if (!item.shouldCache()) { + return new CacheResult<>(item.stream(), item.metadata()); + } + + if (item.sizeMaybe.isPresent()) { + long size = item.sizeMaybe().get(); + if (size > getMaxFileSize()) { + throw new ResponseTooLargeException(getMaxFileSize()); + } + makeRoomFor(size); + } + + try { + // Download the response data. + File responseData = downloadResponseData(cacheKey, item); + M metadata = item.metadata(); + long size = responseData.length(); + ZonedDateTime expiry = getNow().plus(Duration.ofSeconds(item.ttl().get())); + + // Create a cache entry. + var cacheEntry = new CacheEntry<>(responseData, metadata, size, expiry); + cache.put(cacheKey, cacheEntry); + markCacheEntryUsed(cacheKey); + + // Clear out old entries to satisfy the total cache size limit. This might + // be necessary here if we didn't receive a correct content size value. + removeFilesToSatisfyLimit(); + + return getResultForCacheEntry(cacheKey); + } catch (IOException e) { + logger.log( + Level.WARNING, "Failure storing cache entry; will re-execute without caching: {}", e); + // Re-issue the request since we don't know if we've consumed any of the response. + Item rerequested = itemBuilder.buildItem(); + return new CacheResult<>(rerequested.stream(), rerequested.metadata()); + } + } + + /** Mark cache entry used and return a stream reading from the cache file. */ + private CacheResult getResultForCacheEntry(String cacheKey) throws IOException { + markCacheEntryUsed(cacheKey); + return new CacheResult<>( + new FileInputStream(cache.get(cacheKey).responseData), cache.get(cacheKey).metadata()); + } + + /** + * Read the repsonse data from the remote server into the cache file. If the downloaded data is + * over the file size limit, throw a ResponseTooLargeException. + */ + private File downloadResponseData(String cacheKey, Item item) + throws IOException, ResponseTooLargeException { + File temp = File.createTempFile("LRUCache-" + cacheKey, ""); + temp.deleteOnExit(); + var inputStream = item.stream(); + var outputStream = new FileOutputStream(temp); + boolean successful = false; + try { + // Limit the download to getMaxFileSize(). + boolean sizeOK = Stream_Utils.limitedCopy(inputStream, outputStream, getMaxFileSize()); + + if (sizeOK) { + successful = true; + return temp; + } else { + throw new ResponseTooLargeException(getMaxFileSize()); + } + } finally { + outputStream.close(); + if (!successful) { + if (!temp.delete()) { + logger.log(Level.WARNING, "Unable to delete cache file (key {})", cacheKey); + } + } + } + } + + /** Mark the entry with the current time, to maintain LRU data. */ + private void markCacheEntryUsed(String cacheKey) { + lastUsed.put(cacheKey, getNow()); + } + + /** Remove all cache entries (and their files) that have passed their TTL. */ + private void removeStaleEntries() { + var now = getNow(); + removeCacheEntriesByPredicate(e -> e.expiry().isBefore(now)); + } + + /** Remove all cache entries (and their files). */ + public void clear() { + removeCacheEntriesByPredicate(e -> true); + } + + /** Remove all cache entries (and their cache files) that match the predicate. */ + private void removeCacheEntriesByPredicate(Predicate> predicate) { + List>> toRemove = + cache.entrySet().stream() + .filter(me -> predicate.test(me.getValue())) + .collect(Collectors.toList()); + removeCacheEntries(toRemove); + } + + /** Remove a set of cache entries. */ + private void removeCacheEntries(List>> toRemove) { + for (var entry : toRemove) { + removeCacheEntry(entry); + } + } + + /** Remove a cache entry: from `cache`, `lastUsed`, and the filesystem. */ + private void removeCacheEntry(Map.Entry> toRemove) { + var key = toRemove.getKey(); + var value = toRemove.getValue(); + cache.remove(key); + lastUsed.remove(key); + removeCacheFile(key, value); + } + + /** Remove a cache file. */ + private void removeCacheFile(String key, CacheEntry cacheEntry) { + boolean removed = cacheEntry.responseData.delete(); + if (!removed) { + logger.log(Level.WARNING, "Unable to delete cache file for key {0}", key); + } + } + + /** Remove least-recently used entries until there is enough room for a new file. */ + private void makeRoomFor(long newFileSize) { + removeStaleEntries(); + + long totalSize = getTotalCacheSize() + newFileSize; + long maxTotalCacheSize = getMaxTotalCacheSize(); + if (totalSize <= maxTotalCacheSize) { + return; + } + + // Remove least-recently used entries first. + var sortedEntries = getSortedEntries(); + var toRemove = new ArrayList>>(); + for (var mapEntry : sortedEntries) { + if (totalSize <= maxTotalCacheSize) { + break; + } + toRemove.add(mapEntry); + totalSize -= mapEntry.getValue().size(); + } + assert totalSize <= maxTotalCacheSize; + removeCacheEntries(toRemove); + } + + private SortedSet>> getSortedEntries() { + var sortedEntries = new TreeSet>>(cacheEntryLRUComparator); + sortedEntries.addAll(cache.entrySet()); + return sortedEntries; + } + + /** Remove least-recently used entries until the total cache size is under the limit. */ + private void removeFilesToSatisfyLimit() { + makeRoomFor(0L); + } + + private long getTotalCacheSize() { + return cache.values().stream().collect(Collectors.summingLong(e -> e.size())); + } + + private long getMaxFileSize() { + return cacheTestParameters.getMaxFileSizeOverrideTestOnly().orElse(maxFileSize); + } + + private long getMaxTotalCacheSize() { + return cacheTestParameters.getMaxTotalCacheSizeOverrideTestOnly().orElse(maxTotalCacheSize); + } + + public int getNumEntries() { + return cache.size(); + } + + public List getFileSizesTestOnly() { + return new ArrayList<>( + cache.values().stream().map(CacheEntry::size).collect(Collectors.toList())); + } + + private ZonedDateTime getNow() { + return cacheTestParameters.getNowOverrideTestOnly().orElse(ZonedDateTime.now()); + } + + /** Return a set of parameters that can be used to modify settings for testing purposes. */ + public CacheTestParameters getCacheTestParameters() { + return cacheTestParameters; + } + + private record CacheEntry(File responseData, M metadata, long size, ZonedDateTime expiry) {} + + /** + * A record to define the contents and properties of something to be cached. + * + * @param stream The InputStream providing the contents of the thing to be cached. + * @param sizeMaybe (Optional) The size of the data provided by the InputStream + * @param ttl (Optional) The time for which the data is fresh. If the returned Item has a TTL of + * 0, the item will not be cahced at all. + */ + public record Item( + InputStream stream, M metadata, Optional sizeMaybe, Optional ttl) { + + public boolean shouldCache() { + return ttl.isPresent(); + } + } + + public record CacheResult(InputStream inputStream, M metadata) {} + + /** Wraps code that creates an Item to be cached. */ + public interface ItemBuilder { + /** Generate a unique key for the Item */ + String makeCacheKey(); + + /** + * Creates the Item to be cached. Returning an Item with no TTL indicates that the data should + * not be cached. This is only called when the Item is not already present in the cache. + */ + Item buildItem() throws IOException, InterruptedException; + } + + private final Comparator>> cacheEntryLRUComparator = + Comparator.comparing(me -> lastUsed.get(me.getKey())); + + /** A set of parameters that can be used to modify cache settings for testing purposes. */ + public class CacheTestParameters { + /** This value is used for the current time when testing TTL expiration logic. */ + private Optional nowOverrideTestOnly = Optional.empty(); + + /** + * Used for testing file and cache size limits. These cannot be set to values larger than the + * real limits. + */ + private Optional maxFileSizeOverrideTestOnly = Optional.empty(); + + private Optional maxTotalCacheSizeOverrideTestOnly = Optional.empty(); + + public Optional getNowOverrideTestOnly() { + return nowOverrideTestOnly; + } + + public void setNowOverrideTestOnly(ZonedDateTime nowOverride) { + nowOverrideTestOnly = Optional.of(nowOverride); + } + + public void clearNowOverrideTestOnly() { + nowOverrideTestOnly = Optional.empty(); + } + + public Optional getMaxFileSizeOverrideTestOnly() { + return maxFileSizeOverrideTestOnly; + } + + public void setMaxFileSizeOverrideTestOnly(long maxFileSizeOverrideTestOnly_) { + if (maxFileSizeOverrideTestOnly_ > maxFileSize) { + throw new IllegalArgumentException( + "Cannot set the (test-only) maximum file size to more than the allowed limit of " + + maxFileSize); + } + maxFileSizeOverrideTestOnly = Optional.of(maxFileSizeOverrideTestOnly_); + } + + public void clearMaxFileSizeOverrideTestOnly() { + maxFileSizeOverrideTestOnly = Optional.empty(); + } + + public Optional getMaxTotalCacheSizeOverrideTestOnly() { + return maxTotalCacheSizeOverrideTestOnly; + } + + public void setMaxTotalCacheSizeOverrideTestOnly(long maxTotalCacheSizeOverrideTestOnly_) { + if (maxTotalCacheSizeOverrideTestOnly_ > maxTotalCacheSize) { + throw new IllegalArgumentException( + "Cannot set the (test-only) total cache size to more than the allowed limit of " + + maxTotalCacheSize); + } + maxTotalCacheSizeOverrideTestOnly = Optional.of(maxTotalCacheSizeOverrideTestOnly_); + } + + public void clearMaxTotalCacheSizeOverrideTestOnly() { + maxTotalCacheSizeOverrideTestOnly = Optional.empty(); + } + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java b/std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java new file mode 100644 index 0000000000..4e7bddf7cb --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java @@ -0,0 +1,15 @@ +package org.enso.base.cache; + +public class ResponseTooLargeException extends Exception { + private final long limit; + + public ResponseTooLargeException(long limit) { + super("Response too large: repsonse size is over the limit (" + limit + ")"); + + this.limit = limit; + } + + public long getLimit() { + return limit; + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java new file mode 100644 index 0000000000..12187214b1 --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java @@ -0,0 +1,163 @@ +package org.enso.base.enso_cloud; + +import java.io.IOException; +import java.io.InputStream; +import java.net.http.HttpHeaders; +import java.util.List; +import java.util.Optional; +import org.enso.base.cache.LRUCache; +import org.enso.base.cache.ResponseTooLargeException; + +/** + * EnsoHTTPResponseCache is a cache for EnsoHttpResponse values that respects the cache control HTTP + * headers received in the original repsonse to a request. + * + *

It uses LRUCache, so it also puts limits on the size of files that can be requested, and on + * the total cache size, deleting entries to make space for new ones. All cache files are set to be + * deleted automatically on JVM exit. + * + *

Without caching, EnsoHttpResponse contains an InputStream providing the response data. When + * there is a cache hit, this stream reads from the local file storing the cached data. When there + * is no cache hit, the InputStream is connected directly to the remote server. + */ +public class EnsoHTTPResponseCache { + // 1 year. + private static final int DEFAULT_TTL_SECONDS = 31536000; + private static final long MAX_FILE_SIZE = 2L * 1024 * 1024 * 1024; + private static final long MAX_TOTAL_CACHE_SIZE = 20L * 1024 * 1024 * 1024; + + private static final LRUCache lruCache = + new LRUCache<>(MAX_FILE_SIZE, MAX_TOTAL_CACHE_SIZE); + + public static EnsoHttpResponse makeRequest(RequestMaker requestMaker) + throws IOException, InterruptedException, ResponseTooLargeException { + var itemBuilder = new ItemBuilder(requestMaker); + + LRUCache.CacheResult cacheResult = lruCache.getResult(itemBuilder); + + return requestMaker.reconstructResponseFromCachedStream( + cacheResult.inputStream(), cacheResult.metadata()); + } + + public static class ItemBuilder implements LRUCache.ItemBuilder { + private final RequestMaker requestMaker; + + ItemBuilder(RequestMaker requestMaker) { + this.requestMaker = requestMaker; + } + + @Override + public String makeCacheKey() { + return requestMaker.hashKey(); + } + + /** Only HTTP 200 responses are cached; all others are returned uncached. */ + @Override + public LRUCache.Item buildItem() throws IOException, InterruptedException { + var response = requestMaker.makeRequest(); + + if (response.statusCode() != 200) { + // Don't cache non-200 repsonses. + return new LRUCache.Item<>( + response.body(), + new Metadata(response.headers(), response.statusCode()), + Optional.empty(), + Optional.empty()); + } else { + InputStream inputStream = response.body(); + var metadata = new Metadata(response.headers(), response.statusCode()); + var sizeMaybe = getResponseDataSize(response.headers()); + int ttl = calculateTTL(response.headers()); + return new LRUCache.Item<>(inputStream, metadata, sizeMaybe, Optional.of(ttl)); + } + } + } + + /** Get the size of the response data, if available. */ + private static Optional getResponseDataSize(HttpHeaders headers) { + return headers.firstValue("content-length").map(Long::parseLong); + } + + /** + * We define the TTL as the amount of time that the response should be considered fresh. + * + *

Define t0 as the time at which the content was generated on the origin server. + * + *

Define t1 as the time at which the current request was handled, either by the origin server + * or an intervening cache. + * + *

The 'Age' header, if present is (t1 - t0). + * + *

The 'max-age' value in the 'Cache-Control' header, if present, is the origin server's + * definition of how long the response should be considered fresh. + * + *

If 'max-age' and 'Age' are both present, we set TTL = max-age - Age. If only 'max-age' is + * present, we set TTL = max-age. If neither are present, we use a default. + */ + private static int calculateTTL(HttpHeaders headers) { + Integer maxAge = getMaxAge(headers); + if (maxAge == null) { + return DEFAULT_TTL_SECONDS; + } else { + int age = headers.firstValue("age").map(Integer::parseInt).orElse(0); + return maxAge - age; + } + } + + private static Integer getMaxAge(HttpHeaders headers) { + var cacheControlMaybe = headers.firstValue("cache-control"); + Integer maxAge = null; + if (cacheControlMaybe.isPresent()) { + var cacheControl = cacheControlMaybe.get(); + var cacheControlEntries = cacheControl.split(","); + for (var entry : cacheControlEntries) { + if (entry.trim().toLowerCase().startsWith("max-age")) { + var maxAgeBinding = entry.split("="); + if (maxAgeBinding.length > 1) { + maxAge = Integer.valueOf(maxAgeBinding[1]); + } + break; + } + } + } + return maxAge; + } + + public static void clear() { + lruCache.clear(); + } + + public static int getNumEntries() { + return lruCache.getNumEntries(); + } + + public static List getFileSizesTestOnly() { + return lruCache.getFileSizesTestOnly(); + } + + /** Return a set of parameters that can be used to modify settings for testing purposes. */ + public static LRUCache.CacheTestParameters getCacheTestParameters() { + return lruCache.getCacheTestParameters(); + } + + public interface RequestMaker { + /** Executes the HTTP request and returns the response. */ + EnsoHttpResponse makeRequest() throws IOException, InterruptedException; + + /** + * Returns a hash key that can be used to uniquely identify this request. This will be used to + * decide if the `run` method should be executed, or if a cached response will be returned. The + * hash key should not be reversible. + */ + String hashKey(); + + /** + * When a cached response is returned, instead of executing `makeRequest`, this method is used + * to construct the response. + */ + EnsoHttpResponse reconstructResponseFromCachedStream( + InputStream inputStream, Metadata metadata); + } + + public record Metadata(HttpHeaders headers, int statusCode) {} +} diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java index 86e9371787..3e4717fee5 100644 --- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java +++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java @@ -1,6 +1,7 @@ package org.enso.base.enso_cloud; import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; @@ -9,8 +10,12 @@ import java.net.http.HttpResponse; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; import java.util.List; import java.util.Properties; +import org.enso.base.cache.ResponseTooLargeException; import org.enso.base.net.URISchematic; import org.enso.base.net.URIWithSecrets; import org.graalvm.collections.Pair; @@ -58,44 +63,119 @@ public final class EnsoSecretHelper extends SecretValueResolver { HttpClient client, Builder builder, URIWithSecrets uri, - List> headers) - throws IllegalArgumentException, IOException, InterruptedException { + List> headers, + boolean useCache) + throws IllegalArgumentException, + IOException, + InterruptedException, + ResponseTooLargeException { // Build a new URI with the query arguments. URI resolvedURI = resolveURI(uri); - URI renderedURI = uri.render(); - boolean hasSecrets = - uri.containsSecrets() || headers.stream().anyMatch(p -> p.getRight().containsSecrets()); - if (hasSecrets) { - if (resolvedURI.getScheme() == null) { - throw new IllegalArgumentException("The URI must have a scheme."); - } + List> resolvedHeaders = + headers.stream() + .map( + pair -> { + return Pair.create(pair.getLeft(), resolveValue(pair.getRight())); + }) + .toList(); - if (!resolvedURI.getScheme().equalsIgnoreCase("https")) { - throw new IllegalArgumentException( - "Secrets are not allowed in HTTP connections, use HTTPS instead."); - } + var requestMaker = + new RequestMaker(client, builder, uri, resolvedURI, headers, resolvedHeaders); + + if (!useCache) { + return requestMaker.makeRequest(); + } else { + return EnsoHTTPResponseCache.makeRequest(requestMaker); } - - builder.uri(resolvedURI); - - // Resolve the header arguments. - for (Pair header : headers) { - builder.header(header.getLeft(), resolveValue(header.getRight())); - } - - // Build and Send the request. - var httpRequest = builder.build(); - var bodyHandler = HttpResponse.BodyHandlers.ofInputStream(); - var javaResponse = client.send(httpRequest, bodyHandler); - - // Extract parts of the response - return new EnsoHttpResponse( - renderedURI, javaResponse.headers(), javaResponse.body(), javaResponse.statusCode()); } public static void deleteSecretFromCache(String secretId) { EnsoSecretReader.removeFromCache(secretId); } + + private static class RequestMaker implements EnsoHTTPResponseCache.RequestMaker { + private final HttpClient client; + private final Builder builder; + private final URIWithSecrets uri; + private final URI resolvedURI; + private final List> headers; + private final List> resolvedHeaders; + + RequestMaker( + HttpClient client, + Builder builder, + URIWithSecrets uri, + URI resolvedURI, + List> headers, + List> resolvedHeaders) { + this.client = client; + this.builder = builder; + this.uri = uri; + this.resolvedURI = resolvedURI; + this.headers = headers; + this.resolvedHeaders = resolvedHeaders; + } + + @Override + public EnsoHttpResponse makeRequest() throws IOException, InterruptedException { + boolean hasSecrets = + uri.containsSecrets() || headers.stream().anyMatch(p -> p.getRight().containsSecrets()); + if (hasSecrets) { + if (resolvedURI.getScheme() == null) { + throw new IllegalArgumentException("The URI must have a scheme."); + } + + if (!resolvedURI.getScheme().equalsIgnoreCase("https")) { + throw new IllegalArgumentException( + "Secrets are not allowed in HTTP connections, use HTTPS instead."); + } + } + + builder.uri(resolvedURI); + + for (Pair resolvedHeader : resolvedHeaders) { + builder.header(resolvedHeader.getLeft(), resolvedHeader.getRight()); + } + + // Build and Send the request. + var httpRequest = builder.build(); + var bodyHandler = HttpResponse.BodyHandlers.ofInputStream(); + var javaResponse = client.send(httpRequest, bodyHandler); + + URI renderedURI = uri.render(); + + return new EnsoHttpResponse( + renderedURI, javaResponse.headers(), javaResponse.body(), javaResponse.statusCode()); + } + + /** Sorts the header by header name and value. */ + @Override + public String hashKey() { + var sortedHeaders = resolvedHeaders.stream().sorted(headerNameComparator).toList(); + List keyStrings = new ArrayList<>(sortedHeaders.size() + 1); + keyStrings.add(resolvedURI.toString()); + + for (Pair resolvedHeader : sortedHeaders) { + keyStrings.add(resolvedHeader.getLeft()); + keyStrings.add(resolvedHeader.getRight()); + } + + return Integer.toString(Arrays.deepHashCode(keyStrings.toArray())); + } + + @Override + public EnsoHttpResponse reconstructResponseFromCachedStream( + InputStream inputStream, EnsoHTTPResponseCache.Metadata metadata) { + URI renderedURI = uri.render(); + + return new EnsoHttpResponse( + renderedURI, metadata.headers(), inputStream, metadata.statusCode()); + } + } + + private static final Comparator> headerNameComparator = + Comparator.comparing((Pair pair) -> pair.getLeft()) + .thenComparing(Comparator.comparing(pair -> pair.getRight())); } diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index bcf256f71f..b72bb11750 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -3,6 +3,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Forbidden_Operation import Standard.Base.Errors.Common.Syntax_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Network.HTTP.Cache_Policy.Cache_Policy import Standard.Base.Network.HTTP.HTTP_Error.HTTP_Error import Standard.Base.Network.HTTP.Request.Request import Standard.Base.Network.HTTP.Response.Response @@ -184,11 +185,11 @@ add_specs suite_builder = group_builder.specify "can select the version" <| Test.with_retries <| req = Request.get url_get - r2 = HTTP.new version=HTTP_Version.HTTP_2 . request req . decode_as_json + r2 = HTTP.new version=HTTP_Version.HTTP_2 . request req cache_policy=..No_Cache . decode_as_json r2.at "headers" . at "Connection" . should_equal "Upgrade, HTTP2-Settings" r2.at "headers" . at "Http2-Settings" . should_contain "AA" - r1 = HTTP.new version=HTTP_Version.HTTP_1_1 . request req . decode_as_json + r1 = HTTP.new version=HTTP_Version.HTTP_1_1 . request req cache_policy=..No_Cache . decode_as_json header_names = r1.at "headers" . field_names . map (s-> s.to_case Case.Lower) header_names.should_not_contain "connection" header_names.should_not_contain "http2-settings" @@ -606,7 +607,7 @@ add_specs suite_builder = r1.should_be_a Response group_builder.specify "should be able to handle server crash that closes stream abruptly" pending=pending_has_url <| - err = Data.fetch (base_url_with_slash+"crash?type=stream") + err = Data.fetch (base_url_with_slash+"crash?type=stream") cache_policy=..No_Cache err.should_fail_with HTTP_Error err.catch.message . should_equal "An IO error has occurred: java.io.IOException: closed" diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index d1943e9da6..125d84123f 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -1,18 +1,28 @@ from Standard.Base import all import Standard.Base.Data.Base_64.Base_64 +import Standard.Base.Errors.Common.Response_Too_Large import Standard.Base.Errors.File_Error.File_Error +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Network.HTTP.Cache_Policy.Cache_Policy +import Standard.Base.Network.HTTP.Request.Request +import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Base.Network.HTTP.Response.Response import Standard.Base.Runtime.Context +import Standard.Base.Runtime.Ref.Ref from Standard.Table import all import Standard.Table.Errors.Invalid_JSON_Format from Standard.Test import all +import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup from enso_dev.Base_Tests.Network.Http.Http_Test_Setup import base_url_with_slash, pending_has_url import project.Util +polyglot java import java.lang.IllegalArgumentException +polyglot java import org.enso.base.enso_cloud.EnsoHTTPResponseCache + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder @@ -94,3 +104,334 @@ add_specs suite_builder = # Reinterpreting as TSV: r4 = (uri.add_query_argument "Content-Type" "text/tab-separated-values").fetch r4.should_equal (Table.from_rows ["Column 1"] [["A,B"], ["1,x"], ["3,y"]]) + + suite_builder.group "Response caching" pending=pending_has_url group_builder-> + get_num_response_cache_entries = + EnsoHTTPResponseCache.getNumEntries + with_counts ~action = + before_count = get_num_response_cache_entries + action + after_count = get_num_response_cache_entries + [before_count, after_count] + + reset_size_limits = + EnsoHTTPResponseCache.getCacheTestParameters.clearMaxFileSizeOverrideTestOnly + EnsoHTTPResponseCache.getCacheTestParameters.clearMaxTotalCacheSizeOverrideTestOnly + + expect_counts expected_counts ~action = + counts = with_counts action + counts . should_equal expected_counts frames_to_skip=1 + + get_cache_file_sizes : Vector Integer + get_cache_file_sizes -> Vector Integer = + Vector.from_polyglot_array EnsoHTTPResponseCache.getFileSizesTestOnly . sort Sort_Direction.Ascending + + url0 = base_url_with_slash+'test_download?max-age=16&length=10' + url1 = base_url_with_slash+'test_download?max-age=16&length=20' + url_post = base_url_with_slash + "post" + headers0 = [Header.new "A-Header" "a-header-value", Header.new "A-Header" "a-header-value"] + headers1 = [Header.new "A-Header" "a-different-header-value", Header.new "A-Header" "a-header-value"] + + # Run the request(s) twice and confirm the results are the same + check_same_results ~action = + results = 0.up_to 2 . map (_-> action) + results.distinct.length . should_equal 1 + + group_builder.specify "Cache should return the same repsonse" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + check_same_results <| + HTTP.fetch url0 . decode_as_text + get_num_response_cache_entries . should_equal 1 + check_same_results <| + HTTP.fetch url1 . decode_as_text + get_num_response_cache_entries . should_equal 2 + + HTTP.clear_response_cache + + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text + url1_body_1 = HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text . should_equal url1_body_1 + get_num_response_cache_entries . should_equal 2 + + HTTP.clear_response_cache + + url0_body_2 = HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text + HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url0_body_2 + url1_body_2 = HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url1_body_2 + get_num_response_cache_entries . should_equal 0 + + group_builder.specify "Cache should handle many entries" pending=pending_has_url <| Test.with_retries <| + count = 20 + + HTTP.clear_response_cache + check_same_results <| + 0.up_to count . map i-> + HTTP.fetch base_url_with_slash+"test_download?length="+i.to_text . decode_as_text + get_num_response_cache_entries . should_equal count + + HTTP.clear_response_cache + check_same_results <| + 0.up_to count . each i-> + headers = [Header.new "A-Header" "a-header-value-"+i.to_text] + HTTP.fetch base_url_with_slash+"test_download?length=8" headers=headers . decode_as_text + get_num_response_cache_entries . should_equal count + + group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + expect_counts [0, 0] <| + HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache + HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache + expect_counts [0, 2] <| + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache + HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache + HTTP.clear_response_cache + expect_counts [0, 2] <| + HTTP.fetch url0 + HTTP.fetch url1 + + group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + expect_counts [0, 0] <| + Data.fetch url0 cache_policy=Cache_Policy.No_Cache + Data.fetch url1 cache_policy=Cache_Policy.No_Cache + expect_counts [0, 2] <| + Data.fetch url0 cache_policy=Cache_Policy.Use_Cache + Data.fetch url1 cache_policy=Cache_Policy.Use_Cache + HTTP.clear_response_cache + expect_counts [0, 2] <| + Data.fetch url0 + Data.fetch url1 + + group_builder.specify "Should not cache Data.download" pending=pending_has_url <| Test.with_retries <| + target_file = enso_project.data / "transient" / "my_download0.txt" + + HTTP.clear_response_cache + target_file.delete_if_exists + + Data.download url0 target_file + get_num_response_cache_entries . should_equal 0 + + target_file.delete_if_exists + + group_builder.specify "Data.download is not affected by caching limits" pending=pending_has_url <| Test.with_retries <| + target_file = enso_project.data / "transient" / "my_download0.txt" + Panic.with_finalizer reset_size_limits <| + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 120 + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100 + Data.download base_url_with_slash+"test_download?length=200" target_file + target_file.read.length . should_equal 200 + target_file.delete_if_exists + + group_builder.specify "Should not cache for methods other than GET" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + expect_counts [0, 0] <| + Data.post url_post (Request_Body.Text "hello world") + + group_builder.specify "HTTP request with a non-GET method should reject a cache_policy=Use_Cache argument" pending=pending_has_url <| Test.with_retries <| + request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty + HTTP.new.request request cache_policy=Cache_Policy.Use_Cache . should_fail_with Illegal_Argument + + group_builder.specify "HTTP request with a non-GET method should not reject a cache_policy=No_Cache argument" pending=pending_has_url <| Test.with_retries <| + request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty + HTTP.new.request request cache_policy=Cache_Policy.No_Cache . should_succeed + + group_builder.specify "Should be able to clear caches" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + Data.fetch url0 + get_num_response_cache_entries . should_equal 1 + HTTP.clear_response_cache + get_num_response_cache_entries . should_equal 0 + + group_builder.specify "Cache key should depend on the headers" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + expect_counts [0, 2] <| + Data.fetch url0 headers=headers0 + Data.fetch url0 headers=headers1 + Data.fetch url0 headers=headers1 + Data.fetch url0 headers=headers0 + Data.fetch url0 headers=headers0 + Data.fetch url0 headers=headers1 + + group_builder.specify "Cache key should not depend on header order" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + header0 = Header.new "Abc" "eef" + header1 = Header.new "Abc" "def" + header2 = Header.new "Ghi" "jkl" + orders = [[header0, header1, header2], [header1, header2, header0], [header2, header1, header0]] + responses = orders.map headers-> + Data.fetch url0 headers=headers . decode_as_text + get_num_response_cache_entries . should_equal 1 + responses.distinct.length . should_equal 1 + + ## Fetching the trigger uri causes stale entries to be removed, since the + uri is always different and so the caching and cleanup logic is run. + fake_now = Date_Time.now + trigger_uri_serial = Ref.new 0 + make_trigger_uri = + serial = trigger_uri_serial.get + trigger_uri_serial.modify (_ + 1) + base_url_with_slash+'test_download?max-age=10000&length=50&abc='+serial.to_text + set_time_and_get_count advance_secs = + EnsoHTTPResponseCache.getCacheTestParameters.setNowOverrideTestOnly (fake_now + (Duration.new seconds=advance_secs)) + trigger_uri = make_trigger_uri + Data.fetch trigger_uri + get_num_response_cache_entries + fake_time_resetter = + EnsoHTTPResponseCache.getCacheTestParameters.clearNowOverrideTestOnly + + group_builder.specify "The cache should expire stale entries" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + set_time_and_get_count 0 # Initialize fake now. + + Data.fetch base_url_with_slash+'test_download?max-age=100&length=50' + Data.fetch base_url_with_slash+'test_download?max-age=200&length=50' + Data.fetch base_url_with_slash+'test_download?max-age=200&length=51' + Data.fetch base_url_with_slash+'test_download?max-age=300&length=50' + + Panic.with_finalizer fake_time_resetter <| + ## The count will increase by 1 each time, but decrease by the + number of entries removed + set_time_and_get_count 0 . should_equal 6 + set_time_and_get_count 90 . should_equal 7 + set_time_and_get_count 110 . should_equal 7 + set_time_and_get_count 190 . should_equal 8 + set_time_and_get_count 202 . should_equal 7 + set_time_and_get_count 292 . should_equal 8 + set_time_and_get_count 301 . should_equal 8 + + group_builder.specify "The cache should use the Age response header" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + set_time_and_get_count 0 # Initialize fake now. + + Data.fetch base_url_with_slash+'test_download?max-age=100&age=50&length=50' # ttl 50 + Data.fetch base_url_with_slash+'test_download?max-age=100&age=30&length=50' # ttl 70 + Data.fetch base_url_with_slash+'test_download?max-age=120&age=50&length=50' # ttl 70 + Data.fetch base_url_with_slash+'test_download?max-age=70&&length=50' # ttl 70 + Data.fetch base_url_with_slash+'test_download?max-age=160&age=70&length=50' # ttl 90 + + Panic.with_finalizer fake_time_resetter <| + ## The count will increase by 1 each time, but decrease by the + number of entries removed + set_time_and_get_count 0 . should_equal 7 + set_time_and_get_count 40 . should_equal 8 + set_time_and_get_count 51 . should_equal 8 + set_time_and_get_count 68 . should_equal 9 + set_time_and_get_count 72 . should_equal 7 + set_time_and_get_count 88 . should_equal 8 + set_time_and_get_count 93 . should_equal 8 + + download size = + Data.fetch base_url_with_slash+'test_download?length='+size.to_text + + group_builder.specify "Will remove old cache files to keep the total cache size under the total cache size limit" pending=pending_has_url <| Test.with_retries <| + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 100 + + download 30 + download 50 + download 10 + get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50] + download 20 + get_cache_file_sizes . should_equal_ignoring_order [10, 20, 50] + download 40 + get_cache_file_sizes . should_equal_ignoring_order [10, 20, 40] + download 35 + get_cache_file_sizes . should_equal_ignoring_order [20, 35, 40] + + group_builder.specify "Will remove old cache files based on how recently they were used" pending=pending_has_url <| Test.with_retries <| + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 100 + + download 30 + download 50 + download 10 + get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50] + # Use 30 again so it's considered more recently used. + download 30 + get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50] + download 20 + get_cache_file_sizes . should_equal_ignoring_order [10, 20, 30] + download 45 + get_cache_file_sizes . should_equal_ignoring_order [20, 30, 45] + + group_builder.specify "Will not cache a file with a content length greater than the single file limit" pending=pending_has_url <| Test.with_retries <| + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100 + download 110 . should_fail_with (Response_Too_Large.Error 100) + + + group_builder.specify "Will not cache a file without a content length, but which is greater than the single file limit" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100 + url = base_url_with_slash+'test_download?omit-content-length=1&length=110' + Data.fetch url . should_fail_with (Response_Too_Large.Error 100) + + group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + HTTP.fetch url0 + get_num_response_cache_entries . should_equal 1 + HTTP.fetch base_url_with_slash+'crash' + get_num_response_cache_entries . should_equal 1 + HTTP.fetch base_url_with_slash+'nonexistent_endpoint' + get_num_response_cache_entries . should_equal 1 + + cloud_setup = Cloud_Tests_Setup.prepare + + group_builder.specify "Should work with secrets in the URI" pending=pending_has_url <| Test.with_retries <| + cloud_setup.with_prepared_environment <| + secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value" + secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value" + cleanup = + secret1.delete + secret2.delete + Panic.with_finalizer cleanup <| + # Requests differ only in secrets in URI. + url1 = URI.from 'https://httpbin.org/bytes/50' + . add_query_argument "arg1" secret1 + . add_query_argument "arg2" "plain value" + uri2 = URI.from 'https://httpbin.org/bytes/50' + . add_query_argument "arg1" secret2 + . add_query_argument "arg2" "plain value" + + HTTP.clear_response_cache + HTTP.fetch url1 + get_num_response_cache_entries . should_equal 1 + HTTP.fetch uri2 + get_num_response_cache_entries . should_equal 2 + + group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <| + cloud_setup.with_prepared_environment <| + secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value" + secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value" + cleanup = + secret1.delete + secret2.delete + Panic.with_finalizer cleanup <| + # Requests differ only in secrets in headers. + uri = URI.from 'https://httpbin.org/bytes/50' + headers1 = [Header.new "A-Header" secret1] + headers2 = [Header.new "A-Header" secret2] + + HTTP.clear_response_cache + HTTP.fetch headers=headers1 uri + get_num_response_cache_entries . should_equal 1 + HTTP.fetch headers=headers2 uri + get_num_response_cache_entries . should_equal 2 + + group_builder.specify "Should not be able to set the cache limits higher than the real limits" pending=pending_has_url <| Test.with_retries <| + Test.expect_panic IllegalArgumentException <| + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly (2 * 1024 * 1024 * 1024 + 1) . should_fail_with Illegal_Argument + Test.expect_panic IllegalArgumentException <| + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly (20 * 1024 * 1024 * 1024 + 1) . should_fail_with Illegal_Argument diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java b/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java index a1f2227061..bc92132a09 100644 --- a/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java +++ b/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java @@ -98,6 +98,7 @@ public class HTTPTestHelperServer { server.addHandler("/test_basic_auth", new BasicAuthTestHandler()); server.addHandler("/crash", new CrashingTestHandler()); server.addHandler("/test_redirect", new RedirectTestHandler("/testfiles/js.txt")); + server.addHandler("/test_download", new DownloadTestHandler()); // Cloud mock if (cloudMockSetup != null) { diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java new file mode 100644 index 0000000000..85c306ebb5 --- /dev/null +++ b/tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java @@ -0,0 +1,58 @@ +package org.enso.shttp.test_helpers; + +import com.sun.net.httpserver.HttpExchange; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URI; +import java.util.Random; +import org.apache.http.client.utils.URIBuilder; +import org.enso.shttp.SimpleHttpHandler; + +/** + * A handler that generates a data response, with optional max-age and Age headers. The data + * response consists of a string of random letters of the requested length. + */ +public class DownloadTestHandler extends SimpleHttpHandler { + private Random random = new Random(42); + + @Override + protected void doHandle(HttpExchange exchange) throws IOException { + URI uri = exchange.getRequestURI(); + URIBuilder builder = new URIBuilder(uri); + + int length = 10; + String maxAge = null; + String age = null; + boolean omitContentLength = false; + for (var queryPair : builder.getQueryParams()) { + switch (queryPair.getName()) { + case "length" -> length = Integer.parseInt(queryPair.getValue()); + case "max-age" -> maxAge = queryPair.getValue(); + case "age" -> age = queryPair.getValue(); + case "omit-content-length" -> omitContentLength = true; + default -> {} + } + } + + byte responseData[] = new byte[length]; + for (int i = 0; i < length; ++i) { + responseData[i] = (byte) (97 + random.nextInt(26)); + } + + if (maxAge != null) { + exchange.getResponseHeaders().add("Cache-Control", "max-age=" + maxAge); + } + + if (age != null) { + exchange.getResponseHeaders().add("Age", age.toString()); + } + + long contentLength = omitContentLength ? 0 : responseData.length; + exchange.sendResponseHeaders(200, contentLength); + + try (OutputStream os = exchange.getResponseBody()) { + os.write(responseData); + } + exchange.close(); + } +} From 29f1f0d4e1926f49bf181ec3bdc0c13bafbf0093 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Grabarz?= Date: Wed, 30 Oct 2024 14:48:59 +0100 Subject: [PATCH 12/43] Add IDE version to window title. (#11446) Fixes #10966 ![image](https://github.com/user-attachments/assets/a46e38e9-1ff3-4eb4-9a88-901d684dc205) --- CHANGELOG.md | 2 ++ app/common/src/appConfig.js | 2 +- app/gui/index.html | 2 +- app/gui/vite.config.ts | 5 +++++ 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e54b5f091..920a52e90d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ - [New dropdown-based component menu][11398]. - [Size of Table Input Widget is preserved and restored after project re-opening][11435] +- [Added application version to the title bar.][11446] [11151]: https://github.com/enso-org/enso/pull/11151 [11271]: https://github.com/enso-org/enso/pull/11271 @@ -23,6 +24,7 @@ [11388]: https://github.com/enso-org/enso/pull/11388 [11398]: https://github.com/enso-org/enso/pull/11398 [11435]: https://github.com/enso-org/enso/pull/11435 +[11446]: https://github.com/enso-org/enso/pull/11446 #### Enso Standard Library diff --git a/app/common/src/appConfig.js b/app/common/src/appConfig.js index 8ad2ea51f0..5dc227bad4 100644 --- a/app/common/src/appConfig.js +++ b/app/common/src/appConfig.js @@ -44,7 +44,7 @@ export async function readEnvironmentFromFile() { if (!isProduction || entries.length > 0) { Object.assign(process.env, variables) } - process.env.ENSO_CLOUD_DASHBOARD_VERSION ??= buildInfo.version + process.env.ENSO_CLOUD_DASHBOARD_VERSION ??= buildInfo.version ?? '0.0.0-dev' process.env.ENSO_CLOUD_DASHBOARD_COMMIT_HASH ??= buildInfo.commit } catch (error) { process.env.ENSO_CLOUD_DASHBOARD_VERSION ??= buildInfo.version diff --git a/app/gui/index.html b/app/gui/index.html index 055ad35bde..7c05be49a9 100644 --- a/app/gui/index.html +++ b/app/gui/index.html @@ -37,7 +37,7 @@ maximum-scale = 1.0, user-scalable = no" /> - Enso Analytics + Enso %ENSO_IDE_VERSION%

diff --git a/app/gui/vite.config.ts b/app/gui/vite.config.ts index d28babae70..e85808cce5 100644 --- a/app/gui/vite.config.ts +++ b/app/gui/vite.config.ts @@ -26,6 +26,10 @@ await readEnvironmentFromFile() const entrypoint = process.env.E2E === 'true' ? './src/project-view/e2e-entrypoint.ts' : './src/entrypoint.ts' +// NOTE(Frizi): This rename is for the sake of forward compatibility with not yet merged config refactor on bazel branch, +// and because Vite's HTML env replacements only work with import.meta.env variables, not defines. +process.env.ENSO_IDE_VERSION = process.env.ENSO_CLOUD_DASHBOARD_VERSION + // https://vitejs.dev/config/ export default defineConfig({ root: fileURLToPath(new URL('.', import.meta.url)), @@ -65,6 +69,7 @@ export default defineConfig({ '#': fileURLToPath(new URL('./src/dashboard', import.meta.url)), }, }, + envPrefix: 'ENSO_IDE_', define: { ...getDefines(), IS_CLOUD_BUILD: JSON.stringify(IS_CLOUD_BUILD), From 6566b2da2ff36bb0b677754331915f6cec9c826b Mon Sep 17 00:00:00 2001 From: Ilya Bogdanov Date: Wed, 30 Oct 2024 18:14:28 +0400 Subject: [PATCH 13/43] Fix help panel (#11421) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #11392 The issue was caused by somewhat cumbersome logic of updating/overriding displayed suggestion. I’m not sure if it even was working correctly at any time. https://github.com/user-attachments/assets/51e6fcd4-2198-40a2-86e7-8fcfa8b8b8d5 --- .../components/ComponentDocumentation.vue | 26 +++++++++---------- .../components/DocumentationPanel.vue | 4 +-- .../components/DocumentationPanel/history.ts | 26 ++++++++++++++----- .../project-view/components/GraphEditor.vue | 6 +---- 4 files changed, 35 insertions(+), 27 deletions(-) diff --git a/app/gui/src/project-view/components/ComponentDocumentation.vue b/app/gui/src/project-view/components/ComponentDocumentation.vue index 5a91a5b85a..c28c3cadff 100644 --- a/app/gui/src/project-view/components/ComponentDocumentation.vue +++ b/app/gui/src/project-view/components/ComponentDocumentation.vue @@ -2,12 +2,12 @@ import DocumentationPanel from '@/components/DocumentationPanel.vue' import { injectGraphSelection } from '@/providers/graphSelection' import { useGraphStore } from '@/stores/graph' -import { computed } from 'vue' +import { computed, watch } from 'vue' import type { SuggestionId } from 'ydoc-shared/languageServerTypes/suggestions' -import { Err, Ok } from 'ydoc-shared/util/data/result' +import { Err, Ok, unwrapOr } from 'ydoc-shared/util/data/result' -const props = defineProps<{ displayedSuggestionId: SuggestionId | null }>() -const emit = defineEmits<{ 'update:displayedSuggestionId': [SuggestionId] }>() +// A displayed component can be overridren by this model, e.g. when the user clicks links in the documenation. +const overrideDisplayed = defineModel({ default: null }) const selection = injectGraphSelection() const graphStore = useGraphStore() @@ -19,20 +19,20 @@ function docsForSelection() { return Ok(suggestionId) } -const displayedId = computed(() => - props.displayedSuggestionId != null ? Ok(props.displayedSuggestionId) : docsForSelection(), -) +const docs = computed(() => docsForSelection()) +// When the selection changes, we cancel the displayed suggestion override that can be in place. +watch(docs, (_) => (overrideDisplayed.value = null)) + +const displayedId = computed(() => overrideDisplayed.value ?? unwrapOr(docs.value, null)) diff --git a/app/gui/src/project-view/components/RecordControl.vue b/app/gui/src/project-view/components/RecordControl.vue index 8bc5f30e70..8f8e50ef34 100644 --- a/app/gui/src/project-view/components/RecordControl.vue +++ b/app/gui/src/project-view/components/RecordControl.vue @@ -1,13 +1,14 @@ diff --git a/app/gui/src/project-view/components/StandaloneButton.vue b/app/gui/src/project-view/components/StandaloneButton.vue new file mode 100644 index 0000000000..4c965dd46c --- /dev/null +++ b/app/gui/src/project-view/components/StandaloneButton.vue @@ -0,0 +1,26 @@ + + + + + diff --git a/app/gui/src/project-view/components/SvgButton.vue b/app/gui/src/project-view/components/SvgButton.vue index bbab27dff7..068b3f758e 100644 --- a/app/gui/src/project-view/components/SvgButton.vue +++ b/app/gui/src/project-view/components/SvgButton.vue @@ -5,7 +5,7 @@ import type { URLString } from '@/util/data/urlString' import type { Icon } from '@/util/iconName' const _props = defineProps<{ - name: Icon | URLString + name?: Icon | URLString | undefined label?: string | undefined disabled?: boolean title?: string | undefined @@ -14,7 +14,7 @@ const _props = defineProps<{ From 610ee5fdec4541453560d5ec93a5242ca514f4d5 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 31 Oct 2024 10:04:52 +0000 Subject: [PATCH 17/43] Expand and improve `pretty` for core data types, vector and table. (#11438) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - ✅ Alter default `Any.pretty` so constructor is prefixed with type name (as needed now). ![image](https://github.com/user-attachments/assets/72d5ff2f-b567-47e2-becf-2e4acd4d089d) - ✅ Tests for `pretty` on `Date`. - `pretty` for ✅ `Date_Time` and ✅ `Time_Of_Day` improved to not have as much noise. - `pretty` for ✅ `Period`, ✅ `Date_Range` and ✅ `Range`. - Added custom `pretty` for ✅ `Vector` and ✅ `Array` as built-in method doesn't call through to overrides. - Added custom `pretty` for ✅ `Column` and ✅ `Table`. - Bug fix for `pretty` in `Time_Zone` so calls through to `pretty` of the zone_id to ensure safely escaped. - Initial `default_widget` for `Date` and `Time_Of_Day`. - Improve widget for `Date.to_date_time`. ![image](https://github.com/user-attachments/assets/18bc1d88-8ea9-42d0-8a9c-bc873e5d6835) - `to_text`, `to_display_text` and `pretty` for `Enso_Secret` ![image](https://github.com/user-attachments/assets/d850c109-d1af-4b6f-a450-013c4d137805) - private constructor for `Enso_Secret` as can't be correctly built directly. - Use `_` for the testing methods in `HTTP` to clarify they shouldn't be used in general code. --- .../lib/Standard/Base/0.0.0-dev/src/Any.enso | 19 ++++++--- .../Base/0.0.0-dev/src/Data/Array.enso | 7 ++++ .../Base/0.0.0-dev/src/Data/Range.enso | 17 ++++++++ .../Base/0.0.0-dev/src/Data/Time/Date.enso | 29 +++++++++++--- .../0.0.0-dev/src/Data/Time/Date_Range.enso | 17 +++++++- .../0.0.0-dev/src/Data/Time/Date_Time.enso | 32 ++++++++++----- .../Base/0.0.0-dev/src/Data/Time/Period.enso | 20 ++++++++++ .../0.0.0-dev/src/Data/Time/Time_Of_Day.enso | 40 ++++++++++++++----- .../0.0.0-dev/src/Data/Time/Time_Zone.enso | 7 ++-- .../Base/0.0.0-dev/src/Data/Vector.enso | 8 ++++ .../0.0.0-dev/src/Enso_Cloud/Enso_File.enso | 1 + .../0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso | 37 ++++++++++++++++- .../Base/0.0.0-dev/src/Network/HTTP.enso | 16 ++++---- .../0.0.0-dev/src/Network/HTTP/Response.enso | 2 +- .../src/Network/HTTP/Response_Body.enso | 2 +- .../0.0.0-dev/src/System/File_Format.enso | 2 +- .../Standard/Table/0.0.0-dev/src/Column.enso | 7 ++++ .../Standard/Table/0.0.0-dev/src/Table.enso | 6 +++ .../Visualization/0.0.0-dev/src/Widgets.enso | 8 +++- .../builtin/text/AnyPrettyNode.java | 2 +- test/Base_Tests/src/Data/Array_Spec.enso | 8 ++++ test/Base_Tests/src/Data/Range_Spec.enso | 19 +++++++++ .../src/Data/Time/Date_Range_Spec.enso | 8 +++- test/Base_Tests/src/Data/Time/Date_Spec.enso | 6 +++ .../src/Data/Time/Date_Time_Spec.enso | 13 ++++++ .../src/Data/Time/Day_Of_Week_Spec.enso | 5 +++ .../Base_Tests/src/Data/Time/Period_Spec.enso | 13 ++++++ .../src/Data/Time/Time_Of_Day_Spec.enso | 11 +++++ test/Base_Tests/src/Data/Vector_Spec.enso | 6 +++ test/Base_Tests/src/Network/Http_Spec.enso | 10 ++--- .../src/In_Memory/Column_Spec.enso | 15 +++++++ .../Table_Tests/src/In_Memory/Table_Spec.enso | 18 +++++++++ 32 files changed, 354 insertions(+), 57 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso index 670972c87b..0f0196181e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso @@ -78,14 +78,23 @@ type Any to_text : Text to_text self = @Builtin_Method "Any.to_text" - ## ICON convert - Generic conversion of an arbitrary Enso value to a corresponding human-readable - representation. + ## GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. > Example - Getting a human-readable representation of the number 7. + Getting the Enso code of the number 7. - 7.to_text + 7.pretty + ## Returns a Text + 7 + + > Example + Getting the Enso code of the text Hello World!. + + "Hello World!".pretty + ## Returns a Text + 'Hello World!' pretty : Text pretty self = @Builtin_Method "Any.pretty" diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index e8478abde9..bf1390ab66 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -795,6 +795,13 @@ type Array to_display_text : Text to_display_text self = self.short_display_text max_entries=40 + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + pretty : Text + pretty self = self.map .pretty . join ", " "[" "]" + ## ICON column_add Combines all the elements of a non-empty array using a binary operation. If the array is empty, it returns `if_empty`. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso index e2e457c7ee..8c588cb434 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso @@ -574,6 +574,23 @@ type Range step = if self.step.abs == 1 then "" else " by " + self.step.to_display_text start + step + "]" + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the range 1 until 29. + + 1.up_to 29 . pretty + ## Returns a Text + Range.new 1 29 + pretty : Text + pretty self = + start = self.start.pretty + end = self.end.pretty + "Range.new " + start + " " + end + (if self.step.abs == 1 then "" else " step=" + self.step.abs.pretty) + ## PRIVATE throw_zero_step_error = Error.throw (Illegal_State.Error "A range with step = 0 is ill-formed.") diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso index 408aded586..cb25b88edb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso @@ -26,7 +26,8 @@ import project.Panic.Panic from project.Data.Boolean import Boolean, False, True from project.Data.Text.Extensions import all from project.Data.Time.Date_Time import ensure_in_epoch -from project.Metadata import Display, Widget +from project.Metadata import Display, make_single_choice, Widget +from project.Metadata.Choice import Option from project.Widget_Helpers import make_date_format_selector polyglot java import java.lang.ArithmeticException @@ -335,7 +336,7 @@ type Date Arguments: - period: the period to add to self. next : Date_Period -> Date - next self period=Date_Period.Day = self + period.to_period + next self period:Date_Period=..Day = self + period.to_period ## GROUP DateTime ICON time @@ -347,7 +348,7 @@ type Date Arguments: - period: the period to add to self. previous : Date_Period -> Date - previous self period=Date_Period.Day = self - period.to_period + previous self period:Date_Period=..Day = self - period.to_period ## GROUP DateTime ICON time @@ -492,6 +493,8 @@ type Date from Standard.Base import Date, Time_Of_Day, Time_Zone example_to_time = Date.new 2020 2 3 . to_date_time Time_Of_Day.new Time_Zone.utc + @time_of_day (Time_Of_Day.default_widget include_now=False) + @zone Time_Zone.default_widget to_date_time : Time_Of_Day -> Time_Zone -> Date_Time to_date_time self (time_of_day=Time_Of_Day.new) (zone=Time_Zone.system) = Time_Utils.make_zoned_date_time self time_of_day zone @@ -827,9 +830,25 @@ type Date format.format_date self ## PRIVATE - Convert to a Enso code representation of this Date. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the date 29-October-2024. + + (Date.new 2024 10 29).pretty + ## Returns a Text + Date.new 2024 10 29 pretty : Text - pretty self = "(Date.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text + ")" + pretty self = "Date.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text + + ## PRIVATE + Gets the default drop down option for Date. + default_widget : Boolean -> Widget + default_widget (include_today:Boolean=False) = + options = [Option "" "Date.new"] + (if include_today then [Option "" "Date.today"] else []) + Widget.Single_Choice values=options display=Display.When_Modified ## PRIVATE week_days_between start end = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso index d0fba4049f..e51d9f1206 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso @@ -101,9 +101,22 @@ type Date_Range start + step + "]" ## PRIVATE - Convert to a human-readable representation. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the date range 10-September-2024 until + 29-October-2024. + + (Date.new 2024 09 10).up_to (Date.new 2024 10 29) . pretty + ## Returns a Text + Date_Range.new (Date.new 2024 09 10) (Date.new 2024 10 29) pretty : Text - pretty self = self.to_text + pretty self = + start = self.start.pretty + end = self.end.pretty + "Date_Range.new (" + start + ") (" + end + (if self.step == (Period.new days=1) then ")" else ") (" + self.step.pretty + ")") ## PRIVATE Converts this value to a JSON serializable object. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso index fe47ceb5e3..92a1bb70fd 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso @@ -836,18 +836,28 @@ type Date_Time self.format "yyyy-MM-dd "+time_format+zone_format ## PRIVATE - Convert to a Enso code representation of this Time_Of_Day. - pretty : Text - pretty self = "(Date_Time.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text - + (if self.hour == 0 then "" else " hour="+self.hour.to_text) - + (if self.minute == 0 then "" else " minute="+self.minute.to_text) - + (if self.second == 0 then "" else " second="+self.second.to_text) - + (if self.millisecond == 0 then "" else " millisecond="+self.millisecond.to_text) - + (if self.microsecond == 0 then "" else " microsecond="+self.microsecond.to_text) - + (if self.nanosecond == 0 then "" else " nanosecond="+self.nanosecond.to_text) - + (if self.zone == Time_Zone.system then "" else " zone="+self.zone.pretty) - + ")" + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + > Example + Getting the Enso code of the date 29-October-2024 12:34. + + (Date_Time.new 2024 10 29 12 34).pretty + ## Returns a Text + Date_Time.new 2024 10 29 12 34 + pretty : Text + pretty self = + parts = Vector.build builder-> + builder.append ("Date_Time.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text) + if self.hour != 0 then builder.append ((if builder.length!=1 then " hour=" else " ") + self.hour.to_text) + if self.minute != 0 then builder.append ((if builder.length!=2 then " minute=" else " ") + self.minute.to_text) + if self.second != 0 then builder.append ((if builder.length!=3 then " second=" else " ") + self.second.to_text) + if self.millisecond != 0 then builder.append ((if builder.length!=4 then " millisecond=" else " ") + self.millisecond.to_text) + if self.microsecond != 0 then builder.append ((if builder.length!=5 then " microsecond=" else " ") + self.microsecond.to_text) + if self.nanosecond != 0 then builder.append ((if builder.length!=6 then " nanosecond=" else " ") + self.nanosecond.to_text) + if self.zone != Time_Zone.system then builder.append ((if builder.length!=7 then " zone=(" else " (") + self.zone.pretty + ")") + parts.join "" ## PRIVATE Convert to a JavaScript Object representing a Date_Time. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso index 653688641e..3199648b14 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso @@ -209,6 +209,26 @@ type Period if self.days==0 . not then builder.append ["days", self.days] JS_Object.from_pairs v + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the period 1 month and 2 days. + + (Period.new months=1 days=2).pretty + ## Returns a Text + Time_Of_Day.new 12 34 millisecond=500 + pretty : Text + pretty self = + parts = Vector.build builder-> + builder.append "Period.new" + if self.years != 0 then builder.append ((if builder.length!=1 then " years=" else " ") + self.years.to_text) + if self.months != 0 then builder.append ((if builder.length!=2 then " months=" else " ") + self.months.to_text) + if self.days != 0 then builder.append ((if builder.length!=3 then " days=" else " ") + self.days.to_text) + parts.join "" + ## PRIVATE catch_java_exceptions operation ~action = handle_arithmetic_exception caught_panic = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso index b49ea45eae..efd8e18ecf 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso @@ -11,6 +11,7 @@ import project.Data.Time.Duration.Duration import project.Data.Time.Period.Period import project.Data.Time.Time_Period.Time_Period import project.Data.Time.Time_Zone.Time_Zone +import project.Data.Vector.Vector import project.Error.Error import project.Errors.Common.Type_Error import project.Errors.Illegal_Argument.Illegal_Argument @@ -20,7 +21,8 @@ import project.Nothing.Nothing import project.Panic.Panic from project.Data.Boolean import Boolean, False, True from project.Data.Text.Extensions import all -from project.Metadata import Display, Widget +from project.Metadata import Display, make_single_choice, Widget +from project.Metadata.Choice import Option from project.Widget_Helpers import make_time_format_selector polyglot java import java.lang.Exception as JException @@ -492,16 +494,34 @@ type Time_Of_Day format.format_time self ## PRIVATE - Convert to a Enso code representation of this Time_Of_Day. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the time 12:34:00.5 + + (Time_Of_Day.new 12 34 0 500).pretty + ## Returns a Text + Time_Of_Day.new 12 34 millisecond=500 pretty : Text - pretty self = "(Time_Of_Day.new" - + (if self.hour == 0 then "" else " hour="+self.hour.to_text) - + (if self.minute == 0 then "" else " minute="+self.minute.to_text) - + (if self.second == 0 then "" else " second="+self.second.to_text) - + (if self.millisecond == 0 then "" else " millisecond="+self.millisecond.to_text) - + (if self.microsecond == 0 then "" else " microsecond="+self.microsecond.to_text) - + (if self.nanosecond == 0 then "" else " nanosecond="+self.nanosecond.to_text) - + ")" + pretty self = + parts = Vector.build builder-> + builder.append "Time_Of_Day.new" + if self.hour != 0 then builder.append ((if builder.length!=1 then " hour=" else " ") + self.hour.to_text) + if self.minute != 0 then builder.append ((if builder.length!=2 then " minute=" else " ") + self.minute.to_text) + if self.second != 0 then builder.append ((if builder.length!=3 then " second=" else " ") + self.second.to_text) + if self.millisecond != 0 then builder.append ((if builder.length!=4 then " millisecond=" else " ") + self.millisecond.to_text) + if self.microsecond != 0 then builder.append ((if builder.length!=5 then " microsecond=" else " ") + self.microsecond.to_text) + if self.nanosecond != 0 then builder.append ((if builder.length!=6 then " nanosecond=" else " ") + self.nanosecond.to_text) + parts.join "" + + ## PRIVATE + Gets the default drop down option for Time_Of_Day. + default_widget : Boolean -> Widget + default_widget (include_now:Boolean=False) = + options = [Option "" "Time_Of_Day.new"] + (if include_now then [Option "" "Time_Of_Day.now"] else []) + Widget.Single_Choice values=options display=Display.When_Modified ## PRIVATE Time_Of_Day.from (that:JS_Object) = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso index 9133bfa966..75c55383eb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso @@ -214,10 +214,11 @@ type Time_Zone zone_names = Time_Utils.getZoneNames ## PRIVATE - Convert to a Enso code representation of this Time_Of_Day. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. pretty : Text - pretty self = "(Time_Zone.parse '" + self.zone_id + "')" - + pretty self = "Time_Zone.parse " + self.zone_id.pretty ## PRIVATE Time_Zone.from (that:JS_Object) = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index 13d71a160c..3880dafc53 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -878,6 +878,14 @@ type Vector a short_display_text self (max_entries : Integer = 10) = Array_Like_Helpers.short_display_text self max_entries + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + pretty : Text + pretty self = self.map .pretty . join ", " "[" "]" + + ## ALIAS append, concatenate, union GROUP Operators ICON union diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso index f5fc798054..bb95c33e56 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso @@ -530,6 +530,7 @@ type Enso_File "Enso_File "+self.path ## PRIVATE + Converts the file descriptor to a JSON object. to_js_object : JS_Object to_js_object self = JS_Object.from_pairs [["type", "Enso_File"], ["constructor", "new"], ["path", self.path.to_text]] diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso index 538dface7d..33ee5cb134 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso @@ -31,7 +31,19 @@ polyglot java import org.enso.base.enso_cloud.HideableValue.SecretValue ## A reference to a secret stored in the Enso Cloud. type Enso_Secret ## PRIVATE - Value name:Text id:Text path:Enso_Path + private Value internal_name:Text id:Text internal_path:Enso_Path + + ## GROUP Metadata + ICON metadata + The name of the secret. + name : Text + name self = self.internal_name + + ## GROUP Metadata + ICON metadata + The path of the secret. + path : Text + path self = self.internal_path.to_text ## GROUP Output ICON edit @@ -146,6 +158,29 @@ type Enso_Secret EnsoSecretHelper.deleteSecretFromCache self.id self + ## PRIVATE + Returns a text representation of the secret. + to_text : Text + to_text self = "Enso_Secret " + self.path.to_text + + ## PRIVATE + Returns a display text representation of the secret. + to_display_text : Text + to_display_text self = "Enso_Secret {" + self.name + "}" + + ## PRIVATE + Converts the secret to a JSON object. + to_js_object : JS_Object + to_js_object self = + JS_Object.from_pairs [["type", "Enso_Secret"], ["constructor", "get"], ["path", self.path.to_text]] + + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + pretty : Text + pretty self = "Enso_Secret.get " + self.path.to_text.pretty + ## PRIVATE type Enso_Secret_Error ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index 5f115b906d..a5f5aa5c08 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -355,20 +355,20 @@ type Request_Error ## PRIVATE Access the HTTP's timeout (for testing purposes). -get_timeout : HTTP -> Duration -get_timeout http:HTTP = http.timeout +_get_timeout : HTTP -> Duration +_get_timeout http:HTTP = http.timeout ## PRIVATE Access the HTTP's follow_redirects (for testing purposes). -get_follow_redirects : HTTP -> Boolean -get_follow_redirects http:HTTP = http.follow_redirects +_get_follow_redirects : HTTP -> Boolean +_get_follow_redirects http:HTTP = http.follow_redirects ## PRIVATE Access the HTTP's proxy (for testing purposes). -get_proxy : HTTP -> Proxy -get_proxy http:HTTP = http.proxy +_get_proxy : HTTP -> Proxy +_get_proxy http:HTTP = http.proxy ## PRIVATE Access the HTTP's version (for testing purposes). -get_version : HTTP -> HTTP_Version -get_version http:HTTP = http.version +_get_version : HTTP -> HTTP_Version +_get_version http:HTTP = http.version diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso index 8c79be9291..2b09db5597 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso @@ -193,7 +193,7 @@ type Response example_write = Data.fetch Examples.geo_data_url . write Examples.scratch_file - @path (Widget.Text_Input display=Display.Always) + @file (Widget.Text_Input display=Display.Always) write : Writable_File -> Existing_File_Behavior -> File write self file:Writable_File on_existing_file=Existing_File_Behavior.Backup = self.body.write file on_existing_file diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso index a7c3477f08..31cd11eda6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso @@ -180,7 +180,7 @@ type Response_Body example_write = Examples.get_geo_data.write Examples.scratch_file - @path (Widget.Text_Input display=Display.Always) + @file (Widget.Text_Input display=Display.Always) write : Writable_File -> Existing_File_Behavior -> File write self file:Writable_File on_existing_file=Existing_File_Behavior.Backup = self.with_stream body_stream-> diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 312cc5c701..85a0522c77 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -232,7 +232,7 @@ type Bytes type JSON_Format ## PRIVATE Resolve an unresolved constructor to the actual type. - resolve : Function -> Bytes | Nothing + resolve : Function -> JSON_Format | Nothing resolve constructor = _ = constructor Nothing diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index 53cc25ab3e..5a838234ee 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -2589,6 +2589,13 @@ type Column data = Statistic.running self.to_vector statistic Column.from_vector name data + ## PRIVATE + pretty : Text + pretty self = + name = self.name.pretty + data = self.to_vector.pretty + "Column.from_vector " + name + " " + data + ## PRIVATE Folds the vectorized operation over the provided column and values. When more diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 9fd946da01..91ac28b900 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -3739,6 +3739,12 @@ type Table if merged_columns.is_empty then problem_builder_for_unification.raise_no_output_columns_with_cause else Table.new merged_columns + ## PRIVATE + pretty : Text + pretty self = + data = self.columns.map c->("[" + c.name.pretty + ", " + c.to_vector.pretty + "]") . join ", " + "Table.new [" + data + "]" + ## PRIVATE A helper to create a new table consisting of slices of the original table. slice_ranges table ranges = diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso index 0a20b2b63f..1a9c60c4db 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso @@ -1,5 +1,7 @@ from Standard.Base import all +import Standard.Base.Metadata.Widget import Standard.Base.Errors.Common.Not_Invokable +from Standard.Base.Logging import all from Standard.Base.Meta import Instrumentor from Standard.Table import all @@ -20,7 +22,11 @@ get_widget_json value call_name argument_names uuids="{}" = uuid:Text -> Instrumentor.uuid uuid _ -> Nothing - read_annotation argument = + log_panic argument err = + Widget.log_message "Failed for "+argument+": "+err.payload.to_display_text ..Warning + Nothing + + read_annotation argument = Panic.catch Any handler=(log_panic argument) <| annotation = Warning.clear <| Meta.get_annotation value call_name argument return_target err = err.payload.target Panic.catch Not_Invokable handler=return_target diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java index 00b2444f56..2d13018047 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java @@ -47,7 +47,7 @@ public abstract class AnyPrettyNode extends Node { @CompilerDirectives.TruffleBoundary private Text consName(AtomConstructor constructor) { - return Text.create(constructor.getDisplayName()); + return Text.create(constructor.getType().getName() + "." + constructor.getName()); } @CompilerDirectives.TruffleBoundary diff --git a/test/Base_Tests/src/Data/Array_Spec.enso b/test/Base_Tests/src/Data/Array_Spec.enso index 74799f5f26..d08ad876e3 100644 --- a/test/Base_Tests/src/Data/Array_Spec.enso +++ b/test/Base_Tests/src/Data/Array_Spec.enso @@ -49,6 +49,14 @@ add_specs suite_builder = make_enso_array [] . reduce (+) . should_fail_with (Empty_Error.Error Array) make_enso_array [] . reduce (+) 0 . should_equal 0 + group_builder.specify "should have a well-defined debug-printing method" <| + ## Enso arrays should be coded as Vectors when Enso code is generated. + make_enso_array [] . pretty . should_equal "[]" + make_enso_array [1,2,3] . pretty . should_equal "[1, 2, 3]" + make_enso_array [Nothing] . pretty . should_equal "[Nothing]" + make_enso_array [True, False, 'a'] . pretty . should_equal "[True, False, 'a']" + make_enso_array [Date.new 2022 1 1] . pretty . should_equal "[Date.new 2022 1 1]" + suite_builder.group "Compare functionality with Vector" group_builder-> group_builder.specify "compare methods" <| vector_methods = Meta.meta Vector . methods . sort diff --git a/test/Base_Tests/src/Data/Range_Spec.enso b/test/Base_Tests/src/Data/Range_Spec.enso index 719b0a636d..a80a5cd52a 100644 --- a/test/Base_Tests/src/Data/Range_Spec.enso +++ b/test/Base_Tests/src/Data/Range_Spec.enso @@ -7,6 +7,7 @@ import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Common.Unsupported_Argument_Types import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -559,6 +560,24 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> invalid_range . find _->True . should_fail_with Illegal_State invalid_range . contains 0 . should_fail_with Illegal_State + group_builder.specify "should define friendly text representations" <| + range = 1.up_to 100 + range_2 = 0.up_to 10 . with_step 2 + range_3 = 20.down_to 0 . with_step 3 + + range.to_text . should_equal "(Between 1 100 1)" + range_2.to_text . should_equal "(Between 0 10 2)" + range_3.to_text . should_equal "(Between 20 0 -3)" + + range.to_display_text . should_equal "[1 .. 100]" + range_2.to_display_text . should_equal "[0 .. 10 by 2]" + range_3.to_display_text . should_equal "[20 .. 0 by -3]" + + range.pretty . should_equal "Range.new 1 100" + range_2.pretty . should_equal "Range.new 0 10 step=2" + range_3.pretty . should_equal "Range.new 20 0 step=3" + Debug.eval range_3.pretty . should_equal range_3 + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder diff --git a/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso index 7c4ebf7efc..b2b469ae1c 100644 --- a/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Empty_Error.Empty_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -194,8 +195,11 @@ add_specs suite_builder = r1.to_text . should_equal '(Date_Range from 2020-02-28 up to 2020-03-02)' r2.to_text . should_equal '(Date_Range from 2020-03-20 down to 2020-02-29 by 7D)' - r1.pretty . should_equal r1.to_text - r2.pretty . should_equal r2.to_text + r1.pretty . should_equal 'Date_Range.new (Date.new 2020 2 28) (Date.new 2020 3 2)' + (Debug.eval r1.pretty) . should_equal r1 + + r2.pretty . should_equal 'Date_Range.new (Date.new 2020 3 20) (Date.new 2020 2 29) (Period.new days=7)' + (Debug.eval r2.pretty) . should_equal r2 r1.to_display_text . should_equal '[2020-02-28 .. 2020-03-02]' r2.to_display_text . should_equal '[2020-03-20 .. 2020-02-29 by -7D]' diff --git a/test/Base_Tests/src/Data/Time/Date_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Spec.enso index 4666558917..31f93695dc 100644 --- a/test/Base_Tests/src/Data/Time/Date_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Time_Error.Time_Error +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -96,6 +97,11 @@ spec_with suite_builder name create_new_date parse_date pending=Nothing = datetime.date . should_equal date datetime.time_of_day . should_equal time + group_builder.specify "should convert to Enso code" <| + date = create_new_date 2001 12 21 + date.pretty . should_equal "Date.new 2001 12 21" + Debug.eval date.pretty . should_equal date + group_builder.specify "should convert to Json" <| date = create_new_date 2001 12 21 date.to_json.should_equal <| diff --git a/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso index b5617f7210..328a2429af 100644 --- a/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Time_Error.Time_Error +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -119,6 +120,18 @@ spec_with suite_builder name create_new_datetime parse_datetime nanoseconds_loss text = create_new_datetime 1970 (zone = Time_Zone.utc) . to_text text . should_equal "1970-01-01 00:00:00Z[UTC]" + group_builder.specify "should convert to Enso code" <| + create_new_datetime 1970 . pretty . should_equal "Date_Time.new 1970 1 1" + create_new_datetime 1923 9 24 . pretty . should_equal "Date_Time.new 1923 9 24" + create_new_datetime 1923 9 24 12 20 44 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 44" + if nanoseconds_loss_in_precision.not then + create_new_datetime 1923 9 24 12 20 nanosecond=500000000 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 millisecond=500" + create_new_datetime 1923 9 24 12 20 nanosecond=500000 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 microsecond=500" + create_new_datetime 1923 9 24 12 20 nanosecond=500 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 nanosecond=500" + + date_time = create_new_datetime 1970 12 21 11 23 45 nanosecond=123456789 zone=Time_Zone.utc + Debug.eval date_time.pretty . should_equal date_time + group_builder.specify "should convert to Json" <| time = create_new_datetime 1970 12 21 (zone = Time_Zone.utc) time.to_json.should_equal <| diff --git a/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso b/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso index 32f3a14aa4..76d6a0145a 100644 --- a/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -11,6 +12,10 @@ add_specs suite_builder = Day_Of_Week.Friday.to_integer . should_equal 6 Day_Of_Week.Saturday.to_integer . should_equal 7 + group_builder.specify "should be able to convert to Enso code" <| + Day_Of_Week.Sunday.pretty . should_equal "Day_Of_Week.Sunday" + Debug.eval Day_Of_Week.Wednesday.pretty . should_equal Day_Of_Week.Wednesday + group_builder.specify "should be able to convert from an Integer" <| Day_Of_Week.from 1 . should_equal Day_Of_Week.Sunday Day_Of_Week.from 4 . should_equal Day_Of_Week.Wednesday diff --git a/test/Base_Tests/src/Data/Time/Period_Spec.enso b/test/Base_Tests/src/Data/Time/Period_Spec.enso index f61c4abfbb..a1c841add9 100644 --- a/test/Base_Tests/src/Data/Time/Period_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Period_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Common.Incomparable_Values +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -50,6 +51,18 @@ add_specs suite_builder = Period.new years=2 days=3 . to_display_text . should_equal "2Y 0M 3D" Period.new days=18 . to_display_text . should_equal "18D" + group_builder.specify "should render to Enso code" <| + Period.new . pretty . should_equal "Period.new" + Period.new years=2 . pretty . should_equal "Period.new 2" + Period.new months=24 . pretty . should_equal "Period.new months=24" + Period.new months=4 . pretty . should_equal "Period.new months=4" + Period.new years=1 months=6 . pretty . should_equal "Period.new 1 6" + Period.new years=2 days=3 . pretty . should_equal "Period.new 2 days=3" + Period.new days=18 . pretty . should_equal "Period.new days=18" + + period = Period.new years=2 days=3 + Debug.eval period.pretty . should_equal period + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder diff --git a/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso b/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso index 23cf7ffd8d..e4a88f8967 100644 --- a/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso @@ -3,6 +3,7 @@ import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Time_Error.Time_Error +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -58,6 +59,16 @@ specWith suite_builder name create_new_time parse_time nanoseconds_loss_in_preci text = create_new_time 12 20 44 . to_text text . should_equal "12:20:44" + group_builder.specify "should convert to Enso code" <| + create_new_time 12 20 . pretty . should_equal "Time_Of_Day.new 12 20" + create_new_time 12 20 44 . pretty . should_equal "Time_Of_Day.new 12 20 44" + create_new_time 12 20 0 500000000 . pretty . should_equal "Time_Of_Day.new 12 20 millisecond=500" + create_new_time 12 20 0 500000 . pretty . should_equal "Time_Of_Day.new 12 20 microsecond=500" + if nanoseconds_loss_in_precision.not then create_new_time 12 20 0 500 . pretty . should_equal "Time_Of_Day.new 12 20 nanosecond=500" + + time = create_new_time 12 20 0 500000 + Debug.eval time.pretty . should_equal time + group_builder.specify "should convert to Json" <| time = create_new_time 1 2 3 time.to_json.should_equal <| diff --git a/test/Base_Tests/src/Data/Vector_Spec.enso b/test/Base_Tests/src/Data/Vector_Spec.enso index fca7f5bbf0..97fba3a36a 100644 --- a/test/Base_Tests/src/Data/Vector_Spec.enso +++ b/test/Base_Tests/src/Data/Vector_Spec.enso @@ -12,6 +12,7 @@ import Standard.Base.Errors.Common.Not_Found import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Unimplemented.Unimplemented +import Standard.Base.Runtime.Debug import Standard.Base.Runtime.Ref.Ref import Standard.Base.Runtime.State from Standard.Base.Panic import Wrapped_Dataflow_Error @@ -1243,6 +1244,11 @@ add_specs suite_builder = [Nothing].pretty.should_equal "[Nothing]" [True, False, 'a'].pretty . should_equal "[True, False, 'a']" [Foo.Value True].pretty . should_equal "[(Foo.Value True)]" + [Date.new 2022 1 1].pretty . should_equal "[Date.new 2022 1 1]" + + mixed = [1, 2, 'a', (Foo.Value True), Date.new 2022 1 1, Nothing] + mixed.pretty . should_equal "[1, 2, 'a', (Foo.Value True), Date.new 2022 1 1, Nothing]" + Debug.eval (mixed.pretty) . should_equal [1, 2, 'a', Foo.Value True, Date.new 2022 1 1, Nothing] type_spec suite_builder "Use Vector as vectors" identity type_spec suite_builder "Use Array as vectors" (v -> v.to_array) diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index b72bb11750..9a453ceb15 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -11,7 +11,7 @@ import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Base.Network.HTTP.Request_Error import Standard.Base.Network.Proxy.Proxy import Standard.Base.Runtime.Context -from Standard.Base.Network.HTTP import _resolve_headers, get_follow_redirects, get_proxy, get_timeout, get_version +from Standard.Base.Network.HTTP import _resolve_headers, _get_follow_redirects, _get_proxy, _get_timeout, _get_version from Standard.Test import all from Standard.Test.Execution_Context_Helpers import run_with_and_without_output @@ -66,11 +66,11 @@ add_specs suite_builder = suite_builder.group "HTTP client" pending=pending_has_url group_builder-> group_builder.specify "should create HTTP client with timeout setting" <| http = HTTP.new (timeout = (Duration.new seconds=30)) - (get_timeout http).should_equal (Duration.new seconds=30) + (_get_timeout http).should_equal (Duration.new seconds=30) group_builder.specify "should create HTTP client with follow_redirects setting" <| http = HTTP.new (follow_redirects = False) - (get_follow_redirects http).should_equal False + (_get_follow_redirects http).should_equal False Test.with_retries <| r = http.request (Request.new HTTP_Method.Get base_url_with_slash+"test_redirect") @@ -81,12 +81,12 @@ add_specs suite_builder = group_builder.specify "should create HTTP client with proxy setting" <| proxy_setting = Proxy.Address "example.com" 80 http = HTTP.new (proxy = proxy_setting) - (get_proxy http).should_equal proxy_setting + (_get_proxy http).should_equal proxy_setting group_builder.specify "should create HTTP client with version setting" <| version_setting = HTTP_Version.HTTP_2 http = HTTP.new (version = version_setting) - (get_version http).should_equal version_setting + (_get_version http).should_equal version_setting url_get = base_url_with_slash.if_not_nothing <| base_url_with_slash + "get" suite_builder.group "fetch" pending=pending_has_url group_builder-> diff --git a/test/Table_Tests/src/In_Memory/Column_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Spec.enso index f788d10fed..025c47abb0 100644 --- a/test/Table_Tests/src/In_Memory/Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Debug import project.Util @@ -257,6 +258,20 @@ add_specs suite_builder = r2 = Column.from_vector "X" [] (Value_Type.Char size=0 variable_length=True) r2.should_fail_with Illegal_Argument + group_builder.specify "should be able to serialize to Enso code" <| + c1 = Column.from_vector "X" [1, 2] Value_Type.Float + c1.pretty . should_equal 'Column.from_vector \'X\' [1.0, 2.0]' + Debug.eval c1.pretty . should_equal c1 + + c2 = Column.from_vector "X" ["a", 42] + c2.pretty . should_equal 'Column.from_vector \'X\' [\'a\', 42]' + + c3 = Column.from_vector "X" ["aaa", "bbb"] + c3.pretty . should_equal 'Column.from_vector \'X\' [\'aaa\', \'bbb\']' + + c4 = Column.from_vector "X" [Time_Of_Day.new 10 11 12, Time_Of_Day.new 11 30] + c4.pretty . should_equal 'Column.from_vector \'X\' [Time_Of_Day.new 10 11 12, Time_Of_Day.new 11 30]' + suite_builder.group "Rounding" group_builder-> group_builder.specify "should be able to round a column of decimals" <| Column.from_vector "foo" [1.2, 2.3, 2.5, 3.6] . round . should_equal (Column.from_vector "round([foo])" [1, 2, 3, 4]) diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 50ea986314..fecd48c0ff 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -4,6 +4,7 @@ import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Debug from Standard.Table import Table, Column, Sort_Column, Aggregate_Column, Blank_Selector, Value_Type from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names, No_Input_Columns_Selected, Missing_Input_Columns, No_Such_Column, Floating_Point_Equality, Invalid_Value_Type, Row_Count_Mismatch @@ -88,6 +89,23 @@ add_specs suite_builder = r2.at "foo" . to_vector . should_equal [] r2.at "bar" . to_vector . should_equal [] + group_builder.specify "should allow creating Enso code from a Table" <| + r = Table.new [["foo", [1, 2, 3]], ["bar", [False, True, False]]] + r.pretty . should_equal "Table.new [['foo', [1, 2, 3]], ['bar', [False, True, False]]]" + Debug.eval r.pretty . should_equal r + + r2 = Table.new [["foo", []], ["bar", []]] + r2.pretty . should_equal "Table.new [['foo', []], ['bar', []]]" + Debug.eval r2.pretty . should_equal r2 + + r3 = Table.new [["date", [Date.new 2022 8 27, Date.new 1999 1 1]], ["time", [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34]]] + r3.pretty . should_equal "Table.new [['date', [Date.new 2022 8 27, Date.new 1999 1 1]], ['time', [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34]]]" + Debug.eval r3.pretty . should_equal r3 + + r4 = Table.new [["foo", [1, 2, 3]], ["bar", [False, True, False]], ["date", [Date.new 2022 8 27, Date.new 1999 1 1, Date.new 2012 1 23]], ["time", [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34, Time_Of_Day.new 12 0]]] + r4.pretty . should_equal "Table.new [['foo', [1, 2, 3]], ['bar', [False, True, False]], ['date', [Date.new 2022 8 27, Date.new 1999 1 1, Date.new 2012 1 23]], ['time', [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34, Time_Of_Day.new 12]]]" + Debug.eval r4.pretty . should_equal r4 + group_builder.specify "should handle error scenarios gracefully" <| Table.new [["X", [1,2,3]], ["Y", [4]]] . should_fail_with Illegal_Argument Table.new [["X", [1]], ["X", [2]]] . should_fail_with Illegal_Argument From c6e87c2a1739ac13cc386deef1cca576c355e838 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Thu, 31 Oct 2024 20:36:10 +1000 Subject: [PATCH 18/43] Optimize asset table rendering (#11382) - Depends on: - #11380 - Some optimizations for re-rendering assets table: - Omit `visibilities` from `state` in favor of passing each `AssetRow`'s `visibility` directly to the row. This minimizes spurious `state` updates. - Pass `id` and `parentId` to `AssetRow` instead of the entire object. This ensures that re-fetches do not force a re-render of the `AssetRow` - we are no longer passing a reference to the object, so we are now comparing by string comparison (which is almost always stable). # Important Notes None --- app/common/package.json | 8 +- app/common/src/services/Backend.ts | 80 ++++-- app/common/src/utilities/data/object.ts | 9 + .../components/dashboard/AssetRow.tsx | 236 ++++++------------ .../dashboard/DatalinkNameColumn.tsx | 23 +- .../dashboard/DirectoryNameColumn.tsx | 43 ++-- .../components/dashboard/FileNameColumn.tsx | 31 +-- .../dashboard/ProjectNameColumn.tsx | 62 ++--- .../components/dashboard/SecretNameColumn.tsx | 16 +- .../dashboard/components/dashboard/column.ts | 7 +- .../dashboard/column/DocsColumn.tsx | 3 +- .../dashboard/column/LabelsColumn.tsx | 43 +--- .../dashboard/column/ModifiedColumn.tsx | 19 +- .../dashboard/column/SharedWithColumn.tsx | 7 +- .../src/dashboard/events/AssetEventType.ts | 1 - app/gui/src/dashboard/events/assetEvent.ts | 7 - app/gui/src/dashboard/hooks/backendHooks.ts | 63 +++-- .../dashboard/layouts/AssetContextMenu.tsx | 68 ++--- app/gui/src/dashboard/layouts/AssetPanel.tsx | 12 +- .../layouts/AssetProjectSessions.tsx | 10 +- .../src/dashboard/layouts/AssetProperties.tsx | 25 +- .../layouts/AssetVersions/AssetVersion.tsx | 14 +- .../layouts/AssetVersions/AssetVersions.tsx | 15 +- app/gui/src/dashboard/layouts/AssetsTable.tsx | 171 +++++++------ .../dashboard/pages/dashboard/Dashboard.tsx | 18 +- .../src/dashboard/services/LocalBackend.ts | 11 +- .../src/dashboard/utilities/AssetTreeNode.ts | 3 - pnpm-lock.yaml | 22 +- 28 files changed, 470 insertions(+), 557 deletions(-) diff --git a/app/common/package.json b/app/common/package.json index 2ab252dbb0..d3bb6c880e 100644 --- a/app/common/package.json +++ b/app/common/package.json @@ -34,11 +34,11 @@ "@tanstack/vue-query": ">= 5.54.0 < 5.56.0" }, "dependencies": { - "idb-keyval": "^6.2.1", - "react": "^18.3.1", "@tanstack/query-persist-client-core": "^5.54.0", "@tanstack/vue-query": ">= 5.54.0 < 5.56.0", - "vue": "^3.5.2", - "vitest": "^1.3.1" + "idb-keyval": "^6.2.1", + "react": "^18.3.1", + "vitest": "^1.3.1", + "vue": "^3.5.2" } } diff --git a/app/common/src/services/Backend.ts b/app/common/src/services/Backend.ts index b690abcc8c..d2d575b802 100644 --- a/app/common/src/services/Backend.ts +++ b/app/common/src/services/Backend.ts @@ -762,8 +762,9 @@ export const ASSET_TYPE_ORDER: Readonly> = { * Metadata uniquely identifying a directory entry. * These can be Projects, Files, Secrets, or other directories. */ -export interface BaseAsset { - readonly id: AssetId +export interface Asset { + readonly type: Type + readonly id: IdType[Type] readonly title: string readonly modifiedAt: dateTime.Rfc3339DateTime /** @@ -774,16 +775,10 @@ export interface BaseAsset { readonly permissions: readonly AssetPermission[] | null readonly labels: readonly LabelName[] | null readonly description: string | null -} - -/** - * Metadata uniquely identifying a directory entry. - * These can be Projects, Files, Secrets, or other directories. - */ -export interface Asset extends BaseAsset { - readonly type: Type - readonly id: IdType[Type] readonly projectState: Type extends AssetType.project ? ProjectStateType : null + readonly extension: Type extends AssetType.file ? string : null + readonly parentsPath: string + readonly virtualParentsPath: string } /** A convenience alias for {@link Asset}<{@link AssetType.directory}>. */ @@ -823,11 +818,19 @@ export function createRootDirectoryAsset(directoryId: DirectoryId): DirectoryAss parentId: DirectoryId(''), permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Extract the file extension from a file name. */ +function fileExtension(fileNameOrPath: string) { + return fileNameOrPath.match(/[.]([^.]+?)$/)?.[1] ?? '' +} + /** Creates a {@link FileAsset} using the given values. */ export function createPlaceholderFileAsset( title: string, @@ -842,8 +845,11 @@ export function createPlaceholderFileAsset( permissions: assetPermissions, modifiedAt: dateTime.toRfc3339(new Date()), projectState: null, + extension: fileExtension(title), labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } @@ -868,8 +874,11 @@ export function createPlaceholderProjectAsset( ...(organization != null ? { openedBy: organization.email } : {}), ...(path != null ? { path } : {}), }, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } @@ -881,16 +890,24 @@ export function createSpecialLoadingAsset(directoryId: DirectoryId): SpecialLoad return { type: AssetType.specialLoading, title: '', - id: LoadingAssetId(uniqueString.uniqueString()), + id: LoadingAssetId(`${AssetType.specialLoading}-${uniqueString.uniqueString()}`), modifiedAt: dateTime.toRfc3339(new Date()), parentId: directoryId, permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Whether a given {@link string} is an {@link LoadingAssetId}. */ +export function isLoadingAssetId(id: string): id is LoadingAssetId { + return id.startsWith(`${AssetType.specialLoading}-`) +} + /** * Creates a {@link SpecialEmptyAsset}, with all irrelevant fields initialized to default * values. @@ -899,16 +916,24 @@ export function createSpecialEmptyAsset(directoryId: DirectoryId): SpecialEmptyA return { type: AssetType.specialEmpty, title: '', - id: EmptyAssetId(uniqueString.uniqueString()), + id: EmptyAssetId(`${AssetType.specialEmpty}-${uniqueString.uniqueString()}`), modifiedAt: dateTime.toRfc3339(new Date()), parentId: directoryId, permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Whether a given {@link string} is an {@link EmptyAssetId}. */ +export function isEmptyAssetId(id: string): id is EmptyAssetId { + return id.startsWith(`${AssetType.specialEmpty}-`) +} + /** * Creates a {@link SpecialErrorAsset}, with all irrelevant fields initialized to default * values. @@ -917,16 +942,24 @@ export function createSpecialErrorAsset(directoryId: DirectoryId): SpecialErrorA return { type: AssetType.specialError, title: '', - id: ErrorAssetId(uniqueString.uniqueString()), + id: ErrorAssetId(`${AssetType.specialError}-${uniqueString.uniqueString()}`), modifiedAt: dateTime.toRfc3339(new Date()), parentId: directoryId, permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Whether a given {@link string} is an {@link ErrorAssetId}. */ +export function isErrorAssetId(id: string): id is ErrorAssetId { + return id.startsWith(`${AssetType.specialError}-`) +} + /** Any object with a `type` field matching the given `AssetType`. */ interface HasType { readonly type: Type @@ -1386,6 +1419,25 @@ export function extractProjectExtension(name: string) { return { basename: basename ?? name, extension: extension ?? '' } } +/** Check whether a pending rename is valid. */ +export function isNewTitleValid( + item: AnyAsset, + newTitle: string, + siblings?: readonly AnyAsset[] | null, +) { + siblings ??= [] + return ( + newTitle !== '' && + newTitle !== item.title && + siblings.every(sibling => { + const isSelf = sibling.id === item.id + const hasSameType = sibling.type === item.type + const hasSameTitle = sibling.title === newTitle + return !(!isSelf && hasSameType && hasSameTitle) + }) + ) +} + /** Network error class. */ export class NetworkError extends Error { /** diff --git a/app/common/src/utilities/data/object.ts b/app/common/src/utilities/data/object.ts index ac8934358a..f8010da8ae 100644 --- a/app/common/src/utilities/data/object.ts +++ b/app/common/src/utilities/data/object.ts @@ -57,6 +57,15 @@ export function unsafeMutable(object: T): { -readonly [K in ke // === unsafeEntries === // ===================== +/** + * Return the entries of an object. UNSAFE only when it is possible for an object to have + * extra keys. + */ +export function unsafeKeys(object: T): readonly (keyof T)[] { + // @ts-expect-error This is intentionally a wrapper function with a different type. + return Object.keys(object) +} + /** * Return the entries of an object. UNSAFE only when it is possible for an object to have * extra keys. diff --git a/app/gui/src/dashboard/components/dashboard/AssetRow.tsx b/app/gui/src/dashboard/components/dashboard/AssetRow.tsx index 099779de37..623c001601 100644 --- a/app/gui/src/dashboard/components/dashboard/AssetRow.tsx +++ b/app/gui/src/dashboard/components/dashboard/AssetRow.tsx @@ -8,7 +8,6 @@ import BlankIcon from '#/assets/blank.svg' import * as dragAndDropHooks from '#/hooks/dragAndDropHooks' import { useEventCallback } from '#/hooks/eventCallbackHooks' -import * as setAssetHooks from '#/hooks/setAssetHooks' import { useDriveStore, useSetSelectedKeys } from '#/providers/DriveProvider' import * as modalProvider from '#/providers/ModalProvider' @@ -30,10 +29,12 @@ import * as localBackend from '#/services/LocalBackend' import * as backendModule from '#/services/Backend' import { Text } from '#/components/AriaComponents' +import type { AssetEvent } from '#/events/assetEvent' import { useCutAndPaste } from '#/events/assetListEvent' import { backendMutationOptions, backendQueryOptions, + useAssetPassiveListenerStrict, useBackendMutationState, } from '#/hooks/backendHooks' import { createGetProjectDetailsQuery } from '#/hooks/projectHooks' @@ -69,9 +70,8 @@ const DRAG_EXPAND_DELAY_MS = 500 /** Common properties for state and setters passed to event handlers on an {@link AssetRow}. */ export interface AssetRowInnerProps { - readonly key: backendModule.AssetId - readonly item: assetTreeNode.AnyAssetTreeNode - readonly setItem: React.Dispatch> + readonly asset: backendModule.AnyAsset + readonly path: string readonly state: assetsTable.AssetsTableState readonly rowState: assetsTable.AssetRowState readonly setRowState: React.Dispatch> @@ -80,64 +80,57 @@ export interface AssetRowInnerProps { /** Props for an {@link AssetRow}. */ export interface AssetRowProps { readonly isOpened: boolean - readonly item: assetTreeNode.AnyAssetTreeNode + readonly visibility: Visibility | undefined + readonly id: backendModule.AssetId + readonly parentId: backendModule.DirectoryId + readonly path: string + readonly initialAssetEvents: readonly AssetEvent[] | null + readonly depth: number readonly state: assetsTable.AssetsTableState readonly hidden: boolean readonly columns: columnUtils.Column[] readonly isKeyboardSelected: boolean - readonly grabKeyboardFocus: (item: assetTreeNode.AnyAssetTreeNode) => void + readonly grabKeyboardFocus: (item: backendModule.AnyAsset) => void readonly onClick: (props: AssetRowInnerProps, event: React.MouseEvent) => void - readonly select: (item: assetTreeNode.AnyAssetTreeNode) => void + readonly select: (item: backendModule.AnyAsset) => void readonly onDragStart?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDragOver?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDragLeave?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDragEnd?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDrop?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void - readonly updateAssetRef: React.RefObject< - Record void> - > } /** A row containing an {@link backendModule.AnyAsset}. */ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { - const { isKeyboardSelected, isOpened, select, state, columns, onClick } = props - const { item: rawItem, hidden: hiddenRaw, updateAssetRef, grabKeyboardFocus } = props - const { - nodeMap, - doToggleDirectoryExpansion, - doCopy, - doCut, - doPaste, - doDelete: doDeleteRaw, - doRestore, - doMove, - category, - } = state - const { scrollContainerRef, rootDirectoryId, backend } = state - const { visibilities } = state + const { id, parentId, isKeyboardSelected, isOpened, select, state, columns, onClick } = props + const { path, hidden: hiddenRaw, grabKeyboardFocus, visibility: visibilityRaw, depth } = props + const { initialAssetEvents } = props + const { nodeMap, doCopy, doCut, doPaste, doDelete: doDeleteRaw } = state + const { doRestore, doMove, category, scrollContainerRef, rootDirectoryId, backend } = state + const { doToggleDirectoryExpansion } = state - const [item, setItem] = React.useState(rawItem) + const asset = useAssetPassiveListenerStrict(backend.type, id, parentId, category) const driveStore = useDriveStore() const queryClient = useQueryClient() const { user } = useFullUserSession() const setSelectedKeys = useSetSelectedKeys() const selected = useStore(driveStore, ({ visuallySelectedKeys, selectedKeys }) => - (visuallySelectedKeys ?? selectedKeys).has(item.key), + (visuallySelectedKeys ?? selectedKeys).has(id), ) const isSoleSelected = useStore( driveStore, @@ -157,7 +150,6 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { const rootRef = React.useRef(null) const dragOverTimeoutHandle = React.useRef(null) const grabKeyboardFocusRef = useSyncRef(grabKeyboardFocus) - const asset = item.item const [innerRowState, setRowState] = React.useState( assetRowUtils.INITIAL_ROW_STATE, ) @@ -185,11 +177,13 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { const isCloud = isCloudCategory(category) const { data: projectState } = useQuery({ - // This is SAFE, as `isOpened` is only true for projects. - // eslint-disable-next-line no-restricted-syntax - ...createGetProjectDetailsQuery.createPassiveListener(item.item.id as backendModule.ProjectId), + ...createGetProjectDetailsQuery.createPassiveListener( + // This is SAFE, as `isOpened` is only true for projects. + // eslint-disable-next-line no-restricted-syntax + asset.id as backendModule.ProjectId, + ), select: (data) => data?.state.type, - enabled: item.type === backendModule.AssetType.project, + enabled: asset.type === backendModule.AssetType.project, }) const toastAndLog = useToastAndLog() @@ -197,9 +191,8 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { const createPermissionMutation = useMutation(backendMutationOptions(backend, 'createPermission')) const associateTagMutation = useMutation(backendMutationOptions(backend, 'associateTag')) - const outerVisibility = visibilities.get(item.key) const insertionVisibility = useStore(driveStore, (driveState) => - driveState.pasteData?.type === 'move' && driveState.pasteData.data.ids.has(item.key) ? + driveState.pasteData?.type === 'move' && driveState.pasteData.data.ids.has(id) ? Visibility.faded : Visibility.visible, ) @@ -210,27 +203,15 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { createPermissionVariables.actorsIds[0] === user.userId const visibility = isRemovingSelf ? Visibility.hidden - : outerVisibility === Visibility.visible ? insertionVisibility - : outerVisibility ?? insertionVisibility + : visibilityRaw === Visibility.visible ? insertionVisibility + : visibilityRaw ?? insertionVisibility const hidden = isDeleting || isRestoring || hiddenRaw || visibility === Visibility.hidden const setSelected = useEventCallback((newSelected: boolean) => { const { selectedKeys } = driveStore.getState() - setSelectedKeys(set.withPresence(selectedKeys, item.key, newSelected)) + setSelectedKeys(set.withPresence(selectedKeys, id, newSelected)) }) - React.useEffect(() => { - setItem(rawItem) - }, [rawItem]) - - const rawItemRef = useSyncRef(rawItem) - React.useEffect(() => { - // Mutation is HIGHLY INADVISABLE in React, however it is useful here as we want to update the - // parent's state while avoiding re-rendering the parent. - rawItemRef.current.item = asset - }, [asset, rawItemRef]) - const setAsset = setAssetHooks.useSetAsset(asset, setItem) - React.useEffect(() => { if (selected && insertionVisibility !== Visibility.visible) { setSelected(false) @@ -240,30 +221,15 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { React.useEffect(() => { if (isKeyboardSelected) { rootRef.current?.focus() - grabKeyboardFocusRef.current(item) + grabKeyboardFocusRef.current(asset) } - }, [grabKeyboardFocusRef, isKeyboardSelected, item]) - - React.useImperativeHandle(updateAssetRef, () => ({ setAsset, item })) - - if (updateAssetRef.current) { - updateAssetRef.current[item.item.id] = setAsset - } - - React.useEffect(() => { - return () => { - if (updateAssetRef.current) { - // eslint-disable-next-line react-hooks/exhaustive-deps, @typescript-eslint/no-dynamic-delete - delete updateAssetRef.current[item.item.id] - } - } - }, [item.item.id, updateAssetRef]) + }, [grabKeyboardFocusRef, isKeyboardSelected, asset]) const doDelete = React.useCallback( (forever = false) => { - void doDeleteRaw(item.item, forever) + void doDeleteRaw(asset, forever) }, - [doDeleteRaw, item.item], + [doDeleteRaw, asset], ) const clearDragState = React.useCallback(() => { @@ -276,8 +242,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { }, []) const onDragOver = (event: React.DragEvent) => { - const directoryKey = - item.item.type === backendModule.AssetType.directory ? item.key : item.directoryKey + const directoryKey = asset.type === backendModule.AssetType.directory ? id : parentId const payload = drag.ASSET_ROWS.lookup(event) const isPayloadMatch = payload != null && payload.every((innerItem) => innerItem.key !== directoryKey) @@ -287,8 +252,8 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { } else { if (nodeMap.current !== nodeParentKeysRef.current?.nodeMap.deref()) { const parentKeys = new Map( - Array.from(nodeMap.current.entries()).map(([id, otherAsset]) => [ - id, + Array.from(nodeMap.current.entries()).map(([otherId, otherAsset]) => [ + otherId, otherAsset.directoryKey, ]), ) @@ -303,7 +268,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { return true } else { // Assume user path; check permissions - const permission = permissions.tryFindSelfPermission(user, item.item.permissions) + const permission = permissions.tryFindSelfPermission(user, asset.permissions) return ( permission != null && permissions.canPermissionModifyDirectoryContents(permission.permission) @@ -314,7 +279,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { })() if ((isPayloadMatch && canPaste) || event.dataTransfer.types.includes('Files')) { event.preventDefault() - if (item.item.type === backendModule.AssetType.directory && state.category.type !== 'trash') { + if (asset.type === backendModule.AssetType.directory && state.category.type !== 'trash') { setIsDraggedOver(true) } } @@ -323,26 +288,26 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { eventListProvider.useAssetEventListener(async (event) => { switch (event.type) { case AssetEventType.move: { - if (event.ids.has(item.key)) { - await doMove(event.newParentKey, item.item) + if (event.ids.has(id)) { + await doMove(event.newParentKey, asset) } break } case AssetEventType.delete: { - if (event.ids.has(item.key)) { + if (event.ids.has(id)) { doDelete(false) } break } case AssetEventType.deleteForever: { - if (event.ids.has(item.key)) { + if (event.ids.has(id)) { doDelete(true) } break } case AssetEventType.restore: { - if (event.ids.has(item.key)) { - await doRestore(item.item) + if (event.ids.has(id)) { + await doRestore(asset) } break } @@ -436,7 +401,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { actorsIds: [user.userId], }, ]) - dispatchAssetListEvent({ type: AssetListEventType.delete, key: item.key }) + dispatchAssetListEvent({ type: AssetListEventType.delete, key: id }) } catch (error) { toastAndLog(null, error) } @@ -444,7 +409,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { break } case AssetEventType.temporarilyAddLabels: { - const labels = event.ids.has(item.key) ? event.labelNames : set.EMPTY_SET + const labels = event.ids.has(id) ? event.labelNames : set.EMPTY_SET setRowState((oldRowState) => ( oldRowState.temporarilyAddedLabels === labels && @@ -459,7 +424,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { break } case AssetEventType.temporarilyRemoveLabels: { - const labels = event.ids.has(item.key) ? event.labelNames : set.EMPTY_SET + const labels = event.ids.has(id) ? event.labelNames : set.EMPTY_SET setRowState((oldRowState) => ( oldRowState.temporarilyAddedLabels === set.EMPTY_SET && @@ -481,18 +446,16 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { ) const labels = asset.labels if ( - event.ids.has(item.key) && + event.ids.has(id) && (labels == null || [...event.labelNames].some((label) => !labels.includes(label))) ) { const newLabels = [ ...(labels ?? []), ...[...event.labelNames].filter((label) => labels?.includes(label) !== true), ] - setAsset(object.merger({ labels: newLabels })) try { await associateTagMutation.mutateAsync([asset.id, newLabels, asset.title]) } catch (error) { - setAsset(object.merger({ labels })) toastAndLog(null, error) } } @@ -506,47 +469,24 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { ) const labels = asset.labels if ( - event.ids.has(item.key) && + event.ids.has(id) && labels != null && [...event.labelNames].some((label) => labels.includes(label)) ) { const newLabels = labels.filter((label) => !event.labelNames.has(label)) - setAsset(object.merger({ labels: newLabels })) try { await associateTagMutation.mutateAsync([asset.id, newLabels, asset.title]) } catch (error) { - setAsset(object.merger({ labels })) toastAndLog(null, error) } } break } - case AssetEventType.deleteLabel: { - setAsset((oldAsset) => { - const oldLabels = oldAsset.labels ?? [] - const labels: backendModule.LabelName[] = [] - - for (const label of oldLabels) { - if (label !== event.labelName) { - labels.push(label) - } - } - - return oldLabels.length !== labels.length ? object.merge(oldAsset, { labels }) : oldAsset - }) - break - } - case AssetEventType.setItem: { - if (asset.id === event.id) { - setAsset(event.valueOrUpdater) - } - break - } default: { return } } - }, item.initialAssetEvents) + }, initialAssetEvents) switch (asset.type) { case backendModule.AssetType.directory: @@ -555,9 +495,8 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { case backendModule.AssetType.datalink: case backendModule.AssetType.secret: { const innerProps: AssetRowInnerProps = { - key: item.key, - item, - setItem, + asset, + path, state, rowState, setRowState, @@ -608,7 +547,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { unsetModal() onClick(innerProps, event) if ( - item.type === backendModule.AssetType.directory && + asset.type === backendModule.AssetType.directory && eventModule.isDoubleClick(event) && !rowState.isEditingName ) { @@ -617,7 +556,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { window.setTimeout(() => { setSelected(false) }) - doToggleDirectoryExpansion(item.item.id, item.key) + doToggleDirectoryExpansion(asset.id, asset.id) } }} onContextMenu={(event) => { @@ -625,7 +564,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { event.preventDefault() event.stopPropagation() if (!selected) { - select(item) + select(asset) } setModal( { if (dragOverTimeoutHandle.current != null) { window.clearTimeout(dragOverTimeoutHandle.current) } - if (item.type === backendModule.AssetType.directory) { + if (asset.type === backendModule.AssetType.directory) { dragOverTimeoutHandle.current = window.setTimeout(() => { - doToggleDirectoryExpansion(item.item.id, item.key, true) + doToggleDirectoryExpansion(asset.id, asset.id, true) }, DRAG_EXPAND_DELAY_MS) } // Required because `dragover` does not fire on `mouseenter`. - props.onDragOver?.(event, item) + props.onDragOver?.(event, asset) onDragOver(event) }} onDragOver={(event) => { if (state.category.type === 'trash') { event.dataTransfer.dropEffect = 'none' } - props.onDragOver?.(event, item) + props.onDragOver?.(event, asset) onDragOver(event) }} onDragEnd={(event) => { clearDragState() - props.onDragEnd?.(event, item) + props.onDragEnd?.(event, asset) }} onDragLeave={(event) => { if ( @@ -694,30 +633,28 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { ) { clearDragState() } - props.onDragLeave?.(event, item) + props.onDragLeave?.(event, asset) }} onDrop={(event) => { if (state.category.type !== 'trash') { - props.onDrop?.(event, item) + props.onDrop?.(event, asset) clearDragState() - const [directoryKey, directoryId] = - item.type === backendModule.AssetType.directory ? - [item.key, item.item.id] - : [item.directoryKey, item.directoryId] + const directoryId = + asset.type === backendModule.AssetType.directory ? asset.id : parentId const payload = drag.ASSET_ROWS.lookup(event) if ( payload != null && - payload.every((innerItem) => innerItem.key !== directoryKey) + payload.every((innerItem) => innerItem.key !== directoryId) ) { event.preventDefault() event.stopPropagation() unsetModal() - doToggleDirectoryExpansion(directoryId, directoryKey, true) + doToggleDirectoryExpansion(directoryId, directoryId, true) const ids = payload .filter((payloadItem) => payloadItem.asset.parentId !== directoryId) .map((dragItem) => dragItem.key) cutAndPaste( - directoryKey, + directoryId, directoryId, { backendType: backend.type, ids: new Set(ids), category }, nodeMap.current, @@ -725,10 +662,10 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { } else if (event.dataTransfer.types.includes('Files')) { event.preventDefault() event.stopPropagation() - doToggleDirectoryExpansion(directoryId, directoryKey, true) + doToggleDirectoryExpansion(directoryId, directoryId, true) dispatchAssetListEvent({ type: AssetListEventType.uploadFiles, - parentKey: directoryKey, + parentKey: directoryId, parentId: directoryId, files: Array.from(event.dataTransfer.files), }) @@ -741,11 +678,11 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { return (