diff --git a/.github/workflows/extra-nightly-tests.yml b/.github/workflows/extra-nightly-tests.yml index 120c3dc722..f74abe4aa3 100644 --- a/.github/workflows/extra-nightly-tests.yml +++ b/.github/workflows/extra-nightly-tests.yml @@ -44,6 +44,11 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./run backend test std-snowflake env: + ENSO_CLOUD_COGNITO_REGION: ${{ secrets.ENSO_CLOUD_COGNITO_REGION }} + ENSO_CLOUD_COGNITO_USER_POOL_ID: ${{ secrets.ENSO_CLOUD_COGNITO_USER_POOL_ID }} + ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID: ${{ secrets.ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID }} + ENSO_CLOUD_TEST_ACCOUNT_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }} + ENSO_CLOUD_TEST_ACCOUNT_USERNAME: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }} ENSO_SNOWFLAKE_ACCOUNT: ${{ secrets.ENSO_SNOWFLAKE_ACCOUNT }} ENSO_SNOWFLAKE_DATABASE: ${{ secrets.ENSO_SNOWFLAKE_DATABASE }} ENSO_SNOWFLAKE_PASSWORD: ${{ secrets.ENSO_SNOWFLAKE_PASSWORD }} @@ -75,5 +80,69 @@ jobs: GRAAL_EDITION: GraalVM CE permissions: checks: write + enso-build-ci-gen-job-standard-library-tests-graal-vm-ce-linux-amd64: + name: Standard Library Tests (GraalVM CE) (linux, amd64) + runs-on: + - self-hosted + - Linux + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.4.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v7 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - name: Checking out the repository + uses: actions/checkout@v4 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help || (git clean -ffdx && ./run --help) + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - if: "(contains(github.event.pull_request.labels.*.name, 'CI: Clean build required') || inputs.clean_build_required)" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - run: ./run backend test standard-library + env: + ENSO_CLOUD_COGNITO_REGION: ${{ secrets.ENSO_CLOUD_COGNITO_REGION }} + ENSO_CLOUD_COGNITO_USER_POOL_ID: ${{ secrets.ENSO_CLOUD_COGNITO_USER_POOL_ID }} + ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID: ${{ secrets.ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID }} + ENSO_CLOUD_TEST_ACCOUNT_PASSWORD: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_PASSWORD }} + ENSO_CLOUD_TEST_ACCOUNT_USERNAME: ${{ secrets.ENSO_CLOUD_TEST_ACCOUNT_USERNAME }} + ENSO_LIB_S3_AWS_ACCESS_KEY_ID: ${{ secrets.ENSO_LIB_S3_AWS_ACCESS_KEY_ID }} + ENSO_LIB_S3_AWS_REGION: ${{ secrets.ENSO_LIB_S3_AWS_REGION }} + ENSO_LIB_S3_AWS_SECRET_ACCESS_KEY: ${{ secrets.ENSO_LIB_S3_AWS_SECRET_ACCESS_KEY }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - if: (success() || failure()) && github.event.pull_request.head.repo.full_name == github.repository + name: Standard Library Test Reporter + uses: dorny/test-reporter@v1 + with: + max-annotations: 50 + name: Standard Library Tests Report (GraalVM CE, linux, amd64) + path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml + path-replace-backslashes: true + reporter: java-junit + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "(always()) && (contains(github.event.pull_request.labels.*.name, 'CI: Clean build required') || inputs.clean_build_required)" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + env: + GRAAL_EDITION: GraalVM CE + permissions: + checks: write env: ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.vscode/settings.json b/.vscode/settings.json index 0baf6fa290..66b38a7efe 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,6 @@ { "rust-analyzer.linkedProjects": [ - "./app/gui2/rust-ffi/Cargo.toml" + "./app/rust-ffi/Cargo.toml" ], "vue.complete.casing.status": false, "vue.complete.casing.props": "camel", @@ -19,7 +19,7 @@ }, "eslint.workingDirectories": [ "./app/gui2", - "./app/gui2/ide-desktop" + "./app/ide-desktop" ], "files.watcherExclude": { "**/target": true diff --git a/build/build/src/ci_gen.rs b/build/build/src/ci_gen.rs index 8754e17535..b7c2132e03 100644 --- a/build/build/src/ci_gen.rs +++ b/build/build/src/ci_gen.rs @@ -111,6 +111,13 @@ pub mod secret { /// Static token for admin requests on our Lambdas. pub const ENSO_ADMIN_TOKEN: &str = "ENSO_ADMIN_TOKEN"; + // === Enso Cloud Test Account === + pub const ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID: &str = + "ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID"; + pub const ENSO_CLOUD_COGNITO_USER_POOL_ID: &str = "ENSO_CLOUD_COGNITO_USER_POOL_ID"; + pub const ENSO_CLOUD_COGNITO_REGION: &str = "ENSO_CLOUD_COGNITO_REGION"; + pub const ENSO_CLOUD_TEST_ACCOUNT_USERNAME: &str = "ENSO_CLOUD_TEST_ACCOUNT_USERNAME"; + pub const ENSO_CLOUD_TEST_ACCOUNT_PASSWORD: &str = "ENSO_CLOUD_TEST_ACCOUNT_PASSWORD"; // === Apple Code Signing & Notarization === pub const APPLE_CODE_SIGNING_CERT: &str = "APPLE_CODE_SIGNING_CERT"; @@ -538,7 +545,7 @@ pub fn add_backend_checks( ) { workflow.add(target, job::CiCheckBackend { graal_edition }); workflow.add(target, job::JvmTests { graal_edition }); - workflow.add(target, job::StandardLibraryTests { graal_edition }); + workflow.add(target, job::StandardLibraryTests { graal_edition, cloud_tests_enabled: false }); } pub fn workflow_call_job(name: impl Into, path: impl Into) -> Job { @@ -702,6 +709,10 @@ pub fn extra_nightly_tests() -> Result { // behavior. let target = (OS::Linux, Arch::X86_64); workflow.add(target, job::SnowflakeTests {}); + workflow.add(target, job::StandardLibraryTests { + graal_edition: graalvm::Edition::Community, + cloud_tests_enabled: true, + }); Ok(workflow) } diff --git a/build/build/src/ci_gen/job.rs b/build/build/src/ci_gen/job.rs index 1e0fccc920..d03880dfec 100644 --- a/build/build/src/ci_gen/job.rs +++ b/build/build/src/ci_gen/job.rs @@ -211,14 +211,39 @@ impl JobArchetype for JvmTests { } } +fn enable_cloud_tests(step: Step) -> Step { + step.with_secret_exposed_as( + secret::ENSO_CLOUD_COGNITO_USER_POOL_ID, + crate::cloud_tests::env::ci_config::ENSO_CLOUD_COGNITO_USER_POOL_ID, + ) + .with_secret_exposed_as( + secret::ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID, + crate::cloud_tests::env::ci_config::ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID, + ) + .with_secret_exposed_as( + secret::ENSO_CLOUD_COGNITO_REGION, + crate::cloud_tests::env::ci_config::ENSO_CLOUD_COGNITO_REGION, + ) + .with_secret_exposed_as( + secret::ENSO_CLOUD_TEST_ACCOUNT_USERNAME, + crate::cloud_tests::env::ci_config::ENSO_CLOUD_TEST_ACCOUNT_USERNAME, + ) + .with_secret_exposed_as( + secret::ENSO_CLOUD_TEST_ACCOUNT_PASSWORD, + crate::cloud_tests::env::ci_config::ENSO_CLOUD_TEST_ACCOUNT_PASSWORD, + ) +} + #[derive(Clone, Copy, Debug)] pub struct StandardLibraryTests { - pub graal_edition: graalvm::Edition, + pub graal_edition: graalvm::Edition, + pub cloud_tests_enabled: bool, } impl JobArchetype for StandardLibraryTests { fn job(&self, target: Target) -> Job { let graal_edition = self.graal_edition; + let should_enable_cloud_tests = self.cloud_tests_enabled; let job_name = format!("Standard Library Tests ({graal_edition})"); let mut job = RunStepsBuilder::new("backend test standard-library") .customize(move |step| { @@ -235,7 +260,14 @@ impl JobArchetype for StandardLibraryTests { secret::ENSO_LIB_S3_AWS_SECRET_ACCESS_KEY, crate::libraries_tests::s3::env::ENSO_LIB_S3_AWS_SECRET_ACCESS_KEY, ); - vec![main_step, step::stdlib_test_reporter(target, graal_edition)] + + let updated_main_step = if should_enable_cloud_tests { + enable_cloud_tests(main_step) + } else { + main_step + }; + + vec![updated_main_step, step::stdlib_test_reporter(target, graal_edition)] }) .build_job(job_name, target) .with_permission(Permission::Checks, Access::Write); @@ -291,8 +323,11 @@ impl JobArchetype for SnowflakeTests { secret::ENSO_SNOWFLAKE_WAREHOUSE, crate::libraries_tests::snowflake::env::ENSO_SNOWFLAKE_WAREHOUSE, ); + + let updated_main_step = enable_cloud_tests(main_step); + vec![ - main_step, + updated_main_step, step::extra_stdlib_test_reporter(target, GRAAL_EDITION_FOR_EXTRA_TESTS), ] }) diff --git a/build/build/src/cloud_tests/env.rs b/build/build/src/cloud_tests/env.rs new file mode 100644 index 0000000000..69dffc85f1 --- /dev/null +++ b/build/build/src/cloud_tests/env.rs @@ -0,0 +1,39 @@ +//! Environment variables commonly used by AWS services. + +use ide_ci::define_env_var; + +pub mod ci_config { + use super::*; + + define_env_var! { + /// Username for an Enso Cloud account used for running Cloud integration tests. + ENSO_CLOUD_TEST_ACCOUNT_USERNAME, String; + + /// Password for an Enso Cloud account used for running Cloud integration tests. + ENSO_CLOUD_TEST_ACCOUNT_PASSWORD, String; + + // The Client ID of the User Pool for Enso Cloud Cognito auth flow. + ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID, String; + + // The User Pool ID for Enso Cloud Cognito auth flow. + ENSO_CLOUD_COGNITO_USER_POOL_ID, String; + + // The Region used for Cognito auth flow. + ENSO_CLOUD_COGNITO_REGION, String; + } +} + +pub mod test_controls { + use super::*; + + define_env_var! { + /// Locates an Enso Cloud credentials file used in tests. + ENSO_CLOUD_CREDENTIALS_FILE, String; + + /// Denotes the URI of the Enso Cloud API deployment to be used in tests. + ENSO_CLOUD_API_URI, String; + + /// A flag that tells the test suite to run applicable tests on the cloud environment instead of just a mock. + ENSO_RUN_REAL_CLOUD_TEST, String; + } +} diff --git a/build/build/src/cloud_tests/mod.rs b/build/build/src/cloud_tests/mod.rs new file mode 100644 index 0000000000..72e4806c1f --- /dev/null +++ b/build/build/src/cloud_tests/mod.rs @@ -0,0 +1,166 @@ +//! Module that allows to create an Enso Cloud compatible credentials file from +//! a configuration stored in environment variables. + +pub mod env; + +use anyhow::Ok; +use tempfile::NamedTempFile; + +use crate::prelude::*; +use std::fs::File; +use std::io::Write; + +pub fn build_auth_config_from_environment() -> Result { + let web_client_id = env::ci_config::ENSO_CLOUD_COGNITO_USER_POOL_WEB_CLIENT_ID.get()?; + let pool_id = env::ci_config::ENSO_CLOUD_COGNITO_USER_POOL_ID.get()?; + let region = env::ci_config::ENSO_CLOUD_COGNITO_REGION.get()?; + let username = env::ci_config::ENSO_CLOUD_TEST_ACCOUNT_USERNAME.get()?; + let password = env::ci_config::ENSO_CLOUD_TEST_ACCOUNT_PASSWORD.get()?; + Ok(AuthConfig { web_client_id, user_pool_id: pool_id, region, username, password }) +} + +pub async fn prepare_credentials_file(auth_config: AuthConfig) -> Result { + let credentials = build_credentials(auth_config).await?; + let credentials_temp_file = NamedTempFile::with_prefix("enso-cloud-credentials")?; + save_credentials(&credentials, credentials_temp_file.path())?; + Ok(credentials_temp_file) +} + +#[derive(Debug)] +pub struct AuthConfig { + web_client_id: String, + user_pool_id: String, + region: String, + username: String, + password: String, +} + +struct Credentials { + client_id: String, + access_token: String, + refresh_token: String, + refresh_url: String, + expire_at: String, +} + +async fn build_credentials(config: AuthConfig) -> Result { + if !is_aws_cli_installed().await { + return Err(anyhow!("AWS CLI is not installed. If you want the build script to generate the Enso Cloud credentials file, you must install the AWS CLI.")); + } + + // We save the timestamp before the authentication, as it's better to say the token expires a + // bit earlier than to make it expire later than in reality and make downstream user mistakenly + // use an expired token. + let now_before_auth = chrono::Utc::now(); + let mut command = aws_command(); + command + .args(["cognito-idp", "initiate-auth"]) + .args(["--region", &config.region]) + .args(["--auth-flow", "USER_PASSWORD_AUTH"]) + .args([ + "--auth-parameters", + &format!("USERNAME={},PASSWORD={}", config.username, config.password), + ]) + .args(["--client-id", &config.web_client_id]); + + let stdout = command.run_stdout().await?; + let cognito_response = parse_cognito_response(&stdout)?; + + let expire_at = now_before_auth + chrono::Duration::seconds(cognito_response.expires_in); + let expire_at_str = expire_at.to_rfc3339(); + let refresh_url = + format!("https://cognito-idp.{}.amazonaws.com/{}", config.region, config.user_pool_id); + Ok(Credentials { + client_id: config.web_client_id.to_string(), + access_token: cognito_response.access_token, + refresh_token: cognito_response.refresh_token, + expire_at: expire_at_str, + refresh_url, + }) +} + +async fn is_aws_cli_installed() -> bool { + let mut command = aws_command(); + command.arg("--version"); + command.run_ok().await.is_ok() +} + +fn aws_command() -> Command { + Command::new("aws") +} + +struct CognitoResponse { + access_token: String, + refresh_token: String, + expires_in: i64, +} + +fn parse_cognito_response(response: &str) -> Result { + let json: serde_json::Value = serde_json::from_str(response)?; + let root_mapping = unpack_object(&json)?; + let authentication_result_mapping = + unpack_object(get_or_fail(root_mapping, "AuthenticationResult")?)?; + let token_type = unpack_string(get_or_fail(authentication_result_mapping, "TokenType")?)?; + if token_type != "Bearer" { + return Err(anyhow!("Expected token type 'Bearer', but got: {}", token_type)); + } + + let access_token = unpack_string(get_or_fail(authentication_result_mapping, "AccessToken")?)?; + let refresh_token = unpack_string(get_or_fail(authentication_result_mapping, "RefreshToken")?)?; + let expires_in = unpack_integer(get_or_fail(authentication_result_mapping, "ExpiresIn")?)?; + + Ok(CognitoResponse { + access_token: access_token.to_string(), + refresh_token: refresh_token.to_string(), + expires_in, + }) +} + +fn get_or_fail<'a>( + mapping: &'a serde_json::Map, + key: &str, +) -> Result<&'a serde_json::Value> { + match mapping.get(key) { + Some(value) => Ok(value), + None => Err(anyhow!("Missing key when deserializing JSON: {}", key)), + } +} + +fn unpack_object(value: &serde_json::Value) -> Result<&serde_json::Map> { + if let serde_json::Value::Object(mapping) = value { + Ok(mapping) + } else { + Err(anyhow!("Expected JSON object, but got: {:?}", value)) + } +} + +fn unpack_string(value: &serde_json::Value) -> Result<&String> { + if let serde_json::Value::String(string) = value { + Ok(string) + } else { + Err(anyhow!("Expected JSON string, but got: {:?}", value)) + } +} + +fn unpack_integer(value: &serde_json::Value) -> Result { + if let serde_json::Value::Number(number) = value { + Ok(number.as_i64().ok_or_else(|| anyhow!("Expected JSON integer, but got: {:?}", value))?) + } else { + Err(anyhow!("Expected JSON integer, but got: {:?}", value)) + } +} + +fn save_credentials(credentials: &Credentials, path: &Path) -> Result<()> { + let json = serde_json::json! { + { + "client_id": credentials.client_id, + "access_token": credentials.access_token, + "refresh_token": credentials.refresh_token, + "refresh_url": credentials.refresh_url, + "expire_at": credentials.expire_at, + } + }; + let mut file = File::create(path)?; + file.write_all(json.to_string().as_bytes())?; + Ok(()) +} diff --git a/build/build/src/enso.rs b/build/build/src/enso.rs index 3f04aa85fa..e923b350a4 100644 --- a/build/build/src/enso.rs +++ b/build/build/src/enso.rs @@ -1,3 +1,4 @@ +use crate::cloud_tests; use crate::prelude::*; use crate::engine::StandardLibraryTestsSelection; @@ -13,6 +14,7 @@ use crate::sqlserver; use crate::sqlserver::EndpointConfiguration as SQLServerEndpointConfiguration; use crate::sqlserver::SQLServer; +use ide_ci::env::accessor::TypedVariable; use ide_ci::future::AsyncPolicy; use ide_ci::programs::docker::ContainerId; @@ -96,7 +98,12 @@ impl BuiltEnso { benchmarks } - pub fn run_test(&self, test_path: impl AsRef, ir_caches: IrCaches) -> Result { + pub fn run_test( + &self, + test_path: impl AsRef, + ir_caches: IrCaches, + environment_overrides: Vec<(String, String)>, + ) -> Result { let mut command = self.cmd()?; let base_working_directory = test_path.try_parent()?; command @@ -107,6 +114,11 @@ impl BuiltEnso { // This flag enables assertions in the JVM. Some of our stdlib tests had in the past // failed on Graal/Truffle assertions, so we want to have them triggered. .set_env(JAVA_OPTS, &ide_ci::programs::java::Option::EnableAssertions.as_ref())?; + + for (k, v) in environment_overrides { + command.env(k, &v); + } + if test_path.as_str().contains("_Internal_") { command.arg("--disable-private-check"); } @@ -160,6 +172,18 @@ impl BuiltEnso { only.iter().any(|test| test.contains("Microsoft_Tests")), }; + let cloud_credentials_file = match cloud_tests::build_auth_config_from_environment() { + Ok(config) => { + let file = cloud_tests::prepare_credentials_file(config).await?; + info!("Enso Cloud authentication (for cloud integration tests) is enabled."); + Some(file) + } + Err(err) => { + info!("Enso Cloud authentication (for cloud integration tests) is skipped, because of: {}", err); + None + } + }; + let _httpbin = crate::httpbin::get_and_spawn_httpbin_on_free_port(sbt).await?; let _postgres = match TARGET_OS { @@ -210,8 +234,25 @@ impl BuiltEnso { _ => None, }; + let mut environment_overrides: Vec<(String, String)> = vec![]; + if let Some(credentials_file) = cloud_credentials_file.as_ref() { + let path_as_str = credentials_file.path().to_str(); + let path = path_as_str + .ok_or_else(|| anyhow!("Path to credentials file is not valid UTF-8"))?; + environment_overrides.push(( + cloud_tests::env::test_controls::ENSO_CLOUD_CREDENTIALS_FILE.name().to_string(), + path.to_string(), + )); + // We do not set ENSO_CLOUD_API_URI - we rely on the default, or any existing overrides. + environment_overrides.push(( + cloud_tests::env::test_controls::ENSO_RUN_REAL_CLOUD_TEST.name().to_string(), + "1".to_string(), + )); + }; + let futures = std_tests.into_iter().map(|test_path| { - let command = self.run_test(test_path, ir_caches); + let command: std::result::Result = + self.run_test(test_path, ir_caches, environment_overrides.clone()); async move { command?.run_ok().await } }); @@ -219,6 +260,8 @@ impl BuiltEnso { // Could share them with Arc but then scenario of multiple test runs being run in parallel // should be handled, e.g. avoiding port collisions. let results = ide_ci::future::join_all(futures, async_policy).await; + // Only drop the credentials file after all tests have finished. + drop(cloud_credentials_file); let errors = results.into_iter().filter_map(Result::err).collect::>(); if errors.is_empty() { Ok(()) diff --git a/build/build/src/lib.rs b/build/build/src/lib.rs index 589ff6dd3c..2c6a624ebd 100644 --- a/build/build/src/lib.rs +++ b/build/build/src/lib.rs @@ -21,6 +21,7 @@ pub mod aws; pub mod changelog; pub mod ci; pub mod ci_gen; +pub mod cloud_tests; pub mod config; pub mod context; pub mod engine; diff --git a/test/Base_Tests/src/System/File_Spec.enso b/test/Base_Tests/src/System/File_Spec.enso index 67c0cd9cf8..25fa8b41ee 100644 --- a/test/Base_Tests/src/System/File_Spec.enso +++ b/test/Base_Tests/src/System/File_Spec.enso @@ -366,12 +366,12 @@ add_specs suite_builder = current_project_root = enso_project.root base_directory = current_project_root.parent - is_correct_working_directory = (File.current_directory . normalize . path) == current_project_root.absolute.normalize.path + is_correct_working_directory = File.current_directory.absolute.normalize.path == base_directory.absolute.normalize.path group_builder.specify "will resolve relative paths relative to the currently running project" pending=(if is_correct_working_directory.not then "The working directory is not set-up as expected, so this test cannot run. Please run the tests using `ensoup` to ensure the working directory is correct.") <| - root = File.new "." - root.should_be_a File + dot = File.new "." + dot.should_be_a File # The `.` path should resolve to the base path - root.absolute.normalize.path . should_equal base_directory.absolute.normalize.path + dot.absolute.normalize.path . should_equal base_directory.absolute.normalize.path expected_file = base_directory / "abc" / "def.txt" f = File.new "abc/def.txt" @@ -396,7 +396,8 @@ add_specs suite_builder = Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_PROJECT_DIRECTORY_PATH" subdir.path <| # Flush caches to ensure fresh dir is used Enso_User.flush_caches - action + Test.with_clue "(running with ENSO_CLOUD_PROJECT_DIRECTORY_PATH set to "+subdir.path+") " <| + action group_builder.specify "will resolve relative paths as Cloud paths if running in the Cloud" pending=cloud_setup.real_cloud_pending <| with_temporary_cloud_root <|