Build script cleanups (#9931)

This PR removes unused, commented-out or otherwise spurious code from build script. Also, dependencies were reviewed and cleaned.

No functional changes intended.
This commit is contained in:
Michał Wawrzyniec Urbańczyk 2024-05-13 15:56:28 +02:00 committed by GitHub
parent 2ed83f2fb5
commit b76e4d678d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 0 additions and 990 deletions

22
Cargo.lock generated
View File

@ -1341,7 +1341,6 @@ dependencies = [
"aws-sdk-ecr",
"aws-sdk-s3",
"base64 0.13.1",
"byte-unit",
"bytes",
"chrono",
"clap 4.5.4",
@ -1354,7 +1353,6 @@ dependencies = [
"enso-font",
"enso-install-config",
"futures",
"futures-util",
"glob",
"handlebars",
"heck 0.4.1",
@ -1414,20 +1412,15 @@ name = "enso-build-cli"
version = "0.1.0"
dependencies = [
"anyhow",
"byte-unit",
"clap 4.5.4",
"derivative",
"enso-build",
"enso-build-base",
"enso-formatter",
"futures-util",
"glob",
"humantime",
"ide-ci",
"octocrab",
"tempfile",
"tokio",
"toml 0.5.11",
"tracing",
]
@ -1514,20 +1507,14 @@ dependencies = [
"chrono",
"dirs",
"embed-resource",
"enso-build-base",
"enso-install-config",
"flate2",
"ide-ci",
"indicatif",
"mslnk",
"named-lock",
"native-windows-gui",
"self-replace",
"serde_json",
"strum",
"sysinfo",
"tar",
"tokio",
"tracing",
"tracing-subscriber",
"windows 0.53.0",
@ -1555,7 +1542,6 @@ dependencies = [
"byte-unit",
"chrono",
"embed-resource",
"enso-build-base",
"enso-install",
"enso-install-config",
"flate2",
@ -1564,11 +1550,6 @@ dependencies = [
"serde_json",
"sysinfo",
"tar",
"tokio",
"tracing",
"tracing-subscriber",
"winapi",
"windows 0.52.0",
]
[[package]]
@ -1723,8 +1704,6 @@ name = "enso-uninstaller"
version = "0.1.0"
dependencies = [
"anyhow",
"enso-build",
"enso-build-base",
"enso-install",
"enso-install-config",
"ide-ci",
@ -2414,7 +2393,6 @@ dependencies = [
"pathdiff",
"platforms",
"portpicker",
"rand 0.8.5",
"regex",
"reqwest",
"semver",

View File

@ -11,14 +11,12 @@ aws-sdk-ecr = "0.21.0"
aws-sdk-s3 = "0.21.0"
base64 = "0.13.0"
bytes = { workspace = true }
byte-unit = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true }
derivative = { workspace = true }
derive_more = { workspace = true }
dirs = { workspace = true }
futures = { workspace = true }
futures-util = "0.3.17"
glob = "0.3.0"
handlebars = "4.3.5"
heck = "0.4.0"

View File

@ -1,21 +0,0 @@
use enso_build::prelude::*;
use ide_ci::github::setup_octocrab;
use ide_ci::github::Repo;
use octocrab::models::ReleaseId;
#[tokio::main]
async fn main() -> Result {
let octo = setup_octocrab().await?;
let repo = Repo::from_str("enso-org/enso-staging")?;
let handler = repo.handle(&octo);
let repos = handler.repos();
let releases = repos.releases();
let release = releases.get_by_id(ReleaseId(59585385)).await?;
dbg!(&release);
Ok(())
}

View File

@ -1,31 +0,0 @@
use enso_build::prelude::*;
use aws_sdk_s3::model::ObjectCannedAcl;
use aws_sdk_s3::types::ByteStream;
use aws_sdk_s3::Client;
use enso_build::aws::s3::BucketContext;
use enso_build::aws::EDITIONS_BUCKET_NAME;
#[tokio::main]
async fn main() -> Result {
let config = dbg!(aws_config::load_from_env().await);
let bucket_context = BucketContext {
client: Client::new(&config),
bucket: EDITIONS_BUCKET_NAME.to_string(),
upload_acl: ObjectCannedAcl::PublicRead,
key_prefix: Some("enso".into()),
};
// crate::env::set_var("AWS_SECRET_ACCESS_KEY", std::env::var("AWS_SECRET_ACCESS_KEY")?.trim());
let test_file = "test_file.exe";
dbg!(
bucket_context
.put(test_file, ByteStream::from_path(&std::env::current_exe()?).await?)
.await?
);
Ok(())
}

View File

@ -128,15 +128,6 @@ mod tests {
use super::*;
// #[tokio::test]
// async fn aaa() -> Result {
// let repo = RepoContext::from_str("enso-org/enso")?;
// let paths =
// Paths::new_version(r"H:\NBO\enso", Version::parse("2022.1.1-nightly.2022-01-28")?)?;
// update_manifest(&repo, &paths).await?;
// Ok(())
// }
#[test]
fn updating_manifest() -> Result {
let old_nightly = serde_yaml::from_str::<Manifest>(

View File

@ -1,14 +0,0 @@
// const fs = require('fs')
//
// const path = 'build.sbt'
// const version = process.argv[2]
// const edition = process.argv[3]
//
// const content = fs.readFileSync(path, { encoding: 'utf-8' })
// const updated = content
// .replace(/val ensoVersion.*= ".*"/, 'val ensoVersion = "' + version + '"')
// .replace(/val currentEdition.*= ".*"/, 'val currentEdition = "' + edition + '"')
// fs.writeFileSync(path, updated)
//
// console.log('Updated build version to ' + version)
// console.log('Updated build edition to ' + edition)

View File

@ -9,7 +9,6 @@ use crate::version::ENSO_VERSION;
use anyhow::Context;
use ide_ci::env::known::electron_builder::WindowsSigningCredentials;
use ide_ci::io::download_all;
use ide_ci::program::command::FallibleManipulator;
use ide_ci::programs::node::NpmCommand;
use ide_ci::programs::Npm;
@ -36,11 +35,6 @@ lazy_static! {
pub static ref BUILD_INFO: PathBuf = PathBuf::from("build.json");
}
pub const IDE_ASSETS_URL: &str =
"https://github.com/enso-org/ide-assets/archive/refs/heads/main.zip";
pub const ARCHIVED_ASSET_FILE: &str = "ide-assets-main/content/assets/";
pub mod env {
use super::*;
@ -148,16 +142,6 @@ impl FallibleManipulator for IconsArtifacts {
}
}
/// Fill the directory under `output_path` with the assets.
pub async fn download_js_assets(output_path: impl AsRef<Path>) -> Result {
let output = output_path.as_ref();
let archived_asset_prefix = PathBuf::from(ARCHIVED_ASSET_FILE);
let archive = download_all(IDE_ASSETS_URL).await?;
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(archive))?;
ide_ci::archive::zip::extract_subtree(&mut archive, &archived_asset_prefix, output)?;
Ok(())
}
/// Get a relative path to the Project Manager executable in the PM bundle.
pub fn path_to_executable_in_pm_bundle(
artifact: &generated::ProjectManagerBundle,
@ -370,15 +354,3 @@ impl IdeDesktop {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn download_test() -> Result {
let temp = TempDir::new()?;
download_js_assets(temp.path()).await?;
Ok(())
}
}

View File

@ -33,7 +33,6 @@ pub mod prelude {
}
pub mod aws;
pub mod bump_version;
pub mod changelog;
pub mod ci;
pub mod ci_gen;

View File

@ -226,27 +226,4 @@ mod tests {
std::mem::forget(child);
Ok(())
}
#[tokio::test]
#[ignore]
async fn test_postgres() -> Result {
// let config = Configuration {
// postgres_container: ContainerId("something".into()),
// endpoint: EndpointConfiguration::deduce()?,
// version: "latest".into(),
// user: "test".into(),
// password: "test".into(),
// database_name: "test".into(),
// };
// let child = Postgresql::start(config).await?;
// std::mem::forget(child);
// // let mut httpbin = get_and_spawn_httpbin_on_free_port().await?;
// Command::new("cmd")
// .args(["/c",
// "H:\\NBO\\enso2\\built-distribution\\enso-engine-0.2.32-SNAPSHOT-windows-amd64\\enso-0.2.
// 32-SNAPSHOT\\bin\\enso", "--no-ir-caches", "--run",
// "H:\\NBO\\enso2\\test\\Database_Tests"]).run_ok().await?; httpbin.process.kill().
// await?;
Ok(())
}
}

View File

@ -25,10 +25,8 @@ use octocrab::models::repos::Asset;
// ==============
pub mod backend;
pub mod engine;
pub mod gui;
pub mod ide;
pub mod project_manager;
pub mod runtime;
pub mod wasm;
@ -256,20 +254,6 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static {
todo!("Not implemented for target {self:?}!")
}
// /// Upload the artifact as an asset to the GitHub release.
// fn upload_asset(
// &self,
// release_handle: ReleaseHandle,
// output: impl Future<Output = Result<Self::Artifact>> + Send + 'static,
// ) -> BoxFuture<'static, Result> {
// async move {
// let artifact = output.await?;
// release_handle.upload_compressed_dir(&artifact).await?;
// Ok(())
// }
// .boxed()
// }
fn download_asset(
&self,
context: Context,

View File

@ -1,73 +0,0 @@
// use crate::prelude::*;
//
// use crate::engine::BuildConfigurationFlags;
// use crate::project::Context;
// use crate::project::IsArtifact;
// use crate::project::IsTarget;
//
// use ide_ci::goodie::GoodieDatabase;
// use ide_ci::ok_ready_boxed;
//
// pub use crate::project::backend::BuildInput;
// use crate::source::BuildTargetJob;
// use crate::source::WithDestination;
//
// #[derive(Clone, Debug)]
// pub struct Artifact {
// pub root: PathBuf,
// }
//
// impl AsRef<Path> for Artifact {
// fn as_ref(&self) -> &Path {
// &self.root
// }
// }
//
// impl IsArtifact for Artifact {}
//
//
// #[derive(Clone, Copy, Debug, PartialEq)]
// pub struct Engine;
//
// impl IsTarget for Engine {
// type BuildInput = BuildInput;
// type Artifact = Artifact;
//
// fn artifact_name(&self) -> String {
// "Enso Engine".into()
// }
//
// fn adapt_artifact(self, path: impl AsRef<Path>) -> BoxFuture<'static, Result<Self::Artifact>>
// { ok_ready_boxed(Artifact { root: path.as_ref().into() })
// }
//
// fn build_internal(
// &self,
// context: Context,
// job: BuildTargetJob<Self>,
// ) -> BoxFuture<'static, Result<Self::Artifact>> {
// let WithDestination { inner, destination } = job;
// let this = self.clone();
// async move {
// let paths = crate::paths::Paths::new_versions(&inner.repo_root, inner.versions)?;
// let context = crate::engine::context::RunContext {
// operation: crate::engine::Operation::Build,
// goodies: GoodieDatabase::new()?,
// config: BuildConfigurationFlags {
// clean_repo: false,
// build_engine_package: true,
// ..crate::engine::NIGHTLY
// }
// .into(),
// inner: context,
// paths,
// };
// let artifacts = context.build().await?;
// let engine_distribution =
// artifacts.packages.engine.context("Missing Engine Distribution!")?;
// ide_ci::fs::mirror_directory(&engine_distribution.dir, &destination).await?;
// this.adapt_artifact(destination).await
// }
// .boxed()
// }
// }

View File

@ -1,72 +0,0 @@
// use crate::prelude::*;
//
// use crate::engine::BuildConfigurationFlags;
// use crate::project::Context;
// use crate::project::IsArtifact;
// use crate::project::IsTarget;
//
// use ide_ci::goodie::GoodieDatabase;
// use ide_ci::ok_ready_boxed;
//
// pub use crate::project::backend::BuildInput;
// use crate::source::BuildTargetJob;
// use crate::source::WithDestination;
//
// #[derive(Clone, Debug)]
// pub struct Artifact {
// pub root: PathBuf,
// }
//
// impl AsRef<Path> for Artifact {
// fn as_ref(&self) -> &Path {
// &self.root
// }
// }
//
// impl IsArtifact for Artifact {}
//
// #[derive(Clone, Copy, Debug, PartialEq)]
// pub struct ProjectManager;
//
// impl IsTarget for ProjectManager {
// type BuildInput = BuildInput;
// type Artifact = Artifact;
//
// fn artifact_name(&self) -> String {
// "Enso Project Manager".into()
// }
//
// fn adapt_artifact(self, path: impl AsRef<Path>) -> BoxFuture<'static, Result<Self::Artifact>>
// { ok_ready_boxed(Artifact { root: path.as_ref().into() })
// }
//
// fn build_internal(
// &self,
// context: Context,
// job: BuildTargetJob<Self>,
// ) -> BoxFuture<'static, Result<Self::Artifact>> {
// let WithDestination { inner, destination } = job;
// let this = self.clone();
// async move {
// let paths = crate::paths::Paths::new_versions(&inner.repo_root, inner.versions)?;
// let context = crate::engine::context::RunContext {
// operation: crate::engine::Operation::Build,
// goodies: GoodieDatabase::new()?,
// config: BuildConfigurationFlags {
// clean_repo: false,
// build_project_manager_package: true,
// ..crate::engine::NIGHTLY
// }
// .into(),
// inner: context,
// paths,
// };
// let artifacts = context.build().await?;
// let engine_distribution =
// artifacts.packages.project_manager.context("Missing Project Manager package!")?;
// ide_ci::fs::mirror_directory(&engine_distribution.dir, &destination).await?;
// this.adapt_artifact(destination).await
// }
// .boxed()
// }
// }

View File

@ -157,7 +157,6 @@ pub async fn test_all(repo_root: PathBuf, browsers: &[Browser]) -> Result {
.apply(&wasm_pack::TestFlags::Headless)
.apply_iter(browser_flags.iter().copied())
.env("WASM_BINDGEN_TEST_TIMEOUT", "300")
// .args(&wasm_pack_args)
.arg(member.strip_prefix(&repo_root).with_context(|| {
format!(
"Failed to strip prefix {} from {}. Is the test part of the repository?",

View File

@ -41,7 +41,6 @@ pathdiff = "0.2.1"
path-slash = "0.2.1"
platforms = { workspace = true }
portpicker = { workspace = true }
rand = "0.8.4"
regex = { workspace = true }
reqwest = { workspace = true }
semver = { workspace = true }

View File

@ -15,7 +15,6 @@ use tempfile::tempdir;
// === Export ===
// ==============
pub mod artifact;
pub mod context;
pub mod download;
pub mod models;

View File

@ -114,13 +114,8 @@ pub struct ContainerEntry {
impl ContainerEntry {
pub fn relative_path(&self) -> PathBuf {
//ensure!(self.path.is_relative(), "Path {} is not relative.", self.path.display());
// First part is artifact name.
let path_iter = self.path.iter().skip(1);
// ensure!(
// path_iter.next() == Some(&OsStr::new(artifact_name)),
// "Entry path does not start with an artifact name."
// );
PathBuf::from_iter(path_iter)
}
}

View File

@ -36,16 +36,8 @@ pub mod endpoints {
artifact_name: impl AsRef<str>,
) -> Result<CreateArtifactResponse> {
let body = CreateArtifactRequest::new(artifact_name.as_ref(), None);
//
// dbg!(&self.json_client);
// dbg!(serde_json::to_string(&body)?);
let request = json_client.post(artifact_url).json(&body).build()?;
// dbg!(&request);
// TODO retry
let response = json_client.execute(request).await?;
// dbg!(&response);
// let status = response.status();
check_response_json(response, |status, err| match status {
StatusCode::FORBIDDEN => err.context(
"Artifact storage quota has been hit. Unable to upload any new artifacts.",
@ -130,11 +122,7 @@ pub mod endpoints {
bin_client: &reqwest::Client,
artifact_location: Url,
) -> Result<Pin<Box<dyn AsyncRead + Send>>> {
// debug!("Downloading {} to {}.", artifact_location, destination.as_ref().display());
// let file = tokio::fs::File::create(destination);
let response = crate::io::web::execute(bin_client.get(artifact_location)).await?;
// let expected_size = decode_content_length(response.headers());
let is_gzipped = response
.headers()
.get(reqwest::header::ACCEPT_ENCODING)
@ -144,10 +132,8 @@ pub mod endpoints {
if is_gzipped {
let decoded_stream = async_compression::tokio::bufread::GzipDecoder::new(reader);
Ok(Box::pin(decoded_stream) as Pin<Box<dyn AsyncRead + Send>>)
// tokio::io::copy(&mut decoded_stream, &mut file.await?).await?;
} else {
Ok(Box::pin(reader) as Pin<Box<dyn AsyncRead + Send>>)
// tokio::io::copy(&mut reader, &mut destination).await?;
}
}
}
@ -216,7 +202,6 @@ pub async fn check_response(
response: Response,
additional_context: impl FnOnce(StatusCode, anyhow::Error) -> anyhow::Error,
) -> Result<Bytes> {
// dbg!(&response);
let status = response.status();
if !status.is_success() {
let mut err = anyhow!("Server replied with status {}.", status);

View File

@ -1 +0,0 @@

View File

@ -5,12 +5,6 @@ use std::fmt::Write;
pub trait CommandExt {
// fn run_ok(&mut self, program: &impl Program) -> BoxFuture<'static, Result<()>>;
//
// fn output_ok(&mut self) -> BoxFuture<'static, Result<Output>>;
// // TODO: `spawn` but does logs like some other methods. They all need a naming unification
// pass. fn spawn_nicer(&mut self) -> Result<Child>;
fn as_std(&self) -> &std::process::Command;
fn describe(&self) -> String {
@ -55,24 +49,4 @@ impl CommandExt for tokio::process::Command {
fn as_std(&self) -> &std::process::Command {
self.as_std()
}
// fn run_ok(&mut self) -> BoxFuture<'static, Result<()>> {
// let pretty = self.describe();
// debug!("Will run: {}", pretty);
// let status = self.status();
// async move { status.await?.exit_ok().context(format!("Command failed: {}", pretty)) }
// .boxed()
// }
//
// fn output_ok(&mut self) -> BoxFuture<'static, Result<Output>> {
// let pretty = self.describe();
// debug!("Will run: {}", pretty);
// let output = self.output();
// async move { output.await.context(format!("Command failed: {}", pretty)) }.boxed()
// }
//
// fn spawn_nicer(&mut self) -> Result<Child> {
// let pretty = self.describe();
// debug!("Spawning {}", pretty);
// self.spawn().context(format!("Failed to spawn: {}", pretty))
// }
}

View File

@ -6,7 +6,6 @@ use anyhow::Context;
pub trait OutputExt {
fn single_line_stdout(&self) -> Result<String>;
//fn run_ok(&self) -> Result;
fn describe(&self) -> String;
fn stdout_as_str(&self) -> Result<&str>;
@ -23,9 +22,6 @@ impl OutputExt for std::process::Output {
}
}
// fn run_ok(&self) -> Result {
// self.status.exit_ok().with_context(|| self.describe())
// }
fn describe(&self) -> String {
format!(
"Stdout:\n{:?}\n\nStderr:\n{:?}\n",

View File

@ -70,24 +70,3 @@ where
pub fn perhaps<F: Future>(should_do: bool, f: impl FnOnce() -> F) -> OptionFuture<F> {
should_do.then(f).into()
}
// pub fn perhaps_spawn_try<'a, F>(
// should_do: bool,
// f: impl FnOnce() -> F + 'a,
// ) -> BoxFuture<'static, Result<Option<F::Ok>>>
// where
// F: TryFuture + Send + 'static,
// F::Ok: Send + 'static,
// F::Error: Send + Sync + 'static,
// anyhow::Error: From<F::Error>,
// {
// let job = should_do.then(|| tokio::spawn(f().into_future()));
// async move {
// if let Some(job) = job {
// Ok(Some(job.await??))
// } else {
// Ok(None)
// }
// }
// .boxed()
// }

View File

@ -144,14 +144,3 @@ pub async fn complete_tasks() -> Result {
debug!("All pending tasks have been completed.");
Ok(())
}
//
// pub fn complete_tasks(rt: &Runtime) -> Result {
// info!("Waiting for remaining tasks to complete.");
// while let tasks = std::mem::replace(&mut GLOBAL.lock().unwrap().ongoing_tasks, default()) &&
// !tasks.is_empty() { let tasks = try_join_all(tasks, AsyncPolicy::FutureParallelism);
// rt.block_on(tasks)?;
// }
// Ok(())
// }

View File

@ -1,8 +0,0 @@
//! Top-level module for [goodie](`crate::Goodie`) implementations.
// ==============
// === Export ===
// ==============
pub mod shader_tools;

View File

@ -1,133 +0,0 @@
// use crate::prelude::*;
//
// use crate::goodie::GoodieDatabase;
//
// use crate::fs::expect_dir;
// use crate::fs::expect_file;
// use crate::io::download_and_extract;
// use crate::programs::Bash;
// use lazy_static::lazy_static;
// use std::env::consts::EXE_EXTENSION;
// use std::env::consts::EXE_SUFFIX;
//
// lazy_static! {
// pub static ref PROGRAM_NAME: String = format!("{}-musl-gcc{}", filename_stem(), EXE_SUFFIX);
// }
//
// pub struct Gcc;
//
// impl Program for Gcc {
// fn executable_name(&self) -> &'static str {
// &PROGRAM_NAME
// }
// }
//
// pub struct Musl;
//
// pub struct Instance {
// directory: PathBuf,
// }
//
// impl crate::goodie::Instance for Instance {
// fn add_to_environment(&self) -> anyhow::Result<()> {
// crate::env::set_var("TOOLCHAIN_DIR", &self.directory);
// crate::env::prepend_to_path(self.directory.join("bin"))
// }
// }
//
// #[async_trait]
// impl Goodie for Musl {
// const NAME: &'static str = "musl libc toolchain";
// type Instance = Instance;
//
// async fn is_already_available(&self) -> Result<bool> {
// Ok(Gcc.lookup().is_ok())
// }
//
// async fn lookup(&self, database: &GoodieDatabase) -> Result<Self::Instance> {
// database.find_dir("musl").map(|directory| Instance { directory })
// }
//
// async fn install(&self, database: &GoodieDatabase) -> Result<Self::Instance> {
// // Reportedly for my "convenience". :(
// let archive_format = if TARGET_OS == OS::Windows { "zip" } else { "tgz" };
// let url = format!(
// "https://more.musl.cc/10.2.1/x86_64-linux-musl/{}.{}",
// filename_stem(),
// archive_format
// );
// // let url = format!("https://musl.cc/{}.{}", filename_stem(), archive_format);
// let downloaded_dir = database.root_directory.join(filename_stem());
// let target_dir = database.root_directory.join("musl");
// crate::fs::reset_dir(&downloaded_dir)?;
// crate::fs::reset_dir(&target_dir)?;
// // let result = (async move || -> Result {
// crate::io::download_and_extract(url.clone(), &database.root_directory).await?;
// add_zlib(&downloaded_dir).await?;
// // Ok(())
// // })().await;
// // if result.is_err() {
// // crate::io::remove_dir_if_exists(&downloaded_dir)?;
// // crate::io::remove_dir_if_exists(&target_dir)?;
// // };
// // result?;
// std::fs::rename(downloaded_dir, target_dir)?;
// self.lookup(database).await
// }
// }
//
// pub async fn add_zlib(musl_toolchain: &Path) -> Result {
// let temp = tempfile::tempdir()?;
// let zlib_url = Url::from_str("http://www.zlib.net/zlib-1.2.11.tar.gz")?;
// let zlib_dirname = PathBuf::from("zlib-1.2.11");
// download_and_extract(zlib_url, &temp).await?;
// let zlib_path = temp.path().join(zlib_dirname);
// expect_dir(&zlib_path)?;
// let gcc_path = musl_toolchain.join_iter(["bin",
// "gcc"]).with_appended_extension(EXE_EXTENSION); expect_file(&gcc_path)?;
//
// Bash.run_command()?
// .arg("./configure --prefix=$TOOLCHAIN_DIR --static && make && make install")
// .env("CC", &gcc_path)
// .env("TOOLCHAIN_DIR", musl_toolchain)
// .current_dir(&zlib_path)
// .run_ok()
// .await?;
//
// Ok(())
// }
//
// pub fn target_path() -> String {
// let os_name = match TARGET_OS {
// OS::Linux => "linux",
// OS::Windows => "w64",
// other_os => unimplemented!("System `{}` is not supported!", other_os),
// };
//
// let arch_name = match TARGET_ARCH {
// Arch::X86_64 => "x86_64",
// Arch::AArch64 => "aarch64",
// other_arch => unimplemented!("Architecture `{}` is not supported!", other_arch),
// };
//
// let name: &[&str] = if TARGET_OS == OS::Windows { &[] } else { &["musl"] };
// [arch_name, os_name].iter().chain(name).join("-")
// }
//
// pub fn filename_stem() -> String {
// format!("{}-native", target_path())
// }
//
// #[cfg(test)]
// mod tests {
// use super::*;
//
// #[tokio::test]
// async fn musl_get_test() -> Result {
// let db = GoodieDatabase::new()?;
// db.require(&Musl).await?;
// Ok(())
// }
// }

View File

@ -1,37 +0,0 @@
// use crate::goodie::GoodieDatabase;
// use crate::prelude::*;
// use crate::programs;
//
// pub struct Sbt;
//
// pub struct Instance {
// directory: PathBuf,
// }
//
// impl crate::goodie::Instance for Instance {
// fn add_to_environment(&self) -> anyhow::Result<()> {
// crate::env::prepend_to_path(self.directory.join("bin"))
// }
// }
//
// #[async_trait]
// impl Goodie for Sbt {
// const NAME: &'static str = "SBT";
// type Instance = Instance;
//
// async fn is_already_available(&self) -> Result<bool> {
// Ok(programs::Sbt.lookup().is_ok())
// }
//
// async fn lookup(&self, database: &GoodieDatabase) -> Result<Self::Instance> {
// database.find_dir("sbt").map(|directory| Instance { directory })
// }
//
// async fn install(&self, database: &GoodieDatabase) -> Result<Self::Instance> {
// let url = "https://github.com/sbt/sbt/releases/download/v1.5.5/sbt-1.5.5.tgz";
// crate::io::download_and_extract(url.clone(), &database.root_directory).await?;
// self.lookup(database).await
// }
// }

View File

@ -1,79 +0,0 @@
//! Shader Tools is our collection of tools for working with shaders.
//!
//! The included programs are:
//! * [glslc](Glslc);
//! * [spirv-opt](SpirvOpt);
//! * [spirv-cross](SpirvCross).
//!
//! This module only deals with downloading and activating the tools. The code for building and
//! uploading the tools package is in the `enso-build-shader-tools` crate.
use crate::prelude::*;
use crate::cache::goodie;
use crate::cache::Cache;
use crate::env::known::PATH;
use crate::github::RepoRef;
use crate::programs::shaderc::Glslc;
use crate::programs::shaderc::SpirvOpt;
use crate::programs::spirv_cross::SpirvCross;
// =================
// === Constants ===
// =================
/// Repository where we store releases of the shader tools.
pub const SHADER_TOOLS_REPO: RepoRef = RepoRef { owner: "enso-org", name: "shader-tools" };
/// Version of the shader tools package that we download.
pub const VERSION: Version = Version::new(0, 2, 0);
// =========================
// === Asset description ===
// =========================
pub fn asset_name(os: OS) -> String {
// At the moment we don't have non-x64 binaries, so we can hardcode the architecture.
let arch = Arch::X86_64;
format!("shader-tools-{os}-{arch}.tar.gz")
}
// =========================
// === Goodie definition ===
// =========================
#[derive(Clone, Copy, Debug, Default)]
pub struct ShaderTools;
impl Goodie for ShaderTools {
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
let url = SHADER_TOOLS_REPO.url().and_then(|url_base| {
let asset = asset_name(TARGET_OS);
let suffix = format!("releases/download/{VERSION}/{asset}");
url_base
.join(&suffix)
.with_context(|| "Failed to append suffix {suffix} to URL {url_base}")
});
goodie::download_try_url(url, cache)
}
fn is_active(&self) -> BoxFuture<'static, Result<bool>> {
async move {
let glslc = Glslc.lookup();
let spirv_cross = SpirvCross.lookup();
let spirv_opt = SpirvOpt.lookup();
Ok(glslc.is_ok() && spirv_cross.is_ok() && spirv_opt.is_ok())
}
.boxed()
}
fn activation_env_changes(&self, package_path: &Path) -> Result<Vec<crate::env::Modification>> {
let path = package_path.join_iter(["bin"]);
let path = crate::env::Modification::prepend_path(&PATH, path);
Ok(vec![path])
}
}

View File

@ -28,7 +28,6 @@
pub mod actions;
pub mod archive;
pub mod buffer;
pub mod cache;
pub mod ci;
pub mod deploy;
@ -39,7 +38,6 @@ pub mod fs;
pub mod future;
pub mod github;
pub mod global;
pub mod goodies;
pub mod io;
pub mod log;
pub mod models;

View File

@ -12,7 +12,6 @@ pub mod location;
pub mod resolver;
pub mod shell;
pub mod version;
pub mod with_cwd;
pub use command::Command;
pub use resolver::Resolver;

View File

@ -232,23 +232,6 @@ pub trait IsCommandWrapper {
self
}
// fn spawn(&mut self) -> Result<Child> {
// self.borrow_mut_command().spawn().anyhow_err()
// }
//
//
// fn status(&mut self) -> BoxFuture<'static, Result<ExitStatus>> {
// let fut = self.borrow_mut_command().status();
// async move { fut.await.anyhow_err() }.boxed()
// }
//
// fn output(&mut self) -> BoxFuture<'static, Result<Output>> {
// let fut = self.borrow_mut_command().output();
// async move { fut.await.anyhow_err() }.boxed()
// }
/// Value-based variant of [`Self::current_dir`], for convenience.
fn with_current_dir(self, dir: impl AsRef<Path>) -> Self
where Self: Sized {
@ -424,16 +407,6 @@ impl Command {
}
})
}
// pub fn status(&mut self) -> BoxFuture<'static, Result<ExitStatus>> {
// let fut = self.borrow_mut_command().status();
// async move { fut.await.anyhow_err() }.boxed()
// }
//
// pub fn output(&mut self) -> BoxFuture<'static, Result<Output>> {
// let fut = self.borrow_mut_command().output();
// async move { fut.await.anyhow_err() }.boxed()
// }
}
impl Command {
@ -533,60 +506,3 @@ impl<T: Manipulator> Manipulator for Option<T> {
pub trait FallibleManipulator {
fn try_applying<C: IsCommandWrapper + ?Sized>(&self, command: &mut C) -> Result;
}
#[cfg(test)]
mod tests {
// use super::*;
// use crate::global::new_spinner;
// // use crate::global::println;
// use tokio::io::AsyncBufReadExt;
// use tokio::io::AsyncRead;
// use tokio::io::BufReader;
// use tokio::process::ChildStdout;
// use tokio::task::JoinHandle;
// pub fn spawn_log_processor(
// prefix: String,
// out: impl AsyncRead + Send + Unpin + 'static,
// ) -> JoinHandle<Result> {
// tokio::task::spawn(async move {
// let bufread = BufReader::new(out);
// let mut lines = bufread.lines();
// while let Some(line) = lines.next_line().await? {
// println(format!("{} {}", prefix, line))
// }
// println(format!("{} {}", prefix, "<ENDUT>"));
// Result::Ok(())
// })
// }U
//
// pub fn spawn_logged(cmd: &mut Command) {
// cmd.stdout(Stdio::piped());
// cmd.stderr(Stdio::piped());
// }
//
// #[tokio::test]
// async fn test_cmd_out_interception() -> Result {
// pretty_env_logger::init();
// let mut cmd = Command::new("cargo");
// cmd.arg("update");
// cmd.stdout(Stdio::piped());
// cmd.stderr(Stdio::piped());
//
// let mut child = cmd.spawn()?;
// spawn_log_processor("[out]".into(), child.stdout.take().unwrap());
// spawn_log_processor("[err]".into(), child.stderr.take().unwrap());
// let bar = new_spinner(format!("Running {:?}", cmd));
// child.wait().await?;
// Ok(())
// }
//
// #[tokio::test]
// async fn spawning() -> Result {
// println!("Start");
// tokio::process::Command::new("python").spawn()?.wait().await?;
// println!("Finish");
// Ok(())
// }
}

View File

@ -1,26 +0,0 @@
use crate::prelude::*;
/// Wrapper over a program that invokes it with a given working directory.
#[derive(Clone, Debug, Default)]
pub struct WithCwd<T> {
pub working_directory: Option<PathBuf>,
pub underlying_program: T,
}
impl<T: Program> Program for WithCwd<T> {
fn executable_name(&self) -> &str {
self.underlying_program.executable_name()
}
fn current_directory(&self) -> Option<PathBuf> {
self.working_directory.clone()
}
}
impl<T> WithCwd<T> {
pub fn new(underlying_program: T, working_directory: impl Into<PathBuf>) -> Self {
Self { underlying_program, working_directory: Some(working_directory.into()) }
}
}

View File

@ -23,9 +23,7 @@ pub mod rustup;
pub mod sbt;
pub mod seven_zip;
pub mod sh;
pub mod shaderc;
pub mod signtool;
pub mod spirv_cross;
pub mod strip;
pub mod tar;
pub mod vs;

View File

@ -1,36 +0,0 @@
//! A collection of tools, libraries, and tests for Vulkan shader compilation.
//!
//! See the [GitHub repository](https://github.com/google/shaderc) for more information.
use crate::prelude::*;
// =============
// === glslc ===
// =============
/// A command-line GLSL/HLSL to SPIR-V compiler with Clang-compatible arguments.
#[derive(Clone, Copy, Debug, Default)]
pub struct Glslc;
impl Program for Glslc {
fn executable_name(&self) -> &'static str {
"glslc"
}
}
// =================
// === spirv-opt ===
// =================
/// SPIR-V Optimizer.
#[derive(Clone, Copy, Debug, Default)]
pub struct SpirvOpt;
impl Program for SpirvOpt {
fn executable_name(&self) -> &'static str {
"spirv-opt"
}
}

View File

@ -1,16 +0,0 @@
//! Wrapper for [spirv-cross](https://github.com/KhronosGroup/SPIRV-Cross).
use crate::prelude::Program;
/// SPIRV-Cross is a practical tool and library for performing reflection on SPIR-V and
/// disassembling SPIR-V back to high level languages.
#[derive(Clone, Copy, Debug, Default)]
pub struct SpirvCross;
impl Program for SpirvCross {
fn executable_name(&self) -> &'static str {
"spirv-cross"
}
}

View File

@ -7,20 +7,15 @@ default-run = "enso-build-cli"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = { workspace = true }
byte-unit = { workspace = true }
clap = { workspace = true }
derivative = { workspace = true }
enso-build-base = { path = "../base" }
enso-build = { path = "../build" }
enso-formatter = { path = "../enso-formatter" }
futures-util = "0.3.17"
glob = "0.3.0"
humantime = "2.1.0"
ide-ci = { path = "../ci_utils" }
octocrab = { workspace = true }
tempfile = "3.2.0"
tokio = { workspace = true }
toml = "0.5.9"
tracing = { workspace = true }
[lints]

View File

@ -23,12 +23,10 @@ use octocrab::models::RunId;
// ==============
pub mod backend;
pub mod engine;
pub mod git_clean;
pub mod gui;
pub mod ide;
pub mod java_gen;
pub mod project_manager;
pub mod release;
pub mod runtime;
pub mod wasm;

View File

@ -1,17 +0,0 @@
// use enso_build::prelude::*;
//
// use crate::arg::Source;
// use crate::source_args_hlp;
// use clap::Args;
// // use enso_build::project::engine::Engine;
//
// source_args_hlp!(Engine, "engine", BuildInput);
//
// #[derive(Args, Clone, Debug, PartialEq)]
// pub struct BuildInput {}
//
// #[derive(Args, Clone, Debug)]
// pub struct Target {
// #[clap(flatten)]
// pub source: Source<Engine>,
// }

View File

@ -1,18 +0,0 @@
// use enso_build::prelude::*;
//
// use crate::arg::Source;
// use crate::source_args_hlp;
// use enso_build::project::project_manager::ProjectManager;
//
// use clap::Args;
//
// source_args_hlp!(ProjectManager, "project-manager", BuildInput);
//
// #[derive(Args, Clone, Debug, PartialEq)]
// pub struct BuildInput {}
//
// #[derive(Args, Clone, Debug)]
// pub struct Target {
// #[clap(flatten)]
// pub source: Source<ProjectManager>,
// }

View File

@ -1,24 +0,0 @@
use enso_build::prelude::*;
use ide_ci::actions::workflow::MessageLevel;
#[tokio::main]
async fn main() -> Result {
setup_logging()?;
ide_ci::actions::workflow::debug("Debug");
ide_ci::actions::workflow::message(MessageLevel::Debug, "Debug2");
ide_ci::actions::workflow::message(MessageLevel::Notice, "Notice");
ide_ci::actions::workflow::message(MessageLevel::Warning, "Warning");
ide_ci::actions::workflow::message(MessageLevel::Error, "Error");
println!("Hello");
trace!("Hello");
debug!("Hello");
info!("Hello");
warn!("Hello");
error!("Hello");
Ok(())
}

View File

@ -1,87 +0,0 @@
//! This script is used to disable the `wasm-opt` optimization in the crates that can be used as
//! WASM entry points. Unfortunately, wasm-opt does not allow for disabling wasm-opt through a
//! command line flag, so we have to disable it by setting an appropriate flag in each Cargo.toml.
// === Features ===
#![feature(associated_type_bounds)]
use enso_build_cli::prelude::*;
use enso_build::paths::parent_cargo_toml;
use enso_build::repo::deduce_repository_path;
/// Path in the Cargo.toml file where the `wasm-opt` flag is stored.
///
/// This flag controls whether wasm-pack shall invoke wasm-opt on the generated wasm file.
const WASM_OPT_PATH: [&str; 6] =
["package", "metadata", "wasm-pack", "profile", "release", "wasm-opt"];
/// Piece of code that will disable wasm-opt when added to Cargo.toml.
pub fn suffix_that_disables_wasm_opt() -> String {
let without_last = WASM_OPT_PATH[..WASM_OPT_PATH.len() - 1].join(".");
let last = WASM_OPT_PATH.last().unwrap();
format!(
r#"
# Stop wasm-pack from running wasm-opt, because we run it from our build scripts in order to customize options.
[{without_last}]
{last} = false"#
)
}
/// Check if the Rust source file under given path contains a WASM entry point.
pub fn contains_entry_point(path: impl AsRef<Path>) -> Result<bool> {
Ok(ide_ci::fs::read_to_string(path)?.contains("#[entry_point"))
}
/// Retrieve item by repeatedly indexing.
pub fn traverse(
item: &toml::Value,
keys: impl IntoIterator<Item: AsRef<str>>,
) -> Option<&toml::Value> {
keys.into_iter().try_fold(item, |item, key| item.get(key.as_ref()))
}
/// Check if the given (parsed) Cargo.toml has already disabled wasm-opt.
fn has_wasm_opt_disabled(document: &toml::Value) -> bool {
let wasm_opt_entry = traverse(document, WASM_OPT_PATH);
wasm_opt_entry.and_then(toml::Value::as_bool).contains(&false)
}
/// Disable wasm-opt in the Cargo.toml file.
///
/// Does nothing if wasm-opt is already disabled.
fn disable_wasm_opt_in_cargo_toml(path: impl AsRef<Path>) -> Result {
assert!(path.as_ref().is_file());
assert_eq!(path.as_ref().file_name().unwrap(), "Cargo.toml");
let doc = toml::Value::from_str(&ide_ci::fs::read_to_string(&path)?)?;
if !has_wasm_opt_disabled(&doc) {
info!("Disabling wasm-opt in {}", path.as_ref().display());
ide_ci::fs::append(path, suffix_that_disables_wasm_opt())?;
} else {
info!("wasm-opt is already disabled in {}", path.as_ref().display());
}
Ok(())
}
#[tokio::main]
async fn main() -> Result {
setup_logging()?;
let root = deduce_repository_path()?;
let rs_source_glob = PathBuf::from_iter([root.as_str(), "**", "*.rs"]).display().to_string();
info!("Searching for Rust source files in {}", rs_source_glob);
let rs_files = glob::glob(&rs_source_glob)?.try_collect_vec()?;
info!("Completed source discovery. Found {} files.", rs_files.len());
let entry_points: Vec<_> = rs_files.into_iter().try_filter(|p| contains_entry_point(p))?;
info!("{} of them are entry points.", entry_points.len());
let cargo_tomls: BTreeSet<_> = entry_points.into_iter().try_map(parent_cargo_toml)?;
info!("They belong to {} crates.", cargo_tomls.len());
for cargo_toml in &cargo_tomls {
disable_wasm_opt_in_cargo_toml(cargo_toml)?;
}
Ok(())
}

View File

@ -8,18 +8,12 @@ edition = "2021"
[dependencies]
chrono = { workspace = true }
dirs = { workspace = true }
enso-build-base = { path = "../base" }
enso-install-config = { path = "config" }
flate2 = { workspace = true }
ide-ci = { path = "../ci_utils" }
indicatif = { workspace = true }
named-lock = "0.4.1"
self-replace = "1.3.7"
serde_json = { workspace = true }
strum = { workspace = true }
sysinfo = { workspace = true }
tar = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
windows = { version = "0.53.0", features = [

View File

@ -7,7 +7,6 @@ edition = "2021"
anyhow = { workspace = true }
byte-unit = { workspace = true }
chrono = { workspace = true }
enso-build-base = { path = "../../base" }
enso-install = { path = ".." }
enso-install-config = { path = "../config" }
flate2 = { workspace = true }
@ -15,11 +14,6 @@ ide-ci = { path = "../../ci_utils" }
serde_json = { workspace = true }
sysinfo = { workspace = true }
tar = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
windows = { workspace = true }
winapi = "0.3"
[target.'cfg(windows)'.dependencies]
native-windows-gui = { workspace = true }

View File

@ -7,8 +7,6 @@ edition = "2021"
[dependencies]
anyhow = { workspace = true }
enso-build-base = { path = "../../base" }
enso-build = { path = "../../build" }
enso-install = { path = ".." }
enso-install-config = { path = "../config" }
ide-ci = { path = "../../ci_utils" }