mirror of
https://github.com/enso-org/enso.git
synced 2024-12-23 05:41:32 +03:00
Package MSVC CRT redistributables in with the backend native images. (#4019)
This commit is contained in:
parent
74659301e7
commit
97ab0d7d5a
95
Cargo.lock
generated
95
Cargo.lock
generated
@ -1525,6 +1525,24 @@ version = "2.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57"
|
||||
|
||||
[[package]]
|
||||
name = "dataview"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47a802a2cad0ff4dfc4f3110da174b7a6928c315cae523e88638cfb72941b4d5"
|
||||
dependencies = [
|
||||
"derive_pod",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dataview"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50eb3a329e19d78c3a3dfa4ec5a51ecb84fa3a20c06edad04be25356018218f9"
|
||||
dependencies = [
|
||||
"derive_pod",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deadpool"
|
||||
version = "0.9.5"
|
||||
@ -1656,6 +1674,27 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dependency_runner"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e37adbef02511efe61ff835c8e30d8d5c6a00e4688184512d831a371f2615466"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 3.2.23",
|
||||
"dataview 1.0.1",
|
||||
"fs-err",
|
||||
"msvc-demangler",
|
||||
"ntapi",
|
||||
"pelite",
|
||||
"regex",
|
||||
"roxmltree",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derivative"
|
||||
version = "2.2.0"
|
||||
@ -1680,6 +1719,12 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_pod"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2ea6706d74fca54e15f1d40b5cf7fe7f764aaec61352a9fcec58fe27e042fc8"
|
||||
|
||||
[[package]]
|
||||
name = "difference"
|
||||
version = "2.0.0"
|
||||
@ -3340,6 +3385,12 @@ version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85dcb89d2b10c5f6133de2efd8c11959ce9dbb46a2f7a4cab208c4eeda6ce1ab"
|
||||
|
||||
[[package]]
|
||||
name = "fs-err"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541"
|
||||
|
||||
[[package]]
|
||||
name = "fs_extra"
|
||||
version = "1.2.0"
|
||||
@ -3968,6 +4019,7 @@ dependencies = [
|
||||
"convert_case 0.6.0",
|
||||
"cron",
|
||||
"data-encoding",
|
||||
"dependency_runner",
|
||||
"derivative",
|
||||
"derive_more",
|
||||
"dirs",
|
||||
@ -4749,6 +4801,15 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "msvc-demangler"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfb67c6dd0fa9b00619c41c5700b6f92d5f418be49b45ddb9970fbd4569df3c8"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multi-map"
|
||||
version = "1.3.0"
|
||||
@ -4860,6 +4921,12 @@ dependencies = [
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "no-std-compat"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.1"
|
||||
@ -5323,6 +5390,25 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
|
||||
|
||||
[[package]]
|
||||
name = "pelite"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a821dd5a5c4744099b50dc94a6a381c8b4b007f4d80da5334428e220945319b"
|
||||
dependencies = [
|
||||
"dataview 0.1.2",
|
||||
"libc",
|
||||
"no-std-compat",
|
||||
"pelite-macros",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pelite-macros"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a7cf3f8ecebb0f4895f4892a8be0a0dc81b498f9d56735cb769dc31bf00815b"
|
||||
|
||||
[[package]]
|
||||
name = "pem"
|
||||
version = "1.1.0"
|
||||
@ -5998,6 +6084,15 @@ dependencies = [
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "roxmltree"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "112908c3ac4711a1554b3948432ecaf5f061a951aa326977b63f7f72a86a4c0e"
|
||||
dependencies = [
|
||||
"xmlparser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.21"
|
||||
|
@ -1,4 +1,4 @@
|
||||
# This file is used to generate `target/debug/build/enso-build-<hash>/out/paths.rs`.
|
||||
# This file is used to generate `target/<profile>/build/enso-build-<hash>/out/paths.rs`.
|
||||
# Generation logic is in `ci_utils/src/paths.rs`.
|
||||
|
||||
<repo_root>/:
|
||||
@ -22,11 +22,26 @@
|
||||
build/:
|
||||
prettier/:
|
||||
built-distribution/:
|
||||
? path: "enso-engine-<triple>"
|
||||
enso-engine-<triple>/:
|
||||
? path: enso-<version>/
|
||||
type: engine_package
|
||||
"enso-bundle-<triple>":
|
||||
var: engine_package
|
||||
enso-project-manager-<triple>/:
|
||||
? path: enso/
|
||||
type: project_manager_package
|
||||
var: project_manager_package
|
||||
enso-launcher-<triple>/:
|
||||
? path: enso/
|
||||
type: launcher_package
|
||||
var: launcher_package
|
||||
enso-bundle-<triple>/:
|
||||
? path: enso/
|
||||
type: launcher_bundle
|
||||
var: launcher_bundle
|
||||
"project-manager-bundle-<triple>":
|
||||
enso:
|
||||
? path: enso/
|
||||
type: project_manager_bundle
|
||||
var: project_manager_bundle
|
||||
dist/:
|
||||
gui/:
|
||||
assets/:
|
||||
@ -46,6 +61,8 @@
|
||||
distribution/:
|
||||
editions/:
|
||||
<edition>.yaml:
|
||||
enso.bundle.template:
|
||||
launcher-manifest.yaml:
|
||||
engine/:
|
||||
runner-native/:
|
||||
src/:
|
||||
@ -62,6 +79,9 @@
|
||||
java/:
|
||||
target/:
|
||||
generated-java/:
|
||||
parser-upload/:
|
||||
test-results/:
|
||||
scala-parser.js:
|
||||
test/:
|
||||
Benchmarks/:
|
||||
tools/:
|
||||
@ -75,24 +95,39 @@
|
||||
runner: # The runner native image (Linux only).
|
||||
CHANGELOG.md:
|
||||
|
||||
project-manager/:
|
||||
# Launcher Package
|
||||
{ path: enso/, var: launcher_package }:
|
||||
bin/:
|
||||
enso<exe>:
|
||||
|
||||
# Project Manager Package
|
||||
{ path: enso/, var: project_manager_package }:
|
||||
bin/:
|
||||
project-manager<exe>:
|
||||
|
||||
# Project Manager Bundle
|
||||
# https://enso.org/docs/developer/enso/distribution/bundles.html#project-manager-bundle
|
||||
{ path: project-manager/, var: project_manager_bundle }:
|
||||
bin/:
|
||||
project-manager<exe>:
|
||||
dist/:
|
||||
<version>/:
|
||||
runtime/:
|
||||
.enso.bundle:
|
||||
|
||||
# Engine Package
|
||||
# Engine Package aka Enso Version Package
|
||||
# https://enso.org/docs/developer/enso/distribution/distribution.html#layout-of-an-enso-version-package
|
||||
{ path: enso-<version>/, var: engine_package }:
|
||||
bin/:
|
||||
components/:
|
||||
editions/:
|
||||
lib/:
|
||||
Standard/:
|
||||
manifest.yaml:
|
||||
|
||||
# Engine Bundle
|
||||
# Launcher Bundle aka Portable Enso Distribution
|
||||
# https://enso.org/docs/developer/enso/distribution/distribution.html#portable-enso-distribution-layout
|
||||
{ path: enso/, var: engine_bundle }:
|
||||
{ path: enso/, var: launcher_bundle }:
|
||||
dist/:
|
||||
<version>/:
|
||||
edition/:
|
||||
|
@ -7,11 +7,12 @@ use crate::prelude::*;
|
||||
use crate::get_graal_version;
|
||||
use crate::get_java_major_version;
|
||||
use crate::paths::generated;
|
||||
use crate::paths::ComponentPaths;
|
||||
use crate::paths::Paths;
|
||||
|
||||
use artifact::IsArtifact;
|
||||
use bundle::IsBundle;
|
||||
use ide_ci::future::AsyncPolicy;
|
||||
use ide_ci::github::Repo;
|
||||
use package::IsPackage;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
|
||||
@ -19,9 +20,11 @@ use std::collections::BTreeSet;
|
||||
// === Export ===
|
||||
// ==============
|
||||
|
||||
pub mod artifact;
|
||||
pub mod bundle;
|
||||
pub mod context;
|
||||
pub mod env;
|
||||
pub mod package;
|
||||
pub mod sbt;
|
||||
|
||||
pub use context::RunContext;
|
||||
@ -246,100 +249,51 @@ pub enum Operation {
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Default)]
|
||||
pub struct BuiltArtifacts {
|
||||
pub packages: BuiltPackageArtifacts,
|
||||
pub bundles: BuiltBundleArtifacts,
|
||||
pub engine_package: Option<generated::EnginePackage>,
|
||||
pub launcher_package: Option<generated::LauncherPackage>,
|
||||
pub project_manager_package: Option<generated::ProjectManagerPackage>,
|
||||
pub launcher_bundle: Option<generated::LauncherBundle>,
|
||||
pub project_manager_bundle: Option<generated::ProjectManagerBundle>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Default)]
|
||||
pub struct BuiltPackageArtifacts {
|
||||
pub engine: Option<ComponentPaths>,
|
||||
pub launcher: Option<ComponentPaths>,
|
||||
pub project_manager: Option<ComponentPaths>,
|
||||
}
|
||||
|
||||
impl BuiltPackageArtifacts {
|
||||
pub fn iter(&self) -> impl IntoIterator<Item = &ComponentPaths> {
|
||||
[&self.engine, &self.launcher, &self.project_manager].into_iter().flat_map(|b| b.iter())
|
||||
impl BuiltArtifacts {
|
||||
pub fn packages(&self) -> Vec<&dyn IsPackage> {
|
||||
let mut packages = Vec::<&dyn IsPackage>::new();
|
||||
if let Some(engine) = &self.engine_package {
|
||||
packages.push(engine);
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for BuiltPackageArtifacts {
|
||||
type Item = ComponentPaths;
|
||||
type IntoIter = std::iter::Flatten<std::array::IntoIter<Option<ComponentPaths>, 3_usize>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
[self.engine, self.launcher, self.project_manager].into_iter().flatten()
|
||||
if let Some(launcher) = &self.launcher_package {
|
||||
packages.push(launcher);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Default)]
|
||||
pub struct BuiltBundleArtifacts {
|
||||
pub launcher: Option<ComponentPaths>,
|
||||
pub project_manager: Option<ComponentPaths>,
|
||||
}
|
||||
|
||||
impl BuiltBundleArtifacts {
|
||||
pub fn iter(&self) -> impl IntoIterator<Item = &ComponentPaths> {
|
||||
[&self.project_manager, &self.launcher].into_iter().flat_map(|b| b.iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for BuiltBundleArtifacts {
|
||||
type Item = ComponentPaths;
|
||||
type IntoIter = std::iter::Flatten<std::array::IntoIter<Option<ComponentPaths>, 2_usize>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
[self.launcher, self.project_manager].into_iter().flatten()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_packages(paths: &Paths) -> Result<Vec<PathBuf>> {
|
||||
let mut ret = Vec::new();
|
||||
if paths.launcher.root.exists() {
|
||||
debug!("Packaging launcher.");
|
||||
ret.push(package_component(&paths.launcher).await?);
|
||||
}
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
trait ComponentPathExt {
|
||||
async fn pack(&self) -> Result;
|
||||
fn clear(&self) -> Result;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ComponentPathExt for ComponentPaths {
|
||||
async fn pack(&self) -> Result {
|
||||
ide_ci::archive::create(&self.artifact_archive, [&self.dir]).await
|
||||
}
|
||||
fn clear(&self) -> Result {
|
||||
ide_ci::fs::remove_dir_if_exists(&self.root)?;
|
||||
ide_ci::fs::remove_file_if_exists(&self.artifact_archive)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn package_component(paths: &ComponentPaths) -> Result<PathBuf> {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let pattern = paths
|
||||
.dir
|
||||
.join_iter(["bin", "*"])
|
||||
.with_extension(std::env::consts::EXE_EXTENSION)
|
||||
.display()
|
||||
.to_string();
|
||||
for binary in glob::glob(&pattern)? {
|
||||
ide_ci::fs::allow_owner_execute(binary?)?;
|
||||
if let Some(project_manager) = &self.project_manager_package {
|
||||
packages.push(project_manager);
|
||||
}
|
||||
packages
|
||||
}
|
||||
|
||||
ide_ci::archive::create(&paths.artifact_archive, [&paths.root]).await?;
|
||||
Ok(paths.artifact_archive.clone())
|
||||
pub fn bundles(&self) -> Vec<&dyn IsBundle> {
|
||||
let mut bundles = Vec::<&dyn IsBundle>::new();
|
||||
if let Some(launcher) = &self.launcher_bundle {
|
||||
bundles.push(launcher);
|
||||
}
|
||||
if let Some(project_manager) = &self.project_manager_bundle {
|
||||
bundles.push(project_manager);
|
||||
}
|
||||
bundles
|
||||
}
|
||||
|
||||
pub fn artifacts(&self) -> Vec<&dyn IsArtifact> {
|
||||
let mut artifacts = Vec::<&dyn IsArtifact>::new();
|
||||
for package in self.packages() {
|
||||
artifacts.push(package);
|
||||
}
|
||||
for bundle in self.bundles() {
|
||||
artifacts.push(bundle);
|
||||
}
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////
|
||||
|
||||
|
||||
pub async fn deduce_graal(
|
||||
client: Octocrab,
|
||||
build_sbt: &generated::RepoRootBuildSbt,
|
||||
|
72
build/build/src/engine/artifact.rs
Normal file
72
build/build/src/engine/artifact.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use ide_ci::github::release::ReleaseHandle;
|
||||
use octocrab::models::repos::Asset;
|
||||
|
||||
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum ArtifactKind {
|
||||
EnginePackage,
|
||||
ProjectManagerPackage,
|
||||
LauncherPackage,
|
||||
ProjectManagerBundle,
|
||||
LauncherBundle,
|
||||
}
|
||||
|
||||
/// A standalone SBT-generated artifact.
|
||||
///
|
||||
/// Either a package or a bundle with one of our backend components.
|
||||
pub trait IsArtifact: AsRef<Path> + Send + Sync {
|
||||
/// Get the kind of this artifact.
|
||||
fn kind(&self) -> ArtifactKind;
|
||||
|
||||
/// Remove the artifact from the disk.
|
||||
fn clear(&self) -> Result {
|
||||
ide_ci::fs::remove_dir_if_exists(self)
|
||||
}
|
||||
|
||||
/// Get a filename stem for the compressed artifact.
|
||||
///
|
||||
/// It will be used for naming release assets, so this should include the target triple.
|
||||
fn asset_file_stem(&self) -> Result<OsString> {
|
||||
// By the convention, the parent directory to the artifact bears its asset name.
|
||||
Ok(self.as_ref().try_parent()?.try_file_name()?.to_os_string())
|
||||
}
|
||||
|
||||
fn upload_as_asset(&self, release: ReleaseHandle) -> BoxFuture<'static, Result<Asset>> {
|
||||
let path = self.as_ref().to_path_buf();
|
||||
let name = self.asset_file_stem();
|
||||
async move { release.upload_compressed_dir_as(path, name?).await }.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
impl IsArtifact for crate::paths::generated::EnginePackage {
|
||||
fn kind(&self) -> ArtifactKind {
|
||||
ArtifactKind::EnginePackage
|
||||
}
|
||||
}
|
||||
|
||||
impl IsArtifact for crate::paths::generated::ProjectManagerPackage {
|
||||
fn kind(&self) -> ArtifactKind {
|
||||
ArtifactKind::ProjectManagerPackage
|
||||
}
|
||||
}
|
||||
|
||||
impl IsArtifact for crate::paths::generated::ProjectManagerBundle {
|
||||
fn kind(&self) -> ArtifactKind {
|
||||
ArtifactKind::ProjectManagerBundle
|
||||
}
|
||||
}
|
||||
|
||||
impl IsArtifact for crate::paths::generated::LauncherPackage {
|
||||
fn kind(&self) -> ArtifactKind {
|
||||
ArtifactKind::LauncherPackage
|
||||
}
|
||||
}
|
||||
|
||||
impl IsArtifact for crate::paths::generated::LauncherBundle {
|
||||
fn kind(&self) -> ArtifactKind {
|
||||
ArtifactKind::LauncherBundle
|
||||
}
|
||||
}
|
@ -1,94 +1,118 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::engine::ComponentPathExt;
|
||||
use crate::paths::ComponentPaths;
|
||||
use crate::paths::Paths;
|
||||
use crate::engine::artifact::IsArtifact;
|
||||
use crate::paths::generated::RepoRoot;
|
||||
|
||||
use anyhow::Context;
|
||||
use ide_ci::programs::java::JAVA_HOME;
|
||||
use ide_ci::cache::goodie::graalvm::locate_graal;
|
||||
|
||||
|
||||
|
||||
/// Bundle is like a [package][crate::paths::IsPackage] but with additional components bundled to
|
||||
/// make it redistributable.
|
||||
///
|
||||
/// See the [official docs](https://enso.org/docs/developer/enso/distribution/bundles.html).
|
||||
#[async_trait]
|
||||
pub trait Bundle {
|
||||
const PREFIX: &'static str;
|
||||
const DIRNAME: &'static str;
|
||||
|
||||
fn base_distribution(paths: &Paths) -> &ComponentPaths;
|
||||
|
||||
fn suggest_paths(paths: &Paths) -> ComponentPaths {
|
||||
ComponentPaths::new(&paths.build_dist_root, Self::PREFIX, Self::DIRNAME, &paths.triple)
|
||||
pub trait IsBundle: AsRef<Path> + IsArtifact {
|
||||
fn clear(&self) -> Result {
|
||||
ide_ci::fs::remove_dir_if_exists(self.as_ref())
|
||||
}
|
||||
|
||||
async fn create(paths: &Paths) -> Result<ComponentPaths> {
|
||||
let bundle = Self::suggest_paths(paths);
|
||||
/// Path to the directory where GraalVM is placed.
|
||||
fn graalvm_dir(&self) -> PathBuf;
|
||||
|
||||
bundle.clear()?;
|
||||
/// Path to the directory where Engine package is placed.
|
||||
fn engine_dir(&self) -> PathBuf;
|
||||
|
||||
let base_component = Self::base_distribution(paths);
|
||||
ide_ci::fs::copy(&base_component.root, &bundle.root)?;
|
||||
/// Path to the component that will be used as a bundle base.
|
||||
fn base_component(&self, repo_root: &RepoRoot) -> PathBuf;
|
||||
|
||||
/// Path to the bundle marker file.
|
||||
///
|
||||
/// It is a file used by bundled executable to discern whether it is running from a bundle or
|
||||
/// from a regular installation.
|
||||
fn distribution_marker(&self) -> PathBuf;
|
||||
|
||||
/// Creates a bundle for a given component. This requires already built:
|
||||
/// * the base component package (e.g. launcher package for launcher bundle);
|
||||
/// * the engine package;
|
||||
/// * the GraalVM package.
|
||||
/// *
|
||||
///
|
||||
/// `bundle_dir` is like:
|
||||
/// ```text
|
||||
/// H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64\enso-0.0.0-SNAPSHOT.2022-01-19
|
||||
/// ```
|
||||
fn create(&self, repo_root: &RepoRoot) -> BoxFuture<'static, Result> {
|
||||
let bundle_dir = self.as_ref().to_path_buf();
|
||||
let base_component = self.base_component(repo_root);
|
||||
let engine_src_path =
|
||||
repo_root.built_distribution.enso_engine_triple.engine_package.clone();
|
||||
let engine_target_dir = self.engine_dir();
|
||||
let graalvm_dir = self.graalvm_dir();
|
||||
let distribution_marker = self.distribution_marker();
|
||||
|
||||
async move {
|
||||
ide_ci::fs::tokio::remove_dir_if_exists(&bundle_dir).await?;
|
||||
// Start with bundled component.
|
||||
ide_ci::fs::copy(&base_component, bundle_dir)?;
|
||||
// Add engine.
|
||||
let bundled_engine_dir = bundle.dir.join("dist").join(paths.version().to_string());
|
||||
place_component_at(&paths.engine, &bundled_engine_dir).await?;
|
||||
|
||||
ide_ci::fs::mirror_directory(&engine_src_path, &engine_target_dir).await?;
|
||||
// Add GraalVM runtime.
|
||||
place_graal_under(bundle.dir.join("runtime")).await?;
|
||||
|
||||
place_graal_under(graalvm_dir).await?;
|
||||
// Add portable distribution marker.
|
||||
ide_ci::fs::copy(
|
||||
paths.repo_root.join_iter(["distribution", "enso.bundle.template"]),
|
||||
bundle.dir.join(".enso.bundle"),
|
||||
)?;
|
||||
Ok(bundle)
|
||||
ide_ci::fs::create(distribution_marker)?;
|
||||
Ok(())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Launcher;
|
||||
impl Bundle for Launcher {
|
||||
const PREFIX: &'static str = "enso-bundle";
|
||||
const DIRNAME: &'static str = "enso";
|
||||
fn base_distribution(paths: &Paths) -> &ComponentPaths {
|
||||
&paths.launcher
|
||||
impl IsBundle for crate::paths::generated::ProjectManagerBundle {
|
||||
fn graalvm_dir(&self) -> PathBuf {
|
||||
self.runtime.path.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct ProjectManager;
|
||||
impl Bundle for ProjectManager {
|
||||
const PREFIX: &'static str = "project-manager-bundle";
|
||||
const DIRNAME: &'static str = "enso";
|
||||
fn base_distribution(paths: &Paths) -> &ComponentPaths {
|
||||
&paths.project_manager
|
||||
fn engine_dir(&self) -> PathBuf {
|
||||
self.dist.version.to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
#[context("Placing a GraalVM package under {}", target_directory.as_ref().display())]
|
||||
pub async fn place_graal_under(target_directory: impl AsRef<Path>) -> Result {
|
||||
let graal_path = {
|
||||
let java_home = JAVA_HOME.get()?;
|
||||
if TARGET_OS == OS::MacOS {
|
||||
// On macOS we need to drop trailing `/Contents/Home` from the path.
|
||||
java_home
|
||||
.parent()
|
||||
.and_then(|p| p.parent())
|
||||
.context(format!("Invalid Java home for macOS: {}", java_home.display()))?
|
||||
fn base_component(&self, repo_root: &RepoRoot) -> PathBuf {
|
||||
repo_root
|
||||
.built_distribution
|
||||
.enso_project_manager_triple
|
||||
.project_manager_package
|
||||
.to_path_buf()
|
||||
} else {
|
||||
java_home
|
||||
}
|
||||
};
|
||||
let graal_dirname = graal_path
|
||||
.file_name()
|
||||
.context(anyhow!("Invalid Graal Path deduced from JAVA_HOME: {}", graal_path.display()))?;
|
||||
|
||||
fn distribution_marker(&self) -> PathBuf {
|
||||
self.enso_bundle.to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
impl IsBundle for crate::paths::generated::LauncherBundle {
|
||||
fn graalvm_dir(&self) -> PathBuf {
|
||||
self.runtime.path.clone()
|
||||
}
|
||||
|
||||
fn engine_dir(&self) -> PathBuf {
|
||||
self.dist.version.to_path_buf()
|
||||
}
|
||||
|
||||
fn base_component(&self, repo_root: &RepoRoot) -> PathBuf {
|
||||
repo_root.built_distribution.enso_launcher_triple.launcher_package.to_path_buf()
|
||||
}
|
||||
|
||||
fn distribution_marker(&self) -> PathBuf {
|
||||
self.enso_portable.to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
/// Places a copy of the GraalVM's installation directory in the target directory.
|
||||
///
|
||||
/// The GraalVM installation will be located using [`locate_graal`] function.
|
||||
#[context("Failed to place a GraalVM package under {}.", target_directory.as_ref().display())]
|
||||
pub async fn place_graal_under(target_directory: impl AsRef<Path>) -> Result {
|
||||
let graal_path = locate_graal()?;
|
||||
let graal_dirname = graal_path.try_file_name()?;
|
||||
ide_ci::fs::mirror_directory(&graal_path, target_directory.as_ref().join(graal_dirname)).await
|
||||
}
|
||||
|
||||
#[context("Placing a Enso Engine package in {}", target_engine_dir.as_ref().display())]
|
||||
pub async fn place_component_at(
|
||||
engine_paths: &ComponentPaths,
|
||||
target_engine_dir: impl AsRef<Path>,
|
||||
) -> Result {
|
||||
ide_ci::fs::mirror_directory(&engine_paths.dir, &target_engine_dir).await
|
||||
}
|
||||
|
@ -1,14 +1,12 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::engine;
|
||||
use crate::engine::bundle::Bundle;
|
||||
use crate::engine::download_project_templates;
|
||||
use crate::engine::env;
|
||||
use crate::engine::sbt::SbtCommandProvider;
|
||||
use crate::engine::Benchmarks;
|
||||
use crate::engine::BuildConfigurationResolved;
|
||||
use crate::engine::BuiltArtifacts;
|
||||
use crate::engine::ComponentPathExt;
|
||||
use crate::engine::Operation;
|
||||
use crate::engine::ReleaseCommand;
|
||||
use crate::engine::ReleaseOperation;
|
||||
@ -20,6 +18,7 @@ use crate::enso::IrCaches;
|
||||
use crate::paths::cache_directory;
|
||||
use crate::paths::Paths;
|
||||
use crate::paths::TargetTriple;
|
||||
use crate::paths::ENSO_TEST_JUNIT_DIR;
|
||||
use crate::project::ProcessWrapper;
|
||||
|
||||
use ide_ci::actions::workflow::is_in_env;
|
||||
@ -30,6 +29,8 @@ use ide_ci::programs::graal;
|
||||
use ide_ci::programs::sbt;
|
||||
use ide_ci::programs::Flatc;
|
||||
use ide_ci::programs::Sbt;
|
||||
use std::env::consts::DLL_EXTENSION;
|
||||
use std::env::consts::EXE_EXTENSION;
|
||||
use sysinfo::SystemExt;
|
||||
|
||||
|
||||
@ -75,6 +76,34 @@ impl RunContext {
|
||||
Ok(context)
|
||||
}
|
||||
|
||||
pub fn expected_artifacts(&self) -> BuiltArtifacts {
|
||||
BuiltArtifacts {
|
||||
engine_package: self.config.build_engine_package.then(|| {
|
||||
self.repo_root.built_distribution.enso_engine_triple.engine_package.clone()
|
||||
}),
|
||||
launcher_package: self.config.build_launcher_package.then(|| {
|
||||
self.repo_root.built_distribution.enso_launcher_triple.launcher_package.clone()
|
||||
}),
|
||||
project_manager_package: self.config.build_project_manager_package.then(|| {
|
||||
self.repo_root
|
||||
.built_distribution
|
||||
.enso_project_manager_triple
|
||||
.project_manager_package
|
||||
.clone()
|
||||
}),
|
||||
launcher_bundle: self.config.build_launcher_bundle.then(|| {
|
||||
self.repo_root.built_distribution.enso_bundle_triple.launcher_bundle.clone()
|
||||
}),
|
||||
project_manager_bundle: self.config.build_project_manager_bundle.then(|| {
|
||||
self.repo_root
|
||||
.built_distribution
|
||||
.project_manager_bundle_triple
|
||||
.project_manager_bundle
|
||||
.clone()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check that required programs are present (if not, installs them, if supported). Set
|
||||
/// environment variables for the build to follow.
|
||||
pub async fn prepare_build_env(&self) -> Result {
|
||||
@ -179,8 +208,6 @@ impl RunContext {
|
||||
}
|
||||
|
||||
pub async fn build(&self) -> Result<BuiltArtifacts> {
|
||||
let mut ret = BuiltArtifacts::default();
|
||||
|
||||
self.prepare_build_env().await?;
|
||||
if ide_ci::ci::run_in_ci() {
|
||||
// On CI we remove IR caches. They might contain invalid or outdated data, as are using
|
||||
@ -196,7 +223,10 @@ impl RunContext {
|
||||
|
||||
if self.config.test_standard_library {
|
||||
// If we run tests, make sure that old and new results won't end up mixed together.
|
||||
ide_ci::fs::reset_dir(&self.paths.test_results)?;
|
||||
let test_results_dir = ENSO_TEST_JUNIT_DIR
|
||||
.get()
|
||||
.unwrap_or_else(|_| self.paths.repo_root.target.test_results.path.clone());
|
||||
ide_ci::fs::reset_dir(test_results_dir)?;
|
||||
}
|
||||
|
||||
// Workaround for incremental compilation issue, as suggested by kustosz.
|
||||
@ -255,13 +285,13 @@ impl RunContext {
|
||||
let build_native_runner =
|
||||
self.config.build_engine_package() && big_memory_machine && TARGET_OS != OS::Windows;
|
||||
|
||||
|
||||
if big_memory_machine {
|
||||
let mut tasks = vec![];
|
||||
|
||||
if self.config.build_engine_package() {
|
||||
tasks.push("buildEngineDistribution");
|
||||
tasks.push("engine-runner/assembly");
|
||||
ret.packages.engine = Some(self.paths.engine.clone());
|
||||
}
|
||||
if build_native_runner {
|
||||
tasks.push("engine-runner/buildNativeImage");
|
||||
@ -275,12 +305,10 @@ impl RunContext {
|
||||
|
||||
if self.config.build_project_manager_package() {
|
||||
tasks.push("buildProjectManagerDistribution");
|
||||
ret.packages.project_manager = Some(self.paths.project_manager.clone());
|
||||
}
|
||||
|
||||
if self.config.build_launcher_package() {
|
||||
tasks.push("buildLauncherDistribution");
|
||||
ret.packages.launcher = Some(self.paths.launcher.clone());
|
||||
}
|
||||
|
||||
// This just compiles benchmarks, not run them. At least we'll know that they can be
|
||||
@ -343,6 +371,34 @@ impl RunContext {
|
||||
sbt.call_arg(task).await?;
|
||||
}
|
||||
}
|
||||
} // End of Sbt run.
|
||||
|
||||
let ret = self.expected_artifacts();
|
||||
|
||||
// Native images built by GraalVM on Windows use MSVC build tools. Thus, the generated
|
||||
// binaries have MSVC CRT (C++ standard library) linked. This means that we need to ship
|
||||
// the MSVC CRT DLLs along with the binaries.
|
||||
if TARGET_OS == OS::Windows {
|
||||
debug!("Copying MSVC CRT DLLs to the distribution.");
|
||||
for package in ret.packages() {
|
||||
let package_dir = package.dir();
|
||||
let binary_extensions = [EXE_EXTENSION, DLL_EXTENSION];
|
||||
let binaries = binary_extensions
|
||||
.into_iter()
|
||||
.map(|extension| {
|
||||
let pattern = package_dir.join_iter(["**", "*"]).with_extension(extension);
|
||||
glob::glob(pattern.as_str())?.try_collect_vec()
|
||||
})
|
||||
.try_collect_vec()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect_vec();
|
||||
|
||||
debug!(?binaries, "Found executables in the package.");
|
||||
for binary in binaries {
|
||||
ide_ci::packaging::add_msvc_redist_dependencies(&binary).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let enso = BuiltEnso { paths: self.paths.clone() };
|
||||
@ -388,8 +444,8 @@ impl RunContext {
|
||||
// Build the Parser JS Bundle
|
||||
sbt.call_arg("syntaxJS/fullOptJS").await?;
|
||||
ide_ci::fs::copy_to(
|
||||
self.paths.target.join("scala-parser.js"),
|
||||
self.paths.target.join("parser-upload"),
|
||||
&self.paths.repo_root.target.scala_parser_js,
|
||||
&self.paths.repo_root.target.parser_upload,
|
||||
)?;
|
||||
}
|
||||
|
||||
@ -399,10 +455,12 @@ impl RunContext {
|
||||
}
|
||||
|
||||
if self.config.build_engine_package() {
|
||||
let std_libs = self.paths.engine.dir.join("lib").join("Standard");
|
||||
let std_libs =
|
||||
&self.repo_root.built_distribution.enso_engine_triple.engine_package.lib.standard;
|
||||
// let std_libs = self.paths.engine.dir.join("lib").join("Standard");
|
||||
// Compile the Standard Libraries (Unix)
|
||||
debug!("Compiling standard libraries under {}", std_libs.display());
|
||||
for entry in ide_ci::fs::read_dir(&std_libs)? {
|
||||
for entry in ide_ci::fs::read_dir(std_libs)? {
|
||||
let entry = entry?;
|
||||
let target = entry.path().join(self.paths.version().to_string());
|
||||
enso.compile_lib(target)?.run_ok().await?;
|
||||
@ -432,6 +490,7 @@ impl RunContext {
|
||||
// );
|
||||
// }
|
||||
|
||||
|
||||
// Verify License Packages in Distributions
|
||||
// FIXME apparently this does not work on Windows due to some CRLF issues?
|
||||
if self.config.verify_packages && TARGET_OS != OS::Windows {
|
||||
@ -442,23 +501,18 @@ impl RunContext {
|
||||
test $engineversion = $refversion || (echo "Tag version $refversion and the engine version $engineversion do not match" && false)
|
||||
*/
|
||||
|
||||
if self.config.build_engine_package() {
|
||||
sbt.verify_generated_package("engine", &self.paths.engine.dir).await?;
|
||||
}
|
||||
if self.config.build_launcher_package() {
|
||||
sbt.verify_generated_package("launcher", &self.paths.launcher.dir).await?;
|
||||
}
|
||||
if self.config.build_project_manager_package() {
|
||||
sbt.verify_generated_package("project-manager", &self.paths.project_manager.dir)
|
||||
.await?;
|
||||
for package in ret.packages() {
|
||||
package.verify_package_sbt(&sbt).await?;
|
||||
}
|
||||
if self.config.build_engine_package {
|
||||
for libname in ["Base", "Table", "Image", "Database"] {
|
||||
let lib_path = self
|
||||
.paths
|
||||
.engine
|
||||
.dir
|
||||
.join_iter(["lib", "Standard", libname])
|
||||
.repo_root
|
||||
.built_distribution
|
||||
.enso_engine_triple
|
||||
.engine_package
|
||||
.lib
|
||||
.join_iter(["Standard", libname])
|
||||
.join(self.paths.version().to_string());
|
||||
sbt.verify_generated_package(libname, lib_path).await?;
|
||||
}
|
||||
@ -483,14 +537,8 @@ impl RunContext {
|
||||
}
|
||||
}
|
||||
|
||||
if self.config.build_launcher_bundle {
|
||||
ret.bundles.launcher =
|
||||
Some(crate::engine::bundle::Launcher::create(&self.paths).await?);
|
||||
}
|
||||
|
||||
if self.config.build_project_manager_bundle {
|
||||
ret.bundles.project_manager =
|
||||
Some(crate::engine::bundle::ProjectManager::create(&self.paths).await?);
|
||||
for bundle in ret.bundles() {
|
||||
bundle.create(&self.repo_root).await?;
|
||||
}
|
||||
|
||||
Ok(ret)
|
||||
@ -507,13 +555,11 @@ impl RunContext {
|
||||
repo,
|
||||
release_id,
|
||||
);
|
||||
for package in artifacts.packages.into_iter() {
|
||||
package.pack().await?;
|
||||
release.upload_asset_file(package.artifact_archive).await?;
|
||||
for package in artifacts.packages() {
|
||||
package.upload_as_asset(release.clone()).await?;
|
||||
}
|
||||
for bundle in artifacts.bundles.into_iter() {
|
||||
bundle.pack().await?;
|
||||
release.upload_asset_file(bundle.artifact_archive).await?;
|
||||
for bundle in artifacts.bundles() {
|
||||
bundle.upload_as_asset(release.clone()).await?;
|
||||
}
|
||||
if TARGET_OS == OS::Linux {
|
||||
release.upload_asset_file(self.paths.manifest_file()).await?;
|
||||
|
48
build/build/src/engine/package.rs
Normal file
48
build/build/src/engine/package.rs
Normal file
@ -0,0 +1,48 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::engine::artifact::IsArtifact;
|
||||
use crate::engine::sbt::SbtCommandProvider;
|
||||
|
||||
|
||||
|
||||
/// Package is a minimal artifact with some backend component.
|
||||
pub trait IsPackage: IsArtifact {
|
||||
/// Get the package name that is recognized by the SBT build scripts.
|
||||
///
|
||||
/// It can be used e.g. to verify the package by invoking `enso/verifyGeneratedPackage` task.
|
||||
fn sbt_package_name(&self) -> &str;
|
||||
|
||||
/// Primary directory of the package.
|
||||
///
|
||||
/// E.g. for the Engine package it is like
|
||||
/// `H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64\enso-0.
|
||||
/// 0.0-SNAPSHOT.2022-01-19`.
|
||||
fn dir(&self) -> &Path {
|
||||
self.as_ref()
|
||||
}
|
||||
|
||||
/// Invokes `enso/verifyGeneratedPackage` task on this package.
|
||||
fn verify_package_sbt(&self, sbt: &crate::engine::sbt::Context) -> BoxFuture<'static, Result> {
|
||||
let package_name = self.sbt_package_name();
|
||||
let dir = self.dir();
|
||||
sbt.verify_generated_package(package_name, dir)
|
||||
}
|
||||
}
|
||||
|
||||
impl IsPackage for crate::paths::generated::EnginePackage {
|
||||
fn sbt_package_name(&self) -> &str {
|
||||
"engine"
|
||||
}
|
||||
}
|
||||
|
||||
impl IsPackage for crate::paths::generated::ProjectManagerPackage {
|
||||
fn sbt_package_name(&self) -> &str {
|
||||
"project-manager"
|
||||
}
|
||||
}
|
||||
|
||||
impl IsPackage for crate::paths::generated::LauncherPackage {
|
||||
fn sbt_package_name(&self) -> &str {
|
||||
"launcher"
|
||||
}
|
||||
}
|
@ -67,7 +67,7 @@ pub struct BuiltEnso {
|
||||
impl BuiltEnso {
|
||||
pub fn wrapper_script_path(&self) -> PathBuf {
|
||||
let filename = format!("enso{}", if TARGET_OS == OS::Windows { ".bat" } else { "" });
|
||||
self.paths.engine.dir.join_iter(["bin", &filename])
|
||||
self.paths.repo_root.built_distribution.enso_engine_triple.engine_package.bin.join(filename)
|
||||
}
|
||||
|
||||
pub async fn run_benchmarks(&self, opt: BenchmarkOptions) -> Result {
|
||||
|
@ -1,4 +1,6 @@
|
||||
// === Features ===
|
||||
#![feature(trait_upcasting)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(hash_set_entry)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
#![feature(trait_alias)]
|
||||
|
@ -3,6 +3,7 @@ use crate::prelude::*;
|
||||
use crate::version::Versions;
|
||||
|
||||
use std::env::consts::EXE_EXTENSION;
|
||||
use std::env::consts::EXE_SUFFIX;
|
||||
use std::fmt::Formatter;
|
||||
|
||||
|
||||
@ -43,58 +44,14 @@ pub const LIBRARIES_TO_TEST: [&str; 6] = [
|
||||
"Visualization_Tests",
|
||||
];
|
||||
|
||||
pub const ARCHIVE_EXTENSION: &str = match TARGET_OS {
|
||||
OS::Windows => "zip",
|
||||
_ => "tar.gz",
|
||||
};
|
||||
|
||||
pub fn new_repo_root(repo_root: impl Into<PathBuf>, triple: &TargetTriple) -> generated::RepoRoot {
|
||||
generated::RepoRoot::new_root(repo_root, triple.to_string(), triple.versions.edition_name())
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Default)]
|
||||
pub struct ComponentPaths {
|
||||
// e.g. `enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64`
|
||||
pub name: PathBuf,
|
||||
// e.g. H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64
|
||||
pub root: PathBuf,
|
||||
// e.g. H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64\
|
||||
// enso-0.0.0-SNAPSHOT.2022-01-19
|
||||
pub dir: PathBuf,
|
||||
// e.g. H:\NBO\enso\built-distribution\enso-engine-0.0.0-SNAPSHOT.2022-01-19-windows-amd64.zip
|
||||
pub artifact_archive: PathBuf,
|
||||
}
|
||||
|
||||
impl ComponentPaths {
|
||||
pub fn new(
|
||||
build_root: &Path, // e.g. H:\NBO\enso\built-distribution
|
||||
name_prefix: &str,
|
||||
dirname: &str,
|
||||
triple: &TargetTriple,
|
||||
) -> Self {
|
||||
let name = PathBuf::from(iformat!("{name_prefix}-{triple.engine()}"));
|
||||
let root = build_root.join(&name);
|
||||
let dir = root.join(dirname);
|
||||
let artifact_archive = root.with_appended_extension(ARCHIVE_EXTENSION);
|
||||
Self { name, root, dir, artifact_archive }
|
||||
}
|
||||
|
||||
pub async fn emit_to_actions(&self, prefix: &str) -> Result {
|
||||
let paths = [
|
||||
("NAME", &self.name),
|
||||
("ROOT", &self.root),
|
||||
("DIR", &self.dir),
|
||||
("ARCHIVE", &self.artifact_archive),
|
||||
];
|
||||
for (what, path) in paths {
|
||||
ide_ci::actions::workflow::set_env(
|
||||
&iformat!("{prefix}_DIST_{what}"),
|
||||
&path.to_string_lossy(),
|
||||
generated::RepoRoot::new_root(
|
||||
repo_root,
|
||||
triple.versions.edition_name(),
|
||||
EXE_SUFFIX,
|
||||
triple.to_string(),
|
||||
triple.versions.version.to_string(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pretty_print_arch(arch: Arch) -> &'static str {
|
||||
@ -147,13 +104,10 @@ impl Display for TargetTriple {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Paths {
|
||||
pub repo_root: generated::RepoRoot,
|
||||
pub build_dist_root: PathBuf,
|
||||
pub target: PathBuf,
|
||||
pub launcher: ComponentPaths,
|
||||
pub engine: ComponentPaths,
|
||||
pub project_manager: ComponentPaths,
|
||||
// pub launcher: ComponentPaths,
|
||||
// pub engine: ComponentPaths,
|
||||
// pub project_manager: ComponentPaths,
|
||||
pub triple: TargetTriple,
|
||||
pub test_results: PathBuf,
|
||||
}
|
||||
|
||||
impl Paths {
|
||||
@ -165,28 +119,7 @@ impl Paths {
|
||||
pub fn new_triple(repo_root: impl Into<PathBuf>, triple: TargetTriple) -> Result<Self> {
|
||||
let repo_root: PathBuf = repo_root.into().absolutize()?.into();
|
||||
let repo_root = new_repo_root(repo_root, &triple);
|
||||
let build_dist_root = repo_root.join("built-distribution");
|
||||
let target = repo_root.join("target");
|
||||
let launcher = ComponentPaths::new(&build_dist_root, "enso-launcher", "enso", &triple);
|
||||
let engine = ComponentPaths::new(
|
||||
&build_dist_root,
|
||||
"enso-engine",
|
||||
&format!("enso-{}", &triple.versions.version),
|
||||
&triple,
|
||||
);
|
||||
let project_manager =
|
||||
ComponentPaths::new(&build_dist_root, "enso-project-manager", "enso", &triple);
|
||||
let test_results = target.join("test-results");
|
||||
Ok(Paths {
|
||||
repo_root,
|
||||
build_dist_root,
|
||||
target,
|
||||
launcher,
|
||||
engine,
|
||||
project_manager,
|
||||
triple,
|
||||
test_results,
|
||||
})
|
||||
Ok(Paths { repo_root, triple })
|
||||
}
|
||||
|
||||
/// Create a new set of paths for building the Enso with a given version number.
|
||||
@ -204,18 +137,9 @@ impl Paths {
|
||||
/// Sets the environment variables in the current process and in GitHub Actions Runner (if being
|
||||
/// run in its environment), so future steps of the job also have access to them.
|
||||
pub async fn emit_env_to_actions(&self) -> Result {
|
||||
let components = [
|
||||
("ENGINE", &self.engine),
|
||||
("LAUNCHER", &self.launcher),
|
||||
("PROJECTMANAGER", &self.project_manager),
|
||||
];
|
||||
|
||||
for (prefix, paths) in components {
|
||||
paths.emit_to_actions(prefix).await?;
|
||||
}
|
||||
|
||||
ide_ci::actions::workflow::set_env("TARGET_DIR", &self.target.to_string_lossy()).await?;
|
||||
ENSO_TEST_JUNIT_DIR.set_workflow_env(self.test_results.as_path()).await?;
|
||||
// TODO [mwu]: Check if TARGET_DIR is needed. If so, create a strongly typed wrapper.
|
||||
// ide_ci::actions::workflow::set_env("TARGET_DIR", &self.repo_root.target).await?;
|
||||
ENSO_TEST_JUNIT_DIR.set_workflow_env(self.repo_root.target.test_results.as_ref()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -236,18 +160,21 @@ impl Paths {
|
||||
}
|
||||
|
||||
pub fn manifest_file(&self) -> PathBuf {
|
||||
self.engine.dir.join("manifest.yaml")
|
||||
self.repo_root
|
||||
.built_distribution
|
||||
.enso_engine_triple
|
||||
.engine_package
|
||||
.manifest_yaml
|
||||
.to_path_buf()
|
||||
}
|
||||
|
||||
pub fn launcher_manifest_file(&self) -> PathBuf {
|
||||
self.distribution().join("launcher-manifest.yaml")
|
||||
self.repo_root.distribution.launcher_manifest_yaml.to_path_buf()
|
||||
}
|
||||
|
||||
// e.g. enso2\distribution\editions\2021.20-SNAPSHOT.yaml
|
||||
pub fn edition_file(&self) -> PathBuf {
|
||||
self.distribution()
|
||||
.join_iter(["editions", &self.edition_name()])
|
||||
.with_appended_extension("yaml")
|
||||
self.repo_root.distribution.editions.edition_yaml.to_path_buf()
|
||||
}
|
||||
|
||||
pub async fn upload_edition_file_artifact(&self) -> Result {
|
||||
|
@ -23,7 +23,7 @@ pub mod project_manager {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn_from(bundle: &crate::paths::generated::ProjectManager) -> Command {
|
||||
pub fn spawn_from(bundle: &crate::paths::generated::ProjectManagerBundle) -> Command {
|
||||
let binary_path = bundle.bin.project_managerexe.as_path();
|
||||
let mut command = <ProjectManager as Program>::Command::new(binary_path);
|
||||
// We do this, because Project Manager runs until newline is input. We need to create a pipe
|
||||
|
@ -46,7 +46,7 @@ impl BuildInput {
|
||||
pub struct Artifact {
|
||||
/// Location of the Project Manager distribution.
|
||||
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
|
||||
pub path: crate::paths::generated::ProjectManager,
|
||||
pub path: crate::paths::generated::ProjectManagerBundle,
|
||||
/// Versions of Engine that are bundled in this Project Manager distribution.
|
||||
///
|
||||
/// Technically a Project Manager bundle can be shipped with arbitrary number of Enso Engine
|
||||
@ -71,7 +71,7 @@ impl IsArtifact for Artifact {}
|
||||
/// distribution.
|
||||
#[context("Failed to list bundled engine versions: {}", project_manager_bundle)]
|
||||
pub async fn bundled_engine_versions(
|
||||
project_manager_bundle: &crate::paths::generated::ProjectManager,
|
||||
project_manager_bundle: &crate::paths::generated::ProjectManagerBundle,
|
||||
) -> Result<Vec<Version>> {
|
||||
let mut ret = vec![];
|
||||
|
||||
@ -111,12 +111,22 @@ impl IsTarget for Backend {
|
||||
}
|
||||
|
||||
fn adapt_artifact(self, path: impl AsRef<Path>) -> BoxFuture<'static, Result<Self::Artifact>> {
|
||||
let path = crate::paths::generated::ProjectManager::new_root(
|
||||
path.as_ref(),
|
||||
self.target_os.exe_suffix(),
|
||||
let exe_suffix = self.target_os.exe_suffix().to_owned();
|
||||
let path = path.as_ref().to_owned();
|
||||
let provisional_path = crate::paths::generated::ProjectManagerBundle::new_root(
|
||||
&path,
|
||||
&exe_suffix,
|
||||
"<unknown version>",
|
||||
);
|
||||
async move {
|
||||
let engine_versions = bundled_engine_versions(&path).await?;
|
||||
let engine_versions = bundled_engine_versions(&provisional_path).await?;
|
||||
let path = crate::paths::generated::ProjectManagerBundle::new_root(
|
||||
&path,
|
||||
&exe_suffix,
|
||||
engine_versions
|
||||
.last()
|
||||
.map_or_else(|| "<unknown version>".to_string(), |v| v.to_string()),
|
||||
);
|
||||
Ok(Artifact { path, engine_versions })
|
||||
}
|
||||
.boxed()
|
||||
@ -143,8 +153,8 @@ impl IsTarget for Backend {
|
||||
let context = inner.prepare_context(context, config)?;
|
||||
let artifacts = context.build().await?;
|
||||
let project_manager =
|
||||
artifacts.bundles.project_manager.context("Missing project manager bundle!")?;
|
||||
ide_ci::fs::mirror_directory(&project_manager.dir, &destination).await?;
|
||||
artifacts.project_manager_bundle.context("Missing project manager bundle!")?;
|
||||
ide_ci::fs::mirror_directory(&project_manager, &destination).await?;
|
||||
this.adapt_artifact(destination).await
|
||||
}
|
||||
.boxed()
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::engine::package::IsPackage;
|
||||
use crate::engine::BuildConfigurationFlags;
|
||||
use crate::paths::generated::EnginePackage;
|
||||
use crate::paths::TargetTriple;
|
||||
@ -55,12 +56,10 @@ impl IsTarget for Runtime {
|
||||
context
|
||||
.and_then_async(|context| async move {
|
||||
let artifacts = context.build().await?;
|
||||
let engine_package = artifacts
|
||||
.packages
|
||||
.engine
|
||||
.context("Failed to find engine package artifacts.")?;
|
||||
ide_ci::fs::mirror_directory(&engine_package.dir, &destination).await?;
|
||||
this.adapt_artifact(engine_package.dir).await
|
||||
let engine_package =
|
||||
artifacts.engine_package.context("Failed to find engine package artifacts.")?;
|
||||
ide_ci::fs::mirror_directory(engine_package.dir(), &destination).await?;
|
||||
this.adapt_artifact(engine_package.dir()).await
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::paths::TargetTriple;
|
||||
use crate::paths::ARCHIVE_EXTENSION;
|
||||
|
||||
use ide_ci::github::release::ARCHIVE_EXTENSION;
|
||||
|
||||
|
||||
|
||||
|
@ -25,6 +25,7 @@ use octocrab::models::repos::Release;
|
||||
use octocrab::params::repos::Reference;
|
||||
use reqwest::Response;
|
||||
use serde_json::json;
|
||||
use std::env::consts::EXE_SUFFIX;
|
||||
use tempfile::tempdir;
|
||||
|
||||
|
||||
@ -187,7 +188,13 @@ pub async fn get_engine_package<R: IsRepo>(
|
||||
triple: &TargetTriple,
|
||||
) -> Result<generated::EnginePackage> {
|
||||
let release_id = crate::env::ENSO_RELEASE_ID.get()?;
|
||||
let package_name = generated::RepoRootBuiltDistribution::new_root(".", triple.to_string())
|
||||
let package_name = generated::RepoRootBuiltDistribution::new_root(
|
||||
".",
|
||||
triple.versions.edition_name(),
|
||||
EXE_SUFFIX,
|
||||
triple.to_string(),
|
||||
triple.versions.version.to_string(),
|
||||
)
|
||||
.enso_engine_triple
|
||||
.file_name()
|
||||
.context("Failed to get Engine Package name.")?
|
||||
|
@ -18,6 +18,7 @@ chrono = { version = "0.4.19", features = ["serde"] }
|
||||
clap = { version = "3.1.5", features = ["derive", "env"] }
|
||||
cron = "0.12.0"
|
||||
data-encoding = "2.3.2"
|
||||
dependency_runner = "1.0.0"
|
||||
derivative = "2.2.0"
|
||||
derive_more = "0.99"
|
||||
dirs = "4.0.0"
|
||||
|
@ -151,7 +151,7 @@ pub async fn upload_compressed_directory(
|
||||
let archive_path = tempdir.path().join(format!("{artifact_name}.tar.gz"));
|
||||
|
||||
info!("Packing {} to {}", path_to_upload.as_ref().display(), archive_path.display());
|
||||
crate::archive::compress_directory(&archive_path, path_to_upload).await?;
|
||||
crate::archive::compress_directory_contents(&archive_path, path_to_upload).await?;
|
||||
|
||||
info!("Starting upload of {artifact_name}.");
|
||||
upload_single_file(&archive_path, artifact_name).await?;
|
||||
|
@ -34,7 +34,7 @@ define_env_var! {
|
||||
/// The name of the person or app that initiated the workflow. For example, `octocat`.
|
||||
GITHUB_ACTOR, String;
|
||||
|
||||
/// Returns the API URL. For example: https://api.github.com.
|
||||
/// Returns the API URL. For example: <https://api.github.com>.
|
||||
GITHUB_API_URL, Url;
|
||||
|
||||
/// The name of the base ref or target branch of the pull request in a workflow run. This is
|
||||
@ -58,7 +58,7 @@ define_env_var! {
|
||||
/// For example, `/github/workflow/event.json`.
|
||||
GITHUB_EVENT_PATH, PathBuf;
|
||||
|
||||
/// Returns the GraphQL API URL. For example: https://api.github.com/graphql.
|
||||
/// Returns the GraphQL API URL. For example: <https://api.github.com/graphql>.
|
||||
GITHUB_GRAPHQL_URL, Url;
|
||||
|
||||
/// The head ref or source branch of the pull request in a workflow run. This property is only
|
||||
@ -117,7 +117,7 @@ define_env_var! {
|
||||
/// change if you re-run the workflow run. For example, 3.
|
||||
GITHUB_RUN_NUMBER, usize;
|
||||
|
||||
/// The URL of the GitHub server. For example: https://github.com.
|
||||
/// The URL of the GitHub server. For example: <https://github.com>.
|
||||
GITHUB_SERVER_URL, Url;
|
||||
|
||||
/// The commit SHA that triggered the workflow. The value of this commit SHA depends on the
|
||||
|
@ -230,7 +230,7 @@ pub struct Push {
|
||||
|
||||
/// Common branch-related fields between some event triggers.
|
||||
///
|
||||
/// See: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onpull_requestpull_request_targetbranchesbranches-ignore
|
||||
/// See: <https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onpull_requestpull_request_targetbranchesbranches-ignore>
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Branches {
|
||||
@ -246,7 +246,7 @@ impl Branches {
|
||||
}
|
||||
}
|
||||
|
||||
/// See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
/// See: <https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request>
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum PullRequestActivityType {
|
||||
|
@ -110,12 +110,15 @@ pub fn is_archive_name(path: impl AsRef<Path>) -> bool {
|
||||
Format::from_filename(path).is_ok()
|
||||
}
|
||||
|
||||
/// Create an archive with directory contents.
|
||||
///
|
||||
/// Note that the archive will contain directory's children, not the directory itself.
|
||||
#[tracing::instrument(
|
||||
name="Packing directory.",
|
||||
skip_all,
|
||||
fields(src=%root_directory.as_ref().display(), dest=%output_archive.as_ref().display()),
|
||||
err)]
|
||||
pub async fn compress_directory(
|
||||
pub async fn compress_directory_contents(
|
||||
output_archive: impl AsRef<Path>,
|
||||
root_directory: impl AsRef<Path>,
|
||||
) -> Result {
|
||||
|
14
build/ci_utils/src/cache/goodie/graalvm.rs
vendored
14
build/ci_utils/src/cache/goodie/graalvm.rs
vendored
@ -114,6 +114,20 @@ impl GraalVM {
|
||||
}
|
||||
}
|
||||
|
||||
/// Locate the directory with GraalVM installation.
|
||||
///
|
||||
/// It is deduced based on [`JAVA_HOME`] environment variable. Exact logic is os-specific.
|
||||
#[context("Failed to locate GraalVM installation.")]
|
||||
pub fn locate_graal() -> Result<PathBuf> {
|
||||
let java_home = JAVA_HOME.get()?;
|
||||
Ok(if TARGET_OS == OS::MacOS {
|
||||
// On macOS we need to drop trailing `/Contents/Home` from the path.
|
||||
java_home.try_parent()?.try_parent()?.to_path_buf()
|
||||
} else {
|
||||
java_home
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -1,3 +1,5 @@
|
||||
//! Code supporting dealing with GitHub releases and their assets.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::github::Repo;
|
||||
@ -11,6 +13,15 @@ use tracing::instrument;
|
||||
|
||||
|
||||
|
||||
/// The extensions that will be used for the archives in the GitHub release assets.
|
||||
///
|
||||
/// On Windows we use `.zip`, because it has out-of-the-box support in the Explorer.
|
||||
/// On other platforms we use `.tar.gz`, because it is a good default.
|
||||
pub const ARCHIVE_EXTENSION: &str = match TARGET_OS {
|
||||
OS::Windows => "zip",
|
||||
_ => "tar.gz",
|
||||
};
|
||||
|
||||
/// Types that uniquely identify a release and can be used to fetch it from GitHub.
|
||||
pub trait IsRelease: Debug {
|
||||
/// The release ID.
|
||||
@ -23,6 +34,7 @@ pub trait IsRelease: Debug {
|
||||
fn octocrab(&self) -> &Octocrab;
|
||||
}
|
||||
|
||||
/// Information about release that we can provide from the [`IsRelease`] trait.
|
||||
#[async_trait]
|
||||
pub trait IsReleaseExt: IsRelease + Sync {
|
||||
/// Upload a new asset to the release.
|
||||
@ -61,12 +73,10 @@ pub trait IsReleaseExt: IsRelease + Sync {
|
||||
#[instrument(skip_all, fields(source = %path.as_ref().display()))]
|
||||
async fn upload_asset_file(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
|
||||
let error_msg =
|
||||
format!("Failed to upload an asset from the file under {}", path.as_ref().display());
|
||||
format!("Failed to upload an asset from the file under {}.", path.as_ref().display());
|
||||
async move {
|
||||
let path = path.as_ref().to_path_buf();
|
||||
let asset_name = path.file_name().with_context(|| {
|
||||
format!("The given path {} does not contain a filename.", path.display())
|
||||
})?;
|
||||
let asset_name = path.try_file_name()?;
|
||||
let content_type = new_mime_guess::from_path(&path).first_or_octet_stream();
|
||||
let file_size = crate::fs::tokio::metadata(&path).await?.len();
|
||||
let file = crate::fs::tokio::open_stream(&path).await?;
|
||||
@ -77,15 +87,32 @@ pub trait IsReleaseExt: IsRelease + Sync {
|
||||
.context(error_msg)
|
||||
}
|
||||
|
||||
async fn upload_compressed_dir(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
|
||||
let dir_to_upload = path.as_ref();
|
||||
/// Upload given directory as a release asset.
|
||||
///
|
||||
/// The given filename will be used, with appended [platform-specific
|
||||
/// extension](ARCHIVE_EXTENSION).
|
||||
async fn upload_compressed_dir_as(
|
||||
&self,
|
||||
dir_to_upload: impl AsRef<Path> + Send,
|
||||
custom_name: impl AsRef<Path> + Send,
|
||||
) -> Result<Asset> {
|
||||
let dir_to_upload = dir_to_upload.as_ref();
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let archive_path =
|
||||
dir_to_upload.with_parent(temp_dir.path()).with_appended_extension("tar.gz");
|
||||
crate::archive::compress_directory(&archive_path, &dir_to_upload).await?;
|
||||
custom_name.with_parent(temp_dir.path()).with_appended_extension(ARCHIVE_EXTENSION);
|
||||
crate::archive::create(&archive_path, [&dir_to_upload]).await?;
|
||||
self.upload_asset_file(archive_path).await
|
||||
}
|
||||
|
||||
/// Upload given directory as a release asset.
|
||||
///
|
||||
/// The archive name will be deduced from the directory name.
|
||||
async fn upload_compressed_dir(&self, path: impl AsRef<Path> + Send) -> Result<Asset> {
|
||||
let output_filename_stem = path.try_file_name()?.to_owned();
|
||||
self.upload_compressed_dir_as(path, output_filename_stem).await
|
||||
}
|
||||
|
||||
/// Get the information about the release.
|
||||
async fn get(&self) -> Result<Release> {
|
||||
self.octocrab()
|
||||
.repos(self.repo().owner(), self.repo().name())
|
||||
|
@ -59,6 +59,7 @@ pub mod io;
|
||||
pub mod log;
|
||||
pub mod models;
|
||||
pub mod os;
|
||||
pub mod packaging;
|
||||
pub mod path;
|
||||
pub mod paths;
|
||||
pub mod platform;
|
||||
|
72
build/ci_utils/src/packaging.rs
Normal file
72
build/ci_utils/src/packaging.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::fs::copy_to;
|
||||
use crate::programs::vs;
|
||||
|
||||
use dependency_runner::executable::Executables;
|
||||
use unicase::UniCase;
|
||||
|
||||
|
||||
|
||||
/// List of dynamically loaded dependencies.
|
||||
///
|
||||
/// While the code in itself is portable, it supports only Windows binaries (PE files).
|
||||
pub async fn pe_dependencies(binary: impl AsRef<Path>) -> Result<Executables> {
|
||||
let binary = binary.as_ref().to_path_buf();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut query =
|
||||
dependency_runner::query::LookupQuery::deduce_from_executable_location(&binary)
|
||||
.with_context(|| {
|
||||
format!("Failed to prepare dependency query for binary {}.", binary.display())
|
||||
})?;
|
||||
query.parameters.extract_symbols = true;
|
||||
let lookup_path = dependency_runner::path::LookupPath::deduce(&query);
|
||||
dependency_runner::runner::run(&query, &lookup_path).with_context(|| {
|
||||
format!("Failed to run dependency query for binary {}.", binary.display())
|
||||
})
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
/// Place all the required MSVC CRT dependencies next to the binary.
|
||||
#[context("Failed to copy MSVC CRT dependencies for binary `{}`.", binary.display())]
|
||||
pub async fn add_msvc_redist_dependencies(binary: &Path) -> Result {
|
||||
let binary = binary.absolutize()?;
|
||||
let binary_dir = binary.try_parent()?;
|
||||
let dependencies = pe_dependencies(&binary).await?;
|
||||
let msvc_redist_dlls = vs::redist::list_crt_dlls(vs::Platforms::local()?).await?;
|
||||
// map filename -> full path of redist dll
|
||||
// Be careful about casing! Windows is case-insensitive.
|
||||
let msvc_redist_dlls: HashMap<_, _> = msvc_redist_dlls
|
||||
.iter()
|
||||
.flat_map(|path| path.file_name().map(|filename| (UniCase::new(filename.as_str()), path)))
|
||||
.collect();
|
||||
for dependency in dependencies.sorted_by_first_appearance() {
|
||||
let filename = dependency.dllname.as_str();
|
||||
if let Some(redist_path) = msvc_redist_dlls.get(&UniCase::new(filename)) {
|
||||
trace!("{} is a redist dll: {}.", filename, redist_path.display());
|
||||
copy_to(redist_path, binary_dir)?;
|
||||
} else {
|
||||
trace!("{} is not a redist dll.", filename);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::programs::vs;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn lookup_dependencies() -> Result {
|
||||
setup_logging()?;
|
||||
vs::apply_dev_environment().await?;
|
||||
let binary = Path::new(
|
||||
r"H:\NBO\enso5\built-distribution\enso-project-manager-2022.1.1-dev-windows-amd64\enso\bin\project-manager.exe",
|
||||
);
|
||||
add_msvc_redist_dependencies(binary).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -8,6 +8,7 @@ use proc_macro2::Span;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
use regex::Regex;
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeSet;
|
||||
use std::iter::zip;
|
||||
|
||||
@ -110,11 +111,12 @@ pub fn get_string(
|
||||
pub struct Generator<'a> {
|
||||
all_nodes: &'a [&'a Node],
|
||||
stack: Vec<&'a Node>,
|
||||
empty_set: BTreeSet<Ident>,
|
||||
}
|
||||
|
||||
impl<'a> Generator<'a> {
|
||||
pub fn new(all_nodes: &'a [&'a Node]) -> Self {
|
||||
Self { all_nodes, stack: default() }
|
||||
Self { all_nodes, stack: default(), empty_set: default() }
|
||||
}
|
||||
|
||||
pub fn resolve(&self, r#type: &str) -> Result<&Node> {
|
||||
@ -164,7 +166,7 @@ impl<'a> Generator<'a> {
|
||||
let children_struct =
|
||||
children.iter().map(|child| child.field_type(full_path)).collect_vec();
|
||||
|
||||
let parameter_vars = last_node.all_parameters_vars();
|
||||
let parameter_vars = last_node.all_parameters_vars(self)?;
|
||||
let own_parameter_vars = last_node.own_parameter_vars();
|
||||
let parent_parameter_vars: BTreeSet<_> =
|
||||
full_path.iter().flat_map(|n| n.own_parameter_vars()).collect();
|
||||
@ -173,8 +175,11 @@ impl<'a> Generator<'a> {
|
||||
let child_parameter_vars: BTreeSet<_> = last_node
|
||||
.children()
|
||||
.iter()
|
||||
.flat_map(|node| node.parameters.iter())
|
||||
.map(to_ident)
|
||||
.map(|node| node.all_parameters_vars(self))
|
||||
.try_collect_vec()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect();
|
||||
let all_parameters = {
|
||||
let mut v = parent_parameter_vars;
|
||||
@ -195,21 +200,23 @@ impl<'a> Generator<'a> {
|
||||
|
||||
let children_init = zip(last_node.children(), &children_struct)
|
||||
.map(|(child, children_struct)| {
|
||||
if let Some(r#_type) = child.r#type.as_ref() {
|
||||
// FIXME this should resolve target type and use its parameters
|
||||
Result::Ok(if let Some(r#_type) = child.r#type.as_ref() {
|
||||
let resolved_type = self.resolve(r#_type)?;
|
||||
let child_formatter = child.path_formatter();
|
||||
let child_child_parameters = child.children_parameters();
|
||||
println!("Resolved type: {}", resolved_type.path_formatter());
|
||||
info!("Resolved type: {}", resolved_type.path_formatter());
|
||||
let child_child_parameters = resolved_type.children_parameters(self)?;
|
||||
quote! {
|
||||
#children_struct::new_root(path.join(#child_formatter), #(&#child_child_parameters),*)
|
||||
}
|
||||
} else {
|
||||
let child_parameters = child.all_parameters_vars();
|
||||
let child_parameters = child.all_parameters_vars(self)?;
|
||||
quote! {
|
||||
#children_struct::new_under(&path, #(&#child_parameters),*)
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect_vec();
|
||||
})
|
||||
.try_collect_vec()?;
|
||||
|
||||
let opt_conversions = if parameter_vars.is_empty() {
|
||||
quote! {
|
||||
@ -306,7 +313,7 @@ pub struct Node {
|
||||
#[shrinkwrap(main_field)]
|
||||
value: String,
|
||||
/// All parameters needed for this node (directly and for the children).
|
||||
parameters: BTreeSet<String>, // Wasteful but paths won't be that huge.
|
||||
parameters: OnceCell<BTreeSet<Ident>>, // Wasteful but paths won't be that huge.
|
||||
/// The name that replaces value in variable-like contexts.
|
||||
/// Basically, we might not want use filepath name as name in the code.
|
||||
var_name: Option<String>,
|
||||
@ -351,8 +358,32 @@ impl Node {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_parameters_vars(&self) -> BTreeSet<Ident> {
|
||||
self.parameters.iter().sorted().map(to_ident).collect()
|
||||
pub fn type_dependent_parameters_vars<'a>(
|
||||
&'a self,
|
||||
g: &'a Generator,
|
||||
) -> Result<&'a BTreeSet<Ident>> {
|
||||
if let Some(r#type) = &self.r#type {
|
||||
let resolved_type = g.resolve(r#type)?;
|
||||
resolved_type.all_parameters_vars(g)
|
||||
} else {
|
||||
Ok(&g.empty_set)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_parameters_vars(&self, g: &Generator) -> Result<&BTreeSet<Ident>> {
|
||||
self.parameters.get_or_try_init(|| {
|
||||
let mut ret = BTreeSet::new();
|
||||
for child in self.children() {
|
||||
ret.extend(child.all_parameters_vars(g)?.clone());
|
||||
}
|
||||
ret.extend(self.own_parameter_vars());
|
||||
ret.extend(self.type_dependent_parameters_vars(g)?.clone());
|
||||
Ok(ret)
|
||||
})
|
||||
// let mut ret = BTreeSet::new();
|
||||
// ret.extend(self.parameters.iter().sorted().map(to_ident));
|
||||
// ret.extend(self.type_dependent_parameters_vars(g)?);
|
||||
// Ok(ret)
|
||||
}
|
||||
|
||||
pub fn own_parameters(&self) -> impl IntoIterator<Item = &str> {
|
||||
@ -363,8 +394,25 @@ impl Node {
|
||||
self.own_parameters().into_iter().map(to_ident).collect()
|
||||
}
|
||||
|
||||
pub fn children_parameters(&self) -> BTreeSet<Ident> {
|
||||
self.children().iter().flat_map(|child| child.parameters.iter()).map(to_ident).collect()
|
||||
pub fn children_parameters(&self, g: &Generator) -> Result<BTreeSet<Ident>> {
|
||||
let mut ret = BTreeSet::new();
|
||||
for child in self.children() {
|
||||
ret.extend(child.all_parameters_vars(g)?.clone());
|
||||
}
|
||||
Ok(ret)
|
||||
// let resolved_type_params = if let Some(r#type) = &self.r#type {
|
||||
// if let Ok(r#type) = g.resolve(r#type) {
|
||||
// // TODO: This might not work for parameters that are type-introduced in the
|
||||
// subtree // of the resolved type.
|
||||
// r#type.all_parameters_vars(g).iter().map(to_ident).collect_vec()
|
||||
// } else {
|
||||
// warn!(%r#type, "Failed to resolve type.");
|
||||
// default()
|
||||
// }
|
||||
// } else {
|
||||
// default()
|
||||
// };
|
||||
// direct_child_params.chain(resolved_type_params).collect()
|
||||
}
|
||||
|
||||
pub fn children(&self) -> &[Node] {
|
||||
@ -440,18 +488,18 @@ pub fn generate(forest: Vec<Node>) -> Result<TokenStream> {
|
||||
generator.generate()
|
||||
}
|
||||
|
||||
pub fn collect_parameters(value: &mut Node) {
|
||||
let mut child_parameters = BTreeSet::new();
|
||||
for child in value.children_mut() {
|
||||
collect_parameters(child);
|
||||
child_parameters.extend(child.parameters.clone());
|
||||
}
|
||||
|
||||
let own_parameters = PARAMETER.find_all(&value.value).into_iter().map(ToString::to_string);
|
||||
value.parameters.extend(own_parameters);
|
||||
value.parameters.extend(child_parameters);
|
||||
debug!("{} has {} parameters", value.value, value.parameters.len());
|
||||
}
|
||||
// pub fn collect_parameters(value: &mut Node) {
|
||||
// let mut child_parameters = BTreeSet::new();
|
||||
// for child in value.children_mut() {
|
||||
// collect_parameters(child);
|
||||
// child_parameters.extend(child.parameters.clone());
|
||||
// }
|
||||
//
|
||||
// let own_parameters = PARAMETER.find_all(&value.value).into_iter().map(ToString::to_string);
|
||||
// value.parameters.extend(own_parameters);
|
||||
// value.parameters.extend(child_parameters);
|
||||
// debug!("{} has {} parameters", value.value, value.parameters.len());
|
||||
// }
|
||||
|
||||
pub fn convert(value: &serde_yaml::Value) -> Result<Vec<Node>> {
|
||||
match value {
|
||||
@ -464,7 +512,7 @@ pub fn convert(value: &serde_yaml::Value) -> Result<Vec<Node>> {
|
||||
node.add_child(child);
|
||||
}
|
||||
}
|
||||
collect_parameters(&mut node);
|
||||
// collect_parameters(&mut node);
|
||||
ret.push(node)
|
||||
}
|
||||
Ok(ret)
|
||||
|
@ -16,7 +16,7 @@ pub mod fmt;
|
||||
|
||||
/// Extra flags that Cargo invokes rustc with.
|
||||
///
|
||||
/// See: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-reads
|
||||
/// See: <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-reads>
|
||||
pub const CARGO_ENCODED_RUSTFLAGS: Separated =
|
||||
Separated { separator: "\x1F", name: "CARGO_ENCODED_RUSTFLAGS" };
|
||||
|
||||
|
@ -10,3 +10,16 @@ impl Program for Npx {
|
||||
"npx"
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn call_npx() -> Result {
|
||||
setup_logging()?;
|
||||
Npx.cmd()?.run_ok().await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -266,7 +266,7 @@ impl Tar {
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
use crate::archive::compress_directory;
|
||||
use crate::archive::compress_directory_contents;
|
||||
use crate::archive::extract_to;
|
||||
use crate::log::setup_logging;
|
||||
|
||||
@ -296,7 +296,7 @@ pub mod tests {
|
||||
let linked_temp = archive_temp.path().join("linked");
|
||||
symlink::symlink_dir(temp.path(), &linked_temp)?;
|
||||
|
||||
compress_directory(&archive_path, &linked_temp).await?;
|
||||
compress_directory_contents(&archive_path, &linked_temp).await?;
|
||||
assert!(archive_path.exists());
|
||||
assert!(archive_path.metadata()?.len() > 0);
|
||||
|
||||
|
@ -1,9 +1,66 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::define_env_var;
|
||||
use crate::env;
|
||||
use crate::programs::cmd;
|
||||
use crate::programs::vswhere::VsWhere;
|
||||
|
||||
|
||||
// ==============
|
||||
// === Export ===
|
||||
// ==============
|
||||
|
||||
pub mod redist;
|
||||
|
||||
|
||||
|
||||
/// Path components from [VS installation
|
||||
/// root](crate::programs::vswhere::InstanceInfo::installation_path) to the developer command file.
|
||||
/// See: <https://learn.microsoft.com/en-us/cpp/build/building-on-the-command-line?view=msvc-170#developer_command_file_locations>
|
||||
pub const VC_VARS_ALL_PATH: [&str; 4] = ["VC", "Auxiliary", "Build", "vcvarsall.bat"];
|
||||
|
||||
/// Platforms, as being host/target of MSVC toolchain.
|
||||
///
|
||||
/// Can be used as an argument to `vcvarsall.bat`.
|
||||
#[derive(Clone, Copy, Debug, strum::Display, strum::EnumString)]
|
||||
pub enum Platforms {
|
||||
X64,
|
||||
X86,
|
||||
// In theory there's also ARM64 platform but we don't support it for Windows at the moment.
|
||||
}
|
||||
|
||||
impl Platforms {
|
||||
/// Get the native platform for the local system (host).
|
||||
pub fn local() -> Result<Self> {
|
||||
Ok(match TARGET_ARCH {
|
||||
Arch::X86_64 => Platforms::X64,
|
||||
Arch::X86 => Platforms::X86,
|
||||
_ => bail!("Unsupported target architecture: {}.", TARGET_ARCH),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Platforms> for Arch {
|
||||
fn from(platform: Platforms) -> Self {
|
||||
match platform {
|
||||
Platforms::X64 => Arch::X86_64,
|
||||
Platforms::X86 => Arch::X86,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
define_env_var! {
|
||||
/// Target platform architecture. Expected to be the same as `VSCMD_ARG_TGT_ARCH`.
|
||||
Platform, Platforms;
|
||||
|
||||
/// Location with MSVC CRT redistributables.
|
||||
///
|
||||
/// E.g. `%VCINSTALLDIR%Redist\MSVC\x.y.z`.
|
||||
VCToolsRedistDir, PathBuf;
|
||||
|
||||
/// The telemetry introduces undesired dependency on Power Shell.
|
||||
VSCMD_SKIP_SENDTELEMETRY, bool;
|
||||
}
|
||||
|
||||
/// Microsoft C/C++ Optimizing compiler.
|
||||
///
|
||||
@ -18,21 +75,49 @@ impl Program for Cl {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn apply_dev_environment() -> Result {
|
||||
/// Get the path to the `vcvarsall.bat` for the local VS installation.
|
||||
///
|
||||
/// Relies on `vswhere` to find installed VS instances.
|
||||
pub async fn vc_vars_all_path() -> Result<PathBuf> {
|
||||
let msvc = VsWhere::msvc().await?;
|
||||
let path = msvc.installation_path.join_iter(["VC", "Auxiliary", "Build", "vcvarsall.bat"]);
|
||||
let changes = cmd::compare_env(|command| {
|
||||
// The telemetry introduces undesired dependency on Power Shell. We should not need it to
|
||||
// just set a few environment variables.
|
||||
command.arg(path).arg("x64").env("VSCMD_SKIP_SENDTELEMETRY", "true")
|
||||
// See the official documentation:
|
||||
// https://learn.microsoft.com/en-us/cpp/build/building-on-the-command-line#developer_command_file_locations
|
||||
Ok(msvc.installation_path.join_iter(VC_VARS_ALL_PATH))
|
||||
}
|
||||
|
||||
/// Capture changes in environment variables that are introduced by `vcvarsall.bat`.
|
||||
///
|
||||
/// The `vcvarsall.bat` script is part of Microsoft Visual C++ Build Tools. It is used to set up
|
||||
/// environment variables for the MSVC compiler toolchain.
|
||||
///
|
||||
/// This function requires that [`vswhere`](VsWhere) is available in the `PATH`. It will be used to
|
||||
/// locate installations of MSVC Build Tools / compiler.
|
||||
pub async fn retrieve_dev_environment() -> Result<Vec<env::Modification>> {
|
||||
let path = vc_vars_all_path().await?;
|
||||
let platform = Platforms::local()?;
|
||||
|
||||
cmd::compare_env(|command| {
|
||||
command
|
||||
.arg(path)
|
||||
.arg(platform.to_string())
|
||||
.set_env(VSCMD_SKIP_SENDTELEMETRY, &true)
|
||||
// Safety: this can only fail if `true` fails to pretty print, which is not possible.
|
||||
.unwrap()
|
||||
})
|
||||
.await?;
|
||||
for change in changes {
|
||||
change.apply()?;
|
||||
.await
|
||||
}
|
||||
|
||||
/// Modifies the environment of the current process, as if `vcvarsall.bat` was executed.
|
||||
pub async fn apply_dev_environment() -> Result {
|
||||
let modifications = retrieve_dev_environment().await?;
|
||||
for modification in modifications {
|
||||
modification.apply()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A major Visual Studio version.
|
||||
///
|
||||
/// Serialization follows the VS Where `productLineVersion` format.
|
||||
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Serialize, Deserialize)]
|
||||
pub enum Version {
|
||||
@ -43,12 +128,3 @@ pub enum Version {
|
||||
#[serde(rename = "2022")]
|
||||
VS2022,
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn foo() -> Result {
|
||||
// let old_vars = dbg!(std::env::vars_os().map(|(name, _)| name).collect_vec());
|
||||
apply_dev_environment().await?;
|
||||
// let new_vars = dbg!(std::env::vars_os().collect_vec());
|
||||
Ok(())
|
||||
}
|
||||
|
56
build/ci_utils/src/programs/vs/redist.rs
Normal file
56
build/ci_utils/src/programs/vs/redist.rs
Normal file
@ -0,0 +1,56 @@
|
||||
//! Support for dealing with Microsoft Visual C++ Redistributables.
|
||||
//!
|
||||
//! See <https://learn.microsoft.com/en-us/cpp/windows/redistributing-visual-cpp-files> for the
|
||||
//! official documentation.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::programs::vs;
|
||||
use crate::programs::vs::VCToolsRedistDir;
|
||||
|
||||
|
||||
|
||||
/// Get the directory with MSVC redistributable files for a given platform.
|
||||
///
|
||||
/// This requires [`VCToolsRedistDir`] environment variable to be set, e.g. by having invoked
|
||||
/// [`apply_dev_environment`].
|
||||
pub fn locate_dir(platform: vs::Platforms) -> Result<PathBuf> {
|
||||
let redist_path = VCToolsRedistDir.get()?;
|
||||
let platform_segment = platform.to_string();
|
||||
Ok(redist_path.join(platform_segment))
|
||||
}
|
||||
|
||||
/// Obtain a list of all Visual C++ runtime (CRT) redistributable DLLs for a given target.
|
||||
///
|
||||
/// This requires [`VCToolsRedistDir`] environment variable to be set, e.g. by having invoked
|
||||
/// [`apply_dev_environment`].
|
||||
///
|
||||
/// Note that this does not include the C runtime (UCRT) which is now a part of Windows.
|
||||
#[context("Failed to locate MSVC CRT redistributables for {platform}.")]
|
||||
pub async fn list_crt_dlls(platform: vs::Platforms) -> Result<Vec<PathBuf>> {
|
||||
let redist_dir = locate_dir(platform)?;
|
||||
// The first component is "*.CRT" because we only want to redistribute CRT DLLs. If we matched
|
||||
// all folders, we would also get other libraries, like MFC.
|
||||
let redist_binaries_glob = redist_dir.join_iter(["*.crt", "**", "*.dll"]);
|
||||
// Be careful about case! Windows is case-insensitive.
|
||||
let glob_options = glob::MatchOptions { case_sensitive: false, ..default() };
|
||||
glob::glob_with(redist_binaries_glob.as_str(), glob_options)?.try_collect().with_context(|| {
|
||||
format!("Failed to list CRT redistributables in {}.", redist_binaries_glob.display())
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::programs::vs::apply_dev_environment;
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore]
|
||||
async fn test_listing_dlls() -> Result {
|
||||
setup_logging()?;
|
||||
apply_dev_environment().await?;
|
||||
let dlls = list_crt_dlls(vs::Platforms::local()?).await?;
|
||||
dbg!(&dlls);
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -1,34 +1,33 @@
|
||||
//! Wrapper for the [`vswhere`](https://github.com/microsoft/vswhere) program.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
|
||||
|
||||
/// Wrapper for the [`vswhere`](https://github.com/microsoft/vswhere) program.
|
||||
///
|
||||
/// It typically is installed as part of the Visual Studio installation.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct VsWhere;
|
||||
|
||||
impl Program for VsWhere {
|
||||
fn default_locations(&self) -> Vec<PathBuf> {
|
||||
let dir_opt = crate::platform::win::program_files_x86()
|
||||
.map(|program_files| program_files.join("Microsoft Visual Studio").join("Installer"));
|
||||
Vec::from_iter(dir_opt)
|
||||
}
|
||||
|
||||
fn executable_name(&self) -> &'static str {
|
||||
"vswhere"
|
||||
}
|
||||
|
||||
fn default_locations(&self) -> Vec<PathBuf> {
|
||||
if let Ok(program_files) = crate::platform::win::program_files_x86() {
|
||||
let subdir = ["Microsoft Visual Studio", "Installer"];
|
||||
vec![program_files.join_iter(subdir)]
|
||||
} else {
|
||||
default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VsWhere {
|
||||
pub async fn find_all_with(component: Component) -> Result<Vec<InstanceInfo>> {
|
||||
let mut command = VsWhere.cmd()?;
|
||||
command
|
||||
.args(Option::Format(Format::Json).format_arguments())
|
||||
.args(Option::Required(vec![component]).format_arguments())
|
||||
.args(Option::ForceUTF8.format_arguments());
|
||||
|
||||
let stdout = command.run_stdout().await?;
|
||||
serde_json::from_str(&stdout).anyhow_err()
|
||||
}
|
||||
|
||||
/// Finds installation of Visual Studio that have installed the given component.
|
||||
#[context("Failed to find Visual Studio installation with component {component}.")]
|
||||
pub async fn find_with(component: Component) -> Result<InstanceInfo> {
|
||||
let mut command = VsWhere.cmd()?;
|
||||
command
|
||||
@ -60,7 +59,7 @@ impl VsWhere {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InstanceInfo {
|
||||
pub install_date: chrono::DateTime<chrono::Utc>,
|
||||
/// Example: C:\\Program Files\\Microsoft Visual Studio\\2022\\Community
|
||||
/// Example: `C:\Program Files\Microsoft Visual Studio\2022\Community`
|
||||
pub installation_path: PathBuf,
|
||||
pub installation_version: String,
|
||||
pub is_prerelease: bool,
|
||||
|
Loading…
Reference in New Issue
Block a user