Rust bump, reduce dependencices (#10803)

Updated rust version, removed some unnecessary or problematic dependencies. Ported some changes from bazel branch.
This commit is contained in:
Paweł Grabarz 2024-08-14 01:16:55 +02:00 committed by GitHub
parent a8551c7268
commit f14b79f8cf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
134 changed files with 1153 additions and 2801 deletions

1663
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,6 @@ resolver = "2"
members = [
"app/rust-ffi",
"build/cli",
"build/macros/proc-macro",
"build/ci-gen",
"build/cli",
"build/install",
@ -28,12 +27,6 @@ members = [
"tools/language-server/wstest",
]
# We are using a version with extended functionality. The changes have been PR'd upstream:
# https://github.com/rustwasm/console_error_panic_hook/pull/24
# Remove this patch when the issue is resolved.
[patch.crates-io]
console_error_panic_hook = { git = 'https://github.com/enso-org/console_error_panic_hook' }
[profile.dev]
opt-level = 0
lto = false
@ -73,7 +66,6 @@ opt-level = 2
inherits = "dev"
opt-level = 1
lto = false
debug = "line-tables-only"
debug-assertions = true
[workspace.lints.rust]
@ -106,6 +98,8 @@ directories = { version = "5.0.1" }
dirs = { version = "5.0.1" }
flate2 = { version = "1.0.28" }
indicatif = { version = "0.17.7", features = ["tokio"] }
mime = "0.3.16"
new_mime_guess = "4.0.1"
multimap = { version = "0.9.1" }
native-windows-gui = { version = "1.0.13" }
nix = { version = "0.27.1" }
@ -113,10 +107,9 @@ octocrab = { git = "https://github.com/enso-org/octocrab", default-features = fa
"rustls",
] }
path-absolutize = "3.1.1"
platforms = { version = "3.2.0", features = ["serde"] }
portpicker = { version = "0.1.1" }
regex = { version = "1.6.0" }
serde = { version = "1.0.130", features = ["derive", "rc"] }
serde = { version = "1", features = ["derive", "rc"] }
serde_yaml = { version = "0.9.16" }
sha2 = { version = "0.10.8" }
sysinfo = { version = "0.30.7" }
@ -127,26 +120,22 @@ tokio-util = { version = "0.7.10", features = ["full"] }
tracing = { version = "0.1.40" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
walkdir = { version = "2.5.0" }
wasm-bindgen = { version = "0.2.92", features = [] }
wasm-bindgen = { version = "0.2.92", default-features = false }
wasm-bindgen-test = { version = "0.3.34" }
windows = { version = "0.52.0", features = ["Win32", "Win32_UI", "Win32_UI_Shell", "Win32_System", "Win32_System_LibraryLoader", "Win32_Foundation", "Win32_System_Com"] }
windows = { version = "0.52.0", features = ["Win32_UI", "Win32_UI_Shell", "Win32_System_LibraryLoader", "Win32_System_Com"] }
winreg = { version = "0.52.0" }
anyhow = { version = "1.0.66" }
failure = { version = "0.1.8" }
derive_more = { version = "0.99" }
derive_more = { version = "1.0", features = ["index", "index_mut", "deref", "deref_mut", "display", "from", "into", "as_ref", "add", "add_assign"] }
boolinator = { version = "2.4.0" }
derivative = { version = "2.2" }
futures = { version = "0.3" }
futures = { version = "0.3", default-features = false, features = ["std", "executor"]}
futures-util = { version = "0.3", default-features = false }
itertools = { version = "0.12.1" }
lazy_static = { version = "1.4" }
serde_json = { version = "1.0", features = ["raw_value"] }
owned_ttf_parser = { version = "0.15.1" }
convert_case = { version = "0.6.0" }
rustybuzz = { version = "0.5.1" }
bincode = { version = "2.0.0-rc.1" }
byte-unit = { version = "5.1.4", features = ["serde"] }
bytes = { version = "1.1.0" }
matches = { version = "0.1" }
console_error_panic_hook = { version = "0.1.6" }
reqwest = { version = "0.11.27", default-features = false, features = [
"rustls-tls",
@ -161,14 +150,6 @@ syn = { version = "2.0", features = [
"visit",
"visit-mut",
] }
syn_1 = { package = "syn", version = "1.0", features = [
"full",
"extra-traits",
"printing",
"parsing",
"visit",
"visit-mut",
] }
quote = { version = "1.0.23" }
semver = { version = "1.0.0", features = ["serde"] }
strum = { version = "0.26.2", features = ["derive"] }

View File

@ -7,11 +7,10 @@ edition = "2021"
[dependencies]
anyhow = { workspace = true }
fn-error-context = "0.2.0"
futures-util = "0.3.24"
futures-util = { workspace = true }
futures = { workspace = true }
serde = "1.0.145"
serde = { workspace = true }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
tracing = { workspace = true }
[lints]

View File

@ -5,13 +5,10 @@
// === Export ===
// ==============
pub mod from_string;
pub mod future;
pub mod iterator;
pub mod maps;
pub mod option;
pub mod os_str;
pub mod path;
pub mod pathbuf;
pub mod result;
pub mod str;

View File

@ -1,41 +0,0 @@
//!Module with utilities for converting string-like values into other types.
use crate::prelude::*;
use anyhow::Context;
use std::any::type_name;
/// An equivalent of standard's library `std::str::FromStr` trait, but with nice error messages.
pub trait FromString: Sized {
/// Parse a string into a value of this type. See: [`std::str::FromStr::from_str`].
fn from_str(s: &str) -> Result<Self>;
/// Parse a string into a value of this type and then convert it to `R`.
fn parse_into<R>(text: impl AsRef<str>) -> Result<R>
where
Self: TryInto<R>,
<Self as TryInto<R>>::Error: Into<anyhow::Error>, {
let value = Self::from_str(text.as_ref())?;
value.try_into().anyhow_err().context(format!(
"Failed to convert {} => {}.",
type_name::<Self>(),
type_name::<R>(),
))
}
}
impl<T> FromString for T
where
T: std::str::FromStr,
T::Err: Into<anyhow::Error>,
{
fn from_str(text: &str) -> Result<Self> {
text.parse::<T>().anyhow_err().context(format!(
r#"Failed to parse "{}" as {}."#,
text,
type_name::<T>()
))
}
}

View File

@ -2,30 +2,17 @@
use crate::prelude::*;
use futures_util::future::ErrInto;
use futures_util::future::Map;
use futures_util::future::MapErr;
use futures_util::future::MapOk;
use futures_util::stream;
use futures_util::FutureExt as _;
use futures_util::TryFutureExt as _;
/// Extension methods for [`Future`].
pub trait FutureExt: Future {
/// Discard the result of this future.
fn void(self) -> Map<Self, fn(Self::Output) -> ()>
where Self: Sized {
self.map(drop)
}
}
pub trait FutureExt: Future {}
impl<T: ?Sized> FutureExt for T where T: Future {}
type FlattenResultFn<T, E> =
fn(std::result::Result<std::result::Result<T, E>, E>) -> std::result::Result<T, E>;
/// Extension methods for [`TryFuture`], i.e. the Result-yielding [`Future`]
pub trait TryFutureExt: TryFuture {
/// Discard the result of successful future.
@ -55,42 +42,6 @@ pub trait TryFutureExt: TryFuture {
C: Display + Send + Sync + 'static, {
self.into_future().map(|res| res.with_context(context)).boxed()
}
/// Convert the error type of this future to [`anyhow::Error`].
fn anyhow_err(self) -> MapErr<Self, fn(Self::Error) -> anyhow::Error>
where
Self: Sized,
// TODO: we should rely on `into` rather than `from`
anyhow::Error: From<Self::Error>, {
self.map_err(anyhow::Error::from)
}
/// If the future is successful, apply the function to the result and return the new future.
fn and_then_sync<T2, E2, F>(
self,
f: F,
) -> Map<MapOk<ErrInto<Self, E2>, F>, FlattenResultFn<T2, E2>>
where
Self: Sized,
F: FnOnce(Self::Ok) -> std::result::Result<T2, E2>,
Self::Error: Into<E2>,
{
self.err_into().map_ok(f).map(std::result::Result::flatten)
}
}
impl<T: ?Sized> TryFutureExt for T where T: TryFuture {}
/// Extension methods for [`TryStream`], i.e. a [`Stream`] that produces [`Result`]s.
pub trait TryStreamExt: TryStream {
/// Wrap all the errors into [`anyhow::Error`].
fn anyhow_err(self) -> stream::MapErr<Self, fn(Self::Error) -> anyhow::Error>
where
Self: Sized,
// TODO: we should rely on `into` rather than `from`
anyhow::Error: From<Self::Error>, {
self.map_err(anyhow::Error::from)
}
}
impl<T: ?Sized> TryStreamExt for T where T: TryStream {}

View File

@ -1,66 +0,0 @@
//! Extension methods for `Iterator` and `Iterator`-like types.
use crate::prelude::*;
use std::iter::Rev;
use std::iter::Take;
/// Extension methods for `Iterator` and `Iterator`-like types.
pub trait IteratorExt: Iterator {
/// try_filter
/// Transforms an [Iterator]'s items into `Result`s, and filters out the `Err` variants.
fn try_filter<R>(mut self, mut f: impl FnMut(&Self::Item) -> Result<bool>) -> Result<R>
where
Self: Sized,
R: Default + Extend<Self::Item> + Sized, {
self.try_fold(default(), |mut acc: R, item| {
acc.extend(f(&item)?.then_some(item));
Ok(acc)
})
}
/// Transforms an [Iterator]'s items into `Result`s, and filters out the `Err` variants.
fn try_map<R, U>(mut self, mut f: impl FnMut(Self::Item) -> Result<U>) -> Result<R>
where
Self: Sized,
R: Default + Extend<U> + Sized, {
self.try_fold(default(), |mut acc: R, item| {
acc.extend_one(f(item)?);
Ok(acc)
})
}
}
impl<I: Iterator> IteratorExt for I {}
/// Extension methods for `Iterator` and `Iterator`-like types.s
pub trait TryIteratorExt: Iterator {
/// The result of successful iteration.
type Ok;
/// Collects the results of the iterator into a `Result<Vec<_>>`.
fn try_collect_vec(self) -> Result<Vec<Self::Ok>>;
}
impl<T, U, E> TryIteratorExt for T
where
T: Iterator<Item = std::result::Result<U, E>>,
E: Into<anyhow::Error>,
{
type Ok = U;
fn try_collect_vec(self) -> Result<Vec<U>> {
self.map(|i| i.anyhow_err()).collect::<Result<Vec<_>>>()
}
}
#[allow(missing_docs)]
pub trait ExactDoubleEndedIteratorExt: ExactSizeIterator + DoubleEndedIterator + Sized {
/// Take the last n elements of the iterator.
fn take_last_n(self, n: usize) -> Rev<Take<Rev<Self>>> {
self.rev().take(n).rev()
}
}
impl<T> ExactDoubleEndedIteratorExt for T where T: ExactSizeIterator + DoubleEndedIterator {}

View File

@ -62,20 +62,7 @@ pub trait PathExt: AsRef<Path> {
fn write_as_json<T: Serialize>(&self, value: &T) -> Result {
trace!("Writing JSON to {}.", self.as_ref().display());
let file = crate::fs::create(self)?;
serde_json::to_writer(file, value).anyhow_err()
}
/// Parse this file's contents as a YAML-serialized value.
fn read_to_yaml<T: DeserializeOwned>(&self) -> Result<T> {
let content = crate::fs::read_to_string(self)?;
serde_yaml::from_str(&content).anyhow_err()
}
/// Write this file with a YAML-serialized value.
fn write_as_yaml<T: Serialize>(&self, value: &T) -> Result {
trace!("Writing YAML to {}.", self.as_ref().display());
let file = crate::fs::create(self)?;
serde_yaml::to_writer(file, value).anyhow_err()
Ok(serde_json::to_writer(file, value)?)
}
/// Get the path as `str`.
@ -163,25 +150,6 @@ pub trait PathExt: AsRef<Path> {
impl<T: AsRef<Path>> PathExt for T {}
/// A method that outputs a path to a formatter using [`Path::display`].
///
/// This is useful in combination with macros like `Derivative`, as demonstrated in the example
/// below.
///
/// # Example
/// ```ignore
/// #[derive(Derivative)]
/// #[derivative(Debug)]
/// pub struct Foo {
/// #[derivative(Debug(format_with = "display_fmt"))]
/// path: PathBuf,
/// }
/// ```
pub fn display_fmt(path: &Path, f: &mut Formatter) -> std::fmt::Result {
Display::fmt(&path.display(), f)
}
/// A result of splitting a path into its filename components.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct SplitFilename<'a> {

View File

@ -9,19 +9,6 @@ use std::future::Ready;
/// Extension methods for [`Result`].
pub trait ResultExt<T, E>: Sized {
/// Maps the value and wraps it as a [`Future`].
#[allow(clippy::type_complexity)]
fn map_async<'a, T2, F, Fut>(
self,
f: F,
) -> Either<
futures::future::Map<Fut, fn(T2) -> std::result::Result<T2, E>>,
Ready<std::result::Result<T2, E>>,
>
where
F: FnOnce(T) -> Fut,
Fut: Future<Output = T2> + 'a;
/// Maps the `Ok` value to a [`Future`] value. If the result is `Err`, the error is returned
/// as a [`std::future::Ready`] future.
fn and_then_async<'a, T2, E2, F, Fut>(
@ -35,63 +22,12 @@ pub trait ResultExt<T, E>: Sized {
T2: Send + 'a,
E2: Send + 'a;
/// Executes another future if this is an error. The error value is passed to a closure to
/// create this subsequent future.
fn or_else_async<F, Fut>(self, f: F) -> Either<Ready<Self>, futures::future::IntoFuture<Fut>>
where
F: FnOnce(E) -> Fut,
Fut: TryFuture<Ok = T, Error = E>;
/// Convert the error type to [`anyhow::Error`].
///
/// If there are additional context-specific information, use [`context`] instead.
fn anyhow_err(self) -> Result<T>
where E: Into<anyhow::Error>;
/// Convert the `[Result]<[Future]>` to `Future<Result>`.
fn flatten_fut(
self,
) -> Either<Ready<std::result::Result<T::Ok, T::Error>>, futures::future::IntoFuture<T>>
where T: TryFuture<Error: From<E>>;
/// Checks if the result is `Ok` and contains the given value.
fn contains<U>(&self, x: &U) -> bool
where U: PartialEq<T>;
}
impl<T, E> ResultExt<T, E> for std::result::Result<T, E> {
fn map_async<'a, T2, F, Fut>(
self,
f: F,
) -> Either<
futures::future::Map<Fut, fn(T2) -> std::result::Result<T2, E>>,
Ready<std::result::Result<T2, E>>,
>
where
F: FnOnce(T) -> Fut,
Fut: Future<Output = T2> + 'a,
{
match self {
Ok(v) => f(v).map(Ok as fn(T2) -> std::result::Result<T2, E>).left_future(),
Err(e) => ready(Err(e)).right_future(),
}
}
fn or_else_async<'a, F, Fut>(
self,
f: F,
) -> Either<Ready<Self>, futures::future::IntoFuture<Fut>>
where
F: FnOnce(E) -> Fut,
Fut: TryFuture<Ok = T, Error = E>,
{
match self {
Ok(v) => ready(Ok(v)).left_future(),
Err(e) => f(e).into_future().right_future(),
}
}
fn and_then_async<'a, T2, E2, F, Fut>(
self,
f: F,
@ -109,21 +45,6 @@ impl<T, E> ResultExt<T, E> for std::result::Result<T, E> {
}
}
fn anyhow_err(self) -> Result<T>
where E: Into<anyhow::Error> {
self.map_err(E::into)
}
fn flatten_fut(
self,
) -> Either<Ready<std::result::Result<T::Ok, T::Error>>, futures::future::IntoFuture<T>>
where T: TryFuture<Error: From<E>> {
match self {
Ok(fut) => fut.into_future().right_future(),
Err(e) => ready(Err(T::Error::from(e))).left_future(),
}
}
fn contains<U>(&self, x: &U) -> bool
where U: PartialEq<T> {
match self {

View File

@ -1,34 +0,0 @@
//! Extensions fot string-like types.
use crate::prelude::*;
/// Extension methods for strings and similar types.
pub trait StrLikeExt {
/// Convenience variant of `FromString::from_str`.
///
/// Should be preferred over [`str::parse`] due to better error messages.
// FIXME: This needs better name! However, we cannot use `parse` as it conflicts with
// `str::parse`. As a method on `str`, it would take priority over an extension trait.
fn parse2<T: FromString>(&self) -> Result<T>;
/// Convenience variant of `FromString::parse_into`.
fn parse_through<T, R>(&self) -> Result<R>
where
T: FromString + TryInto<R>,
<T as TryInto<R>>::Error: Into<anyhow::Error>;
}
impl<S: AsRef<str>> StrLikeExt for S {
fn parse2<U: FromString>(&self) -> Result<U> {
U::from_str(self.as_ref())
}
fn parse_through<T, R>(&self) -> Result<R>
where
T: FromString + TryInto<R>,
<T as TryInto<R>>::Error: Into<anyhow::Error>, {
T::parse_into(self.as_ref())
}
}

View File

@ -49,12 +49,6 @@ pub fn create(path: impl AsRef<Path>) -> Result<std::fs::File> {
wrappers::create(&path)
}
/// Read the file content and parse it using [`FromString`].
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read_string_into<T: FromString>(path: impl AsRef<Path>) -> Result<T> {
read_to_string(&path)?.parse2()
}
/// Create a directory (and all missing parent directories),
///
/// Does not fail when a directory already exists.
@ -63,7 +57,7 @@ pub fn create_dir_if_missing(path: impl AsRef<Path>) -> Result {
let result = std::fs::create_dir_all(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()),
result => result.anyhow_err(),
result => Ok(result?),
}
}
@ -89,7 +83,7 @@ pub fn remove_dir_if_exists(path: impl AsRef<Path>) -> Result {
let result = std::fs::remove_dir_all(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
result => Ok(result?),
}
}
@ -102,7 +96,7 @@ pub fn remove_file_if_exists(path: impl AsRef<Path>) -> Result<()> {
let result = std::fs::remove_file(&path);
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
result => Ok(result?),
}
}

View File

@ -18,31 +18,31 @@ use std::fs::Metadata;
/// See [std::fs::metadata].
#[context("Failed to obtain metadata for file: {}", path.as_ref().display())]
pub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {
std::fs::metadata(&path).anyhow_err()
Ok(std::fs::metadata(&path)?)
}
/// See [std::fs::symlink_metadata].
#[context("Failed to obtain symlink metadata for file: {}", path.as_ref().display())]
pub fn symlink_metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {
std::fs::symlink_metadata(&path).anyhow_err()
Ok(std::fs::symlink_metadata(&path)?)
}
/// See [std::fs::copy].
#[context("Failed to copy file from {} to {}", from.as_ref().display(), to.as_ref().display())]
pub fn copy(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<u64> {
std::fs::copy(&from, &to).anyhow_err()
Ok(std::fs::copy(&from, &to)?)
}
/// See [std::fs::rename].
#[context("Failed to rename file from {} to {}", from.as_ref().display(), to.as_ref().display())]
pub fn rename(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result {
std::fs::rename(&from, &to).anyhow_err()
Ok(std::fs::rename(&from, &to)?)
}
/// See [std::fs::read].
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
std::fs::read(&path).anyhow_err()
Ok(std::fs::read(&path)?)
}
/// See [std::fs::read_dir].
@ -60,41 +60,41 @@ pub fn read_dir(path: impl AsRef<Path>) -> Result<impl Iterator<Item = Result<Di
/// See [std::fs::read_to_string].
#[context("Failed to read the file: {}", path.as_ref().display())]
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
std::fs::read_to_string(&path).anyhow_err()
Ok(std::fs::read_to_string(&path)?)
}
/// See [std::fs::write].
#[context("Failed to write path: {}", path.as_ref().display())]
pub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
std::fs::write(&path, contents).anyhow_err()
Ok(std::fs::write(&path, contents)?)
}
/// See [std::fs::File::open].
#[context("Failed to open path for reading: {}", path.as_ref().display())]
pub fn open(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).anyhow_err()
Ok(File::open(&path)?)
}
/// See [std::fs::File::create].
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub fn create(path: impl AsRef<Path>) -> Result<File> {
File::create(&path).anyhow_err()
Ok(File::create(&path)?)
}
/// See [std::fs::canonicalize].
#[context("Failed to canonicalize path: {}", path.as_ref().display())]
pub fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf> {
std::fs::canonicalize(&path).anyhow_err()
Ok(std::fs::canonicalize(&path)?)
}
/// See [std::fs::create_dir_all].
#[context("Failed to create missing directories no path: {}", path.as_ref().display())]
pub fn create_dir_all(path: impl AsRef<Path>) -> Result {
std::fs::create_dir_all(&path).anyhow_err()
Ok(std::fs::create_dir_all(&path)?)
}
/// See [std::fs::set_permissions].
#[context("Failed to permissions on file: {}", path.as_ref().display())]
pub fn set_permissions(path: impl AsRef<Path>, perm: std::fs::Permissions) -> Result {
std::fs::set_permissions(&path, perm).anyhow_err()
Ok(std::fs::set_permissions(&path, perm)?)
}

View File

@ -3,10 +3,6 @@
//!
//! Currently it is employed by the native build scripts code.
// === Features ===
#![feature(result_flattening)]
#![feature(associated_type_bounds)]
#![feature(extend_one)]
// === Non-Standard Linter Configuration ===
#![warn(missing_docs)]
@ -54,19 +50,17 @@ pub mod prelude {
pub use std::path::PathBuf;
pub use std::pin::pin;
pub use std::pin::Pin;
pub use std::str::FromStr;
pub use std::sync::Arc;
pub use crate::extensions::from_string::FromString;
// pub use crate::extensions::from_string::FromString;
pub use crate::extensions::future::FutureExt as _;
pub use crate::extensions::future::TryFutureExt as _;
pub use crate::extensions::iterator::IteratorExt as _;
pub use crate::extensions::iterator::TryIteratorExt as _;
pub use crate::extensions::option::OptionExt as _;
pub use crate::extensions::os_str::OsStrExt as _;
pub use crate::extensions::path::PathExt as _;
pub use crate::extensions::pathbuf::PathBufExt as _;
pub use crate::extensions::result::ResultExt as _;
pub use crate::extensions::str::StrLikeExt as _;
pub use anyhow::anyhow;
pub use anyhow::bail;
@ -77,7 +71,6 @@ pub mod prelude {
pub use futures_util::select;
pub use futures_util::stream::BoxStream;
pub use futures_util::try_join;
pub use futures_util::AsyncWrite;
pub use futures_util::FutureExt as _;
pub use futures_util::Stream;
pub use futures_util::StreamExt as _;

View File

@ -13,7 +13,7 @@ base64 = "0.13.0"
bytes = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true }
derivative = { workspace = true }
derive-where = { workspace = true }
derive_more = { workspace = true }
dirs = { workspace = true }
futures = { workspace = true }
@ -25,8 +25,8 @@ enso-enso-font = { path = "../../lib/rust/enso-font" }
enso-font = { path = "../../lib/rust/font" }
enso-install-config = { path = "../install/config" }
ide-ci = { path = "../ci_utils" }
mime = "0.3.16"
new_mime_guess = "4.0.1"
mime = { workspace = true }
new_mime_guess = { workspace = true }
octocrab = { workspace = true }
path-slash = "0.2.1"
port_check = "0.1.5"

View File

@ -25,10 +25,10 @@ pub async fn client_from_env() -> aws_sdk_s3::Client {
}
/// Everything we need to get/put files to S3.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct BucketContext {
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub client: aws_sdk_s3::Client,
pub bucket: String,
pub upload_acl: ObjectCannedAcl,
@ -127,7 +127,7 @@ impl BucketContext {
pub async fn get_yaml<T: DeserializeOwned>(&self, path: &str) -> Result<T> {
let text = self.get(path).await?.collect().await?;
serde_yaml::from_reader(text.reader()).anyhow_err()
Ok(serde_yaml::from_reader(text.reader())?)
}
pub async fn put_yaml(&self, path: &str, data: &impl Serialize) -> Result<PutObjectOutput> {
@ -226,8 +226,8 @@ mod tests {
assert_eq!(headers.content_type.to_string().as_str(), expected_type);
}
case("wasm_imports.js.gz", Some("gzip"), "application/javascript");
case("index.js", None, "application/javascript");
case("wasm_imports.js.gz", Some("gzip"), "text/javascript");
case("index.js", None, "text/javascript");
case("style.css", None, "text/css");
case("ide.wasm", None, "application/wasm");
case("ide.wasm.gz", Some("gzip"), "application/wasm");

View File

@ -234,8 +234,7 @@ pub fn cleaning_step(
}
/// Data needed to generate a typical sequence of CI steps invoking `./run` script.
#[derive(Derivative)]
#[derivative(Debug)]
#[derive_where(Debug)]
pub struct RunStepsBuilder {
/// The command passed to `./run` script.
pub run_command: String,
@ -244,7 +243,7 @@ pub struct RunStepsBuilder {
/// Customize the step that runs the command.
///
/// Allows replacing the run step with one or more custom steps.
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub customize: Option<Box<dyn FnOnce(Step) -> Vec<Step>>>,
}
@ -339,7 +338,6 @@ pub fn runs_on(os: OS, runner_type: RunnerType) -> Vec<RunnerLabel> {
(OS::Linux, RunnerType::GitHubHosted) => vec![RunnerLabel::LinuxLatest],
(OS::MacOS, RunnerType::SelfHosted) => vec![RunnerLabel::SelfHosted, RunnerLabel::MacOS],
(OS::MacOS, RunnerType::GitHubHosted) => vec![RunnerLabel::MacOSLatest],
_ => panic!("Unsupported OS and runner type combination: {os} {runner_type}."),
}
}

View File

@ -82,8 +82,8 @@ impl TryFrom<ConfigRaw> for Config {
let mut required_versions = HashMap::new();
for (program, version_req) in value.required_versions {
required_versions.insert(
<RecognizedProgram as FromString>::from_str(&program)?,
<VersionReq as FromString>::from_str(&version_req)?,
RecognizedProgram::from_str(&program)?,
VersionReq::from_str(&version_req)?,
);
}

View File

@ -2,7 +2,6 @@ use crate::prelude::*;
use crate::paths::TargetTriple;
use derivative::Derivative;
use ide_ci::github;
use octocrab::models::repos::Release;
use octocrab::models::ReleaseId;
@ -10,8 +9,7 @@ use octocrab::models::ReleaseId;
/// The basic, common information available in this application.
#[derive(Clone, Derivative, derive_more::Deref)]
#[derivative(Debug)]
#[derive(Clone, Debug, derive_more::Deref)]
pub struct BuildContext {
#[deref]
pub inner: crate::project::Context,
@ -51,7 +49,7 @@ impl BuildContext {
let repository = self.remote_repo_handle();
let designator_cp = designator.clone();
async move {
let release = if let Ok(id) = designator.parse2::<ReleaseId>() {
let release = if let Ok(id) = designator.parse::<ReleaseId>() {
repository.find_release_by_id(id).await?
} else {
match designator.as_str() {

View File

@ -74,7 +74,7 @@ pub async fn download_project_templates(client: reqwest::Client, enso_root: Path
}
}
let _result = ide_ci::future::try_join_all(futures, AsyncPolicy::FutureParallelism).await?;
let _result = futures::future::try_join_all(futures).await?;
debug!("Completed downloading templates");
Ok(())
}

View File

@ -49,8 +49,8 @@ pub fn format_option_variant<T>(value: &Option<T>, f: &mut Formatter) -> std::fm
}
}
#[derive(derive_more::Deref, derive_more::DerefMut, derivative::Derivative)]
#[derivative(Debug)]
#[derive(derive_more::Deref, derive_more::DerefMut)]
#[derive_where(Debug)]
pub struct RunContext {
#[deref]
#[deref_mut]
@ -59,7 +59,7 @@ pub struct RunContext {
pub paths: Paths,
/// If set, the engine package (used for creating bundles) will be obtained through this
/// provider rather than built from source along the other Engine components.
#[derivative(Debug(format_with = "format_option_variant"))]
#[derive_where(skip)]
pub external_runtime: Option<Arc<EnginePackageProvider>>,
}
@ -354,16 +354,14 @@ impl RunContext {
for package in ret.packages() {
let package_dir = package.dir();
let binary_extensions = [EXE_EXTENSION, DLL_EXTENSION];
let binaries = binary_extensions
let binaries: Vec<PathBuf> = binary_extensions
.into_iter()
.map(|extension| {
.flat_map(|extension| {
let pattern = package_dir.join_iter(["**", "*"]).with_extension(extension);
glob::glob(pattern.as_str())?.try_collect_vec()
glob::glob(pattern.as_str()).expect("Incorrect glob pattern")
})
.try_collect_vec()?
.into_iter()
.flatten()
.collect_vec();
.map(|p| p.map(|p| p.to_owned()))
.try_collect()?;
debug!(?binaries, "Found executables in the package.");
for binary in binaries {

View File

@ -17,7 +17,6 @@ use sha2::Digest;
use std::process::Stdio;
use tempfile::TempDir;
// ==============
// === Export ===
// ==============
@ -176,7 +175,6 @@ pub fn target_os_flag(os: OS) -> Result<&'static str> {
OS::Windows => Ok("--win"),
OS::Linux => Ok("--linux"),
OS::MacOS => Ok("--mac"),
_ => bail!("Not supported target for Electron client: {os}."),
}
}
@ -210,12 +208,12 @@ impl FallibleManipulator for ProjectManagerInfo {
}
}
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct IdeDesktop {
pub build_sbt: generated::RepoRootBuildSbt,
pub repo_root: generated::RepoRoot,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub octocrab: Octocrab,
pub cache: ide_ci::cache::Cache,
}

View File

@ -28,17 +28,17 @@ pub async fn install_html_fonts(
/// [`font-style`]: https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-style
#[derive(Debug, Display, Copy, Clone)]
pub enum FontStyle {
#[display(fmt = "normal")]
#[display("normal")]
Normal,
#[display(fmt = "italic")]
#[display("italic")]
Italic,
#[display(fmt = "oblique")]
#[display("oblique")]
Oblique,
/// Angle is in degrees, between -90 and 90.
#[display(fmt = "oblique {_0}deg")]
#[display("oblique {_0}deg")]
ObliqueWithAngle(f64),
/// Angles are in degrees, between -90 and 90.
#[display(fmt = "oblique {_0}deg {_1}deg")]
#[display("oblique {_0}deg {_1}deg")]
ObliqueWithAngleRange(f64, f64),
}

View File

@ -63,12 +63,12 @@ impl Family {
// ====================
/// Description of the job to download the fonts.
#[derive(Derivative, Clone)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct DownloadFont {
pub family: Family,
/// Possible authentication to GitHub (to get bigger rate limit).
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub octocrab: Octocrab,
}
@ -121,7 +121,7 @@ impl Storable for DownloadFont {
) -> BoxFuture<'static, Result<Self::Output>> {
async move {
for font in &mut metadata {
*font = cache.join(&font);
*font = cache.join(&*font);
}
Ok(metadata)
}

View File

@ -1,13 +1,10 @@
// === Features ===
#![feature(try_blocks)]
#![feature(hash_set_entry)]
#![feature(type_alias_impl_trait)]
#![feature(trait_alias)]
#![feature(let_chains)]
#![feature(exit_status_error)]
#![feature(async_closure)]
#![feature(associated_type_bounds)]
#![feature(result_flattening)]
#![feature(associated_type_defaults)]
#![feature(duration_constants)]
#![feature(slice_take)]
@ -67,7 +64,7 @@ pub fn get_enso_version(build_sbt_contents: &str) -> Result<Version> {
// The `expect` below will not fail due to the regex definition, as is ensured by unit test.
.expect("Missing subcapture #1 with version despite matching the regex.")
.as_str();
Version::parse(version_string).anyhow_err()
Ok(Version::parse(version_string)?)
}
pub fn get_string_assignment_value(
@ -91,17 +88,17 @@ pub fn get_string_assignment_value(
/// Get version of Enso from the `build.sbt` file contents.
pub fn get_graal_version(build_sbt_contents: &str) -> Result<Version> {
get_string_assignment_value(build_sbt_contents, "graalVersion")?.parse2()
Ok(get_string_assignment_value(build_sbt_contents, "graalVersion")?.parse()?)
}
/// Get version of GraalVM packages from the `build.sbt` file contents.
pub fn get_graal_packages_version(build_sbt_contents: &str) -> Result<Version> {
get_string_assignment_value(build_sbt_contents, "graalMavenPackagesVersion")?.parse2()
Ok(get_string_assignment_value(build_sbt_contents, "graalMavenPackagesVersion")?.parse()?)
}
/// Get version of GraalVM packages from the `build.sbt` file contents.
pub fn get_flatbuffers_version(build_sbt_contents: &str) -> Result<Version> {
get_string_assignment_value(build_sbt_contents, "flatbuffersVersion")?.parse2()
Ok(get_string_assignment_value(build_sbt_contents, "flatbuffersVersion")?.parse()?)
}
#[cfg(test)]

View File

@ -49,7 +49,7 @@ pub fn discover_standard_library_tests(repo_root: &generated::RepoRoot) -> Resul
glob::glob(glob_pattern.as_str())?
// Package manifest path -> Parent directory.
.map(|package_path_result| Result::Ok(package_path_result?.try_parent()?.to_path_buf()))
.try_collect_vec()
.try_collect()
}
pub fn new_repo_root(repo_root: impl Into<PathBuf>, triple: &TargetTriple) -> generated::RepoRoot {
@ -62,15 +62,7 @@ pub fn new_repo_root(repo_root: impl Into<PathBuf>, triple: &TargetTriple) -> ge
)
}
pub fn pretty_print_arch(arch: Arch) -> &'static str {
match arch {
Arch::X86_64 => "amd64",
Arch::AArch64 => "aarch64",
_ => panic!("Unrecognized architecture {arch}"),
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Clone, Debug, Serialize, PartialEq, Eq)]
pub struct TargetTriple {
pub os: OS,
pub arch: Arch,
@ -90,16 +82,11 @@ impl TargetTriple {
pub fn engine(&self) -> Self {
self.clone()
}
/// Pretty prints architecture for our packages. Conform to GraalVM scheme as well.
pub fn arch(&self) -> &'static str {
pretty_print_arch(self.arch)
}
}
impl Display for TargetTriple {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}-{}-{}", self.versions.version, self.os, self.arch())
write!(f, "{}-{}-{}", self.versions.version, self.os, self.arch)
}
}

View File

@ -186,7 +186,7 @@ impl Postgresql {
let mut cmd = Docker.run_cmd(&opts)?;
cmd.stderr(Stdio::piped());
cmd.kill_on_drop(true);
let mut child = cmd.spawn().anyhow_err()?;
let mut child = cmd.spawn()?;
let stderr = child
.stderr
.ok_or_else(|| anyhow!("Failed to access standard output of the spawned process!"))?;

View File

@ -11,7 +11,6 @@ use crate::source::Source;
use crate::source::WatchTargetJob;
use crate::source::WithDestination;
use derivative::Derivative;
use ide_ci::actions::artifacts;
use ide_ci::cache;
use ide_ci::cache::Cache;
@ -46,13 +45,13 @@ pub fn path_to_extract() -> Option<PathBuf> {
pub trait IsArtifact: Clone + AsRef<Path> + Debug + Sized + Send + Sync + 'static {}
/// Plain artifact is just a folder with... things.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct PlainArtifact<T> {
/// Directory path.
pub path: PathBuf,
/// Phantom, so we can tell artifacts of different projects apart.
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub phantom: PhantomData<T>,
}
@ -71,24 +70,25 @@ impl<T> PlainArtifact<T> {
}
/// State available to all project-related operations.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct Context {
/// GitHub API client.
///
/// If authenticated, it will count API rate limits against our identity and allow operations
/// like managing releases or downloading CI run artifacts.
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub octocrab: Octocrab,
/// Stores things like downloaded release assets to save time.
#[derive_where(skip)]
pub cache: Cache,
/// Directory being an `enso` repository's working copy.
///
/// The directory is not required to be a git repository. It is allowed to use source tarballs
/// as well.
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
#[derive_where(skip)]
pub repo_root: crate::paths::generated::RepoRoot,
}
@ -314,7 +314,8 @@ pub trait ProcessWrapper {
ide_ci::extensions::child::ChildExt::wait_ok(self.inner()).boxed()
}
fn kill(&mut self) -> BoxFuture<Result> {
self.inner().kill().anyhow_err().boxed()
let f = self.inner().kill();
async { Ok(f.await?) }.boxed()
}
}

View File

@ -1,7 +1,6 @@
use crate::prelude::*;
use crate::engine::BuildConfigurationFlags;
use crate::paths::pretty_print_arch;
use crate::paths::TargetTriple;
use crate::project::Context;
use crate::project::IsArtifact;
@ -9,18 +8,16 @@ use crate::project::IsTarget;
use crate::source::WithDestination;
use crate::version::Versions;
use derivative::Derivative;
use ide_ci::archive::is_archive_name;
use ide_ci::extensions::os::OsExt;
use octocrab::models::repos::Asset;
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct BuildInput {
pub versions: Versions,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub external_runtime: Option<Arc<crate::engine::context::EnginePackageProvider>>,
}
@ -40,11 +37,9 @@ impl BuildInput {
}
}
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone, Debug)]
pub struct Artifact {
/// Location of the Project Manager distribution.
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub path: crate::paths::generated::ProjectManagerBundle,
/// Versions of Engine that are bundled in this Project Manager distribution.
///
@ -54,7 +49,6 @@ pub struct Artifact {
///
/// Artifacts built with [`ProjectManager::build`] will have exactly one engine
/// bundled.
#[derivative(Debug(format_with = "ide_ci::fmt::display_list"))]
pub engine_versions: Vec<Version>,
}
@ -88,7 +82,7 @@ pub async fn bundled_engine_versions(
let mut dir_reader = ide_ci::fs::tokio::read_dir(&project_manager_bundle.dist).await?;
while let Some(entry) = dir_reader.try_next().await? {
if ide_ci::fs::tokio::metadata(&entry.path()).await?.is_dir() {
if ide_ci::fs::tokio::metadata(entry.path()).await?.is_dir() {
ret.push(Version::from_str(entry.file_name().as_str())?);
}
}
@ -104,7 +98,7 @@ impl Backend {
pub fn matches_platform(&self, name: &str) -> bool {
// Sample name: "project-manager-bundle-2022.1.1-nightly.2022-04-16-linux-amd64.tar.gz"
let os_matches = name.contains(self.target_os.as_str());
let arch_matches = name.contains(pretty_print_arch(TARGET_ARCH));
let arch_matches = name.contains(TARGET_ARCH.as_str());
os_matches && arch_matches
}
}

View File

@ -106,14 +106,11 @@ impl IsTarget for Gui {
// =================
// === BuildInfo ===
// =================
#[derive(Clone, Derivative, Serialize, Deserialize)]
#[derivative(Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BuildInfo {
pub commit: String,
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub version: Version,
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub engine_version: Version,
pub name: String,
}

View File

@ -34,7 +34,6 @@ impl Artifact {
OS::Linux => "enso",
OS::MacOS => "Enso.app",
OS::Windows => "Enso.exe",
_ => todo!("{target_os}-{target_arch} combination is not supported"),
}
.into();
@ -79,14 +78,12 @@ impl Artifact {
}
}
#[derive(derivative::Derivative)]
#[derivative(Debug)]
#[derive_where(Debug)]
pub struct BuildInput {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub version: Version,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub project_manager: BoxFuture<'static, Result<crate::project::backend::Artifact>>,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub gui: BoxFuture<'static, Result<crate::project::gui::Artifact>>,
pub electron_target: Option<String>,
/// The name base used to generate CI run artifact names.
@ -141,6 +138,5 @@ pub fn electron_image_filename(target_os: OS, target_arch: Arch, version: &Versi
OS::Linux => format!("enso-linux-{arch_string}-{version}.AppImage"),
OS::MacOS => format!("enso-mac-{arch_string}-{version}.dmg"),
OS::Windows => format!("enso-win-{arch_string}-{version}.exe"),
_ => todo!("{target_os}-{target_arch} combination is not supported"),
}
}

View File

@ -16,8 +16,7 @@ use crate::version::Versions;
const ARTIFACT_NAME: &str = "runtime";
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone, Debug)]
pub struct BuildInput {
pub versions: Versions,
}

View File

@ -13,27 +13,5 @@ pub fn url(target: &TargetTriple) -> Result<Url> {
asset = format!("project-manager-bundle-{target}"),
ext = ide_ci::github::release::archive_extension(),
);
Url::parse(&url_text).anyhow_err()
}
pub async fn ensure_present(dist_path: impl AsRef<Path>, target: &TargetTriple) -> Result {
// Check if already done
let build_info_file = dist_path.as_ref().join("installed-enso-version");
let old_info = dbg!(build_info_file.read_to_json::<TargetTriple>());
if old_info.contains(target) {
debug!(
"Project Manager in version {target} is already installed, according to {info}.",
info = build_info_file.display()
);
} else {
// We remove the build info file to avoid misinformation if the build is interrupted during
// the call to `download_project_manager`.
ide_ci::fs::remove_if_exists(&build_info_file)?;
let url = url(target)?;
ide_ci::io::download_and_extract(url, &dist_path).await?;
ide_ci::fs::allow_owner_execute(crate::paths::project_manager(&dist_path))?;
build_info_file.write_as_json(&target)?;
}
Ok(())
Ok(Url::parse(&url_text)?)
}

View File

@ -3,7 +3,6 @@ use crate::prelude::*;
use crate::project::IsTarget;
use crate::project::IsWatchable;
use derivative::Derivative;
use ide_ci::github::Repo;
use octocrab::models::AssetId;
use octocrab::models::RunId;
@ -11,16 +10,11 @@ use octocrab::models::RunId;
/// Denotes an external source from which a target artifact can be obtained.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone, Debug)]
pub enum ExternalSource {
#[derivative(Debug = "transparent")]
OngoingCiRun(OngoingCiRunSource),
#[derivative(Debug = "transparent")]
CiRun(CiRunSource),
#[derivative(Debug = "transparent")]
LocalFile(PathBuf),
#[derivative(Debug = "transparent")]
Release(ReleaseSource),
}
@ -40,14 +34,11 @@ pub struct BuildSource<Target: IsTarget> {
}
/// Describes how to get a target.
#[derive(Derivative)]
#[derivative(Debug)]
#[derive(Debug)]
pub enum Source<Target: IsTarget> {
/// Build the target locally from the sources.
#[derivative(Debug = "transparent")]
BuildLocally(BuildSource<Target>),
/// Download the target from an external source.
#[derivative(Debug = "transparent")]
External(ExternalSource),
}
@ -56,22 +47,16 @@ pub struct OngoingCiRunSource {
pub artifact_name: String,
}
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone, Debug)]
pub struct CiRunSource {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repository: Repo,
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub run_id: RunId,
pub artifact_name: String,
}
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone, Debug)]
pub struct ReleaseSource {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repository: Repo,
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub asset_id: AssetId,
}

View File

@ -185,7 +185,7 @@ impl SQLServer {
let mut cmd = Docker.run_cmd(&opts)?;
cmd.stdout(Stdio::piped());
cmd.kill_on_drop(true);
let mut child = cmd.spawn().anyhow_err()?;
let mut child = cmd.spawn()?;
let stdout = child
.stdout
.take()

View File

@ -8,7 +8,6 @@ use crate::prelude::*;
use anyhow::Context;
use chrono::Datelike;
use derivative::Derivative;
use ide_ci::define_env_var;
use ide_ci::env::accessor::TypedVariable;
use ide_ci::github;
@ -51,7 +50,7 @@ pub const RC_BUILD_PREFIX: &str = "rc";
/// Check if the given GitHub release matches the provided kind.
pub fn is_release_of_kind(release: &Release, kind: Kind) -> bool {
matches!(release.tag_name.parse2(), Ok(version) if kind.matches(&version))
matches!(release.tag_name.parse(), Ok(version) if kind.matches(&version))
}
/// List all releases in the GitHub repository that are of a given kind.
@ -76,14 +75,12 @@ pub async fn latest_nightly_release(repo: &github::repo::Handle<impl IsRepo>) ->
/// Keeps the version of Enso, edition name and whether this version should be treated as a release.
///
/// Basically this is everything that is needed to define the version of the build.
#[derive(Clone, Derivative, Serialize, Deserialize, Deref, PartialEq, Eq)]
#[derivative(Debug)]
#[derive(Clone, Serialize, Deserialize, Deref, PartialEq, Eq, Debug)]
pub struct Versions {
/// The version of Enso.
///
/// Currently it also doubles as the edition name. In future we might want to separate them.
#[deref]
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub version: Version,
/// Whether this version should be treated as a release.
@ -117,7 +114,7 @@ impl Versions {
}
pub fn local_prerelease() -> Result<Prerelease> {
Prerelease::new(LOCAL_BUILD_PREFIX).anyhow_err()
Ok(Prerelease::new(LOCAL_BUILD_PREFIX)?)
}
/// Get a git tag that should be applied to a commit released as this version.
@ -202,7 +199,7 @@ pub fn increment_rc_version(version: &Version) -> Result<Version> {
ensure!(Kind::Rc.matches(version), "Version is not an RC version: {}.", version);
match version.pre.split('.').collect_vec().as_slice() {
[RC_BUILD_PREFIX, index] => {
let index = index.parse2::<u32>().context("Parsing RC index.")?;
let index = index.parse::<u32>().context("Parsing RC index.")?;
let pre = generate_rc_prerelease(index + 1)?;
Ok(Version { pre, ..version.clone() })
}
@ -233,7 +230,7 @@ pub fn same_core_version(a: &Version, b: &Version) -> bool {
}
pub fn generate_rc_prerelease(index: u32) -> Result<Prerelease> {
Prerelease::from_str(&format!("{RC_BUILD_PREFIX}.{index}"))
Ok(Prerelease::from_str(&format!("{RC_BUILD_PREFIX}.{index}"))?)
}
#[instrument(ret)]

View File

@ -53,12 +53,12 @@ impl TryFrom<&Prerelease> for NightlyPrerelease {
"Not a nightly build."
);
ensure!(identifiers.len() == 4 || identifiers.len() == 5, "Wrong number of identifiers.");
let year = identifiers.get(1).context("Missing year")?.parse2().context("Invalid year")?;
let year = identifiers.get(1).context("Missing year")?.parse().context("Invalid year")?;
let month =
identifiers.get(2).context("Missing month")?.parse2().context("Invalid month")?;
let day = identifiers.get(3).context("Missing day")?.parse2().context("Invalid day")?;
identifiers.get(2).context("Missing month")?.parse().context("Invalid month")?;
let day = identifiers.get(3).context("Missing day")?.parse().context("Invalid day")?;
let index =
identifiers.get(4).map(|index| index.parse2()).transpose().context("Invalid index")?;
identifiers.get(4).map(|index| index.parse()).transpose().context("Invalid index")?;
let date = chrono::NaiveDate::from_ymd_opt(year, month, day)
.with_context(|| format!("Invalid date: {year}-{month}-{day}"))?;
Ok(Self::new(date, index))
@ -88,7 +88,7 @@ impl TryInto<Prerelease> for NightlyPrerelease {
fn try_into(self) -> std::result::Result<Prerelease, Self::Error> {
let as_string = self.to_string();
Prerelease::from_str(&as_string)
Ok(Prerelease::from_str(&as_string)?)
}
}

View File

@ -9,13 +9,12 @@ anyhow = { workspace = true }
async-compression = { version = "0.3.12", features = ["tokio", "gzip"] }
async-trait = "0.1.78"
bincode = "1.3.3"
byte-unit = { workspace = true }
bytes = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true }
data-encoding = "2.3.2"
dependency_runner = "1.1.0"
derivative = { workspace = true }
derive-where = { workspace = true }
derive_more = { workspace = true }
dirs = { workspace = true }
enso-build-base = { path = "../base" }
@ -24,7 +23,7 @@ flate2 = { workspace = true }
flume = "0.10.10"
fs_extra = "1.3.0"
futures = { workspace = true }
futures-util = "0.3.17"
futures-util = { workspace = true }
glob = "0.3.0"
headers = "0.3.7"
heck = "0.4.0"
@ -32,14 +31,13 @@ http-serde = "1.1.0"
indicatif = { workspace = true }
itertools = { workspace = true }
lazy_static = { workspace = true }
mime = "0.3.16"
mime = { workspace = true }
multimap = { workspace = true }
new_mime_guess = "4.0.0"
new_mime_guess = { workspace = true }
octocrab = { workspace = true }
path-absolutize = { workspace = true }
pathdiff = "0.2.1"
path-slash = "0.2.1"
platforms = { workspace = true }
portpicker = { workspace = true }
regex = { workspace = true }
reqwest = { workspace = true }
@ -47,7 +45,7 @@ semver = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
sha2 = "0.10.2"
sha2 = { workspace = true }
strum = { workspace = true }
symlink = "0.1.0"
sysinfo = { workspace = true }
@ -67,7 +65,6 @@ zip = { version = "0.6.2", default-features = false, features = ["deflate"] }
base64 = "0.21.0"
[dev-dependencies]
warp = { version = "0.3.2", default-features = false }
wiremock = "0.5.10"
[lints]

View File

@ -35,7 +35,7 @@ impl Context {
let url_text = format!(
"{runtime_url}_apis/pipelines/workflows/{run_id}/artifacts?api-version={api_version}"
);
Url::parse(&url_text).anyhow_err()
Ok(Url::parse(&url_text)?)
}
pub fn prepare_client(&self, accept_mime: Mime) -> Result<ClientBuilder> {
@ -53,10 +53,10 @@ impl Context {
}
pub fn json_client(&self) -> Result<Client> {
self.prepare_client(mime::APPLICATION_JSON)?
Ok(self
.prepare_client(mime::APPLICATION_JSON)?
.default_content_type(mime::APPLICATION_JSON)
.build()
.anyhow_err()
.build()?)
}
pub fn upload_client(&self) -> Result<Client> {
@ -65,19 +65,19 @@ impl Context {
let mut headers = HeaderMap::new();
headers.insert(reqwest::header::CONNECTION, HeaderValue::from_static("Keep-Alive"));
headers.insert("Keep-Alive", keep_alive_seconds.into());
self.prepare_client(mime::APPLICATION_OCTET_STREAM)?
Ok(self
.prepare_client(mime::APPLICATION_OCTET_STREAM)?
.default_content_type(mime::APPLICATION_JSON)
.default_headers(headers)
.build()
.anyhow_err()
.build()?)
}
pub fn download_client(&self) -> Result<Client> {
self.prepare_client(mime::APPLICATION_OCTET_STREAM)?
Ok(self
.prepare_client(mime::APPLICATION_OCTET_STREAM)?
.default_content_type(mime::APPLICATION_JSON)
.keep_alive(10)
.default_header(ACCEPT_ENCODING, HeaderValue::try_from("gzip").unwrap())
.build()
.anyhow_err()
.build()?)
}
}

View File

@ -95,7 +95,7 @@ pub mod endpoints {
.json::<serde_json::Value>()
.await?;
debug!("{}", serde_json::to_string_pretty(&body)?);
serde_json::from_value(body).anyhow_err()
Ok(serde_json::from_value(body)?)
}
#[context("Failed to finalize upload of the artifact `{}`.", artifact_name.as_ref())]

View File

@ -169,12 +169,10 @@ pub async fn upload_worker(
debug!("Upload worker finished.");
}
#[derive(Derivative)]
#[derivative(Debug)]
#[derive_where(Debug)]
pub struct FileUploader {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub url: Url,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub client: Client,
pub artifact_name: PathBuf,
pub chunk_size: usize,
@ -275,40 +273,3 @@ pub struct UploadResult {
pub successful_upload_size: usize,
pub total_size: usize,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::actions::artifacts;
use crate::actions::artifacts::models::CreateArtifactResponse;
#[tokio::test]
#[ignore]
async fn test_upload() -> Result {
use warp::Filter;
setup_logging().ok();
let response1 = CreateArtifactResponse {
name: "test-artifact".to_string(),
url: "http://localhost:8080/artifacts/test-artifact".try_into()?,
container_id: 1,
size: 0,
file_container_resource_url: "http://localhost:8080/artifacts/test-artifact/files"
.try_into()?,
r#type: "file".to_string(),
expires_on: default(),
signed_content: None,
};
let routes = warp::any().map(move || serde_json::to_string(&response1).unwrap());
tokio::spawn(warp::serve(routes).run(([127, 0, 0, 1], 8080)));
debug!("Hello!");
crate::env::set_var("ACTIONS_RUNTIME_URL", "http://localhost:8080");
crate::env::set_var("ACTIONS_RUNTIME_TOKEN", "test-token");
crate::env::set_var("GITHUB_RUN_ID", "123");
let result = artifacts::upload_single_file("file", "name").await;
dbg!(result)?;
Ok(())
}
}

View File

@ -803,7 +803,7 @@ impl Strategy {
name: impl Into<String>,
values: impl IntoIterator<Item: Serialize>,
) -> Result<&mut Self> {
let values = values.into_iter().map(serde_json::to_value).try_collect_vec()?;
let values = values.into_iter().map(serde_json::to_value).try_collect()?;
self.matrix.insert(name.into(), serde_json::Value::Array(values));
Ok(self)
}

View File

@ -43,12 +43,11 @@ pub fn extract_files_sync<R: Read>(
// ===============
/// A `tar` archive.
#[derive(Derivative)]
#[derivative(Debug)]
#[derive_where(Debug)]
pub struct Archive {
/// The path that the `file` originated from. This is stored for error reporting.
path: Box<Path>,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
file: tar::Archive<GzDecoder<File>>,
}

View File

@ -11,7 +11,7 @@ use zip::read::ZipFile;
pub use ::zip::*;
pub fn open(path: impl AsRef<Path>) -> Result<ZipArchive<std::fs::File>> {
ZipArchive::new(crate::fs::open(path)?).anyhow_err()
Ok(ZipArchive::new(crate::fs::open(path)?)?)
}
#[context("Failed to extract in-memory archive to {}.", output_dir.as_ref().display())]

View File

@ -7,7 +7,6 @@ use crate::io::web::filename_from_response;
use crate::io::web::handle_error_response;
use crate::io::web::stream_response_to_file;
use derivative::Derivative;
use headers::HeaderMap;
use reqwest::Client;
use reqwest::ClientBuilder;
@ -16,10 +15,8 @@ use reqwest::Response;
#[derive(Clone, Derivative, Serialize, Deserialize)]
#[derivative(Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Key {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub url: Url,
/// We keep this as part of the key, as some GitHub API endpoints change their meaning based on

View File

@ -96,9 +96,9 @@ impl GraalPy {
mod tests {
use crate::cache::goodie::graalpy::graalpy_version_from_str;
use crate::cache::goodie::graalpy::GraalPy;
use crate::Arch;
use crate::OS;
use octocrab::Octocrab;
use platforms::Arch;
use platforms::OS;
use semver::Version;
#[test]

View File

@ -33,7 +33,7 @@ pub enum Edition {
Enterprise,
}
impl std::str::FromStr for Edition {
impl FromStr for Edition {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self> {
@ -177,7 +177,6 @@ impl GraalVM {
OS::Linux => "linux",
OS::Windows => "windows",
OS::MacOS => "macos",
other_os => unimplemented!("System `{}` is not supported!", other_os),
};
let arch_name = match self.arch {
Arch::X86_64 => "x64",

View File

@ -18,7 +18,7 @@ pub struct Sbt;
impl Goodie for Sbt {
fn get(&self, cache: &Cache) -> BoxFuture<'static, Result<PathBuf>> {
goodie::download_try_url(Url::from_str(DOWNLOAD_URL_TEXT), cache)
goodie::download_try_url(Url::from_str(DOWNLOAD_URL_TEXT).map_err(Into::into), cache)
}
fn is_active(&self) -> BoxFuture<'static, Result<bool>> {

View File

@ -98,7 +98,7 @@ impl<Variable: TypedVariable, Value: AsRef<Variable::Borrowed>> FallibleManipula
#[derive(Clone, Copy, Debug, Display, Ord, PartialOrd, Eq, PartialEq)]
pub struct PathBufVariable(pub &'static str);
impl const From<&'static str> for PathBufVariable {
impl From<&'static str> for PathBufVariable {
fn from(value: &'static str) -> Self {
PathBufVariable(value)
}
@ -120,7 +120,7 @@ impl TypedVariable for PathBufVariable {
type Value = PathBuf;
type Borrowed = Path;
fn parse(&self, value: &str) -> Result<Self::Value> {
PathBuf::from_str(value)
Ok(PathBuf::from_str(value)?)
}
fn generate(&self, value: &Self::Borrowed) -> Result<String> {
value
@ -144,7 +144,7 @@ impl<Value, Borrowed: ?Sized> From<&'static str> for SimpleVariable<Value, Borro
}
}
impl<Value, Borrowed: ?Sized> const AsRef<str> for SimpleVariable<Value, Borrowed> {
impl<Value, Borrowed: ?Sized> AsRef<str> for SimpleVariable<Value, Borrowed> {
fn as_ref(&self) -> &str {
self.name
}
@ -180,13 +180,13 @@ impl<Value, Borrowed: ?Sized> RawVariable for SimpleVariable<Value, Borrowed> {
}
}
impl<Value: FromString, Borrowed: ToString + ?Sized> TypedVariable
for SimpleVariable<Value, Borrowed>
impl<Value: FromStr, Borrowed: ToString + ?Sized> TypedVariable for SimpleVariable<Value, Borrowed>
where Value::Err: Into<anyhow::Error>
{
type Value = Value;
type Borrowed = Borrowed;
fn parse(&self, value: &str) -> Result<Self::Value> {
Value::from_str(value)
Value::from_str(value).map_err(Into::into)
}
fn generate(&self, value: &Self::Borrowed) -> Result<String> {
Ok(Borrowed::to_string(value))

View File

@ -72,7 +72,7 @@ pub enum CscLink {
Data(Vec<u8>),
}
impl std::str::FromStr for CscLink {
impl FromStr for CscLink {
type Err = anyhow::Error;
#[context("Failed to parse CSC link from '{csc_link}'.")]

View File

@ -6,7 +6,6 @@ pub mod child;
pub mod clap;
pub mod command;
pub mod octocrab;
pub mod os;
pub mod output;
pub mod reqwest;
pub mod version;

View File

@ -17,7 +17,7 @@ pub trait ChildExt {
impl ChildExt for tokio::process::Child {
fn wait_ok(&mut self) -> BoxFuture<Result> {
async move { self.wait().await?.exit_ok().anyhow_err() }.boxed()
async move { Ok(self.wait().await?.exit_ok()?) }.boxed()
}
fn kill_subtree(&self) {

View File

@ -1,75 +0,0 @@
use crate::prelude::*;
/// A bunch of constant literals associated with a given OS. Follows the convention of constants
/// defined in [`std::env::consts`] module.
#[const_trait]
pub trait OsExt: Copy {
fn exe_suffix(self) -> &'static str;
fn exe_extension(self) -> &'static str;
fn dll_prefix(self) -> &'static str;
fn dll_extension(self) -> &'static str;
fn dll_suffix(self) -> &'static str;
}
impl const OsExt for OS {
fn exe_suffix(self) -> &'static str {
match self {
OS::Windows => ".exe",
OS::Linux => "",
OS::MacOS => "",
_ => todo!(),
}
}
fn exe_extension(self) -> &'static str {
match self {
OS::Windows => "exe",
OS::Linux => "",
OS::MacOS => "",
_ => todo!(),
}
}
fn dll_prefix(self) -> &'static str {
match self {
OS::Windows => "",
OS::Linux => "lib",
OS::MacOS => "lib",
_ => todo!(),
}
}
fn dll_extension(self) -> &'static str {
match self {
OS::Windows => "dll",
OS::Linux => "so",
OS::MacOS => "dylib",
_ => todo!(),
}
}
fn dll_suffix(self) -> &'static str {
match self {
OS::Windows => ".dll",
OS::Linux => ".so",
OS::MacOS => ".dylib",
_ => todo!(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn same_on_target() {
assert_eq!(std::env::consts::DLL_EXTENSION, TARGET_OS.dll_extension());
assert_eq!(std::env::consts::DLL_PREFIX, TARGET_OS.dll_prefix());
assert_eq!(std::env::consts::DLL_SUFFIX, TARGET_OS.dll_suffix());
assert_eq!(std::env::consts::EXE_EXTENSION, TARGET_OS.exe_extension());
assert_eq!(std::env::consts::EXE_SUFFIX, TARGET_OS.exe_suffix());
}
}

View File

@ -3,8 +3,6 @@
use crate::prelude::*;
use async_compression::tokio::bufread::GzipEncoder;
use async_compression::Level;
use fs_extra::dir::CopyOptions;
use fs_extra::error::ErrorKind;
@ -82,19 +80,6 @@ pub async fn mirror_directory(source: impl AsRef<Path>, destination: impl AsRef<
}
}
/// Get the size of a file after gzip compression.
pub async fn compressed_size(path: impl AsRef<Path>) -> Result<byte_unit::Byte> {
// Read the file in chunks of 4MB. Our wasm files are usually way bigger than that, so this
// buffer gives very significant speedup over the default 8KB chunks.
const READER_CAPACITY: usize = 4096 * 1024;
let file = tokio::open(&path).await?;
let buf_file = ::tokio::io::BufReader::with_capacity(READER_CAPACITY, file);
let encoded_stream = GzipEncoder::with_quality(buf_file, Level::Best);
crate::io::read_length(encoded_stream).await.map(into)
}
/// Copy the file to the destination path, unless the file already exists and has the same content.
///
/// If the directory is passed as the source, it will be copied recursively.
@ -108,7 +93,7 @@ pub async fn copy_if_different(source: impl AsRef<Path>, target: impl AsRef<Path
}
let walkdir = walkdir::WalkDir::new(&source);
let entries = walkdir.into_iter().try_collect_vec()?;
let entries: Vec<_> = walkdir.into_iter().try_collect()?;
for entry in entries.into_iter().filter(|e| e.file_type().is_file()) {
let entry_path = entry.path();
let relative_path = pathdiff::diff_paths(entry_path, &source)
@ -125,7 +110,7 @@ pub async fn copy_if_different(source: impl AsRef<Path>, target: impl AsRef<Path
pub fn symlink_auto(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result {
create_parent_dir_if_missing(&dst)?;
debug!("Creating symlink {} <= {}", src.as_ref().display(), dst.as_ref().display());
symlink::symlink_auto(&src, &dst).anyhow_err()
Ok(symlink::symlink_auto(&src, &dst)?)
}
/// Remove a symlink to a directory if it exists.
@ -134,7 +119,7 @@ pub fn remove_symlink_dir_if_exists(path: impl AsRef<Path>) -> Result {
let result = symlink::remove_symlink_dir(&path);
match result {
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()),
ret => ret.anyhow_err(),
ret => Ok(ret?),
}
}

View File

@ -34,7 +34,7 @@ pub async fn create_dir_if_missing(path: impl AsRef<Path>) -> Result {
}
result => {
trace!("Created directory: {}", path.as_ref().display());
result.anyhow_err()
Ok(result?)
}
}
}
@ -55,7 +55,7 @@ pub async fn create_parent_dir_if_missing(path: impl AsRef<Path>) -> Result<Path
#[context("Failed to write file: {}", path.as_ref().display())]
pub async fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
create_parent_dir_if_missing(&path).await?;
crate::fs::wrappers::tokio::write(&path, &contents).await.anyhow_err()
crate::fs::wrappers::tokio::write(&path, &contents).await
}
pub async fn copy_to_file(
@ -63,7 +63,7 @@ pub async fn copy_to_file(
output_path: impl AsRef<Path>,
) -> Result<u64> {
let mut output = create(output_path).await?;
tokio::io::copy(&mut content, &mut output).await.anyhow_err()
Ok(tokio::io::copy(&mut content, &mut output).await?)
}
/// Remove a directory with all its subtree.
@ -184,7 +184,7 @@ pub async fn remove_file_if_exists(path: impl AsRef<Path>) -> Result<()> {
let result = tokio::fs::remove_file(&path).await;
match result {
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
result => result.anyhow_err(),
result => Ok(result?),
}
}

View File

@ -8,7 +8,7 @@ use tokio_util::io::ReaderStream;
pub fn metadata<P: AsRef<Path>>(path: P) -> BoxFuture<'static, Result<std::fs::Metadata>> {
let path = path.as_ref().to_owned();
tokio::fs::metadata(path).anyhow_err().boxed()
async { Ok(tokio::fs::metadata(path).await?) }.boxed()
}
@ -36,7 +36,7 @@ pub fn symlink_metadata<P: AsRef<Path>>(path: P) -> BoxFuture<'static, Result<st
#[context("Failed to open path for reading: {}", path.as_ref().display())]
pub async fn open(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).await.anyhow_err()
Ok(File::open(&path).await?)
}
pub fn open_stream(path: impl AsRef<Path>) -> BoxFuture<'static, Result<ReaderStream<File>>> {
@ -53,12 +53,12 @@ pub fn open_stream(path: impl AsRef<Path>) -> BoxFuture<'static, Result<ReaderSt
#[context("Failed to open path for writing: {}", path.as_ref().display())]
pub async fn create(path: impl AsRef<Path>) -> Result<File> {
File::create(&path).await.anyhow_err()
Ok(File::create(&path).await?)
}
#[context("Failed to create missing directories no path: {}", path.as_ref().display())]
pub async fn create_dir_all(path: impl AsRef<Path>) -> Result {
tokio::fs::create_dir_all(&path).await.anyhow_err()
Ok(tokio::fs::create_dir_all(&path).await?)
}
pub async fn read_dir(
@ -79,12 +79,12 @@ pub async fn read_dir(
#[context("Failed to remove directory with the subtree: {}", path.as_ref().display())]
pub async fn remove_dir_all(path: impl AsRef<Path>) -> Result {
tokio::fs::remove_dir_all(&path).await.anyhow_err()
Ok(tokio::fs::remove_dir_all(&path).await?)
}
#[context("Failed to write file: {}", path.as_ref().display())]
pub async fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result {
tokio::fs::write(&path, &contents).await.anyhow_err()
Ok(tokio::fs::write(&path, &contents).await?)
}
#[context("Failed to read file: {}", path.as_ref().display())]
@ -97,11 +97,11 @@ pub async fn read<P: AsRef<Path>>(path: P) -> Result<Vec<u8>> {
#[context("Failed to read the file: {}", path.as_ref().display())]
pub async fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
tokio::fs::read_to_string(&path).await.anyhow_err()
Ok(tokio::fs::read_to_string(&path).await?)
}
/// See [`tokio::fs::set_permissions`].
#[context("Failed to set permissions {:?} for file: {}", permissions, path.as_ref().display())]
pub async fn set_permissions(path: impl AsRef<Path>, permissions: std::fs::Permissions) -> Result {
tokio::fs::set_permissions(&path, permissions.clone()).await.anyhow_err()
Ok(tokio::fs::set_permissions(&path, permissions.clone()).await?)
}

View File

@ -1,21 +1,10 @@
use crate::prelude::*;
use futures_util::future::OptionFuture;
pub fn receiver_to_stream<T>(
mut receiver: tokio::sync::mpsc::Receiver<T>,
) -> impl Stream<Item = T> {
futures::stream::poll_fn(move |ctx| receiver.poll_recv(ctx))
}
use futures::future::OptionFuture;
#[derive(Copy, Clone, Debug)]
pub enum AsyncPolicy {
Sequential,
FutureParallelism,
TaskParallelism,
}
pub async fn join_all<I, F, T, E>(futures: I, parallel: AsyncPolicy) -> Vec<Result<T>>
@ -28,42 +17,15 @@ where
AsyncPolicy::Sequential => {
let mut ret = Vec::new();
for future in futures {
ret.push(future.await.anyhow_err());
ret.push(future.await.map_err(Into::into));
}
ret
}
AsyncPolicy::FutureParallelism =>
futures::future::join_all(futures).await.into_iter().map(|r| r.anyhow_err()).collect(),
AsyncPolicy::TaskParallelism => {
let tasks = futures
.into_iter()
.map(|future| async move { tokio::task::spawn(future).await?.anyhow_err() });
futures::future::join_all(tasks).await
}
}
}
pub async fn try_join_all<I, F, T, E>(futures: I, parallel: AsyncPolicy) -> Result<Vec<T>>
where
I: IntoIterator<Item = F>,
F: Future<Output = std::result::Result<T, E>> + Send + 'static,
T: Send + 'static,
E: Into<anyhow::Error> + Send + 'static, {
match parallel {
AsyncPolicy::Sequential => {
let mut ret = Vec::new();
for future in futures {
ret.push(future.await.anyhow_err()?);
}
Ok(ret)
}
AsyncPolicy::FutureParallelism => futures::future::try_join_all(futures).await.anyhow_err(),
AsyncPolicy::TaskParallelism => {
let tasks = futures
.into_iter()
.map(|future| async move { tokio::task::spawn(future).await?.anyhow_err() });
futures::future::try_join_all(tasks).await
}
AsyncPolicy::FutureParallelism => futures::future::join_all(futures)
.await
.into_iter()
.map(|r| r.map_err(Into::into))
.collect(),
}
}

View File

@ -126,7 +126,7 @@ pub trait IsOrganization {
/// The organization's URL.
fn url(&self) -> Result<Url> {
let url_text = format!("https://github.com/{}", self.name());
Url::from_str(&url_text)
Ok(Url::from_str(&url_text)?)
}
}
@ -162,12 +162,10 @@ pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> Result<Url> {
OS::Linux => "linux",
OS::Windows => "win",
OS::MacOS => "osx",
other_os => unimplemented!("System `{}` is not yet supported!", other_os),
};
let arch_name = match TARGET_ARCH {
Arch::X86_64 => "x64",
Arch::Arm => "arm",
Arch::AArch64 => "arm64",
other_arch => unimplemented!("Architecture `{}` is not yet supported!", other_arch),
};
@ -188,9 +186,5 @@ pub async fn fetch_runner(octocrab: &Octocrab, os: OS, output_dir: impl AsRef<Pa
pub fn create_client(pat: impl AsRef<str>) -> Result<reqwest::Client> {
let mut header_map = reqwest::header::HeaderMap::new();
header_map.append(reqwest::header::AUTHORIZATION, format!("Bearer {}", pat.as_ref()).parse()?);
reqwest::Client::builder()
.user_agent("enso-build")
.default_headers(header_map)
.build()
.anyhow_err()
Ok(reqwest::Client::builder().user_agent("enso-build").default_headers(header_map).build()?)
}

View File

@ -228,12 +228,12 @@ pub trait IsReleaseExt: IsRelease + Sync {
/// Get the information about the release.
async fn get(&self) -> Result<Release> {
self.octocrab()
Ok(self
.octocrab()
.repos(self.repo().owner(), self.repo().name())
.releases()
.get_by_id(self.id())
.await
.anyhow_err()
.await?)
}
async fn publish(&self) -> Result<Release> {
@ -251,13 +251,12 @@ pub trait IsReleaseExt: IsRelease + Sync {
impl<T> IsReleaseExt for T where T: IsRelease + Sync {}
/// A release on GitHub.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug)]
pub struct Handle {
#[derivative(Debug(format_with = "std::fmt::Display::fmt"))]
pub repo: Repo,
pub id: ReleaseId,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub octocrab: Octocrab,
}

View File

@ -27,7 +27,7 @@ use reqwest::Response;
///
/// See also [`RepoRef`] for a non-owning equivalent.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)]
#[display(fmt = "{owner}/{name}")]
#[display("{owner}/{name}")]
pub struct Repo {
/// Owner - an organization's or user's name.
pub owner: String,
@ -46,7 +46,7 @@ impl IsRepo for Repo {
}
/// Parse from strings in format "owner/name". Opposite of [`Display`].
impl std::str::FromStr for Repo {
impl FromStr for Repo {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
@ -78,7 +78,7 @@ impl Repo {
///
/// Particularly useful for defining `const` repositories.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)]
#[display(fmt = "{owner}/{name}")]
#[display("{owner}/{name}")]
pub struct RepoRef<'a> {
/// Owner - an organization's or user's name.
pub owner: &'a str,
@ -161,11 +161,11 @@ pub trait IsRepo: Display {
/// A handle to a specific GitHub repository.
///
/// It includes a client (so also an authentication token) and a repository.
#[derive(Derivative, Clone)]
#[derivative(Debug)]
#[derive(Clone)]
#[derive_where(Debug; Repo)]
pub struct Handle<Repo> {
/// Octocrab client (includes authentication token).
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
pub octocrab: Octocrab,
/// Repository designation.
pub repo: Repo,

View File

@ -1,8 +1,5 @@
use crate::prelude::*;
use crate::future::try_join_all;
use crate::future::AsyncPolicy;
use indicatif::MultiProgress;
use indicatif::ProgressBar;
use indicatif::WeakProgressBar;
@ -29,15 +26,14 @@ pub fn store_static_text(text: impl AsRef<str>) -> &'static str {
const REFRESHES_PER_SECOND: u32 = 100;
#[derive(derivative::Derivative)]
#[derivative(Debug)]
#[derive_where(Debug)]
struct GlobalState {
/// A globally-shared reference to the multi-progress bar.
///
/// All progress bars must be added to this multi-progress bar. This ensures that the progress
/// bars are displayed in a way that does not interfere with tracing log output.
mp: MultiProgress,
#[derivative(Debug = "ignore")]
#[derive_where(skip)]
bars: Vec<WeakProgressBar>,
_tick_thread: std::thread::JoinHandle<()>,
ongoing_tasks: Vec<JoinHandle<Result>>,
@ -139,7 +135,7 @@ pub async fn complete_tasks() -> Result {
break;
}
info!("Found {} tasks to wait upon.", tasks.len());
try_join_all(tasks, AsyncPolicy::FutureParallelism).await?;
futures::future::try_join_all(tasks).await?;
}
debug!("All pending tasks have been completed.");
Ok(())

View File

@ -21,7 +21,7 @@ pub mod web;
/// Inputs content is discarded.
pub async fn read_length(mut read: impl AsyncRead + Unpin) -> Result<u64> {
let mut sink = tokio::io::sink();
tokio::io::copy(&mut read, &mut sink).anyhow_err().await
Ok(tokio::io::copy(&mut read, &mut sink).await?)
}
/// Get the the response body as a byte stream.

View File

@ -1,14 +1,11 @@
// === Features ===
#![allow(incomplete_features)]
#![feature(try_blocks)]
#![feature(result_flattening)]
#![feature(const_fmt_arguments_new)]
#![feature(hash_set_entry)]
#![feature(let_chains)]
#![feature(min_specialization)]
#![feature(exit_status_error)]
#![feature(associated_type_defaults)]
#![feature(associated_type_bounds)]
#![feature(exact_size_is_empty)]
#![feature(async_closure)]
#![feature(type_alias_impl_trait)]
@ -16,8 +13,6 @@
#![feature(string_remove_matches)]
#![feature(duration_constants)]
#![feature(const_trait_impl)]
#![feature(extend_one)]
#![feature(lazy_cell)]
// === Non-Standard Linter Configuration ===
#![warn(unused_qualifications)]
@ -60,14 +55,12 @@ pub mod prelude {
pub use async_trait::async_trait;
pub use bytes::Bytes;
pub use derivative::Derivative;
pub use derive_more::Display;
pub use derive_where::derive_where;
pub use itertools::Itertools;
pub use lazy_static::lazy_static;
pub use octocrab::Octocrab;
pub use path_absolutize::*;
pub use platforms::target::Arch;
pub use platforms::target::OS;
pub use semver::Version;
pub use tokio::io::AsyncWriteExt as _;
pub use url::Url;
@ -79,6 +72,8 @@ pub mod prelude {
pub use crate::github::release::IsRelease;
pub use crate::github::repo::IsRepo;
pub use crate::log::setup_logging;
pub use crate::os::target::Arch;
pub use crate::os::target::OS;
pub use crate::os::target::TARGET_ARCH;
pub use crate::os::target::TARGET_OS;
pub use crate::program::command::provider::CommandProvider;

View File

@ -1,189 +1,91 @@
//! Constants describing the target operating system and architecture.
//!
//! Based upon the [`platforms-2.0.0`](https://docs.rs/platforms/2.0.0/platforms/target/enum.OS.html) crate. Future crate versions dropped support for this in favor
//! of [`std::env::consts::OS`] and [`std::env::consts::ARCH`] -- these however are just string
//! literals, rather than enums (that are much more useful compile-time). Thus we put back this
//! functionality here.
use crate::prelude::*;
use std::fmt;
pub const TARGET_ARCH: Arch = Arch::from_str(std::env::consts::ARCH);
pub const TARGET_OS: OS = OS::from_str(std::env::consts::OS);
#[derive(Debug, PartialEq, Eq, Clone, Copy, serde::Serialize, serde::Deserialize)]
pub enum Arch {
AArch64,
Wasm32,
X86,
X86_64,
}
// Copied from platforms-2.0.0 crate
// https://github.com/rustsec/rustsec
//
// Copyright (c) 2018-2020 The Rust Secure Code Working Group
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the
// Software without restriction, including without
// limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice
// shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
impl Arch {
pub const fn from_str(s: &str) -> Arch {
match s.as_bytes() {
b"aarch64" => Arch::AArch64,
b"wasm32" => Arch::Wasm32,
b"x86" => Arch::X86,
b"x86_64" => Arch::X86_64,
_ => panic!("Unsupported target architecture."),
}
}
pub const fn as_str(self) -> &'static str {
match self {
Arch::AArch64 => "aarch64",
Arch::Wasm32 => "wasm32",
Arch::X86 => "x86",
Arch::X86_64 => "amd64",
}
}
}
// Detect and expose `target_os` as a constant
// Whether this is a good idea is somewhat debatable
impl fmt::Display for Arch {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, serde::Serialize, serde::Deserialize)]
pub enum OS {
Windows,
Linux,
MacOS,
}
impl fmt::Display for OS {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[cfg(target_arch = "aarch64")]
/// `target_arch` when building this crate: `x86_64`
pub const TARGET_ARCH: Arch = Arch::AArch64;
impl OS {
pub const fn from_str(s: &str) -> OS {
match s.as_bytes() {
b"windows" => OS::Windows,
b"linux" => OS::Linux,
b"macos" => OS::MacOS,
_ => panic!("Unsupported target OS."),
}
}
#[cfg(target_arch = "arm")]
/// `target_arch` when building this crate: `arm`
pub const TARGET_ARCH: Arch = Arch::Arm;
pub const fn as_str(self) -> &'static str {
match self {
OS::Windows => "windows",
OS::Linux => "linux",
OS::MacOS => "macos",
}
}
#[cfg(target_arch = "asmjs")]
/// `target_arch` when building this crate: `asmjs`
pub const TARGET_ARCH: Arch = Arch::AsmJs;
pub const fn exe_suffix(self) -> &'static str {
match self {
OS::Windows => ".exe",
OS::Linux => "",
OS::MacOS => "",
}
}
}
#[cfg(target_arch = "mips")]
/// `target_arch` when building this crate: `mips`
pub const TARGET_ARCH: Arch = Arch::Mips;
#[cfg(test)]
mod tests {
use super::*;
#[cfg(target_arch = "mips64")]
/// `target_arch` when building this crate: `mips64`
pub const TARGET_ARCH: Arch = Arch::Mips64;
#[cfg(target_arch = "msp430")]
/// `target_arch` when building this crate: `msp430`
pub const TARGET_ARCH: Arch = Arch::Msp430;
#[cfg(target_arch = "nvptx64")]
/// `target_arch` when building this crate: `nvptx64`
pub const TARGET_ARCH: Arch = Arch::Nvptx64;
#[cfg(target_arch = "powerpc")]
/// `target_arch` when building this crate: `powerpc`
pub const TARGET_ARCH: Arch = Arch::PowerPc;
#[cfg(target_arch = "powerpc64")]
/// `target_arch` when building this crate: `powerpc64`
pub const TARGET_ARCH: Arch = Arch::PowerPc64;
#[cfg(target_arch = "riscv")]
/// `target_arch` when building this crate: `riscv`
pub const TARGET_ARCH: Arch = Arch::RiscV;
#[cfg(target_arch = "s390x")]
/// `target_arch` when building this crate: `s390x`
pub const TARGET_ARCH: Arch = Arch::S390X;
#[cfg(target_arch = "sparc")]
/// `target_arch` when building this crate: `sparc`
pub const TARGET_ARCH: Arch = Arch::Sparc;
#[cfg(target_arch = "sparc64")]
/// `target_arch` when building this crate: `sparc64`
pub const TARGET_ARCH: Arch = Arch::Sparc64;
#[cfg(target_arch = "wasm32")]
/// `target_arch` when building this crate: `wasm32`
pub const TARGET_ARCH: Arch = Arch::Wasm32;
#[cfg(target_arch = "x86")]
/// `target_arch` when building this crate: `x86`
pub const TARGET_ARCH: Arch = Arch::X86;
#[cfg(target_arch = "x86_64")]
/// `target_arch` when building this crate: `x86_64`
pub const TARGET_ARCH: Arch = Arch::X86_64;
#[cfg(target_os = "android")]
/// `target_os` when building this crate: `android`
pub const TARGET_OS: OS = OS::Android;
#[cfg(target_os = "cuda")]
/// `target_os` when building this crate: `cuda`
pub const TARGET_OS: OS = OS::Cuda;
#[cfg(target_os = "dragonfly")]
/// `target_os` when building this crate: `dragonfly`
pub const TARGET_OS: OS = OS::Dragonfly;
#[cfg(target_os = "emscripten")]
/// `target_os` when building this crate: `emscripten`
pub const TARGET_OS: OS = OS::Emscripten;
#[cfg(target_os = "freebsd")]
/// `target_os` when building this crate: `freebsd`
pub const TARGET_OS: OS = OS::FreeBSD;
#[cfg(target_os = "fuchsia")]
/// `target_os` when building this crate: `fuchsia`
pub const TARGET_OS: OS = OS::Fuchsia;
#[cfg(target_os = "haiku")]
/// `target_os` when building this crate: `haiku`
pub const TARGET_OS: OS = OS::Haiku;
#[cfg(target_os = "hermit")]
/// `target_os` when building this crate: `hermit`
pub const TARGET_OS: OS = OS::Hermit;
#[cfg(target_os = "illumos")]
/// `target_os` when building this crate: `illumos`
pub const TARGET_OS: OS = OS::Illumos;
#[cfg(target_os = "ios")]
/// `target_os` when building this crate: `ios`
pub const TARGET_OS: OS = OS::iOS;
#[cfg(target_os = "linux")]
/// `target_os` when building this crate: `linux`
pub const TARGET_OS: OS = OS::Linux;
#[cfg(target_os = "macos")]
/// `target_os` when building this crate: `macos`
pub const TARGET_OS: OS = OS::MacOS;
#[cfg(target_os = "netbsd")]
/// `target_os` when building this crate: `netbsd`
pub const TARGET_OS: OS = OS::NetBSD;
#[cfg(target_os = "openbsd")]
/// `target_os` when building this crate: `openbsd`
pub const TARGET_OS: OS = OS::OpenBSD;
#[cfg(target_os = "redox")]
/// `target_os` when building this crate: `redox`
pub const TARGET_OS: OS = OS::Redox;
#[cfg(target_os = "solaris")]
/// `target_os` when building this crate: `solaris`
pub const TARGET_OS: OS = OS::Solaris;
#[cfg(target_os = "tvos")]
/// `target_os` when building this crate: `tvos`
pub const TARGET_OS: OS = OS::TvOS;
#[cfg(target_os = "wasi")]
/// `target_os` when building this crate: `wasi`
pub const TARGET_OS: OS = OS::Wasi;
#[cfg(target_os = "windows")]
/// `target_os` when building this crate: `windows`
pub const TARGET_OS: OS = OS::Windows;
#[cfg(target_os = "vxworks")]
/// `target_os` when building this crate: `vxworks`
pub const TARGET_OS: OS = OS::VxWorks;
#[test]
fn same_on_target() {
assert_eq!(std::env::consts::EXE_SUFFIX, TARGET_OS.exe_suffix());
}
}

View File

@ -63,7 +63,7 @@ pub trait Program: Sized + 'static {
///
/// The lookup locations are program-defined, they typically include Path environment variable
/// and program-specific default locations.
fn lookup(&self) -> anyhow::Result<Location<Self>> {
fn lookup(&self) -> Result<Location<Self>> {
Resolver::<Self>::new(self.executable_names(), self.default_locations())?
.lookup()
.map(Location::new)
@ -112,7 +112,7 @@ pub trait Program: Sized + 'static {
}
fn handle_exit_status(status: std::process::ExitStatus) -> Result {
status.exit_ok().anyhow_err()
Ok(status.exit_ok()?)
}
/// Command that prints to stdout the version of given program.

View File

@ -81,7 +81,7 @@ pub trait MyCommand<P: Program>: BorrowMut<Command> + From<Command> + Into<Comma
}
fn spawn(&mut self) -> Result<Child> {
self.borrow_mut().spawn().anyhow_err()
self.borrow_mut().spawn()
}
}
@ -298,7 +298,7 @@ impl Debug for Command {
impl Command {
pub fn new<S: AsRef<OsStr>>(program: S) -> Command {
let inner = tokio::process::Command::new(program);
let status_checker = Arc::new(|status: ExitStatus| status.exit_ok().anyhow_err());
let status_checker = Arc::new(|status: ExitStatus| Ok(status.exit_ok()?));
Self { inner, status_checker, pretty_name: None }
}

View File

@ -28,7 +28,7 @@ impl IsVersion for Version {
let matched =
SEMVER_REGEX.find(text).context("No semver-like substring found within the text.")?;
let version_text = matched.as_str();
Version::from_str(version_text)
Ok(Version::from_str(version_text)?)
}
}

View File

@ -99,8 +99,8 @@ pub async fn compare_env(
f(&mut cmd);
add_next_command(&mut cmd, ["set"]);
let output = cmd.output_ok().await?;
let outputs =
split_command_outputs(&output.stdout).map(std::str::from_utf8).try_collect_vec()?;
let outputs: Vec<_> =
split_command_outputs(&output.stdout).map(std::str::from_utf8).try_collect()?;
ensure!(outputs.len() == 3, "Expected outputs from all 3 commands!");

View File

@ -578,7 +578,7 @@ impl RunOptions {
#[derive(Clone, Display, Debug, PartialEq, Eq, Hash)]
pub struct ImageId(pub String);
impl std::str::FromStr for ImageId {
impl FromStr for ImageId {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
@ -589,7 +589,7 @@ impl std::str::FromStr for ImageId {
#[derive(Clone, Debug, Display, Deref, AsRef)]
pub struct ContainerId(pub String);
impl std::str::FromStr for ContainerId {
impl FromStr for ContainerId {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
@ -646,7 +646,7 @@ mod tests {
fn get_kernel_version() -> Result<u32> {
let ret = sysinfo::System::kernel_version()
.with_context(|| "Failed to get OS kernel version.")?
.parse2()?;
.parse()?;
debug!("OS kernel version: {ret}.");
Ok(ret)
}

View File

@ -207,7 +207,7 @@ impl Context {
let hash = fields.next().context("Missing hash.")?;
let refs = fields.next().context("Missing refs.")?;
let refs = refs_from_decoration(&refs);
let refs = refs.into_iter().map(|s| s.parse2()).try_collect()?;
let refs = refs.into_iter().map(|s| s.parse()).try_collect()?;
Ok(LogEntry { hash, refs })
})
.try_collect()
@ -333,14 +333,14 @@ pub struct RemoteLsEntry {
}
/// Construct from a line of output of `git ls-remote`.
impl std::str::FromStr for RemoteLsEntry {
impl FromStr for RemoteLsEntry {
type Err = anyhow::Error;
#[context("Failed to parse remote ls entry from string: {}", line)]
fn from_str(line: &str) -> std::result::Result<Self, Self::Err> {
let mut parts = line.split_whitespace();
let hash = parts.next().context("Missing hash")?.to_string();
let r#ref = parts.next().context("Missing reference")?.parse2()?;
let r#ref = parts.next().context("Missing reference")?.parse()?;
ensure!(parts.next().is_none(), "Unexpected trailing extra parts.");
Ok(Self { hash, r#ref })
}

View File

@ -12,8 +12,8 @@ use crate::prelude::*;
///
/// # Examples
/// ```
/// use enso_build_base::prelude::FromString;
/// use ide_ci::programs::git::Ref;
/// use std::str::FromStr;
/// let reference = Ref::from_str("refs/heads/master").unwrap();
/// assert_eq!(reference, Ref::Branch { name: "master".into() });
/// ```
@ -51,7 +51,7 @@ pub enum Ref {
},
}
impl std::str::FromStr for Ref {
impl FromStr for Ref {
type Err = anyhow::Error;
/// Parse the reference from the full decorated name, like `refs/heads/main`.

View File

@ -77,11 +77,11 @@ mod tests {
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Deref)]
pub struct LanguageVersion(pub u8);
impl std::str::FromStr for LanguageVersion {
impl FromStr for LanguageVersion {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
s.parse2::<u8>().map(LanguageVersion)
Ok(LanguageVersion(s.parse()?))
}
}

View File

@ -15,7 +15,7 @@ impl Program for Robocopy {
fn handle_exit_status(status: std::process::ExitStatus) -> Result {
match status.code() {
None => status.exit_ok().anyhow_err(),
None => Ok(status.exit_ok()?),
Some(code) if code >= 8 => bail!("Exit with code {}.", code),
Some(_) => Ok(()),
}

View File

@ -56,10 +56,10 @@ impl Program for WasmOpt {
#[derive(Clone, Copy, Debug, Display, PartialEq, PartialOrd, Deref, Eq)]
pub struct Version(pub u32);
impl std::str::FromStr for Version {
type Err = <u32 as std::str::FromStr>::Err;
impl FromStr for Version {
type Err = <u32 as FromStr>::Err;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
std::str::FromStr::from_str(s).map(Self)
Ok(Self(s.parse()?))
}
}
@ -68,7 +68,7 @@ impl IsVersion for Version {
let number_regex = regex::Regex::new(r#"\d+"#)?;
let number_match = number_regex.find(text).context("No number in the given text.")?;
let number_text = number_match.as_str();
number_text.parse2()
Ok(number_text.parse()?)
}
}

View File

@ -112,11 +112,12 @@ pub mod via_string {
ser.collect_str(value)
}
/// Deserializer, that uses [`FromString`] trait.
/// Deserializer, that uses [`FromStr`] trait.
pub fn deserialize<'de, D, T>(de: D) -> std::result::Result<T, D::Error>
where
D: Deserializer<'de>,
T: FromString, {
T: FromStr,
T::Err: Display, {
let text = String::deserialize(de)?;
T::from_str(&text).map_err(D::Error::custom)
}
@ -138,11 +139,12 @@ pub mod via_string_opt {
}
}
/// Deserializer, that uses [`FromString`] trait.
/// Deserializer, that uses [`FromStr`] trait.
pub fn deserialize<'de, D, T>(de: D) -> std::result::Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromString, {
T: FromStr,
T::Err: Display, {
let text = Option::<String>::deserialize(de)?;
if let Some(text) = text {
T::from_str(&text).map(Some).map_err(D::Error::custom)

View File

@ -1,22 +1,18 @@
[package]
name = "enso-build-cli"
version = "0.1.0"
edition = "2021"
version = "0.1.0"
default-run = "enso-build-cli"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true }
derivative = { workspace = true }
derive-where = { workspace = true }
enso-build-base = { path = "../base" }
enso-build = { path = "../build" }
enso-formatter = { path = "../enso-formatter" }
futures-util = "0.3.17"
futures-util = { workspace = true }
ide-ci = { path = "../ci_utils" }
octocrab = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
[lints]
workspace = true

View File

@ -10,8 +10,7 @@ use clap::Args;
use clap::Parser;
use clap::Subcommand;
use clap::ValueEnum;
use derivative::Derivative;
use enso_build_base::extensions::path::display_fmt;
use derive_where::derive_where;
use ide_ci::cache;
use ide_ci::github::Repo;
use octocrab::models::RunId;
@ -184,8 +183,8 @@ pub struct Cli {
/// Describe where to get a target artifacts from.
///
/// This is the CLI representation of a [crate::source::Source] for a given target.
#[derive(Args, Clone, Debug, Derivative)]
#[derivative(PartialEq)]
#[derive(Args, Clone, Debug)]
#[derive_where(PartialEq)]
#[group(skip)]
pub struct Source<Target: IsTargetSource> {
/// How the given target should be acquired.
@ -220,11 +219,9 @@ pub struct Source<Target: IsTargetSource> {
pub release: Option<String>,
/// Used when `SourceKind::Build` is used.
#[derivative(PartialEq(bound = ""))]
#[clap(flatten)]
pub build_args: BuildDescription<Target>,
#[derivative(PartialEq(bound = ""))]
#[clap(flatten)]
pub output_path: OutputPath<Target>,
}
@ -245,15 +242,14 @@ pub enum SourceKind {
}
/// Strongly typed argument for an output directory of a given build target.
#[derive(Args, Clone, Derivative)]
#[derive(Args, Clone)]
#[group(skip)]
#[derivative(Debug, PartialEq)]
#[derive_where(Debug, PartialEq)]
pub struct OutputPath<Target: IsTargetSource> {
/// Directory where artifacts should be placed.
#[derivative(Debug(format_with = "display_fmt"))]
#[clap(name = Target::OUTPUT_PATH_NAME, long, value_parser(normalize_path), default_value = Target::DEFAULT_OUTPUT_PATH, enso_env())]
pub output_path: PathBuf,
#[derivative(Debug = "ignore", PartialEq(bound = ""))]
#[derive_where(skip(Debug))]
#[allow(missing_docs)]
#[clap(skip)]
pub phantom: PhantomData<Target>,
@ -265,11 +261,10 @@ impl<Target: IsTargetSource> AsRef<Path> for OutputPath<Target> {
}
}
#[derive(Args, Clone, Derivative)]
#[derive(Args, Clone)]
#[group(skip)]
#[derivative(Debug, PartialEq)]
#[derive_where(Debug, PartialEq)]
pub struct BuildDescription<Target: IsTargetSource> {
#[derivative(PartialEq(bound = ""))]
#[clap(flatten)]
pub input: Target::BuildInput,
// Cumbersome way of defining a bool argument that can take explicit value.
@ -286,9 +281,8 @@ pub struct BuildDescription<Target: IsTargetSource> {
pub upload_artifact: bool,
}
#[derive(Args, Clone, PartialEq, Derivative)]
#[derive(Args, Clone, PartialEq, Debug)]
#[group(skip)]
#[derivative(Debug)]
pub struct BuildJob<Target: IsTargetSource> {
#[clap(flatten)]
pub input: BuildDescription<Target>,
@ -296,9 +290,8 @@ pub struct BuildJob<Target: IsTargetSource> {
pub output_path: OutputPath<Target>,
}
#[derive(Args, Clone, PartialEq, Derivative)]
#[derive(Args, Clone, PartialEq, Debug)]
#[group(skip)]
#[derivative(Debug)]
pub struct WatchJob<Target: IsWatchableSource> {
#[clap(flatten)]
pub build: BuildJob<Target>,
@ -308,8 +301,5 @@ pub struct WatchJob<Target: IsWatchableSource> {
/// Clap parser supporting a given set of [`OS`] values.
pub fn possible_os_parser(possible_os: &[OS]) -> impl TypedValueParser<Value = OS> {
PossibleValuesParser::new(possible_os.iter().map(|os| os.as_str()))
// Unwrap below is safe, because it is symmetric to the `as_str` conversion above, and
// we'll get only the values that were generated from the `possible_os` array.
.map(|s| s.parse::<OS>().unwrap())
PossibleValuesParser::new(possible_os.iter().map(|os| os.as_str())).map(|s| OS::from_str(&s))
}

View File

@ -14,16 +14,13 @@ use octocrab::models::ReleaseId;
source_args_hlp!(Target, "ide", BuildInput);
#[derive(Args, Clone, Debug, Derivative)]
#[derive(Args, Clone, Debug, PartialEq)]
#[group(skip)]
#[derivative(PartialEq)]
pub struct BuildInput {
#[derivative(PartialEq(bound = ""))]
#[clap(flatten)]
pub gui: Source<Gui>,
#[clap(flatten)]
pub project_manager: Source<Backend>,
#[derivative(PartialEq(bound = ""))]
#[clap(flatten)]
pub output_path: OutputPath<Target>,
/// Override the default target for electron-builder. E.g. pass `dir` for unpacked directory

View File

@ -31,7 +31,6 @@ use crate::arg::WatchJob;
use anyhow::Context;
use arg::BuildDescription;
use clap::Parser;
use derivative::Derivative;
use enso_build::config::Config;
use enso_build::context::BuildContext;
use enso_build::engine::context::EnginePackageProvider;
@ -91,8 +90,7 @@ define_env_var! {
}
/// The basic, common information available in this application.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
#[derive(Clone, Debug)]
pub struct Processor {
pub context: BuildContext,
}
@ -192,12 +190,12 @@ impl Processor {
) -> BoxFuture<'static, Result<ReleaseSource>> {
let repository = self.remote_repo.clone();
let release = self.resolve_release_designator(designator);
release
.and_then_sync(move |release| {
let asset = target.find_asset(&release)?;
Ok(ReleaseSource { repository, asset_id: asset.id })
})
.boxed()
async move {
let release = release.await?;
let asset = target.find_asset(&release)?;
Ok(ReleaseSource { repository, asset_id: asset.id })
}
.boxed()
}
pub fn js_build_info(&self) -> BoxFuture<'static, Result<gui::BuildInfo>> {
@ -640,22 +638,21 @@ impl Resolvable for Backend {
) -> BoxFuture<'static, Result<<Self as IsTarget>::BuildInput>> {
let arg::backend::BuildInput { runtime } = from;
let versions = ctx.triple.versions.clone();
let context = ctx.context.inner.clone();
ctx.resolve(Runtime, runtime)
.and_then_sync(|runtime| {
let external_runtime = runtime.to_external().map(move |external| {
Arc::new(move || {
Runtime
.get_external(context.clone(), external.clone())
.map_ok(|artifact| artifact.into_inner())
.boxed()
}) as Arc<EnginePackageProvider>
});
Ok(backend::BuildInput { external_runtime, versions })
})
.boxed()
let runtime_future = ctx.resolve(Runtime, runtime);
async {
let runtime = runtime_future.await?;
let external_runtime = runtime.to_external().map(move |external| {
Arc::new(move || {
Runtime
.get_external(context.clone(), external.clone())
.map_ok(|artifact| artifact.into_inner())
.boxed()
}) as Arc<EnginePackageProvider>
});
Ok(backend::BuildInput { external_runtime, versions })
}
.boxed()
}
}

View File

@ -14,7 +14,6 @@
#![deny(keyword_idents)]
#![deny(macro_use_extern_crate)]
#![deny(missing_abi)]
#![deny(pointer_structural_match)]
#![deny(unsafe_op_in_unsafe_fn)]
#![deny(unconditional_recursion)]
#![warn(absolute_paths_not_starting_with_crate)]
@ -420,7 +419,7 @@ pub fn discover_paths_internal(
let is_main_dir = dir_name.contains(&"bin"); // || dir_name == Some(OsStr::new("tests"));
let sub_paths = fs::read_dir(path)?;
for sub_path in sub_paths {
discover_paths_internal(vec, &sub_path?.path(), is_main_dir)?;
discover_paths_internal(vec, sub_path?.path(), is_main_dir)?;
}
} else if md.is_file() && path.extension().contains(&"rs") {
let is_main_file = path

View File

@ -16,15 +16,8 @@ strum = { workspace = true }
sysinfo = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
windows = { version = "0.53.0", features = [
"Win32",
"Win32_UI",
"Win32_UI_Shell",
"Win32_System",
"Win32_System_LibraryLoader",
"Win32_Foundation",
"Win32_System_Com",
] }
windows = { workspace = true }
derive_more = { workspace = true }
[target.'cfg(windows)'.dependencies]
mslnk = "0.1.8"

View File

@ -5,7 +5,7 @@ edition = "2021"
[dependencies]
anyhow = { workspace = true }
byte-unit = { workspace = true }
bytesize = "1.3.0"
chrono = { workspace = true }
enso-install = { path = ".." }
enso-install-config = { path = "../config" }

View File

@ -1,8 +1,5 @@
//! This crate implements the Windows installer for the Enso IDE.
// === Features ===
#![feature(lazy_cell)]
use enso_install::prelude::*;
use enso_install::access_built_time_env;

View File

@ -222,15 +222,15 @@ pub fn check_disk_space(
.find(|disk| path.starts_with(disk.mount_point()))
.context("No disk information found for the installation directory.")?;
let required_space = byte_unit::Byte::from_u64(bytes_required);
let free_space = byte_unit::Byte::from_u64(disk.available_space());
let required_space = bytesize::ByteSize(bytes_required);
let free_space = bytesize::ByteSize(disk.available_space());
if free_space < required_space {
let msg = format!(
"Not enough disk space on {} to install. Required: {:.2}, available: {:.2}.",
disk.mount_point().display(),
required_space.get_appropriate_unit(byte_unit::UnitType::Binary),
free_space.get_appropriate_unit(byte_unit::UnitType::Binary)
required_space,
free_space
);
return Ok(Some(msg));
}

View File

@ -1,10 +1,5 @@
//! This crate is linked in both by the installer and the uninstaller.
// === Features ===
#![feature(lazy_cell)]
pub mod prelude {
pub use ide_ci::prelude::*;

View File

@ -10,7 +10,7 @@ anyhow = { workspace = true }
enso-install = { path = ".." }
enso-install-config = { path = "../config" }
ide-ci = { path = "../../ci_utils" }
self-replace = "1.3.7"
self-replace = "1.4.0"
sysinfo = { workspace = true }
tokio = { workspace = true }

View File

@ -13,7 +13,7 @@ proc-macro2 = { workspace = true }
quote = { workspace = true }
regex = { workspace = true }
serde_yaml = { workspace = true }
syn_1 = { workspace = true }
syn = { workspace = true }
derive_more = { workspace = true }
[lints]

View File

@ -1,25 +1 @@
// === Features ===
#![feature(const_trait_impl)]
#![feature(string_remove_matches)]
#![feature(once_cell_try)]
mod prelude {
pub use derive_more::*;
pub use enso_build_base::prelude::*;
pub use convert_case::Case;
pub use convert_case::Casing;
pub use itertools::Itertools;
pub use proc_macro2::Span;
pub use proc_macro2::TokenStream;
pub use quote::quote;
pub use syn::Data;
pub use syn::DeriveInput;
pub use syn::Ident;
pub use syn_1 as syn;
}
pub mod paths;
pub mod program_args;

View File

@ -1,7 +1,14 @@
use crate::prelude::*;
use convert_case::Case;
use convert_case::Casing;
use derive_more::*;
use enso_build_base::prelude::*;
use itertools::Itertools;
use proc_macro2::Span;
use proc_macro2::TokenStream;
use quote::quote;
use syn::Ident;
use regex::Regex;
use std::cell::OnceCell;
use std::iter::zip;
@ -12,9 +19,7 @@ fn normalize_ident(ident: impl AsRef<str>, case: Case) -> Ident {
let normalized_text = if base == "." {
String::from("Paths")
} else {
let mut ret = base.replace(|c| matches!(c, '-' | '.' | ' '), "_");
ret.remove_matches(|c| matches!(c, '<' | '>'));
ret
base.replace(['-', '.', ' '], "_").replace(['<', '>'], "")
};
Ident::new(&normalized_text.to_case(case), Span::call_site())
@ -103,12 +108,11 @@ pub fn get_string(
pub struct Generator<'a> {
all_nodes: &'a [&'a Node],
stack: Vec<&'a Node>,
empty_set: BTreeSet<Ident>,
}
impl<'a> Generator<'a> {
pub fn new(all_nodes: &'a [&'a Node]) -> Self {
Self { all_nodes, stack: default(), empty_set: default() }
Self { all_nodes, stack: default() }
}
pub fn resolve(&self, r#type: &str) -> Result<&Node> {
@ -160,24 +164,16 @@ impl<'a> Generator<'a> {
let parameter_vars = last_node.all_parameters_vars(self)?;
let own_parameter_vars = last_node.own_parameter_vars();
let parent_parameter_vars: BTreeSet<_> =
full_path.iter().flat_map(|n| n.own_parameter_vars()).collect();
let child_parameter_vars: BTreeSet<_> = last_node
.children()
let mut child_parameter_vars = BTreeSet::new();
for node in last_node.children() {
child_parameter_vars.extend(node.all_parameters_vars(self)?.iter().cloned())
}
let all_parameters: BTreeSet<_> = full_path
.iter()
.map(|node| node.all_parameters_vars(self))
.try_collect_vec()?
.into_iter()
.flatten()
.cloned()
.flat_map(|n| n.own_parameter_vars())
.chain(child_parameter_vars.iter().cloned())
.collect();
let all_parameters = {
let mut v = parent_parameter_vars;
v.extend(child_parameter_vars.clone());
v
};
let mut segment_names = vec![];
for i in 0..full_path.len() {
@ -190,7 +186,7 @@ impl<'a> Generator<'a> {
});
}
let children_init = zip(last_node.children(), &children_struct)
let children_init: Vec<_> = zip(last_node.children(), &children_struct)
.map(|(child, children_struct)| {
Result::Ok(if let Some(r#_type) = child.r#type.as_ref() {
let resolved_type = self.resolve(r#_type)?;
@ -208,7 +204,7 @@ impl<'a> Generator<'a> {
}
})
})
.try_collect_vec()?;
.try_collect()?;
let opt_conversions = if parameter_vars.is_empty() {
quote! {
@ -303,24 +299,21 @@ impl<'a> Generator<'a> {
#[derive(Clone, Debug, PartialEq, Deref)]
pub struct Node {
#[deref]
value: String,
/// All parameters needed for this node (directly and for the children).
parameters: OnceCell<BTreeSet<Ident>>, // Wasteful but paths won't be that huge.
value: String,
/// The name that replaces value in variable-like contexts.
/// Basically, we might not want use filepath name as name in the code.
var_name: Option<String>,
shape: Shape,
r#type: Option<String>,
var_name: Option<String>,
shape: Shape,
r#type: Option<String>,
}
impl Node {
pub fn new(value: impl AsRef<str>) -> Self {
let shape = Shape::new(value.as_ref());
let value = value.as_ref().trim_end_matches('/').to_string();
let parameters = default();
let r#type = default();
let var_name = default();
Self { var_name, parameters, shape, value, r#type }
Self { var_name, shape, value, r#type }
}
#[context("Failed to process node from key: {}", serde_yaml::to_string(value).unwrap())]
@ -353,29 +346,23 @@ impl Node {
pub fn type_dependent_parameters_vars<'a>(
&'a self,
g: &'a Generator,
) -> Result<&'a BTreeSet<Ident>> {
) -> Result<BTreeSet<Ident>> {
if let Some(r#type) = &self.r#type {
let resolved_type = g.resolve(r#type)?;
resolved_type.all_parameters_vars(g)
} else {
Ok(&g.empty_set)
Ok(default())
}
}
pub fn all_parameters_vars(&self, g: &Generator) -> Result<&BTreeSet<Ident>> {
self.parameters.get_or_try_init(|| {
let mut ret = BTreeSet::new();
for child in self.children() {
ret.extend(child.all_parameters_vars(g)?.clone());
}
ret.extend(self.own_parameter_vars());
ret.extend(self.type_dependent_parameters_vars(g)?.clone());
Ok(ret)
})
// let mut ret = BTreeSet::new();
// ret.extend(self.parameters.iter().sorted().map(to_ident));
// ret.extend(self.type_dependent_parameters_vars(g)?);
// Ok(ret)
pub fn all_parameters_vars(&self, g: &Generator) -> Result<BTreeSet<Ident>> {
let mut ret = BTreeSet::new();
for child in self.children() {
ret.extend(child.all_parameters_vars(g)?);
}
ret.extend(self.own_parameter_vars());
ret.extend(self.type_dependent_parameters_vars(g)?);
Ok(ret)
}
pub fn own_parameters(&self) -> impl IntoIterator<Item = &str> {
@ -389,22 +376,9 @@ impl Node {
pub fn children_parameters(&self, g: &Generator) -> Result<BTreeSet<Ident>> {
let mut ret = BTreeSet::new();
for child in self.children() {
ret.extend(child.all_parameters_vars(g)?.clone());
ret.extend(child.all_parameters_vars(g)?);
}
Ok(ret)
// let resolved_type_params = if let Some(r#type) = &self.r#type {
// if let Ok(r#type) = g.resolve(r#type) {
// // TODO: This might not work for parameters that are type-introduced in the
// subtree // of the resolved type.
// r#type.all_parameters_vars(g).iter().map(to_ident).collect_vec()
// } else {
// warn!(%r#type, "Failed to resolve type.");
// default()
// }
// } else {
// default()
// };
// direct_child_params.chain(resolved_type_params).collect()
}
pub fn children(&self) -> &[Node] {

View File

@ -1,214 +0,0 @@
use crate::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Policy {
pub flag_case: Case,
pub format: Option<String>,
}
impl Default for Policy {
fn default() -> Self {
Self::default_const()
}
}
impl Policy {
pub const fn default_const() -> Self {
Self { flag_case: Case::Kebab, format: None }
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Generator<'a> {
pub input: &'a DeriveInput,
pub policy: Vec<Policy>,
}
impl<'a> Generator<'a> {
pub fn current_policy(&self) -> &Policy {
static DEFAULT_POLICY: Policy = Policy::default_const();
self.policy.last().unwrap_or(&DEFAULT_POLICY)
}
pub fn new(input: &'a DeriveInput) -> Self {
Self { input, policy: vec![Default::default()] }
}
pub fn format_flag(&mut self, name: impl ToString) -> String {
format!("--{}", name.to_string().to_case(self.current_policy().flag_case))
}
pub fn generate(self) -> TokenStream {
// let name = &self.input.ident;
match &self.input.data {
Data::Enum(e) => EnumGenerator::new(self, e).generate(),
_ => unimplemented!(),
// Data::Struct(_) => {}
// Data::Union(_) => {}
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Deref, DerefMut)]
pub struct EnumGenerator<'a> {
#[deref]
#[deref_mut]
pub generator: Generator<'a>,
pub enum_data: &'a syn::DataEnum,
}
impl<'a> EnumGenerator<'a> {
pub fn new(generator: Generator<'a>, enum_data: &'a syn::DataEnum) -> Self {
Self { generator, enum_data }
}
/// Generate output for enum where all variants are units.
///
/// In such case every variant can be converted to OsStr.
/// An iterator is just a single occurrence of the string.
pub fn generate_plain(&mut self) -> TokenStream {
let name = &self.generator.input.ident;
let variant_names =
self.enum_data.variants.iter().map(|variant| &variant.ident).collect_vec();
let flags = variant_names.iter().map(|v| self.format_flag(v)).collect_vec();
quote! {
impl AsRef<std::ffi::OsStr> for #name {
fn as_ref(&self) -> &std::ffi::OsStr {
match self {
#( #name::#variant_names => #flags, )*
}.as_ref()
}
}
impl IntoIterator for #name {
type Item = std::ffi::OsString;
type IntoIter = std::iter::Once<std::ffi::OsString>;
fn into_iter(self) -> Self::IntoIter {
std::iter::once(self.as_ref().to_owned())
}
}
}
}
/// Generate arm that matches a variant with zero or one field and outputs `Vec<OsString>`.
pub fn generate_arm_with_field(&mut self, variant: &syn::Variant) -> TokenStream {
let relevant_attrs = variant
.attrs
.iter()
.filter_map(|attr| attr.path.is_ident("arg").then_some(&attr.tokens))
.collect_vec();
// dbg!(&relevant_attrs.iter().map(|t| t.to_string()).collect_vec());
let _relevant_attrs_as_expr = relevant_attrs
.iter()
.filter_map(|tokens| syn::parse2::<syn::ExprAssign>((*tokens).clone()).ok())
.collect_vec();
// dbg!(relevant_attrs_as_expr);
let name = &self.generator.input.ident;
let variant_name = &variant.ident;
let flag = self.format_flag(variant_name);
if let Some(_field) = variant.fields.iter().next() {
// let field_type = &field.ty;
quote! {
#name::#variant_name(field) => {
let mut result = Vec::new();
result.push(#flag.into());
let os_str: &OsStr = field.as_ref();
result.push(os_str.into());
result.into_iter()
}
}
} else {
quote! {
#name::#variant_name => vec![#flag.into()].into_iter()
}
}
}
/// Generate output for enum where variant can have fields.
pub fn generate_with_fields(&mut self) -> TokenStream {
let name = &self.generator.input.ident;
let arms = self.enum_data.variants.iter().map(|v| self.generate_arm_with_field(v));
quote! {
impl IntoIterator for #name {
type Item = std::ffi::OsString;
type IntoIter = std::vec::IntoIter<std::ffi::OsString>;
fn into_iter(self) -> Self::IntoIter {
match self {
#( #arms, )*
}
}
}
}
}
pub fn generate(&mut self) -> TokenStream {
// If all variants are unit variants, we just pretty print their names.
if self.enum_data.variants.iter().all(|v| v.fields.is_empty()) {
self.generate_plain()
} else {
self.generate_with_fields()
}
}
}
pub fn derive(input: DeriveInput) -> Result<TokenStream> {
let generator = Generator::new(&input);
Ok(generator.generate())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[ignore]
fn foo() -> Result {
let code = "enum Foo {
#[arg]
Bar,
#[arg]
Baz,
#[arg]
Quux,
}";
let _token_stream = syn::parse_str::<TokenStream>(code)?;
Ok(())
}
/// Structure with AST of parenthesized sequence of assignments.
///
/// For example, `(a = 1, b = ToString::to_string)`.
#[derive(Debug, Clone)]
pub struct Assignments {
pub paren_token: syn::token::Paren,
pub assignments: syn::punctuated::Punctuated<syn::ExprAssign, syn::Token![,]>,
}
impl syn::parse::Parse for Assignments {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let content;
let paren_token = syn::parenthesized!(content in input);
let assignments = content.parse_terminated(syn::ExprAssign::parse)?;
Ok(Self { paren_token, assignments })
}
}
#[test]
#[ignore]
fn parse_attribute() -> Result {
let attribute = r#"(format = ToString :: to_string)"#;
let token_stream = syn::parse_str::<TokenStream>(attribute)?;
dbg!(&token_stream);
let foo = syn::parse2::<Assignments>(token_stream)?;
dbg!(foo);
// let attribute = syn::parse2::<syn::Attribute>(token_stream)?;
// dbg!(attribute);
Ok(())
}
}

View File

@ -1,20 +0,0 @@
[package]
name = "enso-build-macros"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
enso-build-base = { path = "../../base" }
enso-build-macros-lib = { path = "../lib" }
syn_1 = { workspace = true }
[dev-dependencies]
itertools = { workspace = true }
proc-macro2 = { workspace = true }
[lints]
workspace = true

View File

@ -1,23 +0,0 @@
use syn_1 as syn;
#[proc_macro_derive(Arg, attributes(arg))]
pub fn derive_arg_fn(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse_macro_input!(item as syn::DeriveInput);
enso_build_macros_lib::program_args::derive(input)
.unwrap_or_else(|err| panic!("Failed to derive program argument: {err:?}"))
.into()
}
/// This macro takes a string literal with YAML description of file tree and generates wrapper
/// classes. See the tests for this crate for usage examples.
#[proc_macro]
pub fn make_paths(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse_macro_input!(item as syn::LitStr);
let input = input.value();
enso_build_macros_lib::paths::process(input.as_bytes())
// .inspect(|tt| println!("Generated: {:}", tt))
.unwrap_or_else(|err| panic!("Failed to generate path types: {err:?}"))
.into()
}

View File

@ -1,24 +0,0 @@
use enso_build_base::prelude::*;
mod paths {
use enso_build_macros::make_paths;
make_paths! {
r#"
<root>/:
item:
item-<root>.bar:"#
}
}
#[test]
fn test_path_generation() -> Result {
let paths = paths::Root::new("name");
assert_eq!(paths.path, PathBuf::from("name"));
assert_eq!(paths.item.path, PathBuf::from_iter(["name", "item"]));
assert_eq!(paths.item_root_bar.path, PathBuf::from_iter(["name", "item-name.bar"]));
Ok(())
}

View File

@ -1,39 +0,0 @@
// === Non-Standard Linter Configuration ===
#![allow(clippy::disallowed_names)]
use enso_build_base::prelude::*;
use itertools::Itertools;
use std::str::FromStr;
use syn_1 as syn;
#[derive(Clone, Copy, Debug, enso_build_macros::Arg)]
pub enum Foo {
Foo,
BarBaz,
}
#[test]
fn hello() {
let foo = Foo::Foo;
assert_eq!(foo.as_ref(), OsStr::new("--foo"));
let args = foo.into_iter().collect_vec();
assert_eq!(args, vec![OsString::from("--foo")]);
let bar_baz = Foo::BarBaz;
assert_eq!(bar_baz.as_ref(), OsStr::new("--bar-baz"));
let args = bar_baz.into_iter().collect_vec();
assert_eq!(args, vec![OsString::from("--bar-baz")]);
}
#[test]
fn experiment_with_parsing() -> Result {
let code = "foo = ToString::to_string";
let token_stream = proc_macro2::TokenStream::from_str(code).unwrap();
dbg!(&token_stream);
let foo = syn::parse2::<syn::ExprAssign>(token_stream).unwrap();
dbg!(&foo);
Ok(())
}

View File

@ -1,32 +0,0 @@
use enso_build_base::prelude::*;
use itertools::Itertools;
#[derive(Clone, Debug, enso_build_macros::Arg)]
pub enum Foo {
Bar,
BarBaz(String),
HogeHoge(OsString),
// #[arg(format = ToString::to_string)]
// TaraPon(u32),
}
#[test]
fn test_argument_formatting() {
let bar = Foo::Bar;
assert_eq!(bar.into_iter().collect_vec(), vec![OsString::from("--bar")]);
let bar_baz = Foo::BarBaz("foo".into());
assert_eq!(bar_baz.into_iter().collect_vec(), vec![
OsString::from("--bar-baz"),
OsString::from("foo")
]);
let hoge_hoge = Foo::HogeHoge(OsString::from("foo"));
assert_eq!(hoge_hoge.into_iter().collect_vec(), vec![
OsString::from("--hoge-hoge"),
OsString::from("foo")
]);
}

View File

@ -1,23 +0,0 @@
// use enso_build_base::prelude::*;
//
// use itertools::Itertools;
//
// #[derive(enso_build_macros::Arg)]
// pub enum Foo {
// Bar,
// #[arg(format = ToString::to_string)]
// TaraPon(u32),
// }
//
// #[test]
// fn test_argument_formatting() {
// let bar = Foo::Bar;
// assert_eq!(bar.into_iter().collect_vec(), vec![OsString::from("--bar")]);
//
// let tara_pon = Foo::TaraPon(42);
// assert_eq!(tara_pon.into_iter().collect_vec(), vec![
// OsString::from("--tara-pon"),
// OsString::from("42")
// ]);
// }

View File

@ -1,24 +0,0 @@
[package]
name = "enso-data-structures"
version = "0.2.0"
authors = ["Enso Team <contact@luna-lang.org>"]
edition = "2021"
description = "A collection of useful data structures."
readme = "README.md"
homepage = "https://github.com/enso-org/enso/lib/rust/data"
repository = "https://github.com/enso-org/enso"
license-file = "../../LICENSE"
keywords = []
categories = ["data-structures"]
publish = true
[lib]
crate-type = ["rlib", "cdylib"]
[dependencies]
enso-prelude = { path = "../prelude" }
serde = { workspace = true }
failure = { workspace = true }
[lints]
workspace = true

View File

@ -1,3 +0,0 @@
# Enso Data Structures
A collection of useful data structures.

View File

@ -1,15 +0,0 @@
//! Library of general data structures.
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
// ==============
// === Export ===
// ==============
pub mod im_list;
pub use enso_prelude as prelude;

View File

@ -18,7 +18,7 @@ crate-type = ["rlib"]
[dependencies]
proc-macro2 = { workspace = true }
quote = { workspace = true }
syn_1 = { workspace = true }
syn = { workspace = true }
[lints]
workspace = true

View File

@ -7,7 +7,6 @@
use proc_macro2::TokenTree;
use syn::WhereClause;
use syn::WherePredicate;
use syn_1 as syn;

Some files were not shown because too many files have changed in this diff Show More