Precompute MSDFs (#5811)

Precompute MSDFs for all ASCII glyphs; after this, we no longer spend any time on MSDF computations when loading or interacting with the example projects.

Also shader precompilation (during build) is now parallel; if you have many cores and an SSD, it's now practically instant.

Closes #5722.

# Important Notes
- The *dynamic-assets* mechanism now used for MSDF data and shaders is versatile, and could be used to pre-seed any other computation-intensive runtime caches.
This commit is contained in:
Kaz Wesley 2023-03-10 04:59:56 -08:00 committed by GitHub
parent 0f1d591bc1
commit 023d8ac239
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 1478 additions and 453 deletions

31
Cargo.lock generated
View File

@ -1859,6 +1859,13 @@ dependencies = [
"enso-prelude",
]
[[package]]
name = "enso-bitmap"
version = "0.1.0"
dependencies = [
"thiserror",
]
[[package]]
name = "enso-build"
version = "0.1.0"
@ -2167,6 +2174,7 @@ dependencies = [
"ensogl",
"ensogl-component",
"ensogl-drop-manager",
"ensogl-dynamic-assets",
"ensogl-examples",
"ensogl-hardcoded-theme",
"ensogl-text-msdf",
@ -2591,6 +2599,7 @@ dependencies = [
"ensogl-drop-down",
"ensogl-drop-down-menu",
"ensogl-drop-manager",
"ensogl-dynamic-assets",
"ensogl-file-browser",
"ensogl-flame-graph",
"ensogl-grid-view",
@ -2690,6 +2699,20 @@ dependencies = [
"web-sys",
]
[[package]]
name = "ensogl-dynamic-assets"
version = "0.1.0"
dependencies = [
"anyhow",
"enso-bitmap",
"enso-prelude",
"enso-shapely",
"ensogl-core",
"ensogl-text",
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "ensogl-example-animation"
version = "0.1.0"
@ -3009,6 +3032,7 @@ dependencies = [
"enso-prelude",
"enso-shapely",
"ensogl-core",
"ensogl-text",
]
[[package]]
@ -3038,11 +3062,15 @@ dependencies = [
name = "ensogl-pack"
version = "0.1.0"
dependencies = [
"enso-bitmap",
"enso-prelude",
"fs_extra",
"futures 0.3.26",
"ide-ci",
"manifest-dir-macros",
"regex",
"serde",
"serde_json",
"tempfile",
"tokio",
"walkdir",
@ -3123,6 +3151,7 @@ version = "0.1.0"
dependencies = [
"bincode 2.0.0-rc.2",
"const_format",
"enso-bitmap",
"enso-frp",
"enso-prelude",
"enso-shapely",
@ -3138,6 +3167,7 @@ dependencies = [
"rand_chacha 0.3.1",
"rustybuzz",
"serde",
"serde_json",
"wasm-bindgen-test",
"xi-rope",
]
@ -3168,6 +3198,7 @@ dependencies = [
name = "ensogl-text-msdf"
version = "0.1.0"
dependencies = [
"enso-bitmap",
"enso-build-utilities",
"enso-prelude",
"enso-profiler",

View File

@ -27,6 +27,7 @@ enso-suggestion-database = { path = "suggestion-database" }
ensogl = { path = "../../lib/rust/ensogl" }
ensogl-examples = { path = "../../lib/rust/ensogl/examples" }
ensogl-component = { path = "../../lib/rust/ensogl/component" }
ensogl-dynamic-assets = { path = "../../lib/rust/ensogl/component/dynamic-assets" }
ensogl-text-msdf = { path = "../../lib/rust/ensogl/component/text/src/font/msdf" }
ensogl-hardcoded-theme = { path = "../../lib/rust/ensogl/app/theme/hardcoded" }
ensogl-drop-manager = { path = "../../lib/rust/ensogl/component/drop-manager" }

View File

@ -45,6 +45,16 @@ pub enum Metadata {
BackendMessage(backend::Message),
/// Performance stats gathered from the EnsoGL rendering engine.
RenderStats(ensogl_core::debug::StatsData),
/// Any other metadata type.
///
/// The types defined above are handled specially by `enso-profiler-enso-data` tools: E.g. the
/// RPC events and `RenderStats` are displayed in different ways by the `profiling_run_graph`
/// entry point.
///
/// Other types are logged purely so they they can be seen in the events logs, e.g. when
/// inspecting a log with the `measurements` tool.
#[serde(other)]
Other,
}
impl Display for Metadata {
@ -54,6 +64,7 @@ impl Display for Metadata {
Metadata::RpcRequest(method) => f.collect_str(&method.to_string()),
Metadata::BackendMessage(backend::Message { endpoint, .. }) => f.collect_str(endpoint),
Metadata::RenderStats(stats) => f.collect_str(&format!("{stats:#?}")),
Metadata::Other => f.collect_str("<value>"),
}
}
}

View File

@ -126,14 +126,16 @@ pub mod prelude {
pub use wasm_bindgen_test::wasm_bindgen_test_configure;
}
// Those imports are required to have all examples entry points visible in IDE.
// These imports are required to have all entry points (such as examples) and `before_main`
// functions (such as the dynamic-asset loader), available in the IDE.
#[allow(unused_imports)]
mod examples {
mod imported_for_entry_points {
use enso_debug_scene::*;
use ensogl_dynamic_assets::*;
use ensogl_examples::*;
}
#[allow(unused_imports)]
use examples::*;
use imported_for_entry_points::*;
mod profile_workflow;

View File

@ -87,7 +87,7 @@ class Main {
loader: {
wasmUrl: 'pkg-opt.wasm',
jsUrl: 'pkg.js',
shadersUrl: 'shaders',
assetsUrl: 'dynamic-assets',
},
},
inputConfig

View File

@ -46,7 +46,7 @@
dist/:
gui/:
assets/:
shaders/: # Optimized shaders that contain main function code only.
dynamic-assets/: # Assets used by the WASM application.
pkg.js: # The `pks.js` artifact of wasm-pack WITH bundled snippets.
pkg.js.map: # The sourcemap mapping to `pkg.js` generated by wasm-pack.
pkg-opt.wasm: # The optimized WASM artifact.
@ -56,7 +56,7 @@
# Final WASM artifacts in `dist` directory.
wasm/:
shaders/: # Optimized shaders that contain main function code only.
dynamic-assets/: # Assets used by the WASM application.
index.js: # The main JS bundle to load WASM and JS wasm-pack bundles.
index.d.ts: # TypeScript types interface file.
index.js.map: # The sourcemap mapping to `index.js`.

View File

@ -426,7 +426,7 @@ impl Artifact {
// consider whether they should be shipped or not.
let RepoRootDistWasm {
path: _,
shaders,
dynamic_assets,
index_js: _,
index_d_ts: _,
index_js_map: _,
@ -434,7 +434,7 @@ impl Artifact {
pkg_wasm: _,
pkg_opt_wasm,
} = &self.0;
vec![shaders.as_path(), pkg_js.as_path(), pkg_opt_wasm.as_path()]
vec![dynamic_assets.as_path(), pkg_js.as_path(), pkg_opt_wasm.as_path()]
}
pub fn symlink_ensogl_dist(&self, linked_dist: &RepoRootTargetEnsoglPackLinkedDist) -> Result {

View File

@ -281,8 +281,11 @@ pub async fn upload_gui_to_cloud(
let bucket = crate::aws::s3::gui::context(version).await?;
// Some file we upload as-is, some gzipped. This seems somewhat arbitrary now.
let files_to_upload =
[assets.pkg_opt_wasm.as_path(), assets.style_css.as_path(), assets.shaders.as_path()];
let files_to_upload = [
assets.pkg_opt_wasm.as_path(),
assets.style_css.as_path(),
assets.dynamic_assets.as_path(),
];
let files_to_upload_gzipped = [assets.index_js.as_path(), assets.pkg_js.as_path()];
for file in files_to_upload.iter() {

View File

@ -0,0 +1,10 @@
[package]
name = "enso-bitmap"
version = "0.1.0"
authors = ["Enso Team <contact@enso.org>"]
edition = "2021"
[features]
[dependencies]
thiserror = "1"

121
lib/rust/bitmap/src/lib.rs Normal file
View File

@ -0,0 +1,121 @@
//! Library for working with simple uncompressed bitmaps.
// === Features ===
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
// === Non-Standard Linter Configuration ===
#![allow(clippy::option_map_unit_fn)]
#![allow(clippy::precedence)]
#![allow(dead_code)]
#![deny(unconditional_recursion)]
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
#![warn(unused_qualifications)]
use thiserror::Error;
// ======================
// === Raw Image Data ===
// ======================
/// Raw image data.
#[derive(Debug, Clone)]
pub struct Image {
/// Number of pixels.
pub width: usize,
/// Number of rows.
pub height: usize,
/// Pixel data (RGB).
pub data: Vec<u8>,
}
// ==================
// === PPM Format ===
// ==================
/// If this is `false`, pixel data will be stored be encoded and decoded more efficiently, but
/// non-standardly. Other programs will be able to display the files, though the values will be
/// inverted.
///
/// PPM uses a 0=white convention for pixel data, which is somewhat expensive to convert to.
const CONFORMANT_PPM: bool = false;
impl Image {
/// Encode in the PPM format. [PPM][1] is a particularly simple bitmap format, that is almost as
/// easy to read and write as serializing the raw pixel data; compared to just serializing, it
/// has the advantage that the files can be inspected with common tools.
///
/// [1]: https://netpbm.sourceforge.net/doc/ppm.html
pub fn encode_ppm(&self) -> Vec<u8> {
debug_assert_eq!(self.height * self.width * CHANNELS, self.data.len());
let mut out = Vec::new();
let width = self.width;
let height = self.height;
let header = format!("{PPM_MAGIC}\n{width} {height}\n255\n");
out.extend(header.bytes());
out.extend(self.data.iter().map(ppm_value));
out
}
/// Decode from the PPM format.
pub fn decode_ppm(data: &[u8]) -> Result<Self, Error> {
const HEADER_LINES: usize = 3;
const BODY_LINE: usize = 1;
let mut lines = data.splitn(HEADER_LINES + BODY_LINE, |&x| x == b'\n');
let magic = lines.next().ok_or(Error::Truncated)?;
if magic != PPM_MAGIC.as_bytes() {
return Err(Error::WrongFormat);
}
const DIMENSIONS: usize = 2;
let dimensions = lines.next().ok_or(Error::Truncated)?;
let dimensions = std::str::from_utf8(dimensions).map_err(|_| Error::Invalid)?;
let (height, width) = dimensions.split_once(' ').ok_or(Error::Truncated)?;
let height = height.parse().map_err(|_| Error::Invalid)?;
let width = width.parse().map_err(|_| Error::Invalid)?;
let num_shades = lines.next().ok_or(Error::Truncated)?;
if num_shades != b"255" {
return Err(Error::Invalid);
}
let data: Vec<_> = lines.next().ok_or(Error::Truncated)?.iter().map(ppm_value).collect();
debug_assert_eq!(height * width * CHANNELS, data.len());
Ok(Self { width, height, data })
}
}
/// Map a channel value to or from PPM encoding, which is inverted from most formats.
fn ppm_value(x: &u8) -> u8 {
if CONFORMANT_PPM {
255 - x
} else {
*x
}
}
/// Encoding/decoding errors.
#[derive(Error, Copy, Clone, Debug, PartialEq, Eq)]
pub enum Error {
/// The file does not appear to be in the expected format.
#[error("The file does not appear to be in the expected format.")]
WrongFormat,
/// The file is invalid or uses unsupported features.
#[error("The file is invalid or uses unsupported features.")]
Invalid,
/// The file is invalid or may be truncated.
#[error("The file is invalid or may be truncated.")]
Truncated,
}
/// The first bytes of a PPM file.
const PPM_MAGIC: &str = "P6";
const CHANNELS: usize = 3;

View File

@ -9,5 +9,6 @@ crate-type = ["rlib", "cdylib"]
[dependencies]
ensogl-core = { path = "../../../core" }
ensogl-text = { path = "../../../component/text" }
enso-shapely = { path = "../../../../shapely" }
enso-prelude = { path = "../../../../prelude" }

View File

@ -19,6 +19,7 @@ use enso_prelude::*;
use enso_shapely::before_main;
use ensogl_core::prelude::ImString;
use ensogl_text::font::DEFAULT_FONT;
@ -215,7 +216,7 @@ define_themes! { [light:0, dark:1]
dimmed = Rgb::from_base_255(160.0, 163.0, 165.0), Rgb::from_base_255(160.0, 163.0, 165.0);
padding = 16.0, 16.0;
text {
font = "mplus1p", "mplus1p";
font = DEFAULT_FONT, DEFAULT_FONT;
y_offset = 8.0, 8.0;
y_offset_header = 5.0, 5.0;
x_offset_header = 0.0, 0.0;
@ -316,7 +317,7 @@ define_themes! { [light:0, dark:1]
entry {
margin = 1.0, 1.0;
hover_color = Rgba(0.0, 0.0, 0.0, 0.0), Rgba(0.0, 0.0, 0.0, 0.0);
font = "mplus1p", "mplus1p";
font = DEFAULT_FONT, DEFAULT_FONT;
text_y_offset = 6.0, 6.0;
text_padding_left = 0.0, 0.0;
text_size = 11.5, 11.5;
@ -686,7 +687,7 @@ define_themes! { [light:0, dark:1]
text {
offset = 00.0, 00.0;
size = 12.0, 12.0;
font = "default", "default";
font = DEFAULT_FONT, DEFAULT_FONT;
}
padding_outer = 20.0, 20.0;
padding_inner_x = 10.0, 10.0;

View File

@ -9,6 +9,7 @@ ensogl-button = { path = "button" }
ensogl-drop-down-menu = { path = "drop-down-menu" }
ensogl-drop-down = { path = "drop-down" }
ensogl-drop-manager = { path = "drop-manager" }
ensogl-dynamic-assets = { path = "dynamic-assets" }
ensogl-file-browser = { path = "file-browser" }
ensogl-flame-graph = { path = "flame-graph" }
ensogl-label = { path = "label" }

View File

@ -0,0 +1,15 @@
[package]
name = "ensogl-dynamic-assets"
version = "0.1.0"
authors = ["Enso Team <contact@enso.org>"]
edition = "2021"
[dependencies]
anyhow = { workspace = true }
enso-bitmap = { path = "../../../bitmap" }
enso-prelude = { path = "../../../prelude" }
enso-shapely = { path = "../../../shapely" }
ensogl-core = { path = "../../core" }
ensogl-text = { path = "../text" }
js-sys = { workspace = true }
wasm-bindgen = { workspace = true }

View File

@ -0,0 +1,170 @@
//! Pre-seed the cache of MSDF data for fast font rendering.
//!
//! During build, while running the app for asset-extraction, we load the MSDF data cache with
//! common glyphs. We serialize this data into two asset files for each font: An image containing
//! the MSDF data itself, and a metadata file identifying the glyphs in the image, and providing
//! MSDF parameters that are computed per-glyph.
use enso_prelude::*;
use ensogl_core::system::web::JsCast;
use ensogl_core::system::web::JsValue;
use ensogl_core::system::web::Map;
use ensogl_text::font;
use ensogl_text::font::Font;
// =================
// === Constants ===
// =================
/// The printable characters in the ASCII subset of Unicode. This is the same as the set of keys
/// on a US-ANSI keyboard.
const ASCII_PRINTABLE_CHARS: &str = concat!(
" !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
);
/// The glyphs to include in the pre-built atlas loaded at application startup.
const PRELOAD_GLYPHS: &[&str] = &[ASCII_PRINTABLE_CHARS];
/// The variations to be pre-built for each glyph, for each typeface.
const PRELOAD_VARIATIONS: &[font::NonVariableFaceHeader] = &[
font::NonVariableFaceHeader::new(
font::Width::Normal,
font::Weight::Normal,
font::Style::Normal,
),
font::NonVariableFaceHeader::new(
font::Width::Normal,
font::Weight::Medium,
font::Style::Normal,
),
font::NonVariableFaceHeader::new(font::Width::Normal, font::Weight::Bold, font::Style::Normal),
font::NonVariableFaceHeader::new(
font::Width::Normal,
font::Weight::ExtraBold,
font::Style::Normal,
),
];
/// The typefaces for which atlases should be pre-built.
const PRELOAD_TYPEFACES: &[&str] = &[font::DEFAULT_FONT_MONO, font::DEFAULT_FONT];
/// Path within the asset directory to store the glyph atlas image.
const ATLAS_FILE: &str = "atlas.ppm";
/// Path within the asset directory to store the glyph metadata.
const METADATA_FILE: &str = "metadata.json";
// =================
// === Interface ===
// =================
/// Build atlas sources, and return as JavaScript data.
pub fn build_atlases() -> JsValue {
let fonts = Map::new();
for font_name in PRELOAD_TYPEFACES {
match build_atlas(font_name) {
Ok(font) => {
fonts.set(&font_name.to_string().into(), &font.into());
}
Err(e) => error!("Failed to build atlas for font: {e}"),
}
}
fonts.into()
}
/// Load an atlas from JavaScript data.
pub fn set_atlas(font: String, data: HashMap<String, Vec<u8>>) {
try_set_atlas(font, data).unwrap_or_else(|e| error!("Failed to load font atlas: {e}"));
}
fn try_set_atlas(font: String, mut data: HashMap<String, Vec<u8>>) -> anyhow::Result<()> {
let atlas = data.remove(ATLAS_FILE).ok_or_else(|| anyhow!("Atlas file not found."))?;
let metadata = String::from_utf8(
data.remove(METADATA_FILE).ok_or_else(|| anyhow!("Metadata file not found."))?,
)?;
load_atlas(font, atlas, metadata)
}
// ==================
// === Atlas Data ===
// ==================
/// MSDF data for a set of glyphs, ready to be rendered.
#[derive(Debug)]
pub struct Atlas {
atlas: js_sys::ArrayBuffer,
metadata: String,
}
impl From<Atlas> for JsValue {
fn from(value: Atlas) -> Self {
Map::new()
.set(&ATLAS_FILE.into(), &value.atlas.into())
.set(&METADATA_FILE.into(), &value.metadata.into())
.into()
}
}
impl TryFrom<JsValue> for Atlas {
type Error = anyhow::Error;
fn try_from(value: JsValue) -> anyhow::Result<Self> {
let map = Map::from(value);
let atlas = map.get(&ATLAS_FILE.into());
let metadata = map.get(&METADATA_FILE.into());
let atlas = atlas.dyn_into().unwrap();
let metadata = metadata.as_string().unwrap();
Ok(Self { atlas, metadata })
}
}
// =======================================
// === Creating Atlases at Build-Time ===
// =======================================
/// Generate MSDF data for a font.
fn build_atlas(name: &str) -> anyhow::Result<Atlas> {
let fonts = font::Embedded::new();
let font = fonts.load_font(name.into()).ok_or_else(|| anyhow!("Failed to load font."))?;
let font = match font {
Font::NonVariable(font) => font,
Font::Variable(_) =>
return Err(anyhow!("Atlas cache pre-seeding for variable fonts is not supported.",)),
};
for variation in PRELOAD_VARIATIONS {
for glyphs in PRELOAD_GLYPHS {
font.prepare_glyphs_for_text(variation, glyphs).unwrap_or_else(|e| {
warn!("Failed to load specified variation for font `{name}`: {e}")
});
}
let unknown_glyph = font::GlyphId::default();
font.prepare_glyph_by_id(variation, unknown_glyph);
}
let cache = font.cache_snapshot();
let atlas = cache.atlas.encode_ppm();
let atlas = js_sys::Uint8Array::from(&atlas[..]).buffer();
let metadata = cache.glyphs;
Ok(Atlas { atlas, metadata })
}
// =========================================
// === Loading Atlases at Early Run-Time ===
// =========================================
/// Attach the given MSDF data to a font to enable efficient rendering.
fn load_atlas(font: String, atlas: Vec<u8>, glyphs: String) -> anyhow::Result<()> {
let atlas = enso_bitmap::Image::decode_ppm(&atlas)?;
let snapshot = Rc::new(font::CacheSnapshot { atlas, glyphs });
let name = ensogl_text::font::Name::from(font);
font::PREBUILT_ATLASES.with_borrow_mut(|atlases| atlases.insert(name, snapshot));
Ok(())
}

View File

@ -0,0 +1,95 @@
//! *Dynamic assets* are assets that the application itself is used to build. This enables certain
//! types of performance optimization:
//!
//! # Cache pre-seeding
//!
//! When the application uses a cache for an expensive computation, in some cases it is possible to
//! serialize the result of the work, and load it at startup to reduce work done at runtime. Dynamic
//! assets make this process simple to apply to any type of cache.
//!
//! # Offline shader optimization
//!
//! The application generates many shader programs; these programs can be optimized by external
//! tools. However, the programs are generated at runtime, so build-time optimization requires
//! dynamic analysis. Dynamic asset extraction fills this role.
#![feature(local_key_cell_methods)]
#![feature(let_chains)]
#![cfg(target_arch = "wasm32")]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
// === Non-Standard Linter Configuration ===
#![allow(clippy::option_map_unit_fn)]
#![allow(clippy::precedence)]
#![allow(dead_code)]
#![deny(unconditional_recursion)]
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
#![warn(unused_qualifications)]
use enso_prelude::*;
use enso_shapely::before_main;
use ensogl_core::system::js;
use ensogl_core::system::web::Closure;
use ensogl_core::system::web::JsCast;
use ensogl_core::system::web::JsValue;
use ensogl_core::system::web::Map;
pub mod fonts;
pub mod shaders;
// ======================
// === Dynamic Assets ===
// ======================
/// Register the functions to get and set dynamic assets, to be invoked from JavaScript.
#[before_main]
pub fn register_dynamic_assets_fns() {
let js_app = js::app_or_panic();
let closure = Closure::new(get_dynamic_assets_sources);
js_app.register_get_dynamic_assets_sources_rust_fn(&closure);
mem::forget(closure);
let closure = Closure::new(set_dynamic_asset);
js_app.register_set_dynamic_asset_rust_fn(&closure);
mem::forget(closure);
}
fn get_dynamic_assets_sources() -> JsValue {
let builders = Map::new();
builders.set(&"font".to_string().into(), &fonts::build_atlases());
builders.set(&"shader".to_string().into(), &shaders::gather());
builders.into()
}
fn set_dynamic_asset(builder: JsValue, key: JsValue, asset: JsValue) {
try_set_dynamic_asset(builder, key, asset)
.unwrap_or_else(|e| error!("Setting dynamic asset: {e}"));
}
fn try_set_dynamic_asset(builder: JsValue, key: JsValue, asset: JsValue) -> anyhow::Result<()> {
let builder = builder.as_string().unwrap();
let key = key.as_string().unwrap();
let asset: Map = asset.dyn_into().unwrap();
info!("Loading a dynamic asset of type `{builder}`: `{key}`.");
let mut asset_ = HashMap::new();
asset.for_each(&mut |value: JsValue, key: JsValue| {
asset_.insert(key.as_string().unwrap(), js_sys::Uint8Array::new(&value).to_vec());
});
match builder.as_ref() {
"font" => fonts::set_atlas(key, asset_),
"shader" => shaders::set(key, asset_),
_ => anyhow::bail!("Unknown builder."),
}
Ok(())
}

View File

@ -0,0 +1,53 @@
//! Offline optimization of runtime-generated shader programs.
use enso_prelude::*;
use ensogl_core::system::web::JsValue;
use ensogl_core::system::web::Map;
// =================
// === Constants ===
// =================
/// Path within the asset directory to store the vertex shader.
const VERTEX_FILE: &'static str = "vertex.glsl";
/// Path within the asset directory to store the fragment shader.
const FRAGMENT_FILE: &'static str = "fragment.glsl";
// ===============
// === Shaders ===
// ===============
/// Gather unoptimized shader code for precompilation.
pub fn gather() -> JsValue {
let map = Map::new();
let shaders = ensogl_core::display::world::gather_shaders();
let vertex_file = VERTEX_FILE.into();
let fragment_file = FRAGMENT_FILE.into();
for (path, shader) in shaders {
let assets = Map::new();
assets.set(&vertex_file, &shader.vertex.into());
assets.set(&fragment_file, &shader.fragment.into());
map.set(&path.into(), &assets.into());
}
map.into()
}
/// Set optimized shader code (at early runtime).
pub fn set(key: String, value: HashMap<String, Vec<u8>>) {
try_set(key, value).unwrap_or_else(|e| error!("Failed to load shader: {e}."));
}
fn try_set(key: String, mut value: HashMap<String, Vec<u8>>) -> anyhow::Result<()> {
let vertex = String::from_utf8(
value.remove(VERTEX_FILE).ok_or_else(|| anyhow!("Missing vertex file."))?,
)?;
let fragment = String::from_utf8(
value.remove(FRAGMENT_FILE).ok_or_else(|| anyhow!("Missing fragment file."))?,
)?;
ensogl_core::display::world::set_shader_code(key, vertex, fragment);
Ok(())
}

View File

@ -8,6 +8,7 @@ edition = "2021"
crate-type = ["rlib", "cdylib"]
[dependencies]
enso-bitmap = { path = "../../../bitmap" }
enso-frp = { path = "../../../frp" }
enso-prelude = { path = "../../../prelude" }
enso-shapely = { path = "../../../shapely" }
@ -21,6 +22,7 @@ xi-rope = { version = "0.3.0" }
owned_ttf_parser = { workspace = true }
bincode = { workspace = true }
serde = { version = "1", features = ["rc"] }
serde_json = { workspace = true }
ordered-float = { workspace = true }
ensogl-text-font-family = { path = "src/font/family" }
rustybuzz = { workspace = true }
@ -29,3 +31,7 @@ rustybuzz = { workspace = true }
rand = { version = "0.8.5", default-features = false }
rand_chacha = "0.3.1"
wasm-bindgen-test = { workspace = true }
# Stop wasm-pack from running wasm-opt, because we run it from our build scripts in order to customize options.
[package.metadata.wasm-pack.profile.release]
wasm-opt = false

View File

@ -9,7 +9,6 @@ use ensogl_core::system::gpu;
#[cfg(target_arch = "wasm32")]
use ensogl_core::system::gpu::texture;
use ensogl_core::system::web::platform;
use ensogl_text_embedded_fonts::Embedded;
use ensogl_text_msdf as msdf;
use ordered_float::NotNan;
use owned_ttf_parser as ttf;
@ -45,11 +44,11 @@ pub use ttf::Width;
/// most web browsers (you cannot define `@font-face` in CSS for multiple faces of the same file).
const TTF_FONT_FACE_INDEX: u32 = 0;
/// A string literal that means a default non-monospace font.
pub const DEFAULT_FONT: &str = "default";
/// The name of the default proportional font family.
pub const DEFAULT_FONT: &str = "mplus1p";
/// A string literal that means a default monospace font.
pub const DEFAULT_FONT_MONO: &str = "default-mono";
/// The name of the default monospace font family.
pub const DEFAULT_FONT_MONO: &str = "dejavusansmono";
@ -226,21 +225,6 @@ pub struct Face {
pub ttf: ttf::OwnedFace,
}
impl Face {
/// Load the font face from memory. Corrupted faces will be reported.
fn load_from_memory(name: &str, embedded: &Embedded) -> Option<Face> {
let result = Self::load_from_memory_internal(name, embedded);
result.map_err(|err| error!("Error parsing font: {}", err)).ok()
}
fn load_from_memory_internal(name: &str, embedded: &Embedded) -> anyhow::Result<Face> {
let data = embedded.data.get(name).ok_or_else(|| anyhow!("Font '{}' not found", name))?;
let ttf = ttf::OwnedFace::from_vec((**data).into(), TTF_FONT_FACE_INDEX)?;
let msdf = msdf::OwnedFace::load_from_memory(data)?;
Ok(Face { msdf, ttf })
}
}
// ==============
@ -310,7 +294,7 @@ impl NonVariableFamily {
/// ignored.
fn load_all_faces(&self, embedded: &Embedded) {
for (header, file_name) in &self.definition.map {
if let Some(face) = Face::load_from_memory(file_name, embedded) {
if let Some(face) = embedded.load_face(file_name) {
self.faces.borrow_mut().insert(*header, face);
}
}
@ -350,7 +334,7 @@ impl VariableFamily {
/// Load all font faces from the embedded font data. Corrupted faces will be reported and
/// ignored.
fn load_all_faces(&self, embedded: &Embedded) {
if let Some(face) = Face::load_from_memory(&self.definition.file_name, embedded) {
if let Some(face) = embedded.load_face(&self.definition.file_name) {
// Set default variation axes during face initialization. This is needed to make some
// fonts appear on the screen. In case some axes are not found, warnings will be
// silenced.
@ -668,6 +652,11 @@ impl<F: Family> FontTemplate<F> {
glyph_id: GlyphId,
face: &Face,
) -> GlyphRenderInfo {
log_miss(GlyphCacheMiss {
face: self.name.normalized.clone(),
variations: format!("{variations:?}"),
glyph_id: glyph_id.0,
});
self.family.update_msdfgen_variations(variations);
let render_info = GlyphRenderInfo::load(&face.msdf, glyph_id, &self.atlas);
if !self.cache.borrow().contains_key(variations) {
@ -714,6 +703,162 @@ impl<F: Family> FontTemplate<F> {
// ===============
// === Caching ===
// ===============
thread_local! {
/// Atlases loaded at application startup.
pub static PREBUILT_ATLASES: RefCell<HashMap<Name, Rc<CacheSnapshot>>> = default();
}
/// Cached rendering information for a font.
#[derive(Debug)]
pub struct CacheSnapshot {
/// The MSDF atlas pixel data.
pub atlas: enso_bitmap::Image,
/// Index of glyphs found in [`atlas`].
pub glyphs: String,
}
impl FontTemplate<NonVariableFamily> {
/// Return the current glyph cache data.
pub fn cache_snapshot(&self) -> CacheSnapshot {
let atlas = self.atlas.to_image();
let cache: HashMap<String, _> = self
.cache
.borrow()
.iter()
.map(|(variation, info)| {
let glyphs: HashMap<String, GlyphRenderInfo> =
info.glyphs.iter().map(|(id, data)| (id.0.to_string(), *data)).collect();
(serialize_variation(variation), glyphs)
})
.collect();
let glyphs = serde_json::to_string(&cache);
// Serialization can only fail if the types are not serializable to JSON, so this will
// either succeed consistently or fail consistently. [`unwrap`] it so if it gets broken,
// we'll catch it.
let glyphs = glyphs.unwrap();
CacheSnapshot { atlas, glyphs }
}
/// Populate the cache with the given data.
pub fn load_cache(&self, snapshot: &CacheSnapshot) -> anyhow::Result<()> {
self.atlas.set_data(snapshot.atlas.clone());
let cache: HashMap<String, HashMap<String, GlyphRenderInfo>> =
serde_json::from_str(&snapshot.glyphs)?;
*self.cache.borrow_mut() = cache
.into_iter()
.map(|(variation, info)| {
let kerning = default();
let glyphs = info
.into_iter()
.map(|(id, data)| Ok((GlyphId(id.parse()?), data)))
.collect::<anyhow::Result<_>>()?;
Ok((deserialize_variation(&variation)?, FontDataCache { kerning, glyphs }))
})
.collect::<anyhow::Result<_>>()?;
Ok(())
}
/// Load the glyphs for the given text into the cache.
pub fn prepare_glyphs_for_text(
&self,
variations: &NonVariableFaceHeader,
glyphs: &str,
) -> anyhow::Result<()> {
let faces = self.family.faces.borrow();
let face = faces
.get(variations)
.ok_or_else(|| anyhow!("No face found for variations: {variations:?}."))?;
let ttf_face = face.ttf.as_face_ref();
// This is safe. Unwrap should be removed after rustybuzz is fixed:
// https://github.com/RazrFalcon/rustybuzz/issues/52
let buzz_face = rustybuzz::Face::from_face(ttf_face.clone()).unwrap();
let mut buffer = rustybuzz::UnicodeBuffer::new();
buffer.push_str(glyphs);
let shaped = rustybuzz::shape(&buzz_face, &[], buffer);
for info in shaped.glyph_infos() {
let id = GlyphId(info.glyph_id as u16);
// Load it into the cache.
let _ = self.glyph_info(variations, id);
}
Ok(())
}
/// Load the glyph with the given ID into the cache.
pub fn prepare_glyph_by_id(&self, variations: &NonVariableFaceHeader, id: GlyphId) {
// Load it into the cache.
let _ = self.glyph_info(variations, id);
}
}
// === Serialization Helpers, Because `ttf_parser` Doesn't `derive` Them ===
fn serialize_variation(variation: &NonVariableFaceHeader) -> String {
let width = match variation.width {
Width::UltraCondensed => "UltraCondensed",
Width::ExtraCondensed => "ExtraCondensed",
Width::Condensed => "Condensed",
Width::SemiCondensed => "SemiCondensed",
Width::Normal => "Normal",
Width::SemiExpanded => "SemiExpanded",
Width::Expanded => "Expanded",
Width::ExtraExpanded => "ExtraExpanded",
Width::UltraExpanded => "UltraExpanded",
};
let weight = variation.weight.to_number().to_string();
let style = match variation.style {
Style::Normal => "Normal",
Style::Italic => "Italic",
Style::Oblique => "Oblique",
};
format!("{width}-{weight}-{style}")
}
fn deserialize_variation(variation: &str) -> anyhow::Result<NonVariableFaceHeader> {
let mut parts = variation.splitn(3, '-');
let bad_variation = || anyhow!("Malformed variation specifier: {variation}");
let width = match parts.next().ok_or_else(bad_variation)? {
"UltraCondensed" => Width::UltraCondensed,
"ExtraCondensed" => Width::ExtraCondensed,
"Condensed" => Width::Condensed,
"SemiCondensed" => Width::SemiCondensed,
"Normal" => Width::Normal,
"SemiExpanded" => Width::SemiExpanded,
"Expanded" => Width::Expanded,
"ExtraExpanded" => Width::ExtraExpanded,
"UltraExpanded" => Width::UltraExpanded,
width => anyhow::bail!("Unexpected font width: `{width}`."),
};
let weight = Weight::from(parts.next().ok_or_else(bad_variation)?.parse::<u16>()?);
let style = match parts.next().ok_or_else(bad_variation)? {
"Normal" => Style::Normal,
"Italic" => Style::Italic,
"Oblique" => Style::Oblique,
style => anyhow::bail!("Unexpected font style: `{style}`."),
};
Ok(NonVariableFaceHeader { width, weight, style })
}
// === Cache Logging ===
/// A glyph that was not found in the MSDF data cache.
#[derive(Debug, serde::Serialize)]
#[allow(dead_code)]
pub struct GlyphCacheMiss {
face: String,
variations: String,
glyph_id: u16,
}
profiler::metadata_logger!("GlyphCacheMiss", log_miss(GlyphCacheMiss));
// =======================
// === FontWithGpuData ===
// =======================
@ -818,9 +963,8 @@ impl {
Entry::Occupied (entry) => Some(entry.get().clone_ref()),
Entry::Vacant (entry) => {
debug!("Loading font: {:?}", name);
let definition = self.embedded.definitions.get(&name)?;
let hinting = Hinting::for_font(&name);
let font = load_from_embedded_registry(name, definition, &self.embedded);
let font = self.embedded.load_font(name)?;
let font = FontWithGpuData::new(font, hinting, &self.scene);
entry.insert(font.clone_ref());
Some(font)
@ -833,7 +977,7 @@ impl Registry {
/// Constructor.
pub fn init_and_load_embedded_fonts(scene: &scene::Scene) -> Registry {
let scene = scene.clone_ref();
let embedded = Embedded::init_and_load_embedded_fonts();
let embedded = Embedded::new();
let fonts = HashMap::new();
let data = RegistryData { scene, embedded, fonts };
let rc = Rc::new(RefCell::new(data));
@ -847,25 +991,6 @@ impl scene::Extension for Registry {
}
}
fn load_from_embedded_registry(
name: Name,
definition: &family::Definition,
embedded: &Embedded,
) -> Font {
match definition {
family::Definition::NonVariable(definition) => {
let family = NonVariableFamily::from(definition);
family.load_all_faces(embedded);
NonVariableFont::new(name, family).into()
}
family::Definition::Variable(definition) => {
let family = VariableFamily::from(definition);
family.load_all_faces(embedded);
VariableFont::new(name, family).into()
}
}
}
// ===============
@ -877,7 +1002,7 @@ fn load_from_embedded_registry(
/// [`glyph::FUNCTIONS`]).
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug)]
pub struct Hinting {
struct Hinting {
opacity_increase: f32,
opacity_exponent: f32,
}
@ -904,3 +1029,60 @@ impl Default for Hinting {
Self { opacity_increase: 0.0, opacity_exponent: 1.0 }
}
}
// =========================
// === Embedded Registry ===
// =========================
/// A registry of font data built-in to the application.
#[derive(Debug, Default)]
pub struct Embedded {
definitions: HashMap<Name, family::Definition>,
data: HashMap<&'static str, &'static [u8]>,
}
impl Embedded {
/// Load the registry.
pub fn new() -> Self {
let fonts = ensogl_text_embedded_fonts::Embedded::init_and_load_embedded_fonts();
let ensogl_text_embedded_fonts::Embedded { definitions, data } = fonts;
Self { definitions, data }
}
/// Load a font from the registry.
pub fn load_font(&self, name: Name) -> Option<Font> {
self.definitions.get(&name).map(|definition| match definition {
family::Definition::NonVariable(definition) => {
let family = NonVariableFamily::from(definition);
family.load_all_faces(self);
let cache = PREBUILT_ATLASES.with_borrow_mut(|atlases| atlases.get(&name).cloned());
let font = NonVariableFont::new(name, family);
if let Some(cache) = cache {
font.load_cache(&cache)
.unwrap_or_else(|e| error!("Failed to load cached font data: {e}."));
}
font.into()
}
family::Definition::Variable(definition) => {
let family = VariableFamily::from(definition);
family.load_all_faces(self);
VariableFont::new(name, family).into()
}
})
}
/// Load the font face from memory. Corrupted faces will be reported.
fn load_face(&self, name: &str) -> Option<Face> {
let result = self.load_face_internal(name);
result.map_err(|err| error!("Error parsing font: {}", err)).ok()
}
fn load_face_internal(&self, name: &str) -> anyhow::Result<Face> {
let data = self.data.get(name).ok_or_else(|| anyhow!("Font '{}' not found", name))?;
let ttf = ttf::OwnedFace::from_vec((**data).into(), TTF_FONT_FACE_INDEX)?;
let msdf = msdf::OwnedFace::load_from_memory(data)?;
Ok(Face { msdf, ttf })
}
}

View File

@ -131,9 +131,7 @@ pub fn embedded_family_definitions_ext() -> HashMap<family::Name, family::Defini
"DejaVuSansMono-Bold.ttf".to_string(),
),
]));
map.insert("dejavusans".into(), dejavusans.clone());
map.insert("dejavusansmono".into(), dejavusansmono.clone());
map.insert("default".into(), dejavusans);
map.insert("default-mono".into(), dejavusansmono);
map.insert("dejavusans".into(), dejavusans);
map.insert("dejavusansmono".into(), dejavusansmono);
map
}

View File

@ -187,7 +187,7 @@ pub struct NonVariableFaceHeader {
impl NonVariableFaceHeader {
/// Constructor.
pub fn new(width: Width, weight: Weight, style: Style) -> Self {
pub const fn new(width: Width, weight: Weight, style: Style) -> Self {
Self { width, weight, style }
}

View File

@ -8,6 +8,7 @@ edition = "2021"
crate-type = ["cdylib", "rlib"]
[dependencies]
enso-bitmap = { path = "../../../../../../bitmap" }
enso-prelude = { path = "../../../../../../prelude" }
enso-profiler = { path = "../../../../../../profiler" }
enso-types = { path = "../../../../../../types" }

View File

@ -4,8 +4,6 @@ use crate::prelude::*;
use crate::Msdf;
use serde;
// ===============
@ -17,7 +15,7 @@ use serde;
/// This structure keeps texture data in 8-bit-per-channel RGB format, which is ready to be passed
/// to WebGL `texImage2D`. The texture contains MSDFs for all loaded glyph, organized in vertical
/// column.
#[derive(Clone, CloneRef, Debug, Default, serde::Serialize, serde::Deserialize)]
#[derive(Clone, CloneRef, Debug, Default)]
pub struct Texture {
/// A plain data of this texture.
data: Rc<RefCell<Vec<u8>>>,
@ -74,6 +72,21 @@ impl Texture {
let clamped_to_byte = scaled_to_byte.clamp(UNSIGNED_BYTE_MIN, UNSIGNED_BYTE_MAX);
clamped_to_byte as u8
}
/// Get the raw pixel data.
pub fn to_image(&self) -> enso_bitmap::Image {
let width = Self::WIDTH;
let height = self.rows();
let data = self.data.borrow().clone();
enso_bitmap::Image { width, height, data }
}
/// Set the raw pixel data.
#[profile(Debug)]
pub fn set_data(&self, image: enso_bitmap::Image) {
debug_assert_eq!(image.width, Self::WIDTH);
*self.data.borrow_mut() = image.data;
}
}

View File

@ -19,6 +19,7 @@
#![feature(once_cell)]
#![feature(is_sorted)]
#![feature(array_windows)]
#![feature(local_key_cell_methods)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]

View File

@ -25,7 +25,6 @@ use crate::display::shape::primitive::glsl;
use crate::display::symbol::registry::RunMode;
use crate::display::symbol::registry::SymbolRegistry;
use crate::system::gpu::shader;
use crate::system::js;
use crate::system::web;
use enso_types::unit2::Duration;
@ -151,53 +150,14 @@ thread_local! {
pub static PRECOMPILED_SHADERS: RefCell<HashMap<String, PrecompiledShader>> = default();
}
/// Registers in JS a closure to acquire non-optimized shaders code and to set back optimized
/// shaders code.
#[before_main]
pub fn register_get_and_set_shaders_fns() {
let js_app = js::app_or_panic();
let closure = Closure::new(|| {
let map = gather_shaders();
let js_map = web::Map::new();
for (key, code) in map {
let value = web::Object::new();
web::Reflect::set(&value, &"vertex".into(), &code.vertex.into()).unwrap();
web::Reflect::set(&value, &"fragment".into(), &code.fragment.into()).unwrap();
js_map.set(&key.into(), &value);
}
js_map.into()
/// Set optimized shader code.
pub fn set_shader_code(key: String, vertex: String, fragment: String) {
let vertex = strip_instance_declarations(&vertex);
let precompiled_shader = PrecompiledShader(shader::Code { vertex, fragment });
debug!("Registering precompiled shaders for '{key}'.");
PRECOMPILED_SHADERS.with_borrow_mut(move |map| {
map.insert(key, precompiled_shader);
});
js_app.register_get_shaders_rust_fn(&closure);
mem::forget(closure);
let closure = Closure::new(|value: JsValue| {
if extract_shaders_from_js(value).err().is_some() {
warn!("Internal error. Downloaded shaders are provided in a wrong format.")
}
});
js_app.register_set_shaders_rust_fn(&closure);
mem::forget(closure);
}
/// Extract optimized shaders code from the JS value.
fn extract_shaders_from_js(value: JsValue) -> Result<(), JsValue> {
let map = value.dyn_into::<web::Map>()?;
for opt_entry in map.entries() {
let entry = opt_entry?.dyn_into::<web::Array>()?;
let key: String = entry.get(0).dyn_into::<web::JsString>()?.into();
let value = entry.get(1).dyn_into::<web::Object>()?;
let vertex_field = web::Reflect::get(&value, &"vertex".into())?;
let fragment_field = web::Reflect::get(&value, &"fragment".into())?;
let vertex: String = vertex_field.dyn_into::<web::JsString>()?.into();
let fragment: String = fragment_field.dyn_into::<web::JsString>()?.into();
let vertex = strip_instance_declarations(&vertex);
let precompiled_shader = PrecompiledShader(shader::Code { vertex, fragment });
debug!("Registering precompiled shaders for '{key}'.");
PRECOMPILED_SHADERS.with_borrow_mut(move |map| {
map.insert(key, precompiled_shader);
});
}
Ok(())
}
/// Remove initial instance variable declarations.
@ -233,7 +193,8 @@ fn strip_instance_declarations(input: &str) -> String {
code
}
fn gather_shaders() -> HashMap<&'static str, shader::Code> {
/// Collect the un-optimized shader code for all the shapes used by the application.
pub fn gather_shaders() -> HashMap<&'static str, shader::Code> {
with_context(|t| t.run_mode.set(RunMode::ShaderExtraction));
let mut map = HashMap::new();
SHAPES_DEFINITIONS.with(|shapes| {

View File

@ -24,17 +24,23 @@ pub mod js_bindings {
pub type Config;
pub type Param;
/// Register in JS a closure to get non-precompiled shaders from Rust.
/// Register in JS a closure to get sources of dynamic assets from Rust.
#[allow(unsafe_code)]
#[wasm_bindgen(method)]
#[wasm_bindgen(js_name = registerGetShadersRustFn)]
pub fn register_get_shaders_rust_fn(this: &App, closure: &Closure<dyn FnMut() -> JsValue>);
#[wasm_bindgen(js_name = registerGetDynamicAssetsSourcesRustFn)]
pub fn register_get_dynamic_assets_sources_rust_fn(
this: &App,
_closure: &Closure<dyn FnMut() -> JsValue>,
);
/// Register in JS a closure to set precompiled shaders in Rust.
/// Register in JS a closure to set dynamic assets in Rust.
#[allow(unsafe_code)]
#[wasm_bindgen(method)]
#[wasm_bindgen(js_name = registerSetShadersRustFn)]
pub fn register_set_shaders_rust_fn(this: &App, closure: &Closure<dyn FnMut(JsValue)>);
#[wasm_bindgen(js_name = registerSetDynamicAssetRustFn)]
pub fn register_set_dynamic_asset_rust_fn(
this: &App,
_closure: &Closure<dyn FnMut(JsValue, JsValue, JsValue)>,
);
/// Show a spinner covering the whole viewport.
#[allow(unsafe_code)]
@ -60,8 +66,16 @@ pub mod js_bindings {
mock_data! { Param => JsValue }
impl App {
pub fn register_get_shaders_rust_fn(&self, _closure: &Closure<dyn FnMut() -> JsValue>) {}
pub fn register_set_shaders_rust_fn(&self, _closure: &Closure<dyn FnMut(JsValue)>) {}
pub fn register_get_dynamic_assets_sources_rust_fn(
&self,
_closure: &Closure<dyn FnMut() -> JsValue>,
) {
}
pub fn register_set_dynamic_asset_rust_fn(
&self,
_closure: &Closure<dyn FnMut(JsValue, JsValue, JsValue)>,
) {
}
pub fn show_progress_indicator(&self, _progress: f32) {}
pub fn hide_progress_indicator(&self) {}

View File

@ -175,7 +175,7 @@ fn init(app: Application) {
// This is a testing string left here for convenience.
// area.set_content("aஓbc🧑🏾de\nfghij\nklmno\npqrst\n01234\n56789");
area.set_content(content);
area.set_font("mplus1p");
area.set_font(ensogl_text::font::DEFAULT_FONT);
area.set_property_default(color::Rgba::black());
area.deprecated_focus();
area.hover();

View File

@ -8,11 +8,15 @@ edition = "2021"
crate-type = ["rlib"]
[dependencies]
futures = { version = "0.3" }
ide-ci = { path = "../../../../build/ci_utils" }
manifest-dir-macros = "0.1.16"
regex = { workspace = true }
serde = { version = "1.0.130", features = ["derive"] }
serde_json = { workspace = true }
tempfile = "3"
tokio = { workspace = true }
fs_extra = "1.2.0"
walkdir = "2"
enso-bitmap = { path = "../../bitmap" }
enso-prelude = { path = "../../prelude" }

View File

@ -17,7 +17,7 @@
"scripts": {
"typecheck": "npx tsc --noEmit",
"build": "npx --yes tsup src/runner/index.ts --format=cjs --dts --sourcemap",
"build-shader-extractor": "npx --yes tsup --format=cjs --target=esnext src/shader-extractor/shader-extractor.ts --dts --sourcemap",
"build-asset-extractor": "npx --yes tsup --format=cjs --target=esnext src/asset-extractor/asset-extractor.ts --dts --sourcemap",
"build-runtime-libs": "npx --yes esbuild --bundle --platform=node --format=cjs src/runtime-libs/runtime-libs.ts",
"lint": "npx --yes eslint src"
},

View File

@ -52,9 +52,7 @@ interface ParseArgsOptionConfig {
export class Args {
[key: string]: Option<string | boolean>
help = new Option('Print help message.', false)
outDir = new Option<string>(
'The directory the extracted non-optimized shaders will be written to.'
)
outDir = new Option<string>('The directory the extracted asset sources will be written to.')
}
export class ArgParser {

View File

@ -0,0 +1,71 @@
/** @file Tool for extracting sources of dynamic assets from compiled WASM binaries. */
import path from 'path'
import * as args from 'asset-extractor/args'
import * as fs from 'asset-extractor/fs'
import * as log from 'runner/log'
import * as name from 'runner/name'
import * as runner from 'runner/index'
// ===========
// === App ===
// ===========
/** The main application. It loads the WASM file from disk, runs before main entry points, extract
* asset sources and saves them to files. */
class App extends runner.App {
override async loadWasm() {
const mainJsUrl = path.join(__dirname, this.config.groups.loader.options.jsUrl.value)
const mainWasmUrl = path.join(__dirname, this.config.groups.loader.options.wasmUrl.value)
const mainJs = await fs.readFile(mainJsUrl, 'utf8')
const mainWasm = await fs.readFile(mainWasmUrl)
this.wasm = await this.compileAndRunWasm(mainJs, mainWasm)
}
async extractAssets(outDir: string) {
await log.Task.asyncRun('Extracting dynamic assets source code.', async () => {
// Clear the extracted-sources directory before getting new sources.
// If getting sources fails we leave the directory empty, not outdated.
await fs.rm(outDir, { recursive: true, force: true })
await fs.mkdir(outDir)
const assetsMap = this.getAssetSources()
if (assetsMap) {
await log.Task.asyncRun(`Writing assets to '${outDir}'.`, async () => {
for (const [builder, asset] of assetsMap) {
for (const [key, files] of asset) {
const dirPath = path.join(outDir, builder, key)
await fs.mkdir(dirPath, { recursive: true })
for (const [name, data] of files) {
const filePath = path.join(dirPath, name)
await fs.writeFile(`${filePath}`, Buffer.from(data))
}
}
}
})
}
})
}
override async run(): Promise<void> {
const parser = args.parse()
const outDir = parser.args.outDir.value
if (outDir) {
await log.Task.asyncRun('Running the program.', async () => {
await app.loadAndInitWasm()
const r = app.runBeforeMainEntryPoints().then(() => {
return app.extractAssets(outDir)
})
await r
})
} else {
parser.printHelpAndExit(1)
}
}
}
// ============
// === Main ===
// ============
const app = new App()
void app.run()

View File

@ -24,9 +24,9 @@
"description": "The URL of the JS pkg file generated by ensogl-pack.",
"primary": false
},
"shadersUrl": {
"value": "shaders",
"description": "The URL of pre-compiled the shaders directory.",
"assetsUrl": {
"value": "assets",
"description": "The URL of the dynamic assets directory.",
"primary": false
},
"downloadToInitRatio": {

View File

@ -35,12 +35,13 @@ class Files<T> {
pkgJs: T
/** Main WASM file that contains the compiled WASM code. */
pkgWasm: T
/** Precompiled shaders files. */
shaders = new Shaders<T>()
/** Dynamic assets. */
assets: T[]
constructor(pkgJs: T, pkgWasm: T) {
constructor(pkgJs: T, pkgWasm: T, assets: T[]) {
this.pkgJs = pkgJs
this.pkgWasm = pkgWasm
this.assets = assets
}
async mapAndAwaitAll<S>(f: (t: T) => Promise<S>): Promise<Files<S>> {
@ -55,74 +56,62 @@ class Files<T> {
/** Converts the structure fields to an array. */
toArray(): T[] {
return [this.pkgJs, this.pkgWasm, ...this.shaders.toArray()]
return [this.pkgJs, this.pkgWasm, ...this.assets]
}
/** Assign array values to the structure fields. The elements order should be the same as the
* output of the `toArray` function. */
fromArray<S>(array: S[]): Files<S> | null {
const [pkgJs, pkgWasm, ...shaders] = array
const [pkgJs, pkgWasm, ...assets] = array
if (pkgJs != null && pkgWasm != null) {
const files = new Files<S>(pkgJs, pkgWasm)
files.shaders = this.shaders.fromArray(shaders) ?? new Shaders()
return files
return new Files<S>(pkgJs, pkgWasm, assets)
} else {
return null
}
}
}
/** Mapping between a shader identifier and precompiled shader sources. */
class Shaders<T> {
map = new Map<string, Shader<T>>()
class AssetDefinition {
dir: string
files: string[]
async mapAndAwaitAll<S>(f: (t: T) => Promise<S>): Promise<Shaders<S>> {
const mapped = await Promise.all(this.toArray().map(f))
const out = this.fromArray(mapped)
if (out != null) {
return out
} else {
log.panic()
}
}
/** Converts the structure fields to an array. The shader names are not preserved. */
toArray(): T[] {
return Array.from(this.map.values()).flatMap(shader => shader.toArray())
}
/** Assign array values to the structure fields. The elements order should be the same as the
* output of the `toArray` function. The shader names will be preserved and assigned to the
* input values in order. */
fromArray<S>(arr: S[]): Shaders<S> | null {
const shaders = new Shaders<S>()
const keys = Array.from(this.map.keys())
const tuples = array.arrayIntoTuples(arr)
if (tuples == null) {
log.panic()
} else {
for (const [key, [vertex, fragment]] of array.zip(keys, tuples)) {
const shader = new Shader(vertex, fragment)
shaders.map.set(key, shader)
}
return shaders
}
constructor(dir: string, files: string[]) {
this.dir = dir
this.files = files
}
}
/** Precompiled shader sources */
class Shader<T> {
vertex: T
fragment: T
class Assets<T> {
assets: Asset<T>[]
constructor(vertex: T, fragment: T) {
this.vertex = vertex
this.fragment = fragment
constructor(assets: Asset<T>[]) {
this.assets = assets
}
/** Converts the structure fields to an array. The shader names are not preserved. */
toArray(): T[] {
return [this.vertex, this.fragment]
async mapAndAwaitAll<S>(f: (t: T) => Promise<S>): Promise<Assets<S>> {
const assets = await Promise.all(this.assets.map(asset => asset.mapAndAwaitAll(f)))
return new Assets(assets)
}
}
class Asset<T> {
type: string
key: string
data: Map<string, T>
constructor(type: string, key: string, data: Map<string, T>) {
this.type = type
this.key = key
this.data = data
}
async mapAndAwaitAll<S>(f: (t: T) => Promise<S>): Promise<Asset<S>> {
const mapValue: ([k, v]: [string, T]) => Promise<[string, S]> = async ([k, v]) => [
k,
await f(v),
]
const data = new Map(await Promise.all(Array.from(this.data, mapValue)))
return new Asset(this.type, this.key, data)
}
}
@ -197,7 +186,7 @@ export class App {
config: config.Options
wasm: any = null
loader: wasm.Loader | null = null
shaders: Shaders<string> | null = null
assets: Assets<ArrayBuffer> | null = null
wasmFunctions: string[] = []
beforeMainEntryPoints = new Map<string, wasm.BeforeMainEntryPoint>()
mainEntryPoints = new Map<string, wasm.EntryPoint>()
@ -230,16 +219,16 @@ export class App {
host.exportGlobal({ ensoglApp: this })
}
/** Registers the Rust function that extracts the shader definitions. */
registerGetShadersRustFn(fn: GetShadersFn) {
logger.log(`Registering 'getShadersFn'.`)
rustGetShadersFn = fn
/** Registers the Rust function that extracts asset source files. */
registerGetDynamicAssetsSourcesRustFn(fn: GetAssetsSourcesFn) {
logger.log(`Registering 'getAssetsSourcesFn'.`)
rustGetAssetsSourcesFn = fn
}
/** Registers the Rust function that injects the shader definitions. */
registerSetShadersRustFn(fn: SetShadersFn) {
logger.log(`Registering 'setShadersFn'.`)
rustSetShadersFn = fn
/** Registers the Rust function that injects dynamic assets. */
registerSetDynamicAssetRustFn(fn: SetAssetFn) {
logger.log(`Registering 'setAssetFn'.`)
rustSetAssetFn = fn
}
/** Log the message on the remote server. */
@ -317,42 +306,56 @@ export class App {
async loadWasm() {
const loader = new wasm.Loader(this.config)
const shadersUrl = this.config.groups.loader.options.shadersUrl.value
const shadersNames = await log.Task.asyncRunCollapsed(
'Downloading shaders list.',
const assetsUrl = this.config.groups.loader.options.assetsUrl.value
const manifest = await log.Task.asyncRunCollapsed(
'Downloading assets manifest.',
async () => {
const shadersListResponse = await fetch(`${shadersUrl}/list.txt`)
const shadersList = await shadersListResponse.text()
return shadersList.split('\n').filter(line => line.length > 0)
const manifestResponse = await fetch(`${assetsUrl}/manifest.json`)
const manifest: Record<
string,
Record<string, AssetDefinition>
> = await manifestResponse.json()
return manifest
}
)
const assetsUrls: string[] = []
const assetsInfo: Asset<number>[] = []
for (const [type, typeAssets] of Object.entries(manifest)) {
for (const [key, asset] of Object.entries(typeAssets)) {
const toUrl = (name: string) => {
const index = assetsUrls.length
assetsUrls.push(`${assetsUrl}/${type}/${asset.dir}/${name}`)
return index
}
const urls = new Map(asset.files.map(name => [name, toUrl(name)]))
assetsInfo.push(new Asset<number>(type, key, urls))
}
}
const files = new Files(
this.config.groups.loader.options.jsUrl.value,
this.config.groups.loader.options.wasmUrl.value
this.config.groups.loader.options.wasmUrl.value,
assetsUrls
)
for (const mangledName of shadersNames) {
const unmangledName = name.unmangle(mangledName)
const vertexUrl = `${shadersUrl}/${mangledName}.vertex.glsl`
const fragmentUrl = `${shadersUrl}/${mangledName}.fragment.glsl`
files.shaders.map.set(unmangledName, new Shader(vertexUrl, fragmentUrl))
}
const responses = await files.mapAndAwaitAll(url => fetch(url))
const responsesArray = responses.toArray()
loader.load(responsesArray)
loader.load(responses.toArray())
const downloadSize = loader.showTotalBytes()
const task = log.Task.startCollapsed(`Downloading application files (${downloadSize}).`)
void loader.done.then(() => task.end())
for (const file of files.toArray()) {
logger.log(`Downloading '${file}'.`)
}
void loader.done.then(() => task.end())
const assetsResponses = responses.assets
const assetsBlobs = await Promise.all(
assetsResponses.map(response => response.blob().then(blob => blob.arrayBuffer()))
)
const assets = assetsInfo.map(info => {
const data = new Map(Array.from(info.data, ([k, i]) => [k, assetsBlobs[i]!]))
return new Asset(info.type, info.key, data)
})
const pkgJs = await responses.pkgJs.text()
this.loader = loader
this.wasm = await this.compileAndRunWasm(pkgJs, responses.pkgWasm)
this.shaders = await responses.shaders.mapAndAwaitAll(t => t.text())
this.assets = new Assets(assets)
}
/** Loads the WASM binary and its dependencies. After the files are fetched, the WASM module is
@ -422,13 +425,19 @@ export class App {
}
}
/** Run both before main entry points and main entry point. */
/** Run both before-main entry points and main entry point. */
async runEntryPoints() {
const entryPointName = this.config.groups.startup.options.entry.value
const entryPoint = this.mainEntryPoints.get(entryPointName)
if (entryPoint) {
await this.runBeforeMainEntryPoints()
if (this.shaders) this.setShaders(this.shaders.map)
log.Task.runCollapsed(`Sending dynamic assets to Rust.`, () => {
if (this.assets) {
for (const asset of this.assets.assets) {
this.setAsset(asset.type, asset.key, asset.data)
}
}
})
if (this.loader) this.loader.destroy()
logger.log(`Running the main entry point '${entryPoint.displayName()}'.`)
const fn = this.wasm[entryPoint.name()]
@ -558,30 +567,31 @@ export class App {
console.log('%c' + msg2, msgCSS)
}
/* Get not optimized shaders from WASM. */
getShaders(): Map<string, { vertex: string; fragment: string }> | null {
return log.Task.run('Getting shaders from Rust.', () => {
if (!rustGetShadersFn) {
logger.error('The Rust shader extraction function was not registered.')
getAssetSources(): Map<string, Map<string, Map<string, ArrayBuffer>>> | null {
return log.Task.run('Getting dynamic asset sources from Rust.', () => {
if (!rustGetAssetsSourcesFn) {
logger.error('The Rust dynamic asset sources function was not registered.')
return null
} else {
const result = rustGetShadersFn()
logger.log(`Got ${result.size} shader definitions.`)
const resultUnmangled = rustGetAssetsSourcesFn()
const mangleKeys = <T>(map: Map<string, T>) =>
new Map(Array.from(map, ([key, value]) => [name.mangle(key), value]))
const result = new Map(
Array.from(resultUnmangled, ([key, value]) => [key, mangleKeys(value)])
)
logger.log(`Got ${result.size} asset definitions.`)
return result
}
})
}
/* Set optimized shaders in WASM. */
setShaders(map: Map<string, { vertex: string; fragment: string }>) {
log.Task.runCollapsed(`Sending ${map.size} shaders to Rust.`, () => {
if (!rustSetShadersFn) {
logger.error('The Rust shader injection function was not registered.')
} else {
logger.log(`Setting ${map.size} shader definitions.`)
rustSetShadersFn(map)
}
})
setAsset(builder: string, keyMangled: string, data: Map<string, ArrayBuffer>) {
if (!rustSetAssetFn) {
logger.error('The Rust asset injection function was not registered.')
} else {
const key = name.unmangle(keyMangled)
rustSetAssetFn(builder, key, data)
}
}
}
@ -589,22 +599,8 @@ export class App {
// === App Initialization ===
// ==========================
type GetShadersFn = () => Map<string, { vertex: string; fragment: string }>
type SetShadersFn = (map: Map<string, { vertex: string; fragment: string }>) => void
type GetAssetsSourcesFn = () => Map<string, Map<string, Map<string, ArrayBuffer>>>
type SetAssetFn = (builder: string, key: string, data: Map<string, ArrayBuffer>) => void
let rustGetShadersFn: null | GetShadersFn = null
let rustSetShadersFn: null | SetShadersFn = null
/** Registers the Rust function that extracts the shader definitions. */
function registerGetShadersRustFn(fn: GetShadersFn) {
logger.log(`Registering 'getShadersFn'.`)
rustGetShadersFn = fn
}
/** Registers the Rust function that injects the shader definitions. */
function registerSetShadersRustFn(fn: SetShadersFn) {
logger.log(`Registering 'setShadersFn'.`)
rustSetShadersFn = fn
}
host.exportGlobal({ registerGetShadersRustFn, registerSetShadersRustFn })
let rustGetAssetsSourcesFn: null | GetAssetsSourcesFn = null
let rustSetAssetFn: null | SetAssetFn = null

View File

@ -1,69 +0,0 @@
/** @file Tool for extracting shaders of EnsoGL shapes from compiled WASM binaries. */
import path from 'path'
import * as args from 'shader-extractor/args'
import * as fs from 'shader-extractor/fs'
import * as log from 'runner/log'
import * as name from 'runner/name'
import * as runner from 'runner/index'
// ===========
// === App ===
// ===========
/** The main application. It loads the WASM file from disk, runs before main entry points, extract
* not optimized shaders and saves them to files. */
class App extends runner.App {
override async loadWasm() {
const mainJsUrl = path.join(__dirname, this.config.groups.loader.options.jsUrl.value)
const mainWasmUrl = path.join(__dirname, this.config.groups.loader.options.wasmUrl.value)
const mainJs = await fs.readFile(mainJsUrl, 'utf8')
const mainWasm = await fs.readFile(mainWasmUrl)
this.wasm = await this.compileAndRunWasm(mainJs, mainWasm)
}
async extractShaders(outDir: string) {
await log.Task.asyncRun('Extracting shaders code.', async () => {
const shadersMap = this.getShaders()
if (shadersMap) {
await log.Task.asyncRun(`Writing shaders to '${outDir}'.`, async () => {
await fs.rm(outDir, { recursive: true, force: true })
await fs.mkdir(outDir)
const fileNames = []
for (const [codePath, code] of shadersMap) {
const fileName = name.mangle(codePath)
const filePath = path.join(outDir, fileName)
await fs.writeFile(`${filePath}.vertex.glsl`, code.vertex)
await fs.writeFile(`${filePath}.fragment.glsl`, code.fragment)
fileNames.push(fileName)
}
const fileListPath = path.join(outDir, 'list.txt')
await fs.writeFile(fileListPath, fileNames.join('\n'))
})
}
})
}
override async run(): Promise<void> {
const parser = args.parse()
const outDir = parser.args.outDir.value
if (outDir) {
await log.Task.asyncRun('Running the program.', async () => {
await app.loadAndInitWasm()
const r = app.runBeforeMainEntryPoints().then(() => {
return app.extractShaders(outDir)
})
await r
})
} else {
parser.printHelpAndExit(1)
}
}
}
// ============
// === Main ===
// ============
const app = new App()
void app.run()

View File

@ -0,0 +1,402 @@
//! Building dynamic assets (assets which require the application to be run to generate their
//! sources).
//!
//! The essential operation, producing a directory of outputs from a directory of inputs, is
//! implemented by each builder (e.g. [`Builder::Shader`], [`Builder::Font`]).
//!
//! As builders can take some time to run, a caching mechanism is used to avoid unnecessary
//! rebuilds. Caching is achieved by making populating-the-output-directory an idempotent process:
//! Paths within the output directory are dependent on the *content* of the corresponding input
//! files, so that if a calculated output path already exists, it is already up-to-date; otherwise,
//! it must be built. This design may be familiar to users of the Nix or Guix package managers.
use crate::Paths;
use enso_prelude::anyhow;
use ide_ci::prelude::*;
use ide_ci::programs::shaderc::Glslc;
use ide_ci::programs::shaderc::SpirvOpt;
use ide_ci::programs::spirv_cross::SpirvCross;
use std::hash::Hasher;
// =============
// === Build ===
// =============
/// Bring the dynamic assets up-to-date, for the current asset sources. This consists of:
/// - Scan the asset source directory tree, hashing the input files.
/// - Update the assets:
/// - For each asset-source directory, determine an output directory based on the inputs name and
/// the hashes of its files.
/// - If that output directory doesn't exist, run the builder (determined by the top-level
/// directory the in which the asset was found, e.g. `shader`) and populate the directory.
/// - Generate a manifest, identifying the current assets and paths to their sources.
pub async fn build(paths: &Paths) -> Result<()> {
info!("Building dynamic assets.");
let sources = survey_asset_sources(paths)?;
let assets = update_assets(paths, &sources).await?;
let manifest = serde_json::to_string(&assets)?;
ide_ci::fs::tokio::write(&paths.target.ensogl_pack.dist.dynamic_assets.manifest, manifest)
.await?;
gc_assets(paths, &assets)?;
Ok(())
}
// ===============
// === Builder ===
// ===============
/// Identifies an asset type, which determines how it is built.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
#[serde(rename_all = "lowercase")]
enum Builder {
Font,
Shader,
}
impl Builder {
fn dir_name<'a>(self) -> &'a str {
self.into()
}
async fn build_asset(
self,
input_dir: &Path,
input_files: &[String],
output_dir: &Path,
tmp: &Path,
) -> Result<()> {
match self {
Builder::Font => build_font(input_dir, input_files, output_dir).await,
Builder::Shader => build_shader(input_dir, input_files, output_dir, tmp).await,
}
}
}
impl TryFrom<&str> for Builder {
type Error = anyhow::Error;
fn try_from(value: &str) -> std::result::Result<Self, Self::Error> {
match value {
"font" => Ok(Builder::Font),
"shader" => Ok(Builder::Shader),
other => Err(anyhow!("Unknown builder: {other:?}")),
}
}
}
impl From<Builder> for &'static str {
fn from(value: Builder) -> Self {
match value {
Builder::Font => "font",
Builder::Shader => "shader",
}
}
}
// ====================
// === Build Inputs ===
// ====================
/// The inputs to a builder.
struct AssetSources {
asset_key: String,
input_files: Vec<String>,
inputs_hash: u64,
}
impl AssetSources {
/// The output directory name for the asset.
fn dir_name(&self) -> String {
let key = &self.asset_key;
let hash = self.inputs_hash;
format!("{key}-{hash:x}")
}
}
// =====================
// === Build Outputs ===
// =====================
/// The outputs of a builder.
#[derive(Serialize)]
struct Asset {
dir: String,
files: Vec<String>,
}
/// The outputs of all builders.
type AssetManifest = BTreeMap<Builder, BTreeMap<String, Asset>>;
// ================
// === Building ===
// ================
/// Scan the sources found in the asset sources directory.
///
/// Returns, for each [`Builder`] (e.g. shader or font), for each asset directory found, an
/// [`AssetSources`] object identifying the asset key (i.e. the name of its directory), its input
/// files, and a hash covering all its input files.
fn survey_asset_sources(paths: &Paths) -> Result<HashMap<Builder, Vec<AssetSources>>> {
let dir = ide_ci::fs::read_dir(&paths.target.ensogl_pack.dynamic_assets)?;
let mut asset_sources: HashMap<_, Vec<_>> = HashMap::new();
let mut buf = Vec::new();
for entry in dir {
let entry = entry?;
let builder = Builder::try_from(entry.file_name().to_string_lossy().as_ref())?;
let builder_dir = ide_ci::fs::read_dir(entry.path())?;
let builder_sources = asset_sources.entry(builder).or_default();
for entry in builder_dir {
let entry = entry?;
let asset_key = entry.file_name().to_string_lossy().to_string();
let dir = ide_ci::fs::read_dir(entry.path())?;
let mut file_hashes = BTreeMap::new();
for entry in dir {
let entry = entry?;
let file_name = entry.file_name().to_string_lossy().to_string();
let path = entry.path();
buf.clear();
ide_ci::fs::open(path)?.read_to_end(&mut buf)?;
let mut file_hasher = std::collections::hash_map::DefaultHasher::new();
buf.hash(&mut file_hasher);
file_hashes.insert(file_name, file_hasher.finish());
}
let mut asset_hasher = std::collections::hash_map::DefaultHasher::new();
file_hashes.hash(&mut asset_hasher);
let inputs_hash = asset_hasher.finish();
let input_files = file_hashes.into_keys().collect();
builder_sources.push(AssetSources { asset_key, input_files, inputs_hash });
}
}
Ok(asset_sources)
}
/// Generate any assets not found up-to-date in the cache.
///
/// If an output directory already exists, it can be assumed to be up-to-date (because output path
/// is dependent on the input data), and is used as-is. Otherwise, [`build_asset`] runs the
/// appropriate builder to generate the output directory. In either case, a summary of the files
/// present in the output directory is produced; these summaries are assembled into an
/// [`AssetManifest`].
///
/// When asset builders need to be invoked, they are all run in parallel.
async fn update_assets(
paths: &Paths,
sources: &HashMap<Builder, Vec<AssetSources>>,
) -> Result<AssetManifest> {
let out = &paths.target.ensogl_pack.dist.dynamic_assets;
ide_ci::fs::create_dir_if_missing(out)?;
let mut assets: AssetManifest = BTreeMap::new();
let mut deferred_assets: BTreeMap<Builder, Vec<_>> = BTreeMap::new();
for (&builder, builder_sources) in sources {
let out = out.join(builder.dir_name());
ide_ci::fs::create_dir_if_missing(&out)?;
for source_specification in builder_sources {
let out = out.join(source_specification.dir_name());
let key = source_specification.asset_key.clone();
match std::fs::try_exists(&out)? {
false => {
info!("Rebuilding asset: `{}`.", out.display());
let builder_assets = deferred_assets.entry(builder).or_default();
let build = build_asset(paths, builder, source_specification);
builder_assets.push(async move { Ok((key, build.await?)) });
}
true => {
debug!("Skipping clean asset: `{}`.", out.display());
let builder_assets = assets.entry(builder).or_default();
let asset = survey_asset(paths, builder, source_specification)?;
builder_assets.insert(key, asset);
}
};
}
}
for (builder, deferred_assets) in deferred_assets.into_iter() {
let deferred_assets = futures::future::join_all(deferred_assets).await;
let deferred_assets: Result<Vec<_>> = deferred_assets.into_iter().collect();
assets.entry(builder).or_default().extend(deferred_assets?);
}
Ok(assets)
}
/// Generate an asset from the given sources.
///
/// Set up paths (as described in the [`crate`] docs): run the appropriate [`Builder`]; move its
/// output from a temporary path into its final location (note that outputs are not built directly
/// in their final location, because directories found in the output tree are assumed to
/// accurately represent the results of running the specified builder for the specified inputs;
/// creating the output directory in its complete state ensures that if a build process is
/// interrupted, incomplete artifacts are never used).
async fn build_asset(
paths: &Paths,
builder: Builder,
source_specification: &AssetSources,
) -> Result<Asset> {
let input_dir = paths
.target
.ensogl_pack
.dynamic_assets
.join(builder.dir_name())
.join(&source_specification.asset_key);
let tmp_output_dir = paths
.target
.ensogl_pack
.dist
.dynamic_assets
.join(builder.dir_name())
.join(&source_specification.asset_key);
tokio::fs::create_dir(&tmp_output_dir).await?;
let work_path = paths
.target
.ensogl_pack
.dynamic_assets
.join(builder.dir_name())
.join(format!("{}.work", source_specification.asset_key));
builder
.build_asset(&input_dir, &source_specification.input_files, &tmp_output_dir, &work_path)
.await?;
let output_dir = paths
.target
.ensogl_pack
.dist
.dynamic_assets
.join(builder.dir_name())
.join(source_specification.dir_name());
tokio::fs::rename(tmp_output_dir, output_dir).await?;
survey_asset(paths, builder, source_specification)
}
/// Identify the files present in an asset directory.
fn survey_asset(
paths: &Paths,
builder: Builder,
source_specification: &AssetSources,
) -> Result<Asset> {
let dir = source_specification.dir_name();
let path = paths.target.ensogl_pack.dist.dynamic_assets.join(builder.dir_name()).join(&dir);
let mut files = Vec::new();
for entry in ide_ci::fs::read_dir(&path)? {
files.push(entry?.file_name().to_string_lossy().to_string());
}
Ok(Asset { dir, files })
}
/// Remove any assets not present in the manifest.
fn gc_assets(paths: &Paths, assets: &AssetManifest) -> Result<()> {
let is_not_manifest = |entry: &std::io::Result<std::fs::DirEntry>| {
entry
.as_ref()
.map(|entry| entry.path() != paths.target.ensogl_pack.dist.dynamic_assets.manifest)
.unwrap_or(true)
};
for entry in paths.target.ensogl_pack.dist.dynamic_assets.read_dir()?.filter(is_not_manifest) {
let entry = entry?;
let path = entry.path();
let builder = Builder::try_from(entry.file_name().to_string_lossy().as_ref()).ok();
let assets = builder.and_then(|builder| assets.get(&builder));
match assets {
Some(assets) => {
let assets: HashSet<_> = assets.values().map(|asset| asset.dir.as_ref()).collect();
for entry in path.read_dir()? {
let entry = entry?;
let path = entry.path();
if !assets.contains(entry.file_name().to_string_lossy().as_ref()) {
info!("Cleaning unused asset at `{}`.", path.display());
ide_ci::fs::remove_if_exists(path)?;
}
}
}
_ => {
info!("Cleaning unused builder at `{}`.", path.display());
ide_ci::fs::remove_if_exists(path)?;
}
}
}
Ok(())
}
// =============
// === Fonts ===
// =============
async fn build_font(input_dir: &Path, input_files: &[String], output_dir: &Path) -> Result<()> {
for file_name in input_files {
crate::copy(input_dir.join(file_name), output_dir.join(file_name))?;
}
Ok(())
}
// ===============
// === Shaders ===
// ===============
/// Build optimized shaders by using `glslc`, `spirv-opt` and `spirv-cross`.
async fn build_shader(
input_dir: &Path,
input_files: &[String],
output_dir: &Path,
work_dir: &Path,
) -> Result<()> {
ide_ci::fs::tokio::create_dir_if_missing(work_dir).await?;
info!("Optimizing `{}`.", input_dir.file_name().unwrap_or_default().to_string_lossy());
for glsl_file_name in input_files {
let glsl_path = input_dir.join(glsl_file_name);
let work_path = work_dir.join(glsl_file_name);
let stage_path = work_path.with_extension("");
let stage =
stage_path.file_name().ok_or_else(|| anyhow!("Empty stage path."))?.to_string_lossy();
let spv_path = stage_path.with_appended_extension("spv");
let spv_opt_path = stage_path.with_appended_extension("opt.spv");
let glsl_opt_path = stage_path.with_appended_extension("opt.glsl");
let glsl_opt_dist_path = output_dir.join(glsl_file_name);
let spv_path = spv_path.as_str();
let glsl_path = glsl_path.as_str();
let shader_stage = &format!("-fshader-stage={stage}");
let glslc_args = ["--target-env=opengl", shader_stage, "-o", spv_path, glsl_path];
let spirv_opt_args = ["-O", "-o", spv_opt_path.as_str(), spv_path.as_str()];
let spirv_cross_args = ["--output", glsl_opt_path.as_str(), spv_opt_path.as_str()];
Glslc.cmd()?.args(glslc_args).run_ok().await?;
SpirvOpt.cmd()?.args(spirv_opt_args).run_ok().await?;
SpirvCross.cmd()?.args(spirv_cross_args).run_ok().await?;
let content =
ide_ci::fs::tokio::read_to_string(&glsl_opt_path).await?.replace("\r\n", "\n");
let extract_err = || format!("Failed to process shader '{}'.", glsl_opt_path.as_str());
let code = extract_main_shader_code(&content).with_context(extract_err)?;
ide_ci::fs::tokio::write(&glsl_opt_dist_path, code).await?;
}
Ok(())
}
/// Read the optimized shader code, extract the main function body and preserve all top-level
/// variable declarations.
fn extract_main_shader_code(code: &str) -> Result<String> {
let main_start_str = "void main()\n{";
let main_end_str = "}";
let main_fn_find_err = "Failed to find main function.";
let main_start = code.find(main_start_str).with_context(|| main_fn_find_err)?;
let main_end = code.rfind(main_end_str).with_context(|| main_fn_find_err)?;
let before_main = &code[..main_start];
let declarations: Vec<&str> = before_main
.lines()
.filter_map(|line| {
let version_def = line.starts_with("#version ");
let precision_def = line.starts_with("precision ");
let layout_def = line.starts_with("layout(");
let def = version_def || precision_def || layout_def;
(!def).then_some(line)
})
.collect();
let declarations = declarations.join("\n");
let main_content = &code[main_start + main_start_str.len()..main_end];
Ok(format!("{declarations}\n{main_content}"))
}

View File

@ -1,6 +1,6 @@
//! EnsoGL Pack compiles Rust sources, precompile shaders of EnsoGL app, and outputs JS WASM loader,
//! additional JS runtime utilities, and a set of optimized shaders. It is a wrapper for `wasm-pack`
//! tool.
//! EnsoGL Pack compiles Rust sources, builds the dynamic assets of the EnsoGL app (including
//! optimized shaders and pre-seeded caches), and outputs the JS WASM loader, additional JS runtime
//! utilities, and a set of optimized dynamic assets. It is a wrapper for `wasm-pack` tool.
//!
//! # Compilation process.
//! When run, the following file tree will be created/used. The files/directories marked with '*'
@ -25,13 +25,15 @@
//! │ ├─ runtime-libs.js | Bundled `this_crate/js/runtime-libs`.
//! │ ╰─ snippets | Rust-extracted JS snippets.
//! │ ╰─ <name>.js | A single Rust-extracted JS snippet.
//! ├─ shaders | Not optimized shaders sources extracted from WASM bundle.
//! │ ├─ list.txt | List of extracted not optimized shaders (no extensions).
//! │ ├─ <name>.<stage>.glsl | A single not optimized shader. (Stage = vertex|fragment).
//! │ ╰─ ...
//! ├─ shaders-hash | Not optimized shader hashes. Used to detect changes.
//! │ ├─ <name>.<stage>.hash | A single not optimized shader hash.
//! │ ╰─ ...
//! ├─ dynamic-assets | Dynamic asset sources extracted from WASM bundle.
//! │ ├─ shader | Pre-compiled shaders.
//! │ │ ├─ <key> | Asset sources (the GLSL file).
//! │ │ ├─ <key>.work | Intermediate files produced by the shader compiler.
//! │ │ ╰─ ...
//! │ ├─ font | Pre-generated MSDF data.
//! │ │ ├─ <key> | Asset sources (the glyph atlas image, and metadata).
//! │ │ ╰─ ...
//! │ ╰─ <type>...
//! ├─ runtime-libs
//! │ ╰─ runtime-libs.js
//! ├─ linked-dist | Either symlink to dist or to the gui artifacts.
@ -39,16 +41,21 @@
//! * ├─ index.js | The main JS bundle to load WASM and JS wasm-pack bundles.
//! ├─ index.js.map | The sourcemap mapping to sources in TypeScript.
//! ** ├─ index.d.ts | TypeScript types interface file.
//! ├─ shader-extractor.js | Node program to extract non optimized shaders from WASM.
//! ├─ shader-extractor.js.map | The sourcemap mapping to sources in TypeScript.
//! ├─ shader-extractor.d.ts | TypeScript types interface file.
//! ├─ asset-extractor.js | Node program to extract asset sources from WASM.
//! ├─ asset-extractor.js.map | The sourcemap mapping to sources in TypeScript.
//! ├─ asset-extractor.d.ts | TypeScript types interface file.
//! * ├─ pkg.js | The `pks.js` artifact of wasm-pack WITH bundled snippets.
//! ├─ pkg.js.map | The sourcemap mapping to `pkg.js` generated by wasm-pack.
//! * ├─ pkg.wasm | The `pks_bg.wasm` artifact of wasm-pack.
//! * ╰─ shaders | Optimized shaders that contain main function code only.
//! ├─ list.txt | List of optimized shaders (no extensions).
//! ├─ <name>.<stage>.glsl | A single optimized shader. (Stage = vertex|fragment).
//! ╰─ ...
//! * ╰─ dynamic-assets | Built dynamic assets.
//! ├─ manifest.json | An index of all the assets and their files.
//! ├─ shader | Pre-compiled shaders.
//! │ ├─ <key> | A subdirectory for each asset.
//! │ ╰─ ...
//! ├─ font | Pre-generated MSDF data.
//! │ ├─ <key> | A subdirectory for each asset.
//! │ ╰─ ...
//! ╰─ <type>...
//! ```
//!
//! The high-level app compilation process is summarized below:
@ -78,23 +85,24 @@
//! `target/ensogl-pack/wasm-pack/index.ts`. This is the main file which when compiled glues
//! `pkg.js`, `snippets`, and `runtime-libs.js` into a single bundle.
//!
//! 4. The program `target/ensogl-pack/dist/shader-extractor.js` is run. It loads
//! `target/dist/pkg.wasm` and writes non-optimized shader code to `target/ensogl-pack/shaders`.
//! 4. The program `target/ensogl-pack/dist/asset-extractor.js` is run. It loads
//! `target/dist/pkg.wasm` and writes asset sources to `target/ensogl-pack/dynamic-assets`.
//!
//! 5. For each shader, the hash of its code is computed and compared to the hash stored in
//! `target/ensogl-pack/shaders-hash`. If the hash did not exist or is different, the shader is
//! optimized by using `glslc`, spirv-opt`, and `spirv-cross`, and the result is written to
//! `dist/shaders`.
//! 5. For each asset, its inputs are hashed and an output directory is determined based on its
//! name and input hash. If the output directory doesn't already exist, the asset is built, and the
//! result is written to `dist/dynamic-assets`. The manifest is rebuilt to reflect the current set
//! of asset outputs, and any outdated output directories are removed.
//!
//! 6. The `target/ensogl-pack/wasm-pack/index.ts` is compiled to
//! `target/ensogl-pack/dis/index.js`. It is then compiled to `target/ensogl-pack/dist/index.js`.
//! `target/ensogl-pack/dist/index.js`.
//!
//!
//!
//! # Runtime process.
//! When `target/dist/index.js` is run:
//!
//! 1. The following files are downloaded from a server: `target/dist/{pkg.js, pkg.wasm, shaders}`.
//! 1. The following files are downloaded from a server:
//! `target/dist/{pkg.js, pkg.wasm, dynamic-assets}`.
//! 2. The code from `pkg.js` is run to compile the WASM file.
//! 3. All before-main entry points are run.
//! 4. Optimized shaders are uploaded to the EnsoGL application.
@ -102,19 +110,17 @@
// === Features ===
#![feature(async_closure)]
#![feature(fs_try_exists)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![warn(missing_docs)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
use ide_ci::prelude::*;
use enso_prelude::calculate_hash;
use ide_ci::program::EMPTY_ARGS;
use ide_ci::programs::shaderc::Glslc;
use ide_ci::programs::shaderc::SpirvOpt;
use ide_ci::programs::spirv_cross::SpirvCross;
use ide_ci::programs::wasm_pack::WasmPackCommand;
use manifest_dir_macros::path;
use std::env;
@ -122,6 +128,9 @@ use std::path::Path;
use std::path::PathBuf;
use walkdir::WalkDir;
pub mod assets;
// ==============
// === Export ===
@ -196,6 +205,7 @@ macro_rules! define_paths {
)*};
}
/// Paths used during build.
pub mod paths {
use super::*;
define_paths! {
@ -216,16 +226,11 @@ pub mod paths {
}
TargetEnsoglPack {
wasm_pack: TargetEnsoglPackWasmPack,
shaders: TargetEnsoglPackShaders,
shaders_hash: PathBuf,
runtime_libs: TargetEnsoglPackRuntimeLibs,
dist: TargetEnsoglPackDist,
linked_dist: PathBuf,
}
TargetEnsoglPackShaders {
list: PathBuf,
wasm_pack: TargetEnsoglPackWasmPack,
dynamic_assets: PathBuf,
runtime_libs: TargetEnsoglPackRuntimeLibs,
dist: TargetEnsoglPackDist,
linked_dist: PathBuf,
}
TargetEnsoglPackRuntimeLibs {
@ -241,14 +246,14 @@ pub mod paths {
TargetEnsoglPackDist {
app: PathBuf,
shader_extractor: PathBuf,
pkg_js: PathBuf,
asset_extractor: PathBuf,
pkg_js: PathBuf,
main_wasm: PathBuf,
shaders: TargetEnsoglPackDistShaders,
dynamic_assets: TargetEnsoglPackDistDynamicAssets,
}
TargetEnsoglPackDistShaders {
list: PathBuf,
TargetEnsoglPackDistDynamicAssets {
manifest: PathBuf,
}
}
}
@ -256,6 +261,7 @@ pub mod paths {
const WASM_PACK_OUT_NAME: &str = "pkg";
impl Paths {
/// Create a set of paths values.
pub async fn new() -> Result<Self> {
let mut p = Paths::default();
let current_cargo_path = Path::new(path!("Cargo.toml"));
@ -275,25 +281,26 @@ impl Paths {
p.target.ensogl_pack.wasm_pack.pkg_js = p.target.ensogl_pack.wasm_pack.join(pkg_js);
p.target.ensogl_pack.wasm_pack.runtime_libs =
p.target.ensogl_pack.wasm_pack.join("runtime-libs.js");
p.target.ensogl_pack.shaders.root = p.target.ensogl_pack.join("shaders");
p.target.ensogl_pack.shaders.list = p.target.ensogl_pack.shaders.join("list.txt");
p.target.ensogl_pack.shaders_hash = p.target.ensogl_pack.join("shaders-hash");
p.target.ensogl_pack.dynamic_assets = p.target.ensogl_pack.join("dynamic-assets");
p.target.ensogl_pack.runtime_libs.root = p.target.ensogl_pack.join("runtime-libs");
p.target.ensogl_pack.runtime_libs.runtime_libs =
p.target.ensogl_pack.runtime_libs.join("runtime-libs.js");
p.target.ensogl_pack.dist.root = p.target.ensogl_pack.join("dist");
p.target.ensogl_pack.linked_dist = p.target.ensogl_pack.join("linked-dist");
p.target.ensogl_pack.dist.app = p.target.ensogl_pack.dist.join("index.js");
p.target.ensogl_pack.dist.shader_extractor =
p.target.ensogl_pack.dist.join("shader-extractor.js");
p.target.ensogl_pack.dist.asset_extractor =
p.target.ensogl_pack.dist.join("asset-extractor.js");
p.target.ensogl_pack.dist.pkg_js = p.target.ensogl_pack.dist.join("pkg.js");
p.target.ensogl_pack.dist.main_wasm = p.target.ensogl_pack.dist.join("pkg.wasm");
p.target.ensogl_pack.dist.shaders.root = p.target.ensogl_pack.dist.join("shaders");
p.target.ensogl_pack.dist.shaders.list = p.target.ensogl_pack.dist.shaders.join("list.txt");
p.target.ensogl_pack.dist.dynamic_assets.root =
p.target.ensogl_pack.dist.join("dynamic-assets");
p.target.ensogl_pack.dist.dynamic_assets.manifest =
p.target.ensogl_pack.dist.dynamic_assets.join("manifest.json");
Ok(p)
}
}
/// Returns the workspace directory (repo root).
pub async fn workspace_dir() -> Result<PathBuf> {
use ide_ci::programs::cargo;
use ide_ci::programs::Cargo;
@ -310,6 +317,7 @@ pub async fn workspace_dir() -> Result<PathBuf> {
}
// =============
// === Build ===
// =============
@ -366,7 +374,7 @@ async fn compile_this_crate_ts_sources(paths: &Paths) -> Result<()> {
let args = ["--", &format!("--out-dir={}", paths.target.ensogl_pack.dist.display())];
run_script("build", &args).await?;
let args = ["--", &format!("--out-dir={}", paths.target.ensogl_pack.dist.display())];
run_script("build-shader-extractor", &args).await?;
run_script("build-asset-extractor", &args).await?;
println!("BUILD build-runtime-libs");
let args = ["--", &format!("--outdir={}", paths.target.ensogl_pack.runtime_libs.display())];
run_script("build-runtime-libs", &args).await?;
@ -427,94 +435,18 @@ async fn compile_wasm_pack_artifacts(pwd: &Path, pkg_js: &Path, out: &Path) -> R
.await
}
/// Extract non-optimized shaders from the WASM artifact.
async fn extract_shaders(paths: &Paths) -> Result<()> {
info!("Extracting shaders from generated WASM file.");
/// Extract asset sources from the WASM artifact.
async fn extract_assets(paths: &Paths) -> Result<()> {
info!("Extracting asset sources from generated WASM file.");
ide_ci::programs::Node
.cmd()?
.arg(&paths.target.ensogl_pack.dist.shader_extractor)
.arg(&paths.target.ensogl_pack.dist.asset_extractor)
.arg("--out-dir")
.arg(&paths.target.ensogl_pack.shaders)
.arg(&paths.target.ensogl_pack.dynamic_assets)
.run_ok()
.await
}
/// Optimize the extracted shaders by using `glslc`, `spirv-opt` and `spirv-cross`.
async fn optimize_shaders(paths: &Paths) -> Result<()> {
info!("Optimizing extracted shaders.");
ide_ci::fs::create_dir_if_missing(&paths.target.ensogl_pack.dist.shaders)?;
let stages = ["vertex", "fragment"];
let shaders_list = ide_ci::fs::read_to_string(&paths.target.ensogl_pack.shaders.list)?;
let shaders_prefixes: Vec<_> = shaders_list.lines().collect();
for shader_prefix in shaders_prefixes {
info!("Optimizing '{shader_prefix}'.");
for stage in stages {
let base_path = paths.target.ensogl_pack.shaders.join(shader_prefix);
let base_path = base_path.display();
let stage_path = format!("{base_path}.{stage}");
let glsl_path = stage_path.with_appended_extension("glsl");
let spv_path = stage_path.with_appended_extension("spv");
let spv_opt_path = stage_path.with_appended_extension("opt.spv");
let glsl_opt_path = stage_path.with_appended_extension("opt.glsl");
let glsl_file_name = format!("{shader_prefix}.{stage}.glsl");
let hash_file_name = format!("{shader_prefix}.{stage}.hash");
let glsl_opt_dist_path = paths.target.ensogl_pack.dist.shaders.join(&glsl_file_name);
let hash_path = paths.target.ensogl_pack.shaders_hash.join(&hash_file_name);
let content = ide_ci::fs::read_to_string(&glsl_path)?;
let old_hash = ide_ci::fs::read_to_string(&hash_path).ok();
let hash = calculate_hash(&content).to_string();
if let Some(old_hash) = old_hash {
if old_hash == hash {
info!("Skipping '{shader_prefix}.{stage}' because it has not changed.");
continue;
}
}
ide_ci::fs::write(&hash_path, hash)?;
let spv_path = spv_path.as_str();
let glsl_path = glsl_path.as_str();
let shader_stage = &format!("-fshader-stage={stage}");
let glslc_args = ["--target-env=opengl", shader_stage, "-o", spv_path, glsl_path];
let spirv_opt_args = ["-O", "-o", spv_opt_path.as_str(), spv_path.as_str()];
let spirv_cross_args = ["--output", glsl_opt_path.as_str(), spv_opt_path.as_str()];
Glslc.cmd()?.args(glslc_args).run_ok().await?;
SpirvOpt.cmd()?.args(spirv_opt_args).run_ok().await?;
SpirvCross.cmd()?.args(spirv_cross_args).run_ok().await?;
let content = ide_ci::fs::read_to_string(&glsl_opt_path)?.replace("\r\n", "\n");
let extract_err = || format!("Failed to process shader '{}'.", glsl_opt_path.as_str());
let code = extract_main_shader_code(&content).with_context(extract_err)?;
ide_ci::fs::write(&glsl_opt_dist_path, code)?;
}
}
ide_ci::fs::write(&paths.target.ensogl_pack.dist.shaders.list, &shaders_list)
}
/// Read the optimized shader code, extract the main function body and preserve all top-level
/// variable declarations.
fn extract_main_shader_code(code: &str) -> Result<String> {
let main_start_str = "void main()\n{";
let main_end_str = "}";
let main_fn_find_err = "Failed to find main function.";
let main_start = code.find(main_start_str).with_context(|| main_fn_find_err)?;
let main_end = code.rfind(main_end_str).with_context(|| main_fn_find_err)?;
let before_main = &code[..main_start];
let declarations: Vec<&str> = before_main
.lines()
.filter_map(|line| {
let version_def = line.starts_with("#version ");
let precision_def = line.starts_with("precision ");
let layout_def = line.starts_with("layout(");
let def = version_def || precision_def || layout_def;
(!def).then_some(line)
})
.collect();
let declarations = declarations.join("\n");
let main_content = &code[main_start + main_start_str.len()..main_end];
Ok(format!("{declarations}\n{main_content}"))
}
/// Wrapper over `wasm-pack build` command.
///
/// # Arguments
@ -528,8 +460,8 @@ pub async fn build(
let paths = Paths::new().await?;
compile_this_crate_ts_sources(&paths).await?;
run_wasm_pack(&paths, provider).await?;
extract_shaders(&paths).await?;
optimize_shaders(&paths).await?;
extract_assets(&paths).await?;
assets::build(&paths).await?;
let out_dir = Path::new(&outputs.out_dir);
ide_ci::fs::copy(&paths.target.ensogl_pack.dist, out_dir)?;
ide_ci::fs::remove_symlink_dir_if_exists(&paths.target.ensogl_pack.linked_dist)?;