mirror of
https://github.com/tauri-apps/tauri.git
synced 2025-01-04 17:18:56 +03:00
This commit is contained in:
parent
dbc2873e82
commit
8661e3e24d
16
.changes/no-long-c-build-deps.md
Normal file
16
.changes/no-long-c-build-deps.md
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
"tauri": patch
|
||||
"tauri-codegen": patch
|
||||
"tauri-macros": patch
|
||||
"tauri-utils": patch
|
||||
---
|
||||
|
||||
Replace multiple dependencies who's C code compiled concurrently and caused
|
||||
the other ones to bloat compile time significantly.
|
||||
|
||||
* `zstd` -> `brotli`
|
||||
* `blake3` -> a vendored version of the blake3 reference
|
||||
* `ring` -> `getrandom`
|
||||
|
||||
See https://github.com/tauri-apps/tauri/pull/3773 for more information about
|
||||
these specific choices.
|
@ -15,7 +15,6 @@ readme = "README.md"
|
||||
[dependencies]
|
||||
sha2 = "0.10"
|
||||
base64 = "0.13"
|
||||
blake3 = { version = "1.3", features = [ "rayon" ] }
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
serde = { version = "1", features = [ "derive" ] }
|
||||
@ -23,7 +22,7 @@ serde_json = "1"
|
||||
tauri-utils = { version = "1.0.0-rc.3", path = "../tauri-utils", features = [ "build" ] }
|
||||
thiserror = "1"
|
||||
walkdir = "2"
|
||||
zstd = { version = "0.11", optional = true }
|
||||
brotli = { version = "3", optional = true, default-features = false, features = ["std"] }
|
||||
regex = { version = "1.5.5", optional = true }
|
||||
uuid = { version = "0.8", features = [ "v4" ] }
|
||||
|
||||
@ -35,8 +34,7 @@ png = "0.17"
|
||||
|
||||
[features]
|
||||
default = [ "compression" ]
|
||||
compression = [ "zstd", "tauri-utils/compression" ]
|
||||
compression = [ "brotli", "tauri-utils/compression" ]
|
||||
isolation = [ "tauri-utils/isolation" ]
|
||||
__isolation-docs = [ "tauri-utils/__isolation-docs" ]
|
||||
shell-scope = [ "regex" ]
|
||||
config-json5 = [ "tauri-utils/config-json5" ]
|
||||
|
@ -50,7 +50,7 @@ fn load_csp(document: &mut NodeRef, key: &AssetKey, csp_hashes: &mut CspHashes)
|
||||
fn map_core_assets(
|
||||
options: &AssetOptions,
|
||||
) -> impl Fn(&AssetKey, &Path, &mut Vec<u8>, &mut CspHashes) -> Result<(), EmbeddedAssetsError> {
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
let pattern = tauri_utils::html::PatternObject::from(&options.pattern);
|
||||
let csp = options.csp;
|
||||
move |key, path, input, csp_hashes| {
|
||||
@ -60,7 +60,7 @@ fn map_core_assets(
|
||||
if csp {
|
||||
load_csp(&mut document, key, csp_hashes);
|
||||
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
if let tauri_utils::html::PatternObject::Isolation { .. } = &pattern {
|
||||
// create the csp for the isolation iframe styling now, to make the runtime less complex
|
||||
let mut hasher = Sha256::new();
|
||||
@ -78,7 +78,7 @@ fn map_core_assets(
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
fn map_isolation(
|
||||
_options: &AssetOptions,
|
||||
dir: PathBuf,
|
||||
@ -284,7 +284,7 @@ pub fn context_codegen(data: ContextData) -> Result<TokenStream, EmbeddedAssetsE
|
||||
|
||||
let pattern = match &options.pattern {
|
||||
PatternKind::Brownfield => quote!(#root::Pattern::Brownfield(std::marker::PhantomData)),
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
PatternKind::Isolation { dir } => {
|
||||
let dir = config_parent.join(dir);
|
||||
if !dir.exists() {
|
||||
|
@ -7,6 +7,7 @@ use quote::{quote, ToTokens, TokenStreamExt};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fmt::Write,
|
||||
fs::File,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
@ -15,12 +16,12 @@ use tauri_utils::config::PatternKind;
|
||||
use thiserror::Error;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
#[cfg(feature = "compression")]
|
||||
use brotli::enc::backward_references::BrotliEncoderParams;
|
||||
|
||||
/// The subdirectory inside the target directory we want to place assets.
|
||||
const TARGET_PATH: &str = "tauri-codegen-assets";
|
||||
|
||||
/// The minimum size needed for the hasher to use multiple threads.
|
||||
const MULTI_HASH_SIZE_LIMIT: usize = 131_072; // 128KiB
|
||||
|
||||
/// (key, (original filepath, compressed bytes))
|
||||
type Asset = (AssetKey, (PathBuf, PathBuf));
|
||||
|
||||
@ -40,6 +41,9 @@ pub enum EmbeddedAssetsError {
|
||||
error: std::io::Error,
|
||||
},
|
||||
|
||||
#[error("failed to create hex from bytes because {0}")]
|
||||
Hex(std::fmt::Error),
|
||||
|
||||
#[error("invalid prefix {prefix} used while including path {path}")]
|
||||
PrefixInvalid { prefix: PathBuf, path: PathBuf },
|
||||
|
||||
@ -182,7 +186,7 @@ pub struct AssetOptions {
|
||||
pub(crate) csp: bool,
|
||||
pub(crate) pattern: PatternKind,
|
||||
pub(crate) freeze_prototype: bool,
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
pub(crate) isolation_schema: String,
|
||||
}
|
||||
|
||||
@ -193,7 +197,7 @@ impl AssetOptions {
|
||||
csp: false,
|
||||
pattern,
|
||||
freeze_prototype: false,
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
isolation_schema: format!("isolation-{}", uuid::Uuid::new_v4()),
|
||||
}
|
||||
}
|
||||
@ -246,13 +250,19 @@ impl EmbeddedAssets {
|
||||
|
||||
/// Use highest compression level for release, the fastest one for everything else
|
||||
#[cfg(feature = "compression")]
|
||||
fn compression_level() -> i32 {
|
||||
let levels = zstd::compression_level_range();
|
||||
fn compression_settings() -> BrotliEncoderParams {
|
||||
let mut settings = BrotliEncoderParams::default();
|
||||
|
||||
// the following compression levels are hand-picked and are not min-maxed.
|
||||
// they have a good balance of runtime vs size for the respective profile goals.
|
||||
// see the "brotli" section of this comment https://github.com/tauri-apps/tauri/issues/3571#issuecomment-1054847558
|
||||
if cfg!(debug_assertions) {
|
||||
*levels.start()
|
||||
settings.quality = 2
|
||||
} else {
|
||||
*levels.end()
|
||||
settings.quality = 9
|
||||
}
|
||||
|
||||
settings
|
||||
}
|
||||
|
||||
/// Compress a file and spit out the information in a [`HashMap`] friendly form.
|
||||
@ -291,20 +301,24 @@ impl EmbeddedAssets {
|
||||
|
||||
// get a hash of the input - allows for caching existing files
|
||||
let hash = {
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
if input.len() < MULTI_HASH_SIZE_LIMIT {
|
||||
hasher.update(&input);
|
||||
} else {
|
||||
hasher.update_rayon(&input);
|
||||
let mut hasher = crate::vendor::blake3_reference::Hasher::default();
|
||||
hasher.update(&input);
|
||||
|
||||
let mut bytes = [0u8; 32];
|
||||
hasher.finalize(&mut bytes);
|
||||
|
||||
let mut hex = String::with_capacity(2 * bytes.len());
|
||||
for b in bytes {
|
||||
write!(hex, "{:02x}", b).map_err(EmbeddedAssetsError::Hex)?;
|
||||
}
|
||||
hasher.finalize().to_hex()
|
||||
hex
|
||||
};
|
||||
|
||||
// use the content hash to determine filename, keep extensions that exist
|
||||
let out_path = if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
out_dir.join(format!("{}.{}", hash, ext))
|
||||
} else {
|
||||
out_dir.join(hash.to_string())
|
||||
out_dir.join(hash)
|
||||
};
|
||||
|
||||
// only compress and write to the file if it doesn't already exist.
|
||||
@ -328,13 +342,16 @@ impl EmbeddedAssets {
|
||||
}
|
||||
|
||||
#[cfg(feature = "compression")]
|
||||
// entirely write input to the output file path with compression
|
||||
zstd::stream::copy_encode(&*input, out_file, Self::compression_level()).map_err(|error| {
|
||||
EmbeddedAssetsError::AssetWrite {
|
||||
path: path.to_owned(),
|
||||
error,
|
||||
}
|
||||
})?;
|
||||
{
|
||||
let mut input = std::io::Cursor::new(input);
|
||||
// entirely write input to the output file path with compression
|
||||
brotli::BrotliCompress(&mut input, &mut out_file, &Self::compression_settings()).map_err(
|
||||
|error| EmbeddedAssetsError::AssetWrite {
|
||||
path: path.to_owned(),
|
||||
error,
|
||||
},
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((key, (path.into(), out_path)))
|
||||
|
@ -11,6 +11,8 @@ pub use tauri_utils::config::{parse::ConfigError, Config};
|
||||
|
||||
mod context;
|
||||
pub mod embedded_assets;
|
||||
#[doc(hidden)]
|
||||
pub mod vendor;
|
||||
|
||||
/// Represents all the errors that can happen while reading the config during codegen.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
377
core/tauri-codegen/src/vendor/blake3_reference.rs
vendored
Normal file
377
core/tauri-codegen/src/vendor/blake3_reference.rs
vendored
Normal file
@ -0,0 +1,377 @@
|
||||
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//! This is a lightly modified version of the BLAKE3 reference implementation.
|
||||
//! The changes applied are to remove unused item warnings due to using it
|
||||
//! vendored along with some minor clippy suggestions. No logic changes. I
|
||||
//! suggest diffing against the original to find all the changes.
|
||||
//!
|
||||
//! ## Original Header
|
||||
//! This is the reference implementation of BLAKE3. It is used for testing and
|
||||
//! as a readable example of the algorithms involved. Section 5.1 of [the BLAKE3
|
||||
//! spec](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf)
|
||||
//! discusses this implementation. You can render docs for this implementation
|
||||
//! by running `cargo doc --open` in this directory.
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! ```
|
||||
//! let mut hasher = tauri_codegen::vendor::blake3_reference::Hasher::new();
|
||||
//! hasher.update(b"abc");
|
||||
//! hasher.update(b"def");
|
||||
//! let mut hash = [0; 32];
|
||||
//! hasher.finalize(&mut hash);
|
||||
//! let mut extended_hash = [0; 500];
|
||||
//! hasher.finalize(&mut extended_hash);
|
||||
//! assert_eq!(hash, extended_hash[..32]);
|
||||
//! ```
|
||||
//!
|
||||
//! CC0-1.0 OR Apache-2.0
|
||||
|
||||
use core::cmp::min;
|
||||
use core::convert::TryInto;
|
||||
|
||||
const OUT_LEN: usize = 32;
|
||||
const BLOCK_LEN: usize = 64;
|
||||
const CHUNK_LEN: usize = 1024;
|
||||
|
||||
const CHUNK_START: u32 = 1 << 0;
|
||||
const CHUNK_END: u32 = 1 << 1;
|
||||
const PARENT: u32 = 1 << 2;
|
||||
const ROOT: u32 = 1 << 3;
|
||||
|
||||
const IV: [u32; 8] = [
|
||||
0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19,
|
||||
];
|
||||
|
||||
const MSG_PERMUTATION: [usize; 16] = [2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8];
|
||||
|
||||
// The mixing function, G, which mixes either a column or a diagonal.
|
||||
fn g(state: &mut [u32; 16], a: usize, b: usize, c: usize, d: usize, mx: u32, my: u32) {
|
||||
state[a] = state[a].wrapping_add(state[b]).wrapping_add(mx);
|
||||
state[d] = (state[d] ^ state[a]).rotate_right(16);
|
||||
state[c] = state[c].wrapping_add(state[d]);
|
||||
state[b] = (state[b] ^ state[c]).rotate_right(12);
|
||||
state[a] = state[a].wrapping_add(state[b]).wrapping_add(my);
|
||||
state[d] = (state[d] ^ state[a]).rotate_right(8);
|
||||
state[c] = state[c].wrapping_add(state[d]);
|
||||
state[b] = (state[b] ^ state[c]).rotate_right(7);
|
||||
}
|
||||
|
||||
fn round(state: &mut [u32; 16], m: &[u32; 16]) {
|
||||
// Mix the columns.
|
||||
g(state, 0, 4, 8, 12, m[0], m[1]);
|
||||
g(state, 1, 5, 9, 13, m[2], m[3]);
|
||||
g(state, 2, 6, 10, 14, m[4], m[5]);
|
||||
g(state, 3, 7, 11, 15, m[6], m[7]);
|
||||
// Mix the diagonals.
|
||||
g(state, 0, 5, 10, 15, m[8], m[9]);
|
||||
g(state, 1, 6, 11, 12, m[10], m[11]);
|
||||
g(state, 2, 7, 8, 13, m[12], m[13]);
|
||||
g(state, 3, 4, 9, 14, m[14], m[15]);
|
||||
}
|
||||
|
||||
fn permute(m: &mut [u32; 16]) {
|
||||
let mut permuted = [0; 16];
|
||||
for i in 0..16 {
|
||||
permuted[i] = m[MSG_PERMUTATION[i]];
|
||||
}
|
||||
*m = permuted;
|
||||
}
|
||||
|
||||
fn compress(
|
||||
chaining_value: &[u32; 8],
|
||||
block_words: &[u32; 16],
|
||||
counter: u64,
|
||||
block_len: u32,
|
||||
flags: u32,
|
||||
) -> [u32; 16] {
|
||||
let mut state = [
|
||||
chaining_value[0],
|
||||
chaining_value[1],
|
||||
chaining_value[2],
|
||||
chaining_value[3],
|
||||
chaining_value[4],
|
||||
chaining_value[5],
|
||||
chaining_value[6],
|
||||
chaining_value[7],
|
||||
IV[0],
|
||||
IV[1],
|
||||
IV[2],
|
||||
IV[3],
|
||||
counter as u32,
|
||||
(counter >> 32) as u32,
|
||||
block_len,
|
||||
flags,
|
||||
];
|
||||
let mut block = *block_words;
|
||||
|
||||
round(&mut state, &block); // round 1
|
||||
permute(&mut block);
|
||||
round(&mut state, &block); // round 2
|
||||
permute(&mut block);
|
||||
round(&mut state, &block); // round 3
|
||||
permute(&mut block);
|
||||
round(&mut state, &block); // round 4
|
||||
permute(&mut block);
|
||||
round(&mut state, &block); // round 5
|
||||
permute(&mut block);
|
||||
round(&mut state, &block); // round 6
|
||||
permute(&mut block);
|
||||
round(&mut state, &block); // round 7
|
||||
|
||||
for i in 0..8 {
|
||||
state[i] ^= state[i + 8];
|
||||
state[i + 8] ^= chaining_value[i];
|
||||
}
|
||||
state
|
||||
}
|
||||
|
||||
fn first_8_words(compression_output: [u32; 16]) -> [u32; 8] {
|
||||
compression_output[0..8].try_into().unwrap()
|
||||
}
|
||||
|
||||
fn words_from_little_endian_bytes(bytes: &[u8], words: &mut [u32]) {
|
||||
debug_assert_eq!(bytes.len(), 4 * words.len());
|
||||
for (four_bytes, word) in bytes.chunks_exact(4).zip(words) {
|
||||
*word = u32::from_le_bytes(four_bytes.try_into().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
// Each chunk or parent node can produce either an 8-word chaining value or, by
|
||||
// setting the ROOT flag, any number of final output bytes. The Output struct
|
||||
// captures the state just prior to choosing between those two possibilities.
|
||||
struct Output {
|
||||
input_chaining_value: [u32; 8],
|
||||
block_words: [u32; 16],
|
||||
counter: u64,
|
||||
block_len: u32,
|
||||
flags: u32,
|
||||
}
|
||||
|
||||
impl Output {
|
||||
fn chaining_value(&self) -> [u32; 8] {
|
||||
first_8_words(compress(
|
||||
&self.input_chaining_value,
|
||||
&self.block_words,
|
||||
self.counter,
|
||||
self.block_len,
|
||||
self.flags,
|
||||
))
|
||||
}
|
||||
|
||||
fn root_output_bytes(&self, out_slice: &mut [u8]) {
|
||||
for (output_block_counter, out_block) in (0u64..).zip(out_slice.chunks_mut(2 * OUT_LEN)) {
|
||||
let words = compress(
|
||||
&self.input_chaining_value,
|
||||
&self.block_words,
|
||||
output_block_counter,
|
||||
self.block_len,
|
||||
self.flags | ROOT,
|
||||
);
|
||||
// The output length might not be a multiple of 4.
|
||||
for (word, out_word) in words.iter().zip(out_block.chunks_mut(4)) {
|
||||
out_word.copy_from_slice(&word.to_le_bytes()[..out_word.len()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ChunkState {
|
||||
chaining_value: [u32; 8],
|
||||
chunk_counter: u64,
|
||||
block: [u8; BLOCK_LEN],
|
||||
block_len: u8,
|
||||
blocks_compressed: u8,
|
||||
flags: u32,
|
||||
}
|
||||
|
||||
impl ChunkState {
|
||||
fn new(key_words: [u32; 8], chunk_counter: u64, flags: u32) -> Self {
|
||||
Self {
|
||||
chaining_value: key_words,
|
||||
chunk_counter,
|
||||
block: [0; BLOCK_LEN],
|
||||
block_len: 0,
|
||||
blocks_compressed: 0,
|
||||
flags,
|
||||
}
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
BLOCK_LEN * self.blocks_compressed as usize + self.block_len as usize
|
||||
}
|
||||
|
||||
fn start_flag(&self) -> u32 {
|
||||
if self.blocks_compressed == 0 {
|
||||
CHUNK_START
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, mut input: &[u8]) {
|
||||
while !input.is_empty() {
|
||||
// If the block buffer is full, compress it and clear it. More
|
||||
// input is coming, so this compression is not CHUNK_END.
|
||||
if self.block_len as usize == BLOCK_LEN {
|
||||
let mut block_words = [0; 16];
|
||||
words_from_little_endian_bytes(&self.block, &mut block_words);
|
||||
self.chaining_value = first_8_words(compress(
|
||||
&self.chaining_value,
|
||||
&block_words,
|
||||
self.chunk_counter,
|
||||
BLOCK_LEN as u32,
|
||||
self.flags | self.start_flag(),
|
||||
));
|
||||
self.blocks_compressed += 1;
|
||||
self.block = [0; BLOCK_LEN];
|
||||
self.block_len = 0;
|
||||
}
|
||||
|
||||
// Copy input bytes into the block buffer.
|
||||
let want = BLOCK_LEN - self.block_len as usize;
|
||||
let take = min(want, input.len());
|
||||
self.block[self.block_len as usize..][..take].copy_from_slice(&input[..take]);
|
||||
self.block_len += take as u8;
|
||||
input = &input[take..];
|
||||
}
|
||||
}
|
||||
|
||||
fn output(&self) -> Output {
|
||||
let mut block_words = [0; 16];
|
||||
words_from_little_endian_bytes(&self.block, &mut block_words);
|
||||
Output {
|
||||
input_chaining_value: self.chaining_value,
|
||||
block_words,
|
||||
counter: self.chunk_counter,
|
||||
block_len: self.block_len as u32,
|
||||
flags: self.flags | self.start_flag() | CHUNK_END,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parent_output(
|
||||
left_child_cv: [u32; 8],
|
||||
right_child_cv: [u32; 8],
|
||||
key_words: [u32; 8],
|
||||
flags: u32,
|
||||
) -> Output {
|
||||
let mut block_words = [0; 16];
|
||||
block_words[..8].copy_from_slice(&left_child_cv);
|
||||
block_words[8..].copy_from_slice(&right_child_cv);
|
||||
Output {
|
||||
input_chaining_value: key_words,
|
||||
block_words,
|
||||
counter: 0, // Always 0 for parent nodes.
|
||||
block_len: BLOCK_LEN as u32, // Always BLOCK_LEN (64) for parent nodes.
|
||||
flags: PARENT | flags,
|
||||
}
|
||||
}
|
||||
|
||||
fn parent_cv(
|
||||
left_child_cv: [u32; 8],
|
||||
right_child_cv: [u32; 8],
|
||||
key_words: [u32; 8],
|
||||
flags: u32,
|
||||
) -> [u32; 8] {
|
||||
parent_output(left_child_cv, right_child_cv, key_words, flags).chaining_value()
|
||||
}
|
||||
|
||||
/// An incremental hasher that can accept any number of writes.
|
||||
pub struct Hasher {
|
||||
chunk_state: ChunkState,
|
||||
key_words: [u32; 8],
|
||||
cv_stack: [[u32; 8]; 54], // Space for 54 subtree chaining values:
|
||||
cv_stack_len: u8, // 2^54 * CHUNK_LEN = 2^64
|
||||
flags: u32,
|
||||
}
|
||||
|
||||
impl Hasher {
|
||||
fn new_internal(key_words: [u32; 8], flags: u32) -> Self {
|
||||
Self {
|
||||
chunk_state: ChunkState::new(key_words, 0, flags),
|
||||
key_words,
|
||||
cv_stack: [[0; 8]; 54],
|
||||
cv_stack_len: 0,
|
||||
flags,
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a new `Hasher` for the regular hash function.
|
||||
pub fn new() -> Self {
|
||||
Self::new_internal(IV, 0)
|
||||
}
|
||||
|
||||
fn push_stack(&mut self, cv: [u32; 8]) {
|
||||
self.cv_stack[self.cv_stack_len as usize] = cv;
|
||||
self.cv_stack_len += 1;
|
||||
}
|
||||
|
||||
fn pop_stack(&mut self) -> [u32; 8] {
|
||||
self.cv_stack_len -= 1;
|
||||
self.cv_stack[self.cv_stack_len as usize]
|
||||
}
|
||||
|
||||
// Section 5.1.2 of the BLAKE3 spec explains this algorithm in more detail.
|
||||
fn add_chunk_chaining_value(&mut self, mut new_cv: [u32; 8], mut total_chunks: u64) {
|
||||
// This chunk might complete some subtrees. For each completed subtree,
|
||||
// its left child will be the current top entry in the CV stack, and
|
||||
// its right child will be the current value of `new_cv`. Pop each left
|
||||
// child off the stack, merge it with `new_cv`, and overwrite `new_cv`
|
||||
// with the result. After all these merges, push the final value of
|
||||
// `new_cv` onto the stack. The number of completed subtrees is given
|
||||
// by the number of trailing 0-bits in the new total number of chunks.
|
||||
while total_chunks & 1 == 0 {
|
||||
new_cv = parent_cv(self.pop_stack(), new_cv, self.key_words, self.flags);
|
||||
total_chunks >>= 1;
|
||||
}
|
||||
self.push_stack(new_cv);
|
||||
}
|
||||
|
||||
/// Add input to the hash state. This can be called any number of times.
|
||||
pub fn update(&mut self, mut input: &[u8]) {
|
||||
while !input.is_empty() {
|
||||
// If the current chunk is complete, finalize it and reset the
|
||||
// chunk state. More input is coming, so this chunk is not ROOT.
|
||||
if self.chunk_state.len() == CHUNK_LEN {
|
||||
let chunk_cv = self.chunk_state.output().chaining_value();
|
||||
let total_chunks = self.chunk_state.chunk_counter + 1;
|
||||
self.add_chunk_chaining_value(chunk_cv, total_chunks);
|
||||
self.chunk_state = ChunkState::new(self.key_words, total_chunks, self.flags);
|
||||
}
|
||||
|
||||
// Compress input bytes into the current chunk state.
|
||||
let want = CHUNK_LEN - self.chunk_state.len();
|
||||
let take = min(want, input.len());
|
||||
self.chunk_state.update(&input[..take]);
|
||||
input = &input[take..];
|
||||
}
|
||||
}
|
||||
|
||||
/// Finalize the hash and write any number of output bytes.
|
||||
pub fn finalize(&self, out_slice: &mut [u8]) {
|
||||
// Starting with the Output from the current chunk, compute all the
|
||||
// parent chaining values along the right edge of the tree, until we
|
||||
// have the root Output.
|
||||
let mut output = self.chunk_state.output();
|
||||
let mut parent_nodes_remaining = self.cv_stack_len as usize;
|
||||
while parent_nodes_remaining > 0 {
|
||||
parent_nodes_remaining -= 1;
|
||||
output = parent_output(
|
||||
self.cv_stack[parent_nodes_remaining],
|
||||
output.chaining_value(),
|
||||
self.key_words,
|
||||
self.flags,
|
||||
);
|
||||
}
|
||||
output.root_output_bytes(out_slice);
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Hasher {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
11
core/tauri-codegen/src/vendor/mod.rs
vendored
Normal file
11
core/tauri-codegen/src/vendor/mod.rs
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//! Manual vendored dependencies - NOT STABLE.
|
||||
//!
|
||||
//! This module and all submodules are not considered part of the public
|
||||
//! api. They can and will change at any time for any reason in any
|
||||
//! version.
|
||||
|
||||
pub mod blake3_reference;
|
@ -27,6 +27,5 @@ tauri-utils = { version = "1.0.0-rc.3", path = "../tauri-utils" }
|
||||
custom-protocol = [ ]
|
||||
compression = [ "tauri-codegen/compression" ]
|
||||
isolation = [ "tauri-codegen/isolation" ]
|
||||
__isolation-docs = [ "tauri-codegen/__isolation-docs" ]
|
||||
shell-scope = [ "tauri-codegen/shell-scope" ]
|
||||
config-json5 = [ "tauri-codegen/config-json5", "tauri-utils/config-json5" ]
|
||||
|
@ -16,7 +16,7 @@ serde = { version = "1.0", features = [ "derive" ] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0.30"
|
||||
phf = { version = "0.10", features = [ "macros" ] }
|
||||
zstd = { version = "0.11", optional = true }
|
||||
brotli = { version = "3", optional = true, default-features = false, features = ["std"] }
|
||||
url = { version = "2.2", features = [ "serde" ] }
|
||||
kuchiki = "0.8"
|
||||
html5ever = "0.25"
|
||||
@ -25,8 +25,7 @@ quote = { version = "1.0", optional = true }
|
||||
schemars = { version = "0.8", features = [ "url" ], optional = true }
|
||||
serde_with = "1.12"
|
||||
aes-gcm = { version = "0.9", optional = true }
|
||||
ring = { version = "0.16", optional = true, features = [ "std" ] }
|
||||
once_cell = { version = "1.10", optional = true }
|
||||
getrandom = { version = "0.2", optional = true, features = [ "std" ] }
|
||||
serialize-to-javascript = "=0.1.1"
|
||||
ctor = "0.1"
|
||||
json5 = { version = "0.4", optional = true }
|
||||
@ -39,10 +38,9 @@ heck = "0.4"
|
||||
|
||||
[features]
|
||||
build = [ "proc-macro2", "quote" ]
|
||||
compression = [ "zstd" ]
|
||||
compression = [ "brotli" ]
|
||||
schema = [ "schemars" ]
|
||||
isolation = [ "aes-gcm", "ring", "once_cell" ]
|
||||
__isolation-docs = [ "aes-gcm", "once_cell" ]
|
||||
isolation = [ "aes-gcm", "getrandom" ]
|
||||
process-relaunch-dangerous-allow-symlink-macos = [ ]
|
||||
config-json5 = [ "json5" ]
|
||||
resources = [ "glob", "walkdir" ]
|
||||
|
@ -144,8 +144,12 @@ impl Assets for EmbeddedAssets {
|
||||
self
|
||||
.assets
|
||||
.get(key.as_ref())
|
||||
.copied()
|
||||
.map(zstd::decode_all)
|
||||
.map(|&(mut asdf)| {
|
||||
// with the exception of extremely small files, output should usually be
|
||||
// at least as large as the compressed version.
|
||||
let mut buf = Vec::with_capacity(asdf.len());
|
||||
brotli::BrotliDecompress(&mut asdf, &mut buf).map(|()| buf)
|
||||
})
|
||||
.and_then(Result::ok)
|
||||
.map(Cow::Owned)
|
||||
}
|
||||
|
@ -1691,7 +1691,7 @@ pub enum PatternKind {
|
||||
/// Brownfield pattern.
|
||||
Brownfield,
|
||||
/// Isolation pattern. Recommended for security purposes.
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
Isolation {
|
||||
/// The dir containing the index.html file that contains the secure isolation application.
|
||||
dir: PathBuf,
|
||||
@ -1768,7 +1768,7 @@ impl TauriConfig {
|
||||
if self.macos_private_api {
|
||||
features.push("macos-private-api");
|
||||
}
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
if let PatternKind::Isolation { .. } = self.pattern {
|
||||
features.push("isolation");
|
||||
}
|
||||
@ -2431,7 +2431,7 @@ mod build {
|
||||
|
||||
tokens.append_all(match self {
|
||||
Self::Brownfield => quote! { #prefix::Brownfield },
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
Self::Isolation { dir } => {
|
||||
let dir = path_buf_lit(dir);
|
||||
quote! { #prefix::Isolation { dir: #dir } }
|
||||
|
@ -10,11 +10,11 @@ use html5ever::{interface::QualName, namespace_url, ns, tendril::TendrilSink, Lo
|
||||
pub use kuchiki::NodeRef;
|
||||
use kuchiki::{Attribute, ExpandedName};
|
||||
use serde::Serialize;
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
use serialize_to_javascript::DefaultTemplate;
|
||||
|
||||
use crate::config::PatternKind;
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
use crate::pattern::isolation::IsolationJavascriptCodegen;
|
||||
|
||||
/// The token used on the CSP tag content.
|
||||
@ -115,7 +115,7 @@ impl From<&PatternKind> for PatternObject {
|
||||
fn from(pattern_kind: &PatternKind) -> Self {
|
||||
match pattern_kind {
|
||||
PatternKind::Brownfield => Self::Brownfield,
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
PatternKind::Isolation { .. } => Self::Isolation {
|
||||
side: IsolationSide::default(),
|
||||
},
|
||||
@ -142,7 +142,7 @@ impl Default for IsolationSide {
|
||||
/// Injects the Isolation JavaScript to a codegen time document.
|
||||
///
|
||||
/// Note: This function is not considered part of the stable API.
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
pub fn inject_codegen_isolation_script(document: &mut NodeRef) {
|
||||
with_head(document, |head| {
|
||||
let script = NodeRef::new_element(QualName::new(None, ns!(html), "script".into()), None);
|
||||
|
@ -9,51 +9,9 @@ use std::string::FromUtf8Error;
|
||||
|
||||
use aes_gcm::aead::Aead;
|
||||
use aes_gcm::{aead::NewAead, Aes256Gcm, Nonce};
|
||||
use once_cell::sync::OnceCell;
|
||||
use getrandom::{getrandom, Error as CsprngError};
|
||||
use serialize_to_javascript::{default_template, Template};
|
||||
|
||||
#[cfg(not(feature = "isolation"))]
|
||||
mod ring_impl {
|
||||
#[cfg(not(feature = "__isolation-docs"))]
|
||||
compile_error!(
|
||||
"Isolation random number generator was used without enabling the `isolation` feature."
|
||||
);
|
||||
|
||||
pub struct Unspecified;
|
||||
|
||||
pub struct SystemRandom;
|
||||
|
||||
impl SystemRandom {
|
||||
pub fn new() -> Self {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Random;
|
||||
|
||||
impl Random {
|
||||
pub fn expose(self) -> [u8; 32] {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rand_generate(_rng: &SystemRandom) -> Result<Random, super::Error> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "isolation")]
|
||||
mod ring_impl {
|
||||
pub use ring::error::Unspecified;
|
||||
pub use ring::rand::generate as rand_generate;
|
||||
pub use ring::rand::SystemRandom;
|
||||
}
|
||||
|
||||
use ring_impl::*;
|
||||
|
||||
/// Cryptographically secure pseudo-random number generator.
|
||||
static RNG: OnceCell<SystemRandom> = OnceCell::new();
|
||||
|
||||
/// The style for the isolation iframe.
|
||||
pub const IFRAME_STYLE: &str = "#__tauri_isolation__ { display: none !important }";
|
||||
|
||||
@ -62,8 +20,8 @@ pub const IFRAME_STYLE: &str = "#__tauri_isolation__ { display: none !important
|
||||
#[non_exhaustive]
|
||||
pub enum Error {
|
||||
/// Something went wrong with the CSPRNG.
|
||||
#[error("Unspecified CSPRNG error")]
|
||||
Csprng,
|
||||
#[error("CSPRNG error")]
|
||||
Csprng(#[from] CsprngError),
|
||||
|
||||
/// Something went wrong with decryping an AES-GCM payload
|
||||
#[error("AES-GCM")]
|
||||
@ -82,12 +40,6 @@ pub enum Error {
|
||||
Json(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
impl From<Unspecified> for Error {
|
||||
fn from(_: Unspecified) -> Self {
|
||||
Self::Csprng
|
||||
}
|
||||
}
|
||||
|
||||
/// A formatted AES-GCM cipher instance along with the key used to initialize it.
|
||||
#[derive(Clone)]
|
||||
pub struct AesGcmPair {
|
||||
@ -103,8 +55,8 @@ impl Debug for AesGcmPair {
|
||||
|
||||
impl AesGcmPair {
|
||||
fn new() -> Result<Self, Error> {
|
||||
let rng = RNG.get_or_init(SystemRandom::new);
|
||||
let raw: [u8; 32] = ring_impl::rand_generate(rng)?.expose();
|
||||
let mut raw = [0u8; 32];
|
||||
getrandom(&mut raw)?;
|
||||
let key = aes_gcm::Key::from_slice(&raw);
|
||||
Ok(Self {
|
||||
raw,
|
||||
|
@ -3,5 +3,5 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
/// Handling the Tauri "Isolation" Pattern.
|
||||
#[cfg(any(feature = "isolation", feature = "__isolation-docs"))]
|
||||
#[cfg(feature = "isolation")]
|
||||
pub mod isolation;
|
||||
|
@ -22,7 +22,6 @@ version = "1.0.0-rc.4"
|
||||
no-default-features = true
|
||||
features = [
|
||||
"wry",
|
||||
"__isolation-docs",
|
||||
"custom-protocol",
|
||||
"api-all",
|
||||
"cli",
|
||||
@ -124,10 +123,6 @@ compression = [ "tauri-macros/compression", "tauri-utils/compression" ]
|
||||
wry = [ "tauri-runtime-wry" ]
|
||||
objc-exception = [ "tauri-runtime-wry/objc-exception" ]
|
||||
isolation = [ "tauri-utils/isolation", "tauri-macros/isolation" ]
|
||||
__isolation-docs = [
|
||||
"tauri-utils/__isolation-docs",
|
||||
"tauri-macros/__isolation-docs"
|
||||
]
|
||||
custom-protocol = [ "tauri-macros/custom-protocol" ]
|
||||
updater = [
|
||||
"minisign-verify",
|
||||
|
90
core/tests/restart/Cargo.lock
generated
90
core/tests/restart/Cargo.lock
generated
@ -38,18 +38,6 @@ version = "1.0.53"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0"
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
|
||||
|
||||
[[package]]
|
||||
name = "atk"
|
||||
version = "0.15.1"
|
||||
@ -101,21 +89,6 @@ version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "blake3"
|
||||
version = "1.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a08e53fc5a564bb15bfe6fae56bd71522205f1f91893f9c0116edad6496c183f"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"constant_time_eq",
|
||||
"digest",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block"
|
||||
version = "0.1.6"
|
||||
@ -265,12 +238,6 @@ dependencies = [
|
||||
"objc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.4.0"
|
||||
@ -346,30 +313,6 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c00d6d2ea26e8b151d99093005cb442fb9a37aeaca582a03ec70946f49ab5ed9"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
"lazy_static",
|
||||
"memoffset",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.7"
|
||||
@ -543,7 +486,6 @@ checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1980,31 +1922,6 @@ dependencies = [
|
||||
"cty",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"crossbeam-deque",
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
"lazy_static",
|
||||
"num_cpus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.10"
|
||||
@ -2381,12 +2298,6 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.86"
|
||||
@ -2552,7 +2463,6 @@ name = "tauri-codegen"
|
||||
version = "1.0.0-rc.3"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"blake3",
|
||||
"ico",
|
||||
"png 0.17.5",
|
||||
"proc-macro2",
|
||||
|
Loading…
Reference in New Issue
Block a user