refactor(bundler): remove unused fs utils, add http utils (#11716)

This commit is contained in:
Fabian-Lars 2024-11-19 00:23:20 +01:00 committed by GitHub
parent d86aaccb0b
commit 72feaf99fc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 499 additions and 714 deletions

28
Cargo.lock generated
View File

@ -570,9 +570,9 @@ dependencies = [
[[package]]
name = "avif-serialize"
version = "0.8.2"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e335041290c43101ca215eed6f43ec437eb5a42125573f600fc3fa42b9bddd62"
checksum = "876c75a42f6364451a033496a14c44bffe41f5f4a8236f697391f11024e596d2"
dependencies = [
"arrayvec",
]
@ -4113,9 +4113,9 @@ dependencies = [
[[package]]
name = "image"
version = "0.25.5"
version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd6f44aed642f18953a158afeb30206f4d50da59fbc66ecb53c66488de73563b"
checksum = "bc144d44a31d753b02ce64093d532f55ff8dc4ebf2ffb8a63c0dda691385acae"
dependencies = [
"bytemuck",
"byteorder-lite",
@ -7138,9 +7138,9 @@ dependencies = [
[[package]]
name = "ravif"
version = "0.11.11"
version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2413fd96bd0ea5cdeeb37eaf446a22e6ed7b981d792828721e74ded1980a45c6"
checksum = "bc13288f5ab39e6d7c9d501759712e6969fcc9734220846fc9ed26cae2cc4234"
dependencies = [
"avif-serialize",
"imgref",
@ -7987,9 +7987,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
[[package]]
name = "serde"
version = "1.0.215"
version = "1.0.214"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
dependencies = [
"serde_derive",
]
@ -8051,9 +8051,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.215"
version = "1.0.214"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
dependencies = [
"proc-macro2",
"quote",
@ -8921,9 +8921,9 @@ dependencies = [
[[package]]
name = "tao"
version = "0.30.6"
version = "0.30.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "833b4d43383d76d5078d72f3acd977f47eb5b6751eb40baa665d13828e7b79df"
checksum = "6682a07cf5bab0b8a2bd20d0a542917ab928b5edb75ebd4eda6b05cbaab872da"
dependencies = [
"bitflags 2.6.0",
"cocoa 0.26.0",
@ -8977,9 +8977,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tar"
version = "0.4.43"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c65998313f8e17d0d553d28f91a0df93e4dbbbf770279c7bc21ca0f09ea1a1f6"
checksum = "4ff6c40d3aedb5e06b57c6f669ad17ab063dd1e63d977c6a88e7f4dfa4f04020"
dependencies = [
"filetime",
"libc",

View File

@ -4,12 +4,10 @@
// SPDX-License-Identifier: MIT
mod category;
mod common;
#[cfg(target_os = "linux")]
mod linux;
#[cfg(target_os = "macos")]
mod macos;
mod path_utils;
mod platform;
mod settings;
mod updater_bundle;

View File

@ -3,14 +3,12 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use super::{
super::{
common::{self, CommandExt},
path_utils,
},
debian,
use super::debian;
use crate::{
bundle::settings::Arch,
utils::{fs_utils, CommandExt},
Settings,
};
use crate::{bundle::settings::Arch, Settings};
use anyhow::Context;
use handlebars::Handlebars;
use std::{
@ -56,7 +54,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
// generate deb_folder structure
let (data_dir, icons) = debian::generate_data(&settings, &package_dir)
.with_context(|| "Failed to build data folders and files")?;
common::copy_custom_files(&settings.appimage().files, &data_dir)
fs_utils::copy_custom_files(&settings.appimage().files, &data_dir)
.with_context(|| "Failed to copy custom files")?;
let output_path = settings.project_out_directory().join("bundle/appimage");
@ -72,7 +70,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
arch
);
let appimage_path = output_path.join(&appimage_filename);
path_utils::create(app_dir_path, true)?;
fs_utils::create_dir(&app_dir_path, true)?;
// setup data to insert into shell script
let mut sh_map = BTreeMap::new();

View File

@ -23,8 +23,8 @@
// metadata, as well as generating the md5sums file. Currently we do not
// generate postinst or prerm files.
use super::{super::common, freedesktop};
use crate::{bundle::settings::Arch, Settings};
use super::freedesktop;
use crate::{bundle::settings::Arch, utils::fs_utils, Settings};
use anyhow::Context;
use flate2::{write::GzEncoder, Compression};
use tar::HeaderMode;
@ -73,7 +73,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
let (data_dir, _) = generate_data(settings, &package_dir)
.with_context(|| "Failed to build data folders and files")?;
common::copy_custom_files(&settings.deb().files, &data_dir)
fs_utils::copy_custom_files(&settings.deb().files, &data_dir)
.with_context(|| "Failed to copy custom files")?;
// Generate control files.
@ -113,7 +113,7 @@ pub fn generate_data(
for bin in settings.binaries() {
let bin_path = settings.binary_path(bin);
common::copy_file(&bin_path, bin_dir.join(bin.name()))
fs_utils::copy_file(&bin_path, &bin_dir.join(bin.name()))
.with_context(|| format!("Failed to copy binary from {bin_path:?}"))?;
}
@ -141,7 +141,7 @@ fn generate_changelog_file(settings: &Settings, data_dir: &Path) -> crate::Resul
let product_name = settings.product_name();
let dest_path = data_dir.join(format!("usr/share/doc/{product_name}/changelog.gz"));
let changelog_file = common::create_file(&dest_path)?;
let changelog_file = fs_utils::create_file(&dest_path)?;
let mut gzip_encoder = GzEncoder::new(changelog_file, Compression::new(9));
io::copy(&mut src_file, &mut gzip_encoder)?;
@ -161,7 +161,7 @@ fn generate_control_file(
// For more information about the format of this file, see
// https://www.debian.org/doc/debian-policy/ch-controlfields.html
let dest_path = control_dir.join("control");
let mut file = common::create_file(&dest_path)?;
let mut file = fs_utils::create_file(&dest_path)?;
let package = heck::AsKebabCase(settings.product_name());
writeln!(file, "Package: {}", package)?;
writeln!(file, "Version: {}", settings.version_string())?;
@ -294,7 +294,7 @@ fn create_script_file_from_path(from: &PathBuf, to: &PathBuf) -> crate::Result<(
/// for each file within the `data_dir`.
fn generate_md5sums(control_dir: &Path, data_dir: &Path) -> crate::Result<()> {
let md5sums_path = control_dir.join("md5sums");
let mut md5sums_file = common::create_file(&md5sums_path)?;
let mut md5sums_file = fs_utils::create_file(&md5sums_path)?;
for entry in WalkDir::new(data_dir) {
let entry = entry?;
let path = entry.path();
@ -327,7 +327,7 @@ fn copy_resource_files(settings: &Settings, data_dir: &Path) -> crate::Result<()
/// Create an empty file at the given path, creating any parent directories as
/// needed, then write `data` into the file.
fn create_file_with_data<P: AsRef<Path>>(path: P, data: &str) -> crate::Result<()> {
let mut file = common::create_file(path.as_ref())?;
let mut file = fs_utils::create_file(path.as_ref())?;
file.write_all(data.as_bytes())?;
file.flush()?;
Ok(())
@ -376,7 +376,7 @@ fn create_tar_from_dir<P: AsRef<Path>, W: Write>(src_dir: P, dest_file: W) -> cr
fn tar_and_gzip_dir<P: AsRef<Path>>(src_dir: P) -> crate::Result<PathBuf> {
let src_dir = src_dir.as_ref();
let dest_path = src_dir.with_extension("tar.gz");
let dest_file = common::create_file(&dest_path)?;
let dest_file = fs_utils::create_file(&dest_path)?;
let gzip_encoder = GzEncoder::new(dest_file, Compression::default());
let gzip_encoder = create_tar_from_dir(src_dir, gzip_encoder)?;
let mut dest_file = gzip_encoder.finish()?;
@ -387,7 +387,7 @@ fn tar_and_gzip_dir<P: AsRef<Path>>(src_dir: P) -> crate::Result<PathBuf> {
/// Creates an `ar` archive from the given source files and writes it to the
/// given destination path.
fn create_archive(srcs: Vec<PathBuf>, dest: &Path) -> crate::Result<()> {
let mut builder = ar::Builder::new(common::create_file(dest)?);
let mut builder = ar::Builder::new(fs_utils::create_file(dest)?);
for path in &srcs {
builder.append_path(path)?;
}

View File

@ -26,8 +26,10 @@ use handlebars::Handlebars;
use image::{self, codecs::png::PngDecoder, ImageDecoder};
use serde::Serialize;
use crate::bundle::common;
use crate::Settings;
use crate::{
utils::{self, fs_utils},
Settings,
};
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct Icon {
@ -65,7 +67,7 @@ pub fn list_icon_files(
let decoder = PngDecoder::new(BufReader::new(File::open(&icon_path)?))?;
let width = decoder.dimensions().0;
let height = decoder.dimensions().1;
let is_high_density = common::is_retina(&icon_path);
let is_high_density = utils::is_retina(&icon_path);
let dest_path = get_dest_path(width, height, is_high_density);
Icon {
width,
@ -84,7 +86,7 @@ pub fn list_icon_files(
pub fn copy_icon_files(settings: &Settings, data_dir: &Path) -> crate::Result<Vec<Icon>> {
let icons = list_icon_files(settings, data_dir)?;
for (icon, src) in &icons {
common::copy_file(src, &icon.path)?;
fs_utils::copy_file(src, &icon.path)?;
}
Ok(icons.into_keys().collect())
@ -105,7 +107,7 @@ pub fn generate_desktop_file(
let path = PathBuf::from("usr/share/applications").join(desktop_file_name);
let dest_path = PathBuf::from("/").join(&path);
let file_path = data_dir.join(&path);
let file = &mut common::create_file(&file_path)?;
let file = &mut fs_utils::create_file(&file_path)?;
let mut handlebars = Handlebars::new();
handlebars.register_escape_fn(handlebars::no_escape);

View File

@ -23,11 +23,13 @@
// files into the `Contents` directory of the bundle.
use super::{
super::common::{self, CommandExt},
icon::create_icns_file,
sign::{notarize, notarize_auth, sign, NotarizeAuthError, SignTarget},
};
use crate::Settings;
use crate::{
utils::{fs_utils, CommandExt},
Settings,
};
use anyhow::Context;
@ -157,7 +159,7 @@ fn copy_binaries_to_bundle(
for bin in settings.binaries() {
let bin_path = settings.binary_path(bin);
let dest_path = dest_dir.join(bin.name());
common::copy_file(&bin_path, &dest_path)
fs_utils::copy_file(&bin_path, &dest_path)
.with_context(|| format!("Failed to copy binary from {:?}", bin_path))?;
paths.push(dest_path);
}
@ -173,10 +175,10 @@ fn copy_custom_files_to_bundle(bundle_directory: &Path, settings: &Settings) ->
contents_path
};
if path.is_file() {
common::copy_file(path, bundle_directory.join(contents_path))
fs_utils::copy_file(path, &bundle_directory.join(contents_path))
.with_context(|| format!("Failed to copy file {:?} to {:?}", path, contents_path))?;
} else {
common::copy_dir(path, &bundle_directory.join(contents_path))
fs_utils::copy_dir(path, &bundle_directory.join(contents_path))
.with_context(|| format!("Failed to copy directory {:?} to {:?}", path, contents_path))?;
}
}
@ -349,7 +351,7 @@ fn copy_framework_from(dest_dir: &Path, framework: &str, src_dir: &Path) -> crat
let src_name = format!("{}.framework", framework);
let src_path = src_dir.join(&src_name);
if src_path.exists() {
common::copy_dir(&src_path, &dest_dir.join(&src_name))?;
fs_utils::copy_dir(&src_path, &dest_dir.join(&src_name))?;
Ok(true)
} else {
Ok(false)
@ -382,7 +384,7 @@ fn copy_frameworks_to_bundle(
.file_name()
.expect("Couldn't get framework filename");
let dest_path = dest_dir.join(src_name);
common::copy_dir(&src_path, &dest_path)?;
fs_utils::copy_dir(&src_path, &dest_path)?;
add_framework_sign_path(&src_path, &dest_path, &mut paths);
continue;
} else if framework.ends_with(".dylib") {
@ -395,7 +397,7 @@ fn copy_frameworks_to_bundle(
}
let src_name = src_path.file_name().expect("Couldn't get library filename");
let dest_path = dest_dir.join(src_name);
common::copy_file(&src_path, &dest_path)?;
fs_utils::copy_file(&src_path, &dest_path)?;
paths.push(SignTarget {
path: dest_path,
is_an_executable: false,

View File

@ -5,7 +5,8 @@
use super::{app, icon::create_icns_file};
use crate::{
bundle::{common::CommandExt, settings::Arch, Bundle},
bundle::{settings::Arch, Bundle},
utils::CommandExt,
PackageType, Settings,
};

View File

@ -3,7 +3,8 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::bundle::{common, Settings};
use crate::bundle::Settings;
use crate::utils::{self, fs_utils};
use std::{
cmp::min,
ffi::OsStr,
@ -28,7 +29,7 @@ pub fn create_icns_file(out_dir: &Path, settings: &Settings) -> crate::Result<Op
if icon_path.extension() == Some(OsStr::new("icns")) {
let mut dest_path = out_dir.to_path_buf();
dest_path.push(icon_path.file_name().expect("Could not get icon filename"));
common::copy_file(&icon_path, &dest_path)?;
fs_utils::copy_file(&icon_path, &dest_path)?;
return Ok(Some(dest_path));
}
}
@ -63,7 +64,7 @@ pub fn create_icns_file(out_dir: &Path, settings: &Settings) -> crate::Result<Op
for icon_path in settings.icon_files() {
let icon_path = icon_path?;
let icon = image::open(&icon_path)?;
let density = if common::is_retina(&icon_path) { 2 } else { 1 };
let density = if utils::is_retina(&icon_path) { 2 } else { 1 };
let (w, h) = icon.dimensions();
let orig_size = min(w, h);
let next_size_down = 2f32.powf((orig_size as f32).log2().floor()) as u32;

View File

@ -13,7 +13,10 @@
// See https://developer.apple.com/go/?id=bundle-structure for a full
// explanation.
use crate::{bundle::common, Settings};
use crate::{
utils::{self, fs_utils},
Settings,
};
use anyhow::Context;
use image::{codecs::png::PngDecoder, GenericImageView, ImageDecoder};
@ -50,7 +53,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
for src in settings.resource_files() {
let src = src?;
let dest = app_bundle_path.join(tauri_utils::resources::resource_relpath(&src));
common::copy_file(&src, &dest)
fs_utils::copy_file(&src, &dest)
.with_context(|| format!("Failed to copy resource file {:?}", src))?;
}
@ -61,7 +64,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
for bin in settings.binaries() {
let bin_path = settings.binary_path(bin);
common::copy_file(&bin_path, app_bundle_path.join(bin.name()))
fs_utils::copy_file(&bin_path, &app_bundle_path.join(bin.name()))
.with_context(|| format!("Failed to copy binary from {:?}", bin_path))?;
}
@ -93,11 +96,11 @@ fn generate_icon_files(bundle_dir: &Path, settings: &Settings) -> crate::Result<
let decoder = PngDecoder::new(BufReader::new(File::open(&icon_path)?))?;
let width = decoder.dimensions().0;
let height = decoder.dimensions().1;
let is_retina = common::is_retina(&icon_path);
let is_retina = utils::is_retina(&icon_path);
if !sizes.contains(&(width, height, is_retina)) {
sizes.insert((width, height, is_retina));
let dest_path = get_dest_path(width, height, is_retina);
common::copy_file(&icon_path, &dest_path)?;
fs_utils::copy_file(&icon_path, &dest_path)?;
}
}
// Fall back to non-PNG files for any missing sizes.
@ -121,12 +124,12 @@ fn generate_icon_files(bundle_dir: &Path, settings: &Settings) -> crate::Result<
} else {
let icon = image::open(&icon_path)?;
let (width, height) = icon.dimensions();
let is_retina = common::is_retina(&icon_path);
let is_retina = utils::is_retina(&icon_path);
if !sizes.contains(&(width, height, is_retina)) {
sizes.insert((width, height, is_retina));
let dest_path = get_dest_path(width, height, is_retina);
icon.write_to(
&mut common::create_file(&dest_path)?,
&mut fs_utils::create_file(&dest_path)?,
image::ImageFormat::Png,
)?;
}
@ -142,7 +145,7 @@ fn generate_info_plist(
settings: &Settings,
icon_filenames: &[String],
) -> crate::Result<()> {
let file = &mut common::create_file(&bundle_dir.join("Info.plist"))?;
let file = &mut fs_utils::create_file(&bundle_dir.join("Info.plist"))?;
writeln!(
file,
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\

View File

@ -1,287 +0,0 @@
// Copyright 2016-2019 Cargo-Bundle developers <https://github.com/burtonageo/cargo-bundle>
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use std::{
fs::{create_dir, create_dir_all, read_dir, remove_dir_all},
path::{Path, PathBuf},
};
/// Directory options.
#[derive(Default, Clone)]
pub struct DirOpts {
pub depth: u64,
}
/// File options.
pub struct FileOpts {
pub overwrite: bool,
pub skip: bool,
#[allow(dead_code)]
pub buffer_size: usize,
}
/// Copy options.
#[derive(Clone)]
pub struct Options {
pub overwrite: bool,
pub skip: bool,
pub buffer_size: usize,
pub copy_files: bool,
pub content_only: bool,
pub depth: u64,
}
/// Directory information descriptor
pub struct DirInfo {
pub size: u64,
pub files: Vec<String>,
pub directories: Vec<String>,
}
impl Default for Options {
fn default() -> Options {
Options {
overwrite: false,
skip: false,
buffer_size: 64000,
copy_files: false,
content_only: false,
depth: 0,
}
}
}
impl Default for FileOpts {
fn default() -> FileOpts {
FileOpts {
overwrite: false,
skip: false,
buffer_size: 64000,
}
}
}
/// Creates the given directory path,
/// erasing it first if specified.
pub fn create<P>(path: P, erase: bool) -> crate::Result<()>
where
P: AsRef<Path>,
{
if erase && path.as_ref().exists() {
remove(&path)?;
}
Ok(create_dir(&path)?)
}
/// Creates all of the directories of the specified path,
/// erasing it first if specified.
pub fn create_all<P>(path: P, erase: bool) -> crate::Result<()>
where
P: AsRef<Path>,
{
if erase && path.as_ref().exists() {
remove(&path)?;
}
Ok(create_dir_all(&path)?)
}
/// Removes the directory if it exists.
pub fn remove<P: AsRef<Path>>(path: P) -> crate::Result<()> {
if path.as_ref().exists() {
Ok(remove_dir_all(path)?)
} else {
Ok(())
}
}
/// Copy file with the given options.
pub fn copy_file<P, Q>(from: P, to: Q, options: &FileOpts) -> crate::Result<u64>
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
let from = from.as_ref();
if !from.exists() {
if let Some(msg) = from.to_str() {
let msg = format!("Path \"{msg}\" does not exist or you don't have access");
return Err(crate::Error::PathUtilError(msg));
}
return Err(crate::Error::PathUtilError(
"Path does not exist or you don't have access!".to_owned(),
));
}
if !from.is_file() {
if let Some(msg) = from.to_str() {
let msg = format!("Path \"{msg}\" is not a file!");
return Err(crate::Error::PathUtilError(msg));
}
return Err(crate::Error::PathUtilError(
"Path is not a file!".to_owned(),
));
}
if !options.overwrite && to.as_ref().exists() {
if options.skip {
return Ok(0);
}
if let Some(msg) = to.as_ref().to_str() {
let msg = format!("Path \"{msg}\" is exist");
return Err(crate::Error::PathUtilError(msg));
}
}
Ok(std::fs::copy(from, to)?)
}
/// Copies the directory with the given options.
#[allow(dead_code)]
pub fn copy<P, Q>(from: P, to: Q, options: &Options) -> crate::Result<u64>
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
let from = from.as_ref();
if !from.exists() {
if let Some(msg) = from.to_str() {
let msg = format!("Path \"{msg}\" does not exist or you don't have access!");
return Err(crate::Error::PathUtilError(msg));
}
return Err(crate::Error::PathUtilError(
"Path does not exist or you don't have access".to_owned(),
));
}
if !from.is_dir() {
if let Some(msg) = from.to_str() {
let msg = format!("Path \"{msg}\" is not a directory!");
return Err(crate::Error::PathUtilError(msg));
}
return Err(crate::Error::PathUtilError(
"Path is not a directory".to_owned(),
));
}
let dir_name = if let Some(val) = from.components().last() {
val.as_os_str()
} else {
return Err(crate::Error::PathUtilError(
"Invalid Folder form".to_owned(),
));
};
let mut to: PathBuf = to.as_ref().to_path_buf();
if !options.content_only && (!options.copy_files || to.exists()) {
to.push(dir_name);
}
let mut read_options = DirOpts::default();
if options.depth > 0 {
read_options.depth = options.depth;
}
let dir_content = get_dir_info(from, &read_options)?;
for directory in dir_content.directories {
let tmp_to = Path::new(&directory).strip_prefix(from)?;
let dir = to.join(tmp_to);
if !dir.exists() {
if options.copy_files {
create_all(dir, false)?;
} else {
create(dir, false)?;
}
}
}
let mut result: u64 = 0;
for file in dir_content.files {
let to = to.to_path_buf();
let tp = Path::new(&file).strip_prefix(from)?;
let path = to.join(tp);
let file_options = FileOpts {
overwrite: options.overwrite,
skip: options.skip,
buffer_size: options.buffer_size,
};
let mut result_copy: crate::Result<u64>;
let mut work = true;
while work {
#[allow(clippy::needless_borrow)]
{
result_copy = copy_file(&file, &path, &file_options);
}
match result_copy {
Ok(val) => {
result += val;
work = false;
}
Err(err) => {
let err_msg = err.to_string();
return Err(crate::Error::PathUtilError(err_msg));
}
}
}
}
Ok(result)
}
/// Gets the DirInfo from the directory path with the given options.
pub fn get_dir_info<P>(path: P, options: &DirOpts) -> crate::Result<DirInfo>
where
P: AsRef<Path>,
{
let depth = if options.depth == 0 {
0
} else {
options.depth + 1
};
_get_dir_info(path, depth)
}
/// Gets the DirInfo from the directory with the given depth.
fn _get_dir_info<P>(path: P, mut depth: u64) -> crate::Result<DirInfo>
where
P: AsRef<Path>,
{
let mut directories = Vec::new();
let mut files = Vec::new();
let mut size = 0;
let item = path.as_ref().to_str();
if item.is_none() {
return Err(crate::Error::PathUtilError("Invalid Path".to_owned()));
}
let item = item.expect("Item had no data").to_string();
if path.as_ref().is_dir() {
directories.push(item);
if depth == 0 || depth > 1 {
if depth > 1 {
depth -= 1;
}
for entry in read_dir(&path)? {
let _path = entry?.path();
match _get_dir_info(_path, depth) {
Ok(items) => {
let mut _files = items.files;
let mut _directories = items.directories;
size += items.size;
files.append(&mut _files);
directories.append(&mut _directories);
}
Err(err) => return Err(err),
}
}
}
} else {
size = path.as_ref().metadata()?.len();
files.push(item);
}
Ok(DirInfo {
size,
files,
directories,
})
}

View File

@ -3,7 +3,7 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use super::common::CommandExt;
use crate::utils::CommandExt;
use std::process::Command;
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy

View File

@ -4,7 +4,7 @@
// SPDX-License-Identifier: MIT
use super::category::AppCategory;
use crate::bundle::{common, platform::target_triple};
use crate::{bundle::platform::target_triple, utils::fs_utils};
use anyhow::Context;
pub use tauri_utils::config::WebviewInstallMode;
use tauri_utils::{
@ -235,7 +235,7 @@ pub struct RpmSettings {
/// in order for the package to be installed.
pub conflicts: Option<Vec<String>>,
/// The list of RPM dependencies your application supersedes - if this package is installed,
/// packages listed as “obsoletes” will be automatically removed (if they are present).
/// packages listed as "obsoletes" will be automatically removed (if they are present).
pub obsoletes: Option<Vec<String>>,
/// The RPM release tag.
pub release: String,
@ -1064,7 +1064,7 @@ impl Settings {
.to_string_lossy()
.replace(&format!("-{}", self.target), ""),
);
common::copy_file(&src, &dest)?;
fs_utils::copy_file(&src, &dest)?;
paths.push(dest);
}
Ok(paths)
@ -1075,7 +1075,7 @@ impl Settings {
for resource in self.resource_files().iter() {
let resource = resource?;
let dest = path.join(resource.target());
common::copy_file(resource.path(), dest)?;
fs_utils::copy_file(resource.path(), &dest)?;
}
Ok(())
}

View File

@ -3,8 +3,6 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use super::common;
use crate::{
bundle::{
windows::{
@ -13,6 +11,7 @@ use crate::{
},
Bundle,
},
utils::fs_utils,
Settings,
};
use tauri_utils::display_path;
@ -210,7 +209,7 @@ fn bundle_update_windows(settings: &Settings, bundles: &[Bundle]) -> crate::Resu
pub fn create_zip(src_file: &Path, dst_file: &Path) -> crate::Result<PathBuf> {
let parent_dir = dst_file.parent().expect("No data in parent");
fs::create_dir_all(parent_dir)?;
let writer = common::create_file(dst_file)?;
let writer = fs_utils::create_file(dst_file)?;
let file_name = src_file
.file_name()
@ -235,7 +234,7 @@ pub fn create_zip(src_file: &Path, dst_file: &Path) -> crate::Result<PathBuf> {
fn create_tar(src_dir: &Path, dest_path: &Path) -> crate::Result<PathBuf> {
use flate2::{write::GzEncoder, Compression};
let dest_file = common::create_file(dest_path)?;
let dest_file = fs_utils::create_file(dest_path)?;
let gzip_encoder = GzEncoder::new(dest_file, Compression::default());
let gzip_encoder = create_tar_from_src(src_dir, gzip_encoder)?;

View File

@ -3,17 +3,22 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::bundle::{
common::CommandExt,
path_utils::{copy_file, FileOpts},
settings::{Arch, Settings},
windows::{
sign::try_sign,
util::{
download_and_verify, download_webview2_bootstrapper, download_webview2_offline_installer,
extract_zip, HashAlgorithm, WIX_OUTPUT_FOLDER_NAME, WIX_UPDATER_OUTPUT_FOLDER_NAME,
use crate::{
bundle::{
settings::{Arch, Settings},
windows::{
sign::try_sign,
util::{
download_webview2_bootstrapper, download_webview2_offline_installer,
WIX_OUTPUT_FOLDER_NAME, WIX_UPDATER_OUTPUT_FOLDER_NAME,
},
},
},
utils::{
fs_utils::copy_file,
http_utils::{download_and_verify, extract_zip, HashAlgorithm},
CommandExt,
},
};
use anyhow::{bail, Context};
use handlebars::{html_escape, to_json, Handlebars};
@ -198,14 +203,7 @@ fn copy_icon(settings: &Settings, filename: &str, path: &Path) -> crate::Result<
let icon_path = std::env::current_dir()?.join(path);
copy_file(
icon_path,
&icon_target_path,
&FileOpts {
overwrite: true,
..Default::default()
},
)?;
copy_file(&icon_path, &icon_target_path)?;
Ok(icon_target_path)
}

View File

@ -2,17 +2,21 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::bundle::settings::Arch;
use crate::bundle::windows::sign::{sign_command, try_sign};
use crate::{
bundle::{
common::CommandExt,
windows::util::{
download_and_verify, download_webview2_bootstrapper, download_webview2_offline_installer,
verify_file_hash, HashAlgorithm, NSIS_OUTPUT_FOLDER_NAME, NSIS_UPDATER_OUTPUT_FOLDER_NAME,
settings::Arch,
windows::{
sign::{sign_command, try_sign},
util::{
download_webview2_bootstrapper, download_webview2_offline_installer,
NSIS_OUTPUT_FOLDER_NAME, NSIS_UPDATER_OUTPUT_FOLDER_NAME,
},
},
},
utils::{
http_utils::{download_and_verify, verify_file_hash, HashAlgorithm},
CommandExt,
},
Settings,
};
use tauri_utils::display_path;
@ -108,7 +112,7 @@ fn get_and_extract_nsis(nsis_toolset_path: &Path, _tauri_tools_path: &Path) -> c
{
let data = download_and_verify(NSIS_URL, NSIS_SHA1, HashAlgorithm::Sha1)?;
log::info!("extracting NSIS");
crate::bundle::windows::util::extract_zip(&data, _tauri_tools_path)?;
crate::utils::http_utils::extract_zip(&data, _tauri_tools_path)?;
fs::rename(_tauri_tools_path.join("nsis-3.08"), nsis_toolset_path)?;
}

View File

@ -6,7 +6,7 @@
use crate::bundle::settings::CustomSignCommandSettings;
#[cfg(windows)]
use crate::bundle::windows::util;
use crate::{bundle::common::CommandExt, Settings};
use crate::{utils::CommandExt, Settings};
#[cfg(windows)]
use std::path::PathBuf;
#[cfg(windows)]

View File

@ -3,15 +3,11 @@
// SPDX-License-Identifier: MIT
use std::{
fs::{create_dir_all, File},
io::{Cursor, Read, Write},
fs::create_dir_all,
path::{Path, PathBuf},
};
use regex::Regex;
use sha2::Digest;
use url::Url;
use zip::ZipArchive;
use crate::utils::http_utils::download;
pub const WEBVIEW2_BOOTSTRAPPER_URL: &str = "https://go.microsoft.com/fwlink/p/?LinkId=2124703";
pub const WEBVIEW2_OFFLINE_INSTALLER_X86_URL: &str =
@ -69,148 +65,6 @@ pub fn download_webview2_offline_installer(base_path: &Path, arch: &str) -> crat
Ok(file_path)
}
fn generate_github_mirror_url_from_template(github_url: &str) -> Option<String> {
std::env::var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE")
.ok()
.and_then(|template| {
let re =
Regex::new(r"https://github.com/([^/]+)/([^/]+)/releases/download/([^/]+)/(.*)").unwrap();
re.captures(github_url).map(|caps| {
template
.replace("<owner>", &caps[1])
.replace("<repo>", &caps[2])
.replace("<version>", &caps[3])
.replace("<asset>", &caps[4])
})
})
}
fn generate_github_mirror_url_from_base(github_url: &str) -> Option<String> {
std::env::var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR")
.ok()
.and_then(|cdn| Url::parse(&cdn).ok())
.map(|mut cdn| {
cdn.set_path(github_url);
cdn.to_string()
})
}
fn generate_github_alternative_url(url: &str) -> Option<(ureq::Agent, String)> {
if !url.starts_with("https://github.com/") {
return None;
}
generate_github_mirror_url_from_template(url)
.or_else(|| generate_github_mirror_url_from_base(url))
.map(|alt_url| (ureq::AgentBuilder::new().build(), alt_url))
}
fn create_agent_and_url(url: &str) -> (ureq::Agent, String) {
generate_github_alternative_url(url).unwrap_or((
ureq::AgentBuilder::new().try_proxy_from_env(true).build(),
url.to_owned(),
))
}
pub fn download(url: &str) -> crate::Result<Vec<u8>> {
let (agent, final_url) = create_agent_and_url(url);
log::info!(action = "Downloading"; "{}", final_url);
let response = agent.get(&final_url).call().map_err(Box::new)?;
let mut bytes = Vec::new();
response.into_reader().read_to_end(&mut bytes)?;
Ok(bytes)
}
#[derive(Clone, Copy)]
pub enum HashAlgorithm {
#[cfg(target_os = "windows")]
Sha256,
Sha1,
}
/// Function used to download a file and checks SHA256 to verify the download.
pub fn download_and_verify(
url: &str,
hash: &str,
hash_algorithm: HashAlgorithm,
) -> crate::Result<Vec<u8>> {
let data = download(url)?;
log::info!("validating hash");
verify_hash(&data, hash, hash_algorithm)?;
Ok(data)
}
pub fn verify_hash(data: &[u8], hash: &str, hash_algorithm: HashAlgorithm) -> crate::Result<()> {
match hash_algorithm {
#[cfg(target_os = "windows")]
HashAlgorithm::Sha256 => {
let hasher = sha2::Sha256::new();
verify_data_with_hasher(data, hash, hasher)
}
HashAlgorithm::Sha1 => {
let hasher = sha1::Sha1::new();
verify_data_with_hasher(data, hash, hasher)
}
}
}
fn verify_data_with_hasher(data: &[u8], hash: &str, mut hasher: impl Digest) -> crate::Result<()> {
hasher.update(data);
let url_hash = hasher.finalize().to_vec();
let expected_hash = hex::decode(hash)?;
if expected_hash == url_hash {
Ok(())
} else {
Err(crate::Error::HashError)
}
}
pub fn verify_file_hash<P: AsRef<Path>>(
path: P,
hash: &str,
hash_algorithm: HashAlgorithm,
) -> crate::Result<()> {
let data = std::fs::read(path)?;
verify_hash(&data, hash, hash_algorithm)
}
/// Extracts the zips from memory into a usable path.
#[allow(dead_code)]
pub fn extract_zip(data: &[u8], path: &Path) -> crate::Result<()> {
let cursor = Cursor::new(data);
let mut zipa = ZipArchive::new(cursor)?;
for i in 0..zipa.len() {
let mut file = zipa.by_index(i)?;
if let Some(name) = file.enclosed_name() {
let dest_path = path.join(name);
if file.is_dir() {
create_dir_all(&dest_path)?;
continue;
}
let parent = dest_path.parent().expect("Failed to get parent");
if !parent.exists() {
create_dir_all(parent)?;
}
let mut buff: Vec<u8> = Vec::new();
file.read_to_end(&mut buff)?;
let mut fileout = File::create(dest_path).expect("Failed to open file");
fileout.write_all(&buff)?;
}
}
Ok(())
}
#[cfg(target_os = "windows")]
pub fn os_bitness<'a>() -> Option<&'a str> {
use windows_sys::Win32::System::SystemInformation::{
@ -225,57 +79,3 @@ pub fn os_bitness<'a>() -> Option<&'a str> {
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::generate_github_mirror_url_from_template;
use std::env;
const GITHUB_ASSET_URL: &str =
"https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip";
const NON_GITHUB_ASSET_URL: &str = "https://someotherwebsite.com/somefile.zip";
#[test]
fn test_generate_mirror_url_no_env_var() {
env::remove_var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE");
assert!(generate_github_mirror_url_from_template(GITHUB_ASSET_URL).is_none());
}
#[test]
fn test_generate_mirror_url_non_github_url() {
env::set_var(
"TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE",
"https://mirror.example.com/<owner>/<repo>/releases/download/<version>/<asset>",
);
assert!(generate_github_mirror_url_from_template(NON_GITHUB_ASSET_URL).is_none());
}
struct TestCase {
template: &'static str,
expected_url: &'static str,
}
#[test]
fn test_generate_mirror_url_correctly() {
let test_cases = vec![
TestCase {
template: "https://mirror.example.com/<owner>/<repo>/releases/download/<version>/<asset>",
expected_url: "https://mirror.example.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip",
},
TestCase {
template: "https://mirror.example.com/<asset>",
expected_url: "https://mirror.example.com/wix311-binaries.zip",
},
];
for case in test_cases {
env::set_var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE", case.template);
assert_eq!(
generate_github_mirror_url_from_template(GITHUB_ASSET_URL),
Some(case.expected_url.to_string())
);
}
}
}

View File

@ -25,5 +25,6 @@
/// The bundle API.
pub mod bundle;
mod error;
mod utils;
pub use bundle::*;
pub use error::{Error, Result};

View File

@ -4,28 +4,11 @@
// SPDX-License-Identifier: MIT
use std::{
ffi::OsStr,
fs::{self, File},
io::{self, BufRead, BufReader, BufWriter},
io::{self, BufWriter},
path::Path,
process::{Command, ExitStatus, Output, Stdio},
sync::{Arc, Mutex},
};
/// Returns true if the path has a filename indicating that it is a high-density
/// "retina" icon. Specifically, returns true the file stem ends with
/// "@2x" (a convention specified by the [Apple developer docs](
/// <https://developer.apple.com/library/mac/documentation/GraphicsAnimation/Conceptual/HighResolutionOSX/Optimizing/Optimizing.html>)).
#[allow(dead_code)]
pub fn is_retina<P: AsRef<Path>>(path: P) -> bool {
path
.as_ref()
.file_stem()
.and_then(OsStr::to_str)
.map(|stem| stem.ends_with("@2x"))
.unwrap_or(false)
}
/// Creates a new file at the given path, creating any parent directories as
/// needed.
pub fn create_file(path: &Path) -> crate::Result<BufWriter<File>> {
@ -36,6 +19,36 @@ pub fn create_file(path: &Path) -> crate::Result<BufWriter<File>> {
Ok(BufWriter::new(file))
}
/// Creates the given directory path,
/// erasing it first if specified.
#[allow(dead_code)]
pub fn create_dir(path: &Path, erase: bool) -> crate::Result<()> {
if erase && path.exists() {
remove_dir_all(path)?;
}
Ok(fs::create_dir(path)?)
}
/// Creates all of the directories of the specified path,
/// erasing it first if specified.
#[allow(dead_code)]
pub fn create_dir_all(path: &Path, erase: bool) -> crate::Result<()> {
if erase && path.exists() {
remove_dir_all(path)?;
}
Ok(fs::create_dir_all(path)?)
}
/// Removes the directory and its contents if it exists.
#[allow(dead_code)]
pub fn remove_dir_all(path: &Path) -> crate::Result<()> {
if path.exists() {
Ok(fs::remove_dir_all(path)?)
} else {
Ok(())
}
}
/// Makes a symbolic link to a directory.
#[cfg(unix)]
#[allow(dead_code)]
@ -63,11 +76,9 @@ fn symlink_file(src: &Path, dst: &Path) -> io::Result<()> {
}
/// Copies a regular file from one path to another, creating any parent
/// directories of the destination path as necessary. Fails if the source path
/// directories of the destination path as necessary. Fails if the source path
/// is a directory or doesn't exist.
pub fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> crate::Result<()> {
let from = from.as_ref();
let to = to.as_ref();
pub fn copy_file(from: &Path, to: &Path) -> crate::Result<()> {
if !from.exists() {
return Err(crate::Error::GenericError(format!(
"{from:?} does not exist"
@ -151,7 +162,7 @@ pub fn copy_custom_files(
pkg_path
};
if path.is_file() {
copy_file(path, data_dir.join(pkg_path))?;
copy_file(path, &data_dir.join(pkg_path))?;
} else {
copy_dir(path, &data_dir.join(pkg_path))?;
}
@ -159,93 +170,10 @@ pub fn copy_custom_files(
Ok(())
}
pub trait CommandExt {
// The `pipe` function sets the stdout and stderr to properly
// show the command output in the Node.js wrapper.
fn piped(&mut self) -> std::io::Result<ExitStatus>;
fn output_ok(&mut self) -> crate::Result<Output>;
}
impl CommandExt for Command {
fn piped(&mut self) -> std::io::Result<ExitStatus> {
self.stdin(os_pipe::dup_stdin()?);
self.stdout(os_pipe::dup_stdout()?);
self.stderr(os_pipe::dup_stderr()?);
let program = self.get_program().to_string_lossy().into_owned();
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
self.status().map_err(Into::into)
}
fn output_ok(&mut self) -> crate::Result<Output> {
let program = self.get_program().to_string_lossy().into_owned();
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
self.stdout(Stdio::piped());
self.stderr(Stdio::piped());
let mut child = self.spawn()?;
let mut stdout = child.stdout.take().map(BufReader::new).unwrap();
let stdout_lines = Arc::new(Mutex::new(Vec::new()));
let stdout_lines_ = stdout_lines.clone();
std::thread::spawn(move || {
let mut line = String::new();
let mut lines = stdout_lines_.lock().unwrap();
loop {
line.clear();
match stdout.read_line(&mut line) {
Ok(0) => break,
Ok(_) => {
log::debug!(action = "stdout"; "{}", line.trim_end());
lines.extend(line.as_bytes().to_vec());
}
Err(_) => (),
}
}
});
let mut stderr = child.stderr.take().map(BufReader::new).unwrap();
let stderr_lines = Arc::new(Mutex::new(Vec::new()));
let stderr_lines_ = stderr_lines.clone();
std::thread::spawn(move || {
let mut line = String::new();
let mut lines = stderr_lines_.lock().unwrap();
loop {
line.clear();
match stderr.read_line(&mut line) {
Ok(0) => break,
Ok(_) => {
log::debug!(action = "stderr"; "{}", line.trim_end());
lines.extend(line.as_bytes().to_vec());
}
Err(_) => (),
}
}
});
let status = child.wait()?;
let output = Output {
status,
stdout: std::mem::take(&mut *stdout_lines.lock().unwrap()),
stderr: std::mem::take(&mut *stderr_lines.lock().unwrap()),
};
if output.status.success() {
Ok(output)
} else {
Err(crate::Error::GenericError(format!(
"failed to run {program}"
)))
}
}
}
#[cfg(test)]
mod tests {
use super::{create_file, is_retina};
use std::{io::Write, path::PathBuf};
use tauri_utils::resources::resource_relpath;
use super::create_file;
use std::io::Write;
#[test]
fn create_file_with_parent_dirs() {
@ -263,6 +191,8 @@ mod tests {
#[cfg(not(windows))]
#[test]
fn copy_dir_with_symlinks() {
use std::path::PathBuf;
// Create a directory structure that looks like this:
// ${TMP}/orig/
// sub/
@ -310,26 +240,4 @@ mod tests {
b"Hello, world!\n"
);
}
#[test]
fn retina_icon_paths() {
assert!(!is_retina("data/icons/512x512.png"));
assert!(is_retina("data/icons/512x512@2x.png"));
}
#[test]
fn resource_relative_paths() {
assert_eq!(
resource_relpath(&PathBuf::from("./data/images/button.png")),
PathBuf::from("data/images/button.png")
);
assert_eq!(
resource_relpath(&PathBuf::from("../../images/wheel.png")),
PathBuf::from("_up_/_up_/images/wheel.png")
);
assert_eq!(
resource_relpath(&PathBuf::from("/home/ferris/crab.png")),
PathBuf::from("_root_/home/ferris/crab.png")
);
}
}

View File

@ -0,0 +1,216 @@
// Copyright 2016-2019 Cargo-Bundle developers <https://github.com/burtonageo/cargo-bundle>
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use std::{
fs::{create_dir_all, File},
io::{Cursor, Read, Write},
path::Path,
};
use regex::Regex;
use sha2::Digest;
use url::Url;
use zip::ZipArchive;
fn generate_github_mirror_url_from_template(github_url: &str) -> Option<String> {
std::env::var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE")
.ok()
.and_then(|template| {
let re =
Regex::new(r"https://github.com/([^/]+)/([^/]+)/releases/download/([^/]+)/(.*)").unwrap();
re.captures(github_url).map(|caps| {
template
.replace("<owner>", &caps[1])
.replace("<repo>", &caps[2])
.replace("<version>", &caps[3])
.replace("<asset>", &caps[4])
})
})
}
fn generate_github_mirror_url_from_base(github_url: &str) -> Option<String> {
std::env::var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR")
.ok()
.and_then(|cdn| Url::parse(&cdn).ok())
.map(|mut cdn| {
cdn.set_path(github_url);
cdn.to_string()
})
}
fn generate_github_alternative_url(url: &str) -> Option<(ureq::Agent, String)> {
if !url.starts_with("https://github.com/") {
return None;
}
generate_github_mirror_url_from_template(url)
.or_else(|| generate_github_mirror_url_from_base(url))
.map(|alt_url| (ureq::AgentBuilder::new().build(), alt_url))
}
fn create_agent_and_url(url: &str) -> (ureq::Agent, String) {
generate_github_alternative_url(url).unwrap_or((
ureq::AgentBuilder::new().try_proxy_from_env(true).build(),
url.to_owned(),
))
}
#[allow(dead_code)]
pub fn download(url: &str) -> crate::Result<Vec<u8>> {
let (agent, final_url) = create_agent_and_url(url);
log::info!(action = "Downloading"; "{}", final_url);
let response = agent.get(&final_url).call().map_err(Box::new)?;
let mut bytes = Vec::new();
response.into_reader().read_to_end(&mut bytes)?;
Ok(bytes)
}
#[allow(dead_code)]
#[derive(Clone, Copy)]
pub enum HashAlgorithm {
#[cfg(target_os = "windows")]
Sha256,
Sha1,
}
/// Function used to download a file and checks SHA256 to verify the download.
#[allow(dead_code)]
pub fn download_and_verify(
url: &str,
hash: &str,
hash_algorithm: HashAlgorithm,
) -> crate::Result<Vec<u8>> {
let data = download(url)?;
log::info!("validating hash");
verify_hash(&data, hash, hash_algorithm)?;
Ok(data)
}
#[allow(dead_code)]
pub fn verify_hash(data: &[u8], hash: &str, hash_algorithm: HashAlgorithm) -> crate::Result<()> {
match hash_algorithm {
#[cfg(target_os = "windows")]
HashAlgorithm::Sha256 => {
let hasher = sha2::Sha256::new();
verify_data_with_hasher(data, hash, hasher)
}
HashAlgorithm::Sha1 => {
let hasher = sha1::Sha1::new();
verify_data_with_hasher(data, hash, hasher)
}
}
}
fn verify_data_with_hasher(data: &[u8], hash: &str, mut hasher: impl Digest) -> crate::Result<()> {
hasher.update(data);
let url_hash = hasher.finalize().to_vec();
let expected_hash = hex::decode(hash)?;
if expected_hash == url_hash {
Ok(())
} else {
Err(crate::Error::HashError)
}
}
#[allow(dead_code)]
pub fn verify_file_hash<P: AsRef<Path>>(
path: P,
hash: &str,
hash_algorithm: HashAlgorithm,
) -> crate::Result<()> {
let data = std::fs::read(path)?;
verify_hash(&data, hash, hash_algorithm)
}
/// Extracts the zips from memory into a usable path.
#[allow(dead_code)]
pub fn extract_zip(data: &[u8], path: &Path) -> crate::Result<()> {
let cursor = Cursor::new(data);
let mut zipa = ZipArchive::new(cursor)?;
for i in 0..zipa.len() {
let mut file = zipa.by_index(i)?;
if let Some(name) = file.enclosed_name() {
let dest_path = path.join(name);
if file.is_dir() {
create_dir_all(&dest_path)?;
continue;
}
let parent = dest_path.parent().expect("Failed to get parent");
if !parent.exists() {
create_dir_all(parent)?;
}
let mut buff: Vec<u8> = Vec::new();
file.read_to_end(&mut buff)?;
let mut fileout = File::create(dest_path).expect("Failed to open file");
fileout.write_all(&buff)?;
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::generate_github_mirror_url_from_template;
use std::env;
const GITHUB_ASSET_URL: &str =
"https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip";
const NON_GITHUB_ASSET_URL: &str = "https://someotherwebsite.com/somefile.zip";
#[test]
fn test_generate_mirror_url_no_env_var() {
env::remove_var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE");
assert!(generate_github_mirror_url_from_template(GITHUB_ASSET_URL).is_none());
}
#[test]
fn test_generate_mirror_url_non_github_url() {
env::set_var(
"TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE",
"https://mirror.example.com/<owner>/<repo>/releases/download/<version>/<asset>",
);
assert!(generate_github_mirror_url_from_template(NON_GITHUB_ASSET_URL).is_none());
}
struct TestCase {
template: &'static str,
expected_url: &'static str,
}
#[test]
fn test_generate_mirror_url_correctly() {
let test_cases = vec![
TestCase {
template: "https://mirror.example.com/<owner>/<repo>/releases/download/<version>/<asset>",
expected_url: "https://mirror.example.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311-binaries.zip",
},
TestCase {
template: "https://mirror.example.com/<asset>",
expected_url: "https://mirror.example.com/wix311-binaries.zip",
},
];
for case in test_cases {
env::set_var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE", case.template);
assert_eq!(
generate_github_mirror_url_from_template(GITHUB_ASSET_URL),
Some(case.expected_url.to_string())
);
}
}
}

View File

@ -0,0 +1,141 @@
// Copyright 2016-2019 Cargo-Bundle developers <https://github.com/burtonageo/cargo-bundle>
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use std::{
ffi::OsStr,
io::{BufRead, BufReader},
path::Path,
process::{Command, ExitStatus, Output, Stdio},
sync::{Arc, Mutex},
};
pub mod fs_utils;
pub mod http_utils;
/// Returns true if the path has a filename indicating that it is a high-density
/// "retina" icon. Specifically, returns true the file stem ends with
/// "@2x" (a convention specified by the [Apple developer docs](
/// <https://developer.apple.com/library/mac/documentation/GraphicsAnimation/Conceptual/HighResolutionOSX/Optimizing/Optimizing.html>)).
#[allow(dead_code)]
pub fn is_retina(path: &Path) -> bool {
path
.file_stem()
.and_then(OsStr::to_str)
.map(|stem| stem.ends_with("@2x"))
.unwrap_or(false)
}
pub trait CommandExt {
// The `pipe` function sets the stdout and stderr to properly
// show the command output in the Node.js wrapper.
fn piped(&mut self) -> std::io::Result<ExitStatus>;
fn output_ok(&mut self) -> crate::Result<Output>;
}
impl CommandExt for Command {
fn piped(&mut self) -> std::io::Result<ExitStatus> {
self.stdin(os_pipe::dup_stdin()?);
self.stdout(os_pipe::dup_stdout()?);
self.stderr(os_pipe::dup_stderr()?);
let program = self.get_program().to_string_lossy().into_owned();
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
self.status().map_err(Into::into)
}
fn output_ok(&mut self) -> crate::Result<Output> {
let program = self.get_program().to_string_lossy().into_owned();
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
self.stdout(Stdio::piped());
self.stderr(Stdio::piped());
let mut child = self.spawn()?;
let mut stdout = child.stdout.take().map(BufReader::new).unwrap();
let stdout_lines = Arc::new(Mutex::new(Vec::new()));
let stdout_lines_ = stdout_lines.clone();
std::thread::spawn(move || {
let mut line = String::new();
let mut lines = stdout_lines_.lock().unwrap();
loop {
line.clear();
match stdout.read_line(&mut line) {
Ok(0) => break,
Ok(_) => {
log::debug!(action = "stdout"; "{}", line.trim_end());
lines.extend(line.as_bytes().to_vec());
}
Err(_) => (),
}
}
});
let mut stderr = child.stderr.take().map(BufReader::new).unwrap();
let stderr_lines = Arc::new(Mutex::new(Vec::new()));
let stderr_lines_ = stderr_lines.clone();
std::thread::spawn(move || {
let mut line = String::new();
let mut lines = stderr_lines_.lock().unwrap();
loop {
line.clear();
match stderr.read_line(&mut line) {
Ok(0) => break,
Ok(_) => {
log::debug!(action = "stderr"; "{}", line.trim_end());
lines.extend(line.as_bytes().to_vec());
}
Err(_) => (),
}
}
});
let status = child.wait()?;
let output = Output {
status,
stdout: std::mem::take(&mut *stdout_lines.lock().unwrap()),
stderr: std::mem::take(&mut *stderr_lines.lock().unwrap()),
};
if output.status.success() {
Ok(output)
} else {
Err(crate::Error::GenericError(format!(
"failed to run {program}"
)))
}
}
}
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use tauri_utils::resources::resource_relpath;
use super::is_retina;
#[test]
fn retina_icon_paths() {
assert!(!is_retina(Path::new("data/icons/512x512.png")));
assert!(is_retina(Path::new("data/icons/512x512@2x.png")));
}
#[test]
fn resource_relative_paths() {
assert_eq!(
resource_relpath(Path::new("./data/images/button.png")),
PathBuf::from("data/images/button.png")
);
assert_eq!(
resource_relpath(Path::new("../../images/wheel.png")),
PathBuf::from("_up_/_up_/images/wheel.png")
);
assert_eq!(
resource_relpath(Path::new("/home/ferris/crab.png")),
PathBuf::from("_root_/home/ferris/crab.png")
);
}
}