Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
977 changes: 0 additions & 977 deletions crates/surge-core/src/releases/delta.rs

This file was deleted.

147 changes: 147 additions & 0 deletions crates/surge-core/src/releases/delta/archive.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
use std::io::Write;

use crate::diff::chunked::{ChunkedDiffOptions, chunked_bsdiff, chunked_bspatch};
use crate::diff::wrapper::{bsdiff_buffers, bspatch_buffers};
use crate::error::{Result, SurgeError};

pub(super) const LEGACY_ARCHIVE_BSDIFF_MAGIC: &[u8; 4] = b"ATB4";
pub(super) const LEGACY_ARCHIVE_CHUNKED_MAGIC: &[u8; 4] = b"ATC4";
pub(super) const ARCHIVE_BSDIFF_MAGIC: &[u8; 4] = b"ATB5";
pub(super) const ARCHIVE_CHUNKED_MAGIC: &[u8; 4] = b"ATC5";

const ARCHIVE_PATCH_HEADER_LEN: usize = 12;
const LEGACY_ARCHIVE_PATCH_HEADER_LEN: usize = 8;

pub fn build_archive_bsdiff_patch(
older_archive: &[u8],
newer_archive: &[u8],
compression_level: i32,
zstd_workers: u32,
) -> Result<Vec<u8>> {
let older_tar = decode_archive_bytes(older_archive)?;
let newer_tar = decode_archive_bytes(newer_archive)?;
let patch = bsdiff_buffers(&older_tar, &newer_tar)?;
Ok(encode_archive_patch_payload(
*ARCHIVE_BSDIFF_MAGIC,
compression_level,
zstd_workers,
&patch,
))
}

pub fn build_archive_chunked_patch(
older_archive: &[u8],
newer_archive: &[u8],
compression_level: i32,
zstd_workers: u32,
opts: &ChunkedDiffOptions,
) -> Result<Vec<u8>> {
let older_tar = decode_archive_bytes(older_archive)?;
let newer_tar = decode_archive_bytes(newer_archive)?;
let patch = chunked_bsdiff(&older_tar, &newer_tar, opts)?;
Ok(encode_archive_patch_payload(
*ARCHIVE_CHUNKED_MAGIC,
compression_level,
zstd_workers,
&patch,
))
}

pub(super) fn apply_archive_bsdiff_patch(older: &[u8], patch: &[u8]) -> Result<Vec<u8>> {
let older_tar = decode_archive_bytes(older)?;
let (compression_level, zstd_workers, archive_patch) = decode_archive_patch_payload(
patch,
*ARCHIVE_BSDIFF_MAGIC,
Some(*LEGACY_ARCHIVE_BSDIFF_MAGIC),
Some(b"BSDIFF40"),
)?;
let newer_tar = bspatch_buffers(&older_tar, archive_patch)?;
encode_archive_bytes(&newer_tar, compression_level, zstd_workers)
}

pub(super) fn apply_archive_chunked_patch(older: &[u8], patch: &[u8]) -> Result<Vec<u8>> {
let older_tar = decode_archive_bytes(older)?;
let (compression_level, zstd_workers, archive_patch) = decode_archive_patch_payload(
patch,
*ARCHIVE_CHUNKED_MAGIC,
Some(*LEGACY_ARCHIVE_CHUNKED_MAGIC),
Some(b"CSDF"),
)?;
let newer_tar = chunked_bspatch(&older_tar, archive_patch, &ChunkedDiffOptions::default())?;
encode_archive_bytes(&newer_tar, compression_level, zstd_workers)
}

fn encode_archive_patch_payload(magic: [u8; 4], compression_level: i32, zstd_workers: u32, patch: &[u8]) -> Vec<u8> {
let mut payload = Vec::with_capacity(ARCHIVE_PATCH_HEADER_LEN + patch.len());
payload.extend_from_slice(&magic);
payload.extend_from_slice(&compression_level.to_le_bytes());
payload.extend_from_slice(&zstd_workers.to_le_bytes());
payload.extend_from_slice(patch);
payload
}

pub(super) fn decode_archive_patch_payload<'a>(
data: &'a [u8],
expected_magic: [u8; 4],
legacy_magic: Option<[u8; 4]>,
legacy_payload_magic: Option<&'static [u8]>,
) -> Result<(i32, u32, &'a [u8])> {
if data.len() < LEGACY_ARCHIVE_PATCH_HEADER_LEN {
return Err(SurgeError::Update("Archive delta payload is truncated".to_string()));
}
let matches_expected = data.starts_with(&expected_magic);
let matches_legacy = legacy_magic.is_some_and(|magic| data.starts_with(&magic));
if !matches_expected && !matches_legacy {
return Err(SurgeError::Update("Archive delta payload magic is invalid".to_string()));
}

let compression_level = i32::from_le_bytes(
data[expected_magic.len()..expected_magic.len() + std::mem::size_of::<i32>()]
.try_into()
.map_err(|_| SurgeError::Update("Archive delta payload header is invalid".to_string()))?,
);

if matches_legacy {
let legacy_payload_offset = expected_magic.len() + std::mem::size_of::<i32>();
if legacy_payload_magic.is_some_and(|magic| data[legacy_payload_offset..].starts_with(magic)) {
return Ok((compression_level, 0, &data[legacy_payload_offset..]));
}
}

if data.len() < ARCHIVE_PATCH_HEADER_LEN {
return Err(SurgeError::Update(
"Archive delta payload header is invalid".to_string(),
));
}

let worker_offset = expected_magic.len() + std::mem::size_of::<i32>();
let zstd_workers = u32::from_le_bytes(
data[worker_offset..ARCHIVE_PATCH_HEADER_LEN]
.try_into()
.map_err(|_| SurgeError::Update("Archive delta payload header is invalid".to_string()))?,
);
Ok((compression_level, zstd_workers, &data[ARCHIVE_PATCH_HEADER_LEN..]))
}

fn decode_archive_bytes(data: &[u8]) -> Result<Vec<u8>> {
zstd::decode_all(data).map_err(|e| SurgeError::Archive(format!("Failed to decode archive bytes: {e}")))
}

fn encode_archive_bytes(data: &[u8], compression_level: i32, zstd_workers: u32) -> Result<Vec<u8>> {
if zstd_workers > 1 {
let mut encoder = zstd::Encoder::new(Vec::new(), compression_level)
.map_err(|e| SurgeError::Archive(format!("Failed to create zstd encoder: {e}")))?;
encoder
.multithread(zstd_workers)
.map_err(|e| SurgeError::Archive(format!("Failed to enable multi-threaded zstd: {e}")))?;
encoder
.write_all(data)
.map_err(|e| SurgeError::Archive(format!("Failed to encode archive bytes: {e}")))?;
return encoder
.finish()
.map_err(|e| SurgeError::Archive(format!("Failed to finalize zstd encoder: {e}")));
}

zstd::encode_all(data, compression_level)
.map_err(|e| SurgeError::Archive(format!("Failed to encode archive bytes: {e}")))
}
69 changes: 69 additions & 0 deletions crates/surge-core/src/releases/delta/format.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
use crate::diff::chunked::has_magic_prefix;
use crate::releases::manifest::{
COMPRESSION_ZSTD, DIFF_ALGORITHM_BSDIFF, DIFF_ALGORITHM_FILE_OPS, DeltaArtifact, PATCH_FORMAT_BSDIFF4,
PATCH_FORMAT_BSDIFF4_ARCHIVE_V3, PATCH_FORMAT_CHUNKED_BSDIFF_ARCHIVE_V3, PATCH_FORMAT_CHUNKED_BSDIFF_V1,
PATCH_FORMAT_SPARSE_FILE_OPS_V1,
};

use super::archive::{
ARCHIVE_BSDIFF_MAGIC, ARCHIVE_CHUNKED_MAGIC, LEGACY_ARCHIVE_BSDIFF_MAGIC, LEGACY_ARCHIVE_CHUNKED_MAGIC,
};
use super::sparse_ops::SPARSE_FILE_OPS_MAGIC;

pub(super) fn normalized_or_default<'a>(value: &'a str, default: &'a str) -> &'a str {
let trimmed = value.trim();
if trimmed.is_empty() { default } else { trimmed }
}

#[must_use]
pub fn has_archive_bsdiff_magic_prefix(data: &[u8]) -> bool {
data.starts_with(ARCHIVE_BSDIFF_MAGIC) || data.starts_with(LEGACY_ARCHIVE_BSDIFF_MAGIC)
}

#[must_use]
pub fn has_archive_chunked_magic_prefix(data: &[u8]) -> bool {
data.starts_with(ARCHIVE_CHUNKED_MAGIC) || data.starts_with(LEGACY_ARCHIVE_CHUNKED_MAGIC)
}

#[must_use]
pub fn has_sparse_file_ops_magic_prefix(data: &[u8]) -> bool {
data.starts_with(SPARSE_FILE_OPS_MAGIC)
}

#[must_use]
pub fn patch_format_from_magic_prefix(data: &[u8]) -> Option<&'static str> {
if has_sparse_file_ops_magic_prefix(data) {
return Some(PATCH_FORMAT_SPARSE_FILE_OPS_V1);
}
if has_archive_chunked_magic_prefix(data) {
return Some(PATCH_FORMAT_CHUNKED_BSDIFF_ARCHIVE_V3);
}
if has_archive_bsdiff_magic_prefix(data) {
return Some(PATCH_FORMAT_BSDIFF4_ARCHIVE_V3);
}
if has_magic_prefix(data) {
return Some(PATCH_FORMAT_CHUNKED_BSDIFF_V1);
}
None
}

#[must_use]
pub fn is_supported_delta(delta: &DeltaArtifact) -> bool {
let patch_format = normalized_or_default(&delta.patch_format, PATCH_FORMAT_BSDIFF4);
let compression = normalized_or_default(&delta.compression, COMPRESSION_ZSTD);
let algorithm = delta.algorithm.trim();

if patch_format.eq_ignore_ascii_case(PATCH_FORMAT_SPARSE_FILE_OPS_V1) {
return compression.eq_ignore_ascii_case(COMPRESSION_ZSTD)
&& (algorithm.is_empty() || algorithm.eq_ignore_ascii_case(DIFF_ALGORITHM_FILE_OPS));
}

let algorithm = normalized_or_default(&delta.algorithm, DIFF_ALGORITHM_BSDIFF);

algorithm.eq_ignore_ascii_case(DIFF_ALGORITHM_BSDIFF)
&& compression.eq_ignore_ascii_case(COMPRESSION_ZSTD)
&& (patch_format.eq_ignore_ascii_case(PATCH_FORMAT_BSDIFF4)
|| patch_format.eq_ignore_ascii_case(PATCH_FORMAT_CHUNKED_BSDIFF_V1)
|| patch_format.eq_ignore_ascii_case(PATCH_FORMAT_BSDIFF4_ARCHIVE_V3)
|| patch_format.eq_ignore_ascii_case(PATCH_FORMAT_CHUNKED_BSDIFF_ARCHIVE_V3))
}
163 changes: 163 additions & 0 deletions crates/surge-core/src/releases/delta/fs_apply.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
use std::fs;
use std::path::{Component, Path, PathBuf};

use crate::crypto::sha256::sha256_hex_file;
use crate::diff::chunked::chunked_bspatch_file;
use crate::error::{Result, SurgeError};

use super::sparse_ops::SparseFileOp;

pub(super) fn apply_sparse_file_ops(root: &Path, ops: &[SparseFileOp], payloads: &[u8]) -> Result<()> {
for op in ops {
match op {
SparseFileOp::Delete { path } => {
let target = resolve_relative_path(root, path)?;
remove_path_if_exists(&target)?;
}
SparseFileOp::EnsureDir { path, mode } => {
let target = resolve_relative_path(root, path)?;
fs::create_dir_all(&target)?;
set_mode(&target, *mode)?;
}
SparseFileOp::SetMode { path, mode } => {
let target = resolve_relative_path(root, path)?;
set_mode(&target, *mode)?;
}
SparseFileOp::WriteFile {
path,
mode,
payload_offset,
payload_len,
sha256,
} => {
let target = resolve_relative_path(root, path)?;
let payload = payload_slice(payloads, *payload_offset, *payload_len)?;
remove_path_if_exists(&target)?;
if let Some(parent) = target.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&target, payload)?;
set_mode(&target, *mode)?;
verify_file_sha256(&target, sha256)?;
}
SparseFileOp::PatchFile {
path,
mode,
payload_offset,
payload_len,
basis_sha256,
sha256,
} => {
let target = resolve_relative_path(root, path)?;
verify_file_sha256(&target, basis_sha256)?;
let patch_bytes = payload_slice(payloads, *payload_offset, *payload_len)?;
let temp_path = patched_temp_path(&target);
if temp_path.exists() {
fs::remove_file(&temp_path)?;
}
chunked_bspatch_file(&target, patch_bytes, &temp_path)?;
fs::remove_file(&target)?;
fs::rename(&temp_path, &target)?;
set_mode(&target, *mode)?;
verify_file_sha256(&target, sha256)?;
}
SparseFileOp::WriteSymlink { path, target } => {
let link_path = resolve_relative_path(root, path)?;
remove_path_if_exists(&link_path)?;
if let Some(parent) = link_path.parent() {
fs::create_dir_all(parent)?;
}
create_symlink(target, &link_path)?;
}
}
}
Ok(())
}

fn resolve_relative_path(root: &Path, relative: &str) -> Result<PathBuf> {
let mut resolved = PathBuf::from(root);
for component in Path::new(relative).components() {
match component {
Component::Normal(segment) => resolved.push(segment),
_ => {
return Err(SurgeError::Update(format!("Invalid sparse delta path '{relative}'")));
}
}
}
Ok(resolved)
}

fn payload_slice(payloads: &[u8], offset: u64, len: u64) -> Result<&[u8]> {
let start = usize::try_from(offset)
.map_err(|_| SurgeError::Update("Sparse delta payload offset exceeds platform limits".to_string()))?;
let len = usize::try_from(len)
.map_err(|_| SurgeError::Update("Sparse delta payload length exceeds platform limits".to_string()))?;
let end = start
.checked_add(len)
.ok_or_else(|| SurgeError::Update("Sparse delta payload range overflows".to_string()))?;
payloads
.get(start..end)
.ok_or_else(|| SurgeError::Update("Sparse delta payload range is invalid".to_string()))
}

fn verify_file_sha256(path: &Path, expected_sha256: &str) -> Result<()> {
let expected = expected_sha256.trim();
if expected.is_empty() {
return Ok(());
}
let actual = sha256_hex_file(path)?;
if actual != expected {
return Err(SurgeError::Update(format!(
"Sparse delta file hash mismatch for '{}': expected {expected}, got {actual}",
path.display()
)));
}
Ok(())
}

fn remove_path_if_exists(path: &Path) -> Result<()> {
let Ok(metadata) = fs::symlink_metadata(path) else {
return Ok(());
};
let file_type = metadata.file_type();
if file_type.is_dir() {
fs::remove_dir_all(path)?;
} else {
fs::remove_file(path)?;
}
Ok(())
}

fn patched_temp_path(target: &Path) -> PathBuf {
let file_name = target
.file_name()
.map_or_else(|| "patched".to_string(), |name| name.to_string_lossy().into_owned());
target.with_file_name(format!(".{file_name}.surge-patch"))
}

#[cfg(unix)]
fn set_mode(path: &Path, mode: u32) -> Result<()> {
use std::os::unix::fs::PermissionsExt;

let mut permissions = fs::metadata(path)?.permissions();
permissions.set_mode(mode);
fs::set_permissions(path, permissions)?;
Ok(())
}

#[cfg(not(unix))]
fn set_mode(_path: &Path, _mode: u32) -> Result<()> {
Ok(())
}

#[cfg(unix)]
fn create_symlink(target: &str, link_path: &Path) -> Result<()> {
std::os::unix::fs::symlink(target, link_path)?;
Ok(())
}

#[cfg(windows)]
fn create_symlink(target: &str, link_path: &Path) -> Result<()> {
std::os::windows::fs::symlink_file(target, link_path)?;
Ok(())
}
Loading