feat(08-01): add directory support to pack/unpack/inspect

- Implement collect_entries() with recursive directory traversal (DFS preorder)
- pack() handles mixed file and directory arguments with relative paths
- Directory entries stored with entry_type=1, zero-length crypto fields
- unpack() creates directory hierarchy and restores Unix mode bits
- inspect() displays entry type (dir/file) and octal permissions
- Update cli.rs doc comments for directory support

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
NikitolProject
2026-02-26 21:47:15 +03:00
parent 4e25d19ff5
commit 7820c18622
2 changed files with 286 additions and 114 deletions

View File

@@ -3,6 +3,7 @@ use std::io::{Read, Seek, SeekFrom, Write};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use rand::Rng; use rand::Rng;
use std::os::unix::fs::PermissionsExt;
use crate::compression; use crate::compression;
use crate::crypto; use crate::crypto;
@@ -52,84 +53,211 @@ fn read_archive_metadata(file: &mut fs::File) -> anyhow::Result<(Header, Vec<Toc
Ok((header, entries)) Ok((header, entries))
} }
/// Pack files into an encrypted archive. /// Get Unix permission bits (lower 12 bits of mode_t) for a path.
fn get_permissions(path: &Path) -> anyhow::Result<u16> {
let metadata = fs::metadata(path)?;
Ok((metadata.permissions().mode() & 0o7777) as u16)
}
/// Process a single file through the crypto pipeline, returning a ProcessedFile.
fn process_file(
file_path: &Path,
name: String,
permissions: u16,
no_compress: &[String],
rng: &mut impl Rng,
) -> anyhow::Result<ProcessedFile> {
let data = fs::read(file_path)?;
// Validate file size <= u32::MAX
anyhow::ensure!(
data.len() <= u32::MAX as usize,
"File too large: {} ({} bytes exceeds 4 GB limit)",
file_path.display(),
data.len()
);
// Step 1: SHA-256 of original data
let sha256 = crypto::sha256_hash(&data);
// Step 2: Determine compression and compress if needed
let should_compress = compression::should_compress(&name, no_compress);
let (compressed_data, compression_flag) = if should_compress {
let compressed = compression::compress(&data)?;
(compressed, 1u8)
} else {
(data.clone(), 0u8)
};
let original_size = data.len() as u32;
let compressed_size = compressed_data.len() as u32;
// Step 3: Generate random IV
let iv = crypto::generate_iv();
// Step 4: Encrypt
let ciphertext = crypto::encrypt_data(&compressed_data, &KEY, &iv);
let encrypted_size = ciphertext.len() as u32;
// Step 5: Compute HMAC over IV || ciphertext
let hmac = crypto::compute_hmac(&KEY, &iv, &ciphertext);
// Step 6: Generate decoy padding (FORMAT.md Section 9.3)
let padding_after: u16 = rng.random_range(64..=4096);
let mut padding_bytes = vec![0u8; padding_after as usize];
rand::Fill::fill(&mut padding_bytes[..], rng);
Ok(ProcessedFile {
name,
entry_type: 0,
permissions,
original_size,
compressed_size,
encrypted_size,
iv,
hmac,
sha256,
compression_flag,
ciphertext,
padding_after,
padding_bytes,
})
}
/// Create a ProcessedFile for a directory entry (no data block).
fn make_directory_entry(name: String, permissions: u16) -> ProcessedFile {
ProcessedFile {
name,
entry_type: 1,
permissions,
original_size: 0,
compressed_size: 0,
encrypted_size: 0,
iv: [0u8; 16],
hmac: [0u8; 32],
sha256: [0u8; 32],
compression_flag: 0,
ciphertext: Vec::new(),
padding_after: 0,
padding_bytes: Vec::new(),
}
}
/// Recursively collect all entries (directories and files) from a directory path.
///
/// Entries are emitted in parent-before-child order (DFS preorder).
/// The base_name is the top-level directory name used as prefix for all relative paths.
fn collect_directory_entries(
dir_path: &Path,
base_name: &str,
no_compress: &[String],
rng: &mut impl Rng,
) -> anyhow::Result<Vec<ProcessedFile>> {
let mut entries = Vec::new();
// Add the directory itself first (parent-before-child)
let dir_perms = get_permissions(dir_path)?;
entries.push(make_directory_entry(base_name.to_string(), dir_perms));
// Collect children sorted by name for deterministic ordering
let mut children: Vec<fs::DirEntry> = fs::read_dir(dir_path)?
.collect::<Result<Vec<_>, _>>()?;
children.sort_by_key(|e| e.file_name());
for child in children {
let child_path = child.path();
let child_name = format!(
"{}/{}",
base_name,
child.file_name().to_str()
.ok_or_else(|| anyhow::anyhow!("Non-UTF-8 filename: {}", child_path.display()))?
);
if child_path.is_dir() {
// Recurse into subdirectory
let sub_entries = collect_directory_entries(
&child_path,
&child_name,
no_compress,
rng,
)?;
entries.extend(sub_entries);
} else {
// Process file
let file_perms = get_permissions(&child_path)?;
let pf = process_file(&child_path, child_name, file_perms, no_compress, rng)?;
entries.push(pf);
}
}
Ok(entries)
}
/// Collect all entries from input paths (files and directories).
///
/// For files: processes through crypto pipeline with filename-only name.
/// For directories: recursively collects all children with relative paths.
fn collect_entries(
inputs: &[PathBuf],
no_compress: &[String],
rng: &mut impl Rng,
) -> anyhow::Result<Vec<ProcessedFile>> {
let mut processed = Vec::new();
for input_path in inputs {
if input_path.is_dir() {
// Get the directory's own name for the archive prefix
let dir_name = input_path
.file_name()
.ok_or_else(|| anyhow::anyhow!("Invalid directory path: {}", input_path.display()))?
.to_str()
.ok_or_else(|| anyhow::anyhow!("Non-UTF-8 directory name: {}", input_path.display()))?
.to_string();
let dir_entries = collect_directory_entries(
input_path,
&dir_name,
no_compress,
rng,
)?;
processed.extend(dir_entries);
} else {
// Single file: use just the filename
let name = input_path
.file_name()
.ok_or_else(|| anyhow::anyhow!("Invalid file path: {}", input_path.display()))?
.to_str()
.ok_or_else(|| anyhow::anyhow!("Non-UTF-8 filename: {}", input_path.display()))?
.to_string();
let file_perms = get_permissions(input_path)?;
let pf = process_file(input_path, name, file_perms, no_compress, rng)?;
processed.push(pf);
}
}
Ok(processed)
}
/// Pack files and directories into an encrypted archive.
/// ///
/// Two-pass algorithm with full obfuscation: /// Two-pass algorithm with full obfuscation:
/// Pass 1: Read, hash, compress, encrypt each file; generate decoy padding. /// Pass 1: Read, hash, compress, encrypt each file; generate decoy padding.
/// Directories are stored as zero-length entries.
/// Pass 2: Encrypt TOC, compute offsets, XOR header, write archive. /// Pass 2: Encrypt TOC, compute offsets, XOR header, write archive.
pub fn pack(files: &[PathBuf], output: &Path, no_compress: &[String]) -> anyhow::Result<()> { pub fn pack(files: &[PathBuf], output: &Path, no_compress: &[String]) -> anyhow::Result<()> {
anyhow::ensure!(!files.is_empty(), "No input files specified"); anyhow::ensure!(!files.is_empty(), "No input files specified");
let mut rng = rand::rng(); let mut rng = rand::rng();
// --- Pass 1: Process all files --- // --- Pass 1: Collect and process all entries ---
let mut processed: Vec<ProcessedFile> = Vec::with_capacity(files.len()); let processed = collect_entries(files, no_compress, &mut rng)?;
for file_path in files { anyhow::ensure!(!processed.is_empty(), "No entries to archive");
let data = fs::read(file_path)?;
// Validate file size <= u32::MAX // Count files and directories
anyhow::ensure!( let file_count = processed.iter().filter(|pf| pf.entry_type == 0).count();
data.len() <= u32::MAX as usize, let dir_count = processed.iter().filter(|pf| pf.entry_type == 1).count();
"File too large: {} ({} bytes exceeds 4 GB limit)",
file_path.display(),
data.len()
);
// Use just the filename (not the full path) as the archive entry name
let name = file_path
.file_name()
.ok_or_else(|| anyhow::anyhow!("Invalid file path: {}", file_path.display()))?
.to_str()
.ok_or_else(|| anyhow::anyhow!("Non-UTF-8 filename: {}", file_path.display()))?
.to_string();
// Step 1: SHA-256 of original data
let sha256 = crypto::sha256_hash(&data);
// Step 2: Determine compression and compress if needed
let should_compress = compression::should_compress(&name, no_compress);
let (compressed_data, compression_flag) = if should_compress {
let compressed = compression::compress(&data)?;
(compressed, 1u8)
} else {
(data.clone(), 0u8)
};
let original_size = data.len() as u32;
let compressed_size = compressed_data.len() as u32;
// Step 3: Generate random IV
let iv = crypto::generate_iv();
// Step 4: Encrypt
let ciphertext = crypto::encrypt_data(&compressed_data, &KEY, &iv);
let encrypted_size = ciphertext.len() as u32;
// Step 5: Compute HMAC over IV || ciphertext
let hmac = crypto::compute_hmac(&KEY, &iv, &ciphertext);
// Step 6: Generate decoy padding (FORMAT.md Section 9.3)
let padding_after: u16 = rng.random_range(64..=4096);
let mut padding_bytes = vec![0u8; padding_after as usize];
rand::Fill::fill(&mut padding_bytes[..], &mut rng);
processed.push(ProcessedFile {
name,
entry_type: 0,
permissions: 0o644,
original_size,
compressed_size,
encrypted_size,
iv,
hmac,
sha256,
compression_flag,
ciphertext,
padding_after,
padding_bytes,
});
}
// --- Pass 2: Compute offsets and write archive --- // --- Pass 2: Compute offsets and write archive ---
@@ -171,12 +299,18 @@ pub fn pack(files: &[PathBuf], output: &Path, no_compress: &[String]) -> anyhow:
let toc_offset = HEADER_SIZE; let toc_offset = HEADER_SIZE;
// Compute data offsets (accounting for encrypted TOC size and padding) // Compute data offsets (accounting for encrypted TOC size and padding)
// Directory entries are skipped (no data block).
let data_block_start = toc_offset + encrypted_toc_size; let data_block_start = toc_offset + encrypted_toc_size;
let mut data_offsets: Vec<u32> = Vec::with_capacity(processed.len()); let mut data_offsets: Vec<u32> = Vec::with_capacity(processed.len());
let mut current_offset = data_block_start; let mut current_offset = data_block_start;
for pf in &processed { for pf in &processed {
data_offsets.push(current_offset); if pf.entry_type == 1 {
current_offset += pf.encrypted_size + pf.padding_after as u32; // Directory: no data block, offset is 0
data_offsets.push(0);
} else {
data_offsets.push(current_offset);
current_offset += pf.encrypted_size + pf.padding_after as u32;
}
} }
// Now re-serialize TOC with correct data_offsets // Now re-serialize TOC with correct data_offsets
@@ -233,16 +367,21 @@ pub fn pack(files: &[PathBuf], output: &Path, no_compress: &[String]) -> anyhow:
// Write encrypted TOC // Write encrypted TOC
out_file.write_all(&final_encrypted_toc)?; out_file.write_all(&final_encrypted_toc)?;
// Write data blocks with interleaved decoy padding // Write data blocks with interleaved decoy padding (skip directory entries)
for pf in &processed { for pf in &processed {
if pf.entry_type == 1 {
continue; // directories have no data block
}
out_file.write_all(&pf.ciphertext)?; out_file.write_all(&pf.ciphertext)?;
out_file.write_all(&pf.padding_bytes)?; out_file.write_all(&pf.padding_bytes)?;
} }
let total_bytes = current_offset; let total_bytes = current_offset;
println!( println!(
"Packed {} files into {} ({} bytes)", "Packed {} entries ({} files, {} directories) into {} ({} bytes)",
processed.len(), processed.len(),
file_count,
dir_count,
output.display(), output.display(),
total_bytes total_bytes
); );
@@ -269,41 +408,49 @@ pub fn inspect(archive: &Path) -> anyhow::Result<()> {
println!("Archive: {}", filename); println!("Archive: {}", filename);
println!("Version: {}", header.version); println!("Version: {}", header.version);
println!("Flags: 0x{:02X}", header.flags); println!("Flags: 0x{:02X}", header.flags);
println!("Files: {}", header.file_count); println!("Entries: {}", header.file_count);
println!("TOC offset: {}", header.toc_offset); println!("TOC offset: {}", header.toc_offset);
println!("TOC size: {}", header.toc_size); println!("TOC size: {}", header.toc_size);
println!(); println!();
// Print each file entry // Print each entry
let mut total_original: u64 = 0; let mut total_original: u64 = 0;
for (i, entry) in entries.iter().enumerate() { for (i, entry) in entries.iter().enumerate() {
let compression_str = if entry.compression_flag == 1 { let type_str = if entry.entry_type == 1 { "dir" } else { "file" };
"yes" let perms_str = format!("{:04o}", entry.permissions);
} else {
"no"
};
println!("[{}] {}", i, entry.name); println!("[{}] {} ({}, {})", i, entry.name, type_str, perms_str);
println!(" Original: {} bytes", entry.original_size); println!(" Permissions: {}", perms_str);
println!(" Compressed: {} bytes", entry.compressed_size);
println!(" Encrypted: {} bytes", entry.encrypted_size);
println!(" Offset: {}", entry.data_offset);
println!(" Compression: {}", compression_str);
println!(" Padding after: {} bytes", entry.padding_after);
println!(
" IV: {}",
entry.iv.iter().map(|b| format!("{:02x}", b)).collect::<String>()
);
println!(
" HMAC: {}",
entry.hmac.iter().map(|b| format!("{:02x}", b)).collect::<String>()
);
println!(
" SHA-256: {}",
entry.sha256.iter().map(|b| format!("{:02x}", b)).collect::<String>()
);
total_original += entry.original_size as u64; if entry.entry_type == 0 {
// File entry: show size and crypto details
let compression_str = if entry.compression_flag == 1 {
"yes"
} else {
"no"
};
println!(" Original: {} bytes", entry.original_size);
println!(" Compressed: {} bytes", entry.compressed_size);
println!(" Encrypted: {} bytes", entry.encrypted_size);
println!(" Offset: {}", entry.data_offset);
println!(" Compression: {}", compression_str);
println!(" Padding after: {} bytes", entry.padding_after);
println!(
" IV: {}",
entry.iv.iter().map(|b| format!("{:02x}", b)).collect::<String>()
);
println!(
" HMAC: {}",
entry.hmac.iter().map(|b| format!("{:02x}", b)).collect::<String>()
);
println!(
" SHA-256: {}",
entry.sha256.iter().map(|b| format!("{:02x}", b)).collect::<String>()
);
total_original += entry.original_size as u64;
}
} }
println!(); println!();
@@ -312,12 +459,15 @@ pub fn inspect(archive: &Path) -> anyhow::Result<()> {
Ok(()) Ok(())
} }
/// Unpack an encrypted archive, extracting all files with HMAC and SHA-256 verification. /// Unpack an encrypted archive, extracting all files and directories with
/// HMAC and SHA-256 verification, and Unix permission restoration.
/// ///
/// Follows FORMAT.md Section 10 decode order: /// Follows FORMAT.md Section 10 decode order:
/// 1. Read header with XOR bootstrapping /// 1. Read header with XOR bootstrapping
/// 2. Read and decrypt TOC entries /// 2. Read and decrypt TOC entries
/// 3. For each file: seek to data_offset, verify HMAC, decrypt, decompress, verify SHA-256, write /// 3. For each entry:
/// - Directory: create directory, set permissions
/// - File: seek to data_offset, verify HMAC, decrypt, decompress, verify SHA-256, write, set permissions
pub fn unpack(archive: &Path, output_dir: &Path) -> anyhow::Result<()> { pub fn unpack(archive: &Path, output_dir: &Path) -> anyhow::Result<()> {
let mut file = fs::File::open(archive)?; let mut file = fs::File::open(archive)?;
@@ -327,7 +477,7 @@ pub fn unpack(archive: &Path, output_dir: &Path) -> anyhow::Result<()> {
// Create output directory // Create output directory
fs::create_dir_all(output_dir)?; fs::create_dir_all(output_dir)?;
let file_count = entries.len(); let entry_count = entries.len();
let mut error_count: usize = 0; let mut error_count: usize = 0;
let mut success_count: usize = 0; let mut success_count: usize = 0;
@@ -335,13 +485,34 @@ pub fn unpack(archive: &Path, output_dir: &Path) -> anyhow::Result<()> {
// Sanitize filename: reject directory traversal // Sanitize filename: reject directory traversal
if entry.name.starts_with('/') || entry.name.contains("..") { if entry.name.starts_with('/') || entry.name.contains("..") {
eprintln!( eprintln!(
"Skipping file with unsafe name: {} (directory traversal attempt)", "Skipping entry with unsafe name: {} (directory traversal attempt)",
entry.name entry.name
); );
error_count += 1; error_count += 1;
continue; continue;
} }
let output_path = output_dir.join(&entry.name);
if entry.entry_type == 1 {
// Directory entry: create and set permissions
fs::create_dir_all(&output_path)?;
fs::set_permissions(
&output_path,
fs::Permissions::from_mode(entry.permissions as u32),
)?;
println!("Created directory: {}", entry.name);
success_count += 1;
continue;
}
// File entry: extract with full verification pipeline
// Create parent directories if name contains path separators
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent)?;
}
// Seek to data_offset and read ciphertext // Seek to data_offset and read ciphertext
file.seek(SeekFrom::Start(entry.data_offset as u64))?; file.seek(SeekFrom::Start(entry.data_offset as u64))?;
let mut ciphertext = vec![0u8; entry.encrypted_size as usize]; let mut ciphertext = vec![0u8; entry.encrypted_size as usize];
@@ -389,25 +560,26 @@ pub fn unpack(archive: &Path, output_dir: &Path) -> anyhow::Result<()> {
// Still write the file per spec // Still write the file per spec
} }
// Step 5: Create parent directories if name contains path separators // Step 5: Write file
let output_path = output_dir.join(&entry.name);
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent)?;
}
// Step 6: Write file
fs::write(&output_path, &decompressed)?; fs::write(&output_path, &decompressed)?;
// Step 6: Set file permissions
fs::set_permissions(
&output_path,
fs::Permissions::from_mode(entry.permissions as u32),
)?;
println!("Extracted: {} ({} bytes)", entry.name, entry.original_size); println!("Extracted: {} ({} bytes)", entry.name, entry.original_size);
success_count += 1; success_count += 1;
} }
println!( println!(
"Extracted {}/{} files", "Extracted {}/{} entries",
success_count, file_count success_count, entry_count
); );
if error_count > 0 { if error_count > 0 {
anyhow::bail!("{} file(s) had verification errors", error_count); anyhow::bail!("{} entry(ies) had verification errors", error_count);
} }
Ok(()) Ok(())

View File

@@ -11,9 +11,9 @@ pub struct Cli {
#[derive(Subcommand)] #[derive(Subcommand)]
pub enum Commands { pub enum Commands {
/// Pack files into an encrypted archive /// Pack files and directories into an encrypted archive
Pack { Pack {
/// Input files to archive /// Input files and directories to archive
#[arg(required = true)] #[arg(required = true)]
files: Vec<PathBuf>, files: Vec<PathBuf>,
/// Output archive file /// Output archive file