-
Notifications
You must be signed in to change notification settings - Fork 62
pipelined extraction #236
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
cosmicexplorer
wants to merge
33
commits into
zip-rs:master
Choose a base branch
from
cosmicexplorer:pipelined-extract-v2
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
pipelined extraction #236
Changes from all commits
Commits
Show all changes
33 commits
Select commit
Hold shift + click to select a range
2c02588
pipelined extraction
cosmicexplorer a1734b1
bzip2 support needed for benchmark test
cosmicexplorer 3f0a887
more review comments
cosmicexplorer ec63cdd
fix merge errors
cosmicexplorer c311784
correctly handle backslashes in entry names (i.e. don't)
cosmicexplorer 8efb409
make PathSplitError avoid consing a String until necessary
cosmicexplorer 3c19c28
add repro_old423 test for pipelining
cosmicexplorer 6ccecd0
silence dead code warnings for windows
cosmicexplorer 7332eb6
fix ci error
cosmicexplorer c9dd876
avoid erroring for top-level directory entries
cosmicexplorer 17d611e
use num_cpus by default for parallelism
cosmicexplorer 5bd512e
we spawn three threads per chunk
cosmicexplorer 8dd83d3
add dynamically-generated test archive
cosmicexplorer 7ea1b85
remove some lint ignores
cosmicexplorer 6f9d3d6
add back default features for displaydoc and update version
cosmicexplorer f4c43cb
add FIXME for follow-up work to support absolute paths
cosmicexplorer 1ae44f0
box each level of the b-tree together with its values
cosmicexplorer c92d018
impl From<DirEntry<...>> for FSEntry
cosmicexplorer d6da333
make some tests return Result
cosmicexplorer 41381b1
simplify some btreemap creation
cosmicexplorer 4fc051a
move handle_creation module to a separate file
cosmicexplorer 730f18f
downgrade HandleCreationError to io::Error
cosmicexplorer 2f8e5b4
use ByAddress over ZipDataHandle
cosmicexplorer 0378ab9
fix lint error
cosmicexplorer 295c7f0
make an assert into a debug assert
cosmicexplorer 436570f
remove extraneous error case
cosmicexplorer a22c53b
replace unsafe transmutes with Pod methods
cosmicexplorer 24c4425
add dead code ignore
cosmicexplorer 11321bd
use if let for matching
cosmicexplorer 06f7b89
fix dead code ignores
cosmicexplorer d6bbc9c
add note about shared future dependency task DAG
cosmicexplorer 7bbe4ae
Merge branch 'master' into pipelined-extract-v2
Pr0methean 1369bfe
Merge branch 'master' into pipelined-extract-v2
Pr0methean File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,149 @@ | ||
use bencher::{benchmark_group, benchmark_main}; | ||
|
||
use bencher::Bencher; | ||
use lazy_static::lazy_static; | ||
use tempdir::TempDir; | ||
use tempfile::tempfile; | ||
|
||
use std::fs; | ||
use std::path::Path; | ||
use std::sync::{Arc, Mutex}; | ||
|
||
use zip::result::ZipResult; | ||
use zip::write::ZipWriter; | ||
use zip::ZipArchive; | ||
|
||
#[cfg(all(feature = "parallelism", unix))] | ||
use zip::read::{split_extract, ExtractionParameters}; | ||
|
||
/* This archive has a set of entries repeated 20x: | ||
* - 200K random data, stored uncompressed (CompressionMethod::Stored) | ||
* - 246K text data (the project gutenberg html version of king lear) | ||
* (CompressionMethod::Bzip2, compression level 1) (project gutenberg ebooks are public domain) | ||
* | ||
* The full archive file is 5.3MB. | ||
*/ | ||
fn static_test_archive() -> ZipResult<ZipArchive<fs::File>> { | ||
assert!( | ||
cfg!(feature = "bzip2"), | ||
"this test archive requires bzip2 support" | ||
); | ||
let path = | ||
Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/data/stored-and-compressed-text.zip"); | ||
let file = fs::File::open(path)?; | ||
ZipArchive::new(file) | ||
} | ||
|
||
lazy_static! { | ||
static ref STATIC_TEST_ARCHIVE: Arc<Mutex<ZipArchive<fs::File>>> = { | ||
let archive = static_test_archive().unwrap(); | ||
Arc::new(Mutex::new(archive)) | ||
}; | ||
} | ||
|
||
/* This archive is generated dynamically, in order to scale with the number of reported CPUs. | ||
* - We want at least 768 files (4 per VCPU on EC2 *.48xlarge instances) to run in CI. | ||
* - We want to retain the interspersed random/text entries from static_test_archive(). | ||
* | ||
* We will copy over entries from the static archive repeatedly until we reach the desired file | ||
* count. | ||
*/ | ||
fn dynamic_test_archive(src_archive: &mut ZipArchive<fs::File>) -> ZipResult<ZipArchive<fs::File>> { | ||
let desired_num_entries: usize = num_cpus::get() * 4; | ||
let mut output_archive = ZipWriter::new(tempfile()?); | ||
|
||
for (src_index, output_index) in (0..src_archive.len()).cycle().zip(0..desired_num_entries) { | ||
let src_file = src_archive.by_index_raw(src_index)?; | ||
let output_name = if src_file.name().starts_with("random-") { | ||
format!("random-{output_index}.dat") | ||
} else { | ||
assert!(src_file.name().starts_with("text-")); | ||
format!("text-{output_index}.dat") | ||
}; | ||
output_archive.raw_copy_file_rename(src_file, output_name)?; | ||
} | ||
|
||
output_archive.finish_into_readable() | ||
} | ||
|
||
lazy_static! { | ||
static ref DYNAMIC_TEST_ARCHIVE: Arc<Mutex<ZipArchive<fs::File>>> = { | ||
let mut src = STATIC_TEST_ARCHIVE.lock().unwrap(); | ||
let archive = dynamic_test_archive(&mut src).unwrap(); | ||
Arc::new(Mutex::new(archive)) | ||
}; | ||
} | ||
|
||
fn do_extract_basic(bench: &mut Bencher, archive: &mut ZipArchive<fs::File>) { | ||
let total_size: u64 = archive.decompressed_size().unwrap().try_into().unwrap(); | ||
|
||
let parent = TempDir::new("zip-extract").unwrap(); | ||
|
||
bench.bytes = total_size; | ||
bench.bench_n(1, |bench| { | ||
bench.iter(move || { | ||
let outdir = TempDir::new_in(parent.path(), "bench-subdir") | ||
.unwrap() | ||
.into_path(); | ||
archive.extract(outdir).unwrap(); | ||
}); | ||
}); | ||
} | ||
|
||
fn extract_basic_static(bench: &mut Bencher) { | ||
let mut archive = STATIC_TEST_ARCHIVE.lock().unwrap(); | ||
do_extract_basic(bench, &mut archive); | ||
} | ||
|
||
fn extract_basic_dynamic(bench: &mut Bencher) { | ||
let mut archive = DYNAMIC_TEST_ARCHIVE.lock().unwrap(); | ||
do_extract_basic(bench, &mut archive); | ||
} | ||
|
||
#[cfg(all(feature = "parallelism", unix))] | ||
fn do_extract_split(bench: &mut Bencher, archive: &ZipArchive<fs::File>) { | ||
let total_size: u64 = archive.decompressed_size().unwrap().try_into().unwrap(); | ||
|
||
let params = ExtractionParameters { | ||
decompression_threads: num_cpus::get() / 3, | ||
..Default::default() | ||
}; | ||
|
||
let parent = TempDir::new("zip-extract").unwrap(); | ||
|
||
bench.bytes = total_size; | ||
bench.bench_n(1, |bench| { | ||
bench.iter(move || { | ||
let outdir = TempDir::new_in(parent.path(), "bench-subdir") | ||
.unwrap() | ||
.into_path(); | ||
split_extract(archive, &outdir, params.clone()).unwrap(); | ||
}); | ||
}); | ||
} | ||
|
||
#[cfg(all(feature = "parallelism", unix))] | ||
fn extract_split_static(bench: &mut Bencher) { | ||
let archive = STATIC_TEST_ARCHIVE.lock().unwrap(); | ||
do_extract_split(bench, &archive); | ||
} | ||
|
||
#[cfg(all(feature = "parallelism", unix))] | ||
fn extract_split_dynamic(bench: &mut Bencher) { | ||
let archive = DYNAMIC_TEST_ARCHIVE.lock().unwrap(); | ||
do_extract_split(bench, &archive); | ||
} | ||
|
||
#[cfg(not(all(feature = "parallelism", unix)))] | ||
benchmark_group!(benches, extract_basic_static, extract_basic_dynamic); | ||
|
||
#[cfg(all(feature = "parallelism", unix))] | ||
benchmark_group!( | ||
benches, | ||
extract_basic_static, | ||
extract_basic_dynamic, | ||
extract_split_static, | ||
extract_split_dynamic | ||
); | ||
|
||
benchmark_main!(benches); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
What will the other 2/3 of the CPUs be doing? Also, does this need to be clamped to at least 1?