-
Notifications
You must be signed in to change notification settings - Fork 900
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Auto merge of #1020 - jelford:support_partial_downloads, r=brson
Support partial downloads This PR should close #889 A couple of implementation details: Only added support for the curl backend; previously discussed that there's an intention to get rid of rustup's own download code, and the default feature-set uses curl anyway, so hopefully this is okay. Added new testing to the download crate - while it's there, it makes sense to have a test. Since using curl's "resume" functionality, I figured it's probably fine to just file:// urls for test cases. Previously tested using a small hyper-based http server, but that feels like overkill. For hashing files, I've set the buffer size to 2^15 - just because that's what strace tells me is used by `sha256sum` on my local PC. It seems much slower than that command though, and it's not obvious why, so maybe I've done something silly here. Finally, and maybe most controversially, I haven't done anything about cleaning up aborted partials. I don't really know when a good time is to do this, but a couple of suggestions that I'd be happy to implement: * Every run, just check the download cache for any files > 7 days old and smoke them * On self-update, as that seems like a natural time for generic "maintenance" sorts of operations I mentioned in my last PR, but the same disclaimer: I haven't written much rust, so I fully expect you will see some problems (also very happy to accept style criticisms). I accidentally ran a `rustfmt` on some things so apologies for the noise (can revert but... maybe it's worth having anyway?).
- Loading branch information
Showing
10 changed files
with
243 additions
and
51 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,98 @@ | ||
#![cfg(feature = "curl-backend")] | ||
|
||
extern crate download; | ||
extern crate tempdir; | ||
extern crate url; | ||
|
||
use std::sync::{Arc, Mutex}; | ||
use std::fs::{self, File}; | ||
use std::io::{self, Read}; | ||
use std::path::Path; | ||
|
||
use tempdir::TempDir; | ||
use url::Url; | ||
|
||
use download::*; | ||
|
||
fn tmp_dir() -> TempDir { | ||
TempDir::new("rustup-download-test-").expect("creating tempdir for test") | ||
} | ||
|
||
fn file_contents(path: &Path) -> String { | ||
let mut result = String::new(); | ||
File::open(&path).unwrap().read_to_string(&mut result).expect("reading test result file"); | ||
result | ||
} | ||
|
||
|
||
pub fn write_file(path: &Path, contents: &str) { | ||
let mut file = fs::OpenOptions::new() | ||
.write(true) | ||
.truncate(true) | ||
.create(true) | ||
.open(path) | ||
.expect("writing test data"); | ||
|
||
io::Write::write_all(&mut file, contents.as_bytes()).expect("writing test data"); | ||
|
||
file.sync_data().expect("writing test data"); | ||
} | ||
|
||
#[test] | ||
fn partially_downloaded_file_gets_resumed_from_byte_offset() { | ||
let tmpdir = tmp_dir(); | ||
let from_path = tmpdir.path().join("download-source"); | ||
write_file(&from_path, "xxx45"); | ||
|
||
let target_path = tmpdir.path().join("downloaded"); | ||
write_file(&target_path, "123"); | ||
|
||
let from_url = Url::from_file_path(&from_path).unwrap(); | ||
download_to_path_with_backend( | ||
Backend::Curl, | ||
&from_url, | ||
&target_path, | ||
true, | ||
None) | ||
.expect("Test download failed"); | ||
|
||
assert_eq!(file_contents(&target_path), "12345"); | ||
} | ||
|
||
#[test] | ||
fn callback_gets_all_data_as_if_the_download_happened_all_at_once() { | ||
let tmpdir = tmp_dir(); | ||
|
||
let from_path = tmpdir.path().join("download-source"); | ||
write_file(&from_path, "xxx45"); | ||
|
||
let target_path = tmpdir.path().join("downloaded"); | ||
write_file(&target_path, "123"); | ||
|
||
let from_url = Url::from_file_path(&from_path).unwrap(); | ||
|
||
let received_in_callback = Arc::new(Mutex::new(Vec::new())); | ||
|
||
download_to_path_with_backend(Backend::Curl, | ||
&from_url, | ||
&target_path, | ||
true, | ||
Some(&|msg| { | ||
match msg { | ||
Event::DownloadDataReceived(data) => { | ||
for b in data.iter() { | ||
received_in_callback.lock().unwrap().push(b.clone()); | ||
} | ||
} | ||
_ => {} | ||
} | ||
|
||
|
||
Ok(()) | ||
})) | ||
.expect("Test download failed"); | ||
|
||
let ref observed_bytes = *received_in_callback.lock().unwrap(); | ||
assert_eq!(observed_bytes, &vec![b'1', b'2', b'3', b'4', b'5']); | ||
assert_eq!(file_contents(&target_path), "12345"); | ||
} |
Oops, something went wrong.