Skip to content

Commit b552078

Browse files
authored
Merge pull request #2499 from rbtcollins/bug-2490
Fix #2490: Don't fail artificially on large files
2 parents 640bedc + 99ff000 commit b552078

File tree

1 file changed

+12
-1
lines changed

1 file changed

+12
-1
lines changed

src/dist/component/package.rs

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -196,6 +196,8 @@ impl MemoryBudget {
196196
}
197197
};
198198

199+
// Future us: this can be removed when IO chunking within a single file is possible: it just helps generate good
200+
// errors rather than allocator-failure panics when we hit the large file on a RAM limited system.
199201
if max_file_size > unpack_ram {
200202
panic!("RUSTUP_UNPACK_RAM must be larger than {}", max_file_size);
201203
}
@@ -356,7 +358,16 @@ fn unpack_without_first_dir<'a, R: Read>(
356358

357359
let size = entry.header().size()?;
358360
if size > MAX_FILE_SIZE {
359-
return Err(format!("File too big {} {}", relpath.display(), size).into());
361+
// If we cannot tell the user we will either succeed (great), or fail (and we may get a bug report), either
362+
// way, we will most likely get reports from users about this, so the possible set of custom builds etc that
363+
// don't report are not a great concern.
364+
if let Some(notify_handler) = notify_handler {
365+
notify_handler(Notification::Error(format!(
366+
"File too big {} {}",
367+
relpath.display(),
368+
size
369+
)));
370+
}
360371
}
361372
while size > budget.available() as u64 {
362373
for mut item in Vec::from_iter(io_executor.completed()) {

0 commit comments

Comments
 (0)