Skip to content

Commit b5b0376

Browse files
committed
adapt for failure->anyhow migration
1 parent 94ddc05 commit b5b0376

File tree

4 files changed

+14
-21
lines changed

4 files changed

+14
-21
lines changed

src/docbuilder/rustwide_builder.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ use crate::docbuilder::{crates::crates_from_path, Limits};
77
use crate::error::Result;
88
use crate::index::api::ReleaseData;
99
use crate::repositories::RepositoryStatsUpdater;
10-
use crate::storage::CompressionAlgorithms;
1110
use crate::storage::{rustdoc_archive_path, source_archive_path};
1211
use crate::utils::{copy_dir_all, parse_rustc_version, CargoMetadata};
1312
use crate::{db::blacklist::is_blacklisted, utils::MetadataPackage};

src/storage/archive_index.rs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use crate::error::Result;
22
use crate::storage::{compression::CompressionAlgorithm, FileRange};
3-
use failure::ResultExt;
3+
use anyhow::{bail, Context as _};
44
use serde::{Deserialize, Serialize};
55
use std::collections::HashMap;
66
use std::io;
@@ -28,15 +28,11 @@ pub(crate) struct Index {
2828

2929
impl Index {
3030
pub(crate) fn load(reader: impl io::Read) -> Result<Index> {
31-
serde_cbor::from_reader(reader)
32-
.context("deserialization error")
33-
.map_err(Into::into)
31+
serde_cbor::from_reader(reader).context("deserialization error")
3432
}
3533

3634
pub(crate) fn save(&self, writer: impl io::Write) -> Result<()> {
37-
serde_cbor::to_writer(writer, self)
38-
.context("serialization error")
39-
.map_err(Into::into)
35+
serde_cbor::to_writer(writer, self).context("serialization error")
4036
}
4137

4238
pub(crate) fn new_from_zip<R: io::Read + io::Seek>(zipfile: &mut R) -> Result<Index> {
@@ -56,7 +52,7 @@ impl Index {
5652
),
5753
compression: match zf.compression() {
5854
zip::CompressionMethod::Bzip2 => CompressionAlgorithm::Bzip2,
59-
c => failure::bail!("unsupported compression algorithm {} in zip-file", c),
55+
c => bail!("unsupported compression algorithm {} in zip-file", c),
6056
},
6157
},
6258
);

src/storage/compression.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
use anyhow::Error;
22
use bzip2::read::{BzDecoder, BzEncoder};
33
use bzip2::Compression;
4-
use failure::Error;
54
use serde::{Deserialize, Serialize};
6-
use std::{collections::HashSet, fmt, io::Read};
75
use std::{
86
collections::HashSet,
97
fmt,

src/storage/mod.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use self::database::DatabaseBackend;
88
use self::s3::S3Backend;
99
use crate::error::Result;
1010
use crate::{db::Pool, Config, Metrics};
11-
use anyhow::ensure;
11+
use anyhow::{anyhow, ensure};
1212
use chrono::{DateTime, Utc};
1313
use path_slash::PathExt;
1414
use std::{
@@ -145,7 +145,7 @@ impl Storage {
145145
version: &str,
146146
path: &str,
147147
archive_storage: bool,
148-
) -> Result<Blob, Error> {
148+
) -> Result<Blob> {
149149
Ok(if archive_storage {
150150
self.get_from_archive(
151151
&rustdoc_archive_path(name, version),
@@ -165,7 +165,7 @@ impl Storage {
165165
version: &str,
166166
path: &str,
167167
archive_storage: bool,
168-
) -> Result<Blob, Error> {
168+
) -> Result<Blob> {
169169
Ok(if archive_storage {
170170
self.get_from_archive(
171171
&source_archive_path(name, version),
@@ -184,7 +184,7 @@ impl Storage {
184184
version: &str,
185185
path: &str,
186186
archive_storage: bool,
187-
) -> Result<bool, Error> {
187+
) -> Result<bool> {
188188
Ok(if archive_storage {
189189
self.exists_in_archive(&rustdoc_archive_path(name, version), path)?
190190
} else {
@@ -194,7 +194,7 @@ impl Storage {
194194
})
195195
}
196196

197-
pub(crate) fn exists_in_archive(&self, archive_path: &str, path: &str) -> Result<bool, Error> {
197+
pub(crate) fn exists_in_archive(&self, archive_path: &str, path: &str) -> Result<bool> {
198198
let index = self.get_index_for(archive_path)?;
199199
Ok(index.find_file(path).is_ok())
200200
}
@@ -217,7 +217,7 @@ impl Storage {
217217
max_size: usize,
218218
range: FileRange,
219219
compression: Option<CompressionAlgorithm>,
220-
) -> Result<Blob, Error> {
220+
) -> Result<Blob> {
221221
let mut blob = match &self.backend {
222222
StorageBackend::Database(db) => db.get(path, max_size, Some(range)),
223223
StorageBackend::S3(s3) => s3.get(path, max_size, Some(range)),
@@ -232,7 +232,7 @@ impl Storage {
232232
Ok(blob)
233233
}
234234

235-
fn get_index_for(&self, archive_path: &str) -> Result<archive_index::Index, Error> {
235+
fn get_index_for(&self, archive_path: &str) -> Result<archive_index::Index> {
236236
// remote/folder/and/x.zip.index
237237
let remote_index_path = format!("{}.index", archive_path);
238238
let local_index_path = self
@@ -249,7 +249,7 @@ impl Storage {
249249
fs::create_dir_all(
250250
local_index_path
251251
.parent()
252-
.ok_or_else(|| err_msg("index path without parent"))?,
252+
.ok_or_else(|| anyhow!("index path without parent"))?,
253253
)?;
254254
let mut file = fs::File::create(&local_index_path)?;
255255
file.write_all(&index_content)?;
@@ -263,7 +263,7 @@ impl Storage {
263263
archive_path: &str,
264264
path: &str,
265265
max_size: usize,
266-
) -> Result<Blob, Error> {
266+
) -> Result<Blob> {
267267
let index = self.get_index_for(archive_path)?;
268268
let info = index.find_file(path)?;
269269

@@ -288,7 +288,7 @@ impl Storage {
288288
&self,
289289
archive_path: &str,
290290
root_dir: &Path,
291-
) -> Result<(HashMap<PathBuf, String>, CompressionAlgorithm), Error> {
291+
) -> Result<(HashMap<PathBuf, String>, CompressionAlgorithm)> {
292292
let mut file_paths = HashMap::new();
293293

294294
// We are only using the `zip` library to create the archives and the matching

0 commit comments

Comments
 (0)