@@ -33,7 +33,7 @@ use std::path::{Path, PathBuf};
33
33
use std::collections::{HashMap, HashSet};
34
34
use std::collections::hash_map::Entry;
35
35
36
- use url::{ Url, UrlParser} ;
36
+ use url::Url;
37
37
38
38
use Redirect::*;
39
39
@@ -92,7 +92,7 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, url: &mut Url, errors: &mut
92
92
for entry in t!(dir.read_dir()).map(|e| t!(e)) {
93
93
let path = entry.path();
94
94
let kind = t!(entry.file_type());
95
- url.path_mut ().unwrap().push(entry.file_name().into_string ().unwrap());
95
+ url.path_segments_mut ().unwrap().push(entry.file_name().to_str ().unwrap());
96
96
if kind.is_dir() {
97
97
walk(cache, root, &path, url, errors);
98
98
} else {
@@ -104,7 +104,7 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, url: &mut Url, errors: &mut
104
104
entry.source = String::new();
105
105
}
106
106
}
107
- url.path_mut ().unwrap().pop();
107
+ url.path_segments_mut ().unwrap().pop();
108
108
}
109
109
}
110
110
@@ -138,9 +138,6 @@ fn check(cache: &mut Cache,
138
138
return None;
139
139
}
140
140
141
- let mut parser = UrlParser::new();
142
- parser.base_url(base);
143
-
144
141
let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);
145
142
let (pretty_file, contents) = match res {
146
143
Ok(res) => res,
@@ -162,7 +159,7 @@ fn check(cache: &mut Cache,
162
159
}
163
160
// Once we've plucked out the URL, parse it using our base url and
164
161
// then try to extract a file path.
165
- let (parsed_url, path) = match url_to_file_path(&parser , url) {
162
+ let (parsed_url, path) = match url_to_file_path(&base , url) {
166
163
Some((url, path)) => (url, PathBuf::from(path)),
167
164
None => {
168
165
*errors = true;
@@ -203,7 +200,7 @@ fn check(cache: &mut Cache,
203
200
Err(LoadError::IsRedirect) => unreachable!(),
204
201
};
205
202
206
- if let Some(ref fragment) = parsed_url.fragment {
203
+ if let Some(ref fragment) = parsed_url.fragment() {
207
204
// Fragments like `#1-6` are most likely line numbers to be
208
205
// interpreted by javascript, so we're ignoring these
209
206
if fragment.splitn(2, '-')
@@ -214,7 +211,7 @@ fn check(cache: &mut Cache,
214
211
let entry = &mut cache.get_mut(&pretty_path).unwrap();
215
212
entry.parse_ids(&pretty_path, &contents, errors);
216
213
217
- if !entry.ids.contains(fragment) {
214
+ if !entry.ids.contains(* fragment) {
218
215
*errors = true;
219
216
print!("{}:{}: broken link fragment ",
220
217
pretty_file.display(),
@@ -271,10 +268,8 @@ fn load_file(cache: &mut Cache,
271
268
}
272
269
};
273
270
let base = Url::from_file_path(&file).unwrap();
274
- let mut parser = UrlParser::new();
275
- parser.base_url(&base);
276
271
277
- match maybe_redirect.and_then(|url| url_to_file_path(&parser , &url)) {
272
+ match maybe_redirect.and_then(|url| url_to_file_path(&base , &url)) {
278
273
Some((_, redirect_file)) => {
279
274
let path = PathBuf::from(redirect_file);
280
275
load_file(cache, root, path, FromRedirect(true))
@@ -299,8 +294,8 @@ fn maybe_redirect(source: &str) -> Option<String> {
299
294
})
300
295
}
301
296
302
- fn url_to_file_path(parser: &UrlParser , url: &str) -> Option<(Url, PathBuf)> {
303
- parser.parse (url)
297
+ fn url_to_file_path(parser: &Url , url: &str) -> Option<(Url, PathBuf)> {
298
+ parser.join (url)
304
299
.ok()
305
300
.and_then(|parsed_url| parsed_url.to_file_path().ok().map(|f| (parsed_url, f)))
306
301
}
0 commit comments