use futures::StreamExt;
use hyper_tls::HttpsConnector;
use std::io::Write;
use hyper::body::HttpBody as _;
#[tokio::main]
async fn main() {
let u = "https://wallhaven.cc";
let cookie = "";
crawl(&u, &cookie).await;
}
async fn crawl(base: &str, cookie: &str) {
let mut next_pager = Some(format!("{}&page=1", base));
loop {
if next_pager.is_none() {
return;
}
next_pager = process_one(next_pager.as_ref().unwrap(), &cookie).await;
}
}
async fn process_one(u: &str, cookie: &str) -> Option<String> {
let next_pager_url = get_next_page_url(&u, &cookie).await;
let body = do_request(&u, &cookie).await;
let urls = parse_image_url(&body).await;
println!("next: {:?}", next_pager_url);
println!("urls: {:?}", urls);
for url in urls {
if download(&url, &cookie).await {
std::thread::sleep(std::time::Duration::from_millis(500));
}
}
next_pager_url
}
async fn get_next_page_url(base: &str, cookie: &str) -> Option<String> {
let body = do_request(base, cookie).await;
/// class selector:
/// grammar: .class_name .thumb-listing-page-num
let document = scraper::Html::parse_document(&body);
// let overlay_close_selector = scraper::Selector::parse("#overlay-close").unwrap();
// document.select(&overlay_close_selector).next().map(|overlay| {
// println!("overlay close title: {:?}", overlay.value().attr("title"))
// });
let next_pager_selector = scraper::Selector::parse(".next").unwrap();
document.select(&next_pager_selector).next().map(|next_pager| {
// println!("page_num_element: {:?}", next_pager.inner_html());
String::from(next_pager.value().attr("href").unwrap())
})
}
async fn do_request(u: &str, cookie: &str) -> String {
let https = HttpsConnector::new();
let client = hyper::Client::builder().build::<_, hyper::Body>(https);
let request = hyper::Request::get(u).
header("Cookie", cookie).
body(hyper::Body::empty()).unwrap();
let mut response = match client.request(request).await {
Ok(response) => response,
Err(e) => {
println!("do request error: {:?}", e);
panic!("");
}
};
let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
let str_body: String = unsafe { String::from_utf8_unchecked(body.to_vec()) };
str_body
}
async fn parse_image_url(body: &str) -> Vec<String> {
let document = scraper::Html::parse_document(&body);
let preview_selector = scraper::Selector::parse(".preview").unwrap();
let urls: Vec<String> = document.select(&preview_selector).filter(|element| {
element.value().attr("href").is_some()
}).map(|element| {
String::from(element.value().attr("href").unwrap())
}).collect();
urls
}
async fn download(address: &str, cookie: &str) -> bool {
println!("address: {}", address);
let body = do_request(&address, &cookie).await;
let selector = r#"id="wallpaper""#;
if let Some(index) = body.find(&selector) {
let body = &body[index + selector.len()..];
if let Some(src_index) = body.find("src") {
let body = &body[src_index + 5..];
if let Some(end_index) = body.find(r#"""#) {
let src = &body[..end_index];
println!("wallpaper src: {}", src);
let slash_index = match address.rfind('/') {
Some(index) => index,
None => return false,
};
let name = &address[slash_index + 1..];
let name = format!("./output/{}", name);
let path = std::path::Path::new(&name);
if path.exists() {
return false;
}
let image = do_request(&src, &cookie).await;
save(&name, image.as_bytes()).await;
return true;
}
}
return false;
}
return false;
}
async fn save(name: &str, body: &[u8]) {
let mut file = std::fs::File::create(name).unwrap();
file.write_all(body);
}
|