我想写一个关于如何像蜘蛛一样访问互联网的程序。我使用tokio::spawn想让每个 url 在获取时都异步。当我在循环中使用 tokio reqwest 时,这似乎与单线程。Rust async 与 js 不同。我不太明白它是如何工作的。我已经考虑了很多天。我问了公众。这是代码。
use tokio::task;
use scraper::{Html, Selector};
use url::Url;
#[tokio::main]
async fn main() {
let mut visited_url:Vec<String> = Vec::new();
let mut visiting_url:Vec<String> = Vec::new();
visiting_url.push("https://baike.baidu.com/".to_string());
let client = reqwest::Client::builder()
.user_agent("ozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36")
.build().unwrap();
let fut = tokio::spawn(async move {
loop{
let cclient = &client;
let main_url = visiting_url.pop().unwrap();
println!("{}",main_url);
visited_url.push(main_url.clone());
let resp = cclient.get(&main_url).send().await.unwrap();
let status_code = resp.status().as_u16();
let html = resp.text().await.unwrap();
let document = Html::parse_document(&html);
for element in document.select(&Selector::parse("[href]").unwrap()) {
let href = element.value().attr("href").unwrap();
let abs_url = Url::parse(&main_url).unwrap().join(href).unwrap().as_str().to_string();
if !visited_url.contains(&abs_url) {
visiting_url.push(abs_url);
}
}
println!("{}",status_code);
}
// task::yield_now().await;
});
task::unconstrained(fut).await;
}
和
[dependencies]
tokio = { version = "1.4.0", features = ["full"]}
reqwest = { version = "0.11", features = [] }
scraper = "0.12.0"
url = "2.2.1"
我像这样更改代码。但是速度的提高仍然很小。
use reqwest::Client;
use tokio::task;
use scraper::{Html, Selector};
use url::Url;
#[macro_use]
extern crate lazy_static;
use std::sync::Mutex;
lazy_static! {
static ref visited_url: Mutex<Vec<String>> = Mutex::new(vec![]);
static ref visiting_url: Mutex<Vec<String>> = Mutex::new(vec![]);
}
async fn get(client: &Client){
let main_url = visiting_url.lock().unwrap().pop().unwrap();
println!("{}",main_url);
visited_url.lock().unwrap().push(main_url.clone());
let resp = client.get(&main_url).send().await.unwrap();
let status_code = resp.status().as_u16();
let html = resp.text().await.unwrap();
let document = Html::parse_document(&html);
for element in document.select(&Selector::parse("[href]").unwrap()) {
let href = element.value().attr("href").unwrap();
let abs_url = Url::parse(&main_url).unwrap().join(href).unwrap().as_str().to_string();
if !visited_url.lock().unwrap().contains(&abs_url) {
visiting_url.lock().unwrap().push(abs_url);
}
}
println!("{}",status_code);
}
#[tokio::main]
async fn main() {
visiting_url.lock().unwrap().push("https://baike.baidu.com/".to_string());
let client = reqwest::Client::builder()
.user_agent("ozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36")
.build().unwrap();
loop{
// get(&client).await;
tokio::spawn(get(client.clone()));
}
}