mirror of
https://github.com/Syfaro/fuzzysearch.git
synced 2024-11-23 15:22:31 +00:00
Make async.
This commit is contained in:
parent
ba0aca1c80
commit
97246f502a
810
Cargo.lock
generated
810
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -5,8 +5,9 @@ authors = ["Syfaro <syfaro@huefox.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
reqwest = "0.9"
|
||||
reqwest = { git = "https://github.com/seanmonstar/reqwest" }
|
||||
postgres = { version = "0.17.0-alpha.2", features = ["with-chrono-0_4"] }
|
||||
tokio = { version = "0.2", features = ["macros", "time"] }
|
||||
tokio-postgres = { version = "0.5.0-alpha.2" }
|
||||
r2d2_postgres = " 0.16.0-alpha.2"
|
||||
r2d2 = "0.8"
|
||||
@ -14,4 +15,4 @@ chrono = "0.4"
|
||||
|
||||
[dependencies.furaffinity-rs]
|
||||
git = "https://git.huefox.com/syfaro/furaffinity-rs"
|
||||
rev = "35478cb1402b26842900546f5cb1130238866c22"
|
||||
rev = "d6d5a61bfe041e8ecbbe135c43dcbb577961e2f3"
|
||||
|
20
src/main.rs
20
src/main.rs
@ -71,11 +71,7 @@ fn insert_submission(
|
||||
.collect();
|
||||
|
||||
let hash = sub.hash.clone();
|
||||
|
||||
let url = match &sub.content {
|
||||
furaffinity_rs::Content::Image(url) => url,
|
||||
furaffinity_rs::Content::Flash(url) => url,
|
||||
};
|
||||
let url = sub.content.url();
|
||||
|
||||
client.execute("INSERT INTO submission (id, artist_id, url, filename, hash, rating, posted_at, description, hash1, hash2, hash3, hash4, hash5, hash6, hash7, hash8) VALUES ($1, $2, $3, $4, decode($5, 'base64'), $6, $7, $8, get_byte(decode($5, 'base64'), 0), get_byte(decode($5, 'base64'), 1), get_byte(decode($5, 'base64'), 2), get_byte(decode($5, 'base64'), 3), get_byte(decode($5, 'base64'), 4), get_byte(decode($5, 'base64'), 5), get_byte(decode($5, 'base64'), 6), get_byte(decode($5, 'base64'), 7))", &[
|
||||
&sub.id, &artist_id, &url, &sub.filename, &hash, &sub.rating.serialize(), &sub.posted_at, &sub.description,
|
||||
@ -84,6 +80,7 @@ fn insert_submission(
|
||||
let stmt = client.prepare(
|
||||
"INSERT INTO tag_to_post (tag_id, post_id) VALUES ($1, $2) ON CONFLICT DO NOTHING",
|
||||
)?;
|
||||
|
||||
for tag_id in tag_ids {
|
||||
client.execute(&stmt, &[&tag_id, &sub.id])?;
|
||||
}
|
||||
@ -95,7 +92,8 @@ fn insert_null_submission(client: &mut Client, id: i32) -> Result<u64, postgres:
|
||||
client.execute("INSERT INTO SUBMISSION (id) VALUES ($1)", &[&id])
|
||||
}
|
||||
|
||||
fn main() {
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let (cookie_a, cookie_b) = (
|
||||
std::env::var("FA_A").expect("missing fa cookie a"),
|
||||
std::env::var("FA_B").expect("missing fa cookie b"),
|
||||
@ -115,19 +113,19 @@ fn main() {
|
||||
'main: loop {
|
||||
let mut client = pool.get().unwrap();
|
||||
|
||||
let latest_id = fa.latest_id().expect("unable to get latest id");
|
||||
let latest_id = fa.latest_id().await.expect("unable to get latest id");
|
||||
|
||||
for id in ids_to_check(&mut client, latest_id) {
|
||||
'attempt: for attempt in 0..3 {
|
||||
if !has_submission(&mut client, id) {
|
||||
println!("loading submission {}", id);
|
||||
|
||||
let sub = match fa.get_submission(id) {
|
||||
let sub = match fa.get_submission(id).await {
|
||||
Ok(sub) => sub,
|
||||
Err(e) => {
|
||||
println!("got error: {:?}, retry {}", e.message, e.retry);
|
||||
if e.retry {
|
||||
std::thread::sleep(std::time::Duration::from_secs(attempt + 1));
|
||||
tokio::time::delay_for(std::time::Duration::from_secs(attempt + 1)).await;
|
||||
continue 'attempt;
|
||||
} else {
|
||||
println!("unrecoverable, exiting");
|
||||
@ -145,7 +143,7 @@ fn main() {
|
||||
}
|
||||
};
|
||||
|
||||
let sub = match fa.calc_image_hash(sub.clone()) {
|
||||
let sub = match fa.calc_image_hash(sub.clone()).await {
|
||||
Ok(sub) => sub,
|
||||
Err(e) => {
|
||||
println!("unable to hash image: {:?}", e);
|
||||
@ -164,6 +162,6 @@ fn main() {
|
||||
|
||||
println!("completed fetch, waiting a minute before loading more");
|
||||
|
||||
std::thread::sleep(std::time::Duration::from_secs(60));
|
||||
tokio::time::delay_for(std::time::Duration::from_secs(60)).await;
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user