Skip to content

Commit

Permalink
api fetch as trait with caching
Browse files Browse the repository at this point in the history
  • Loading branch information
wyatt-avilla committed Jun 18, 2024
1 parent 9a319fe commit a0c8262
Show file tree
Hide file tree
Showing 7 changed files with 189 additions and 158 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
cached = {version = "0.51.4", features = ["async"]}
dotenv = "0.15.0"
regex = "1.10.5"
reqwest = "0.12.4"
Expand Down
9 changes: 8 additions & 1 deletion src/dynamic_content.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
pub mod cache;
mod github;
mod goodreads;
mod lastfm;
Expand All @@ -8,3 +7,11 @@ pub use github::Commit;
pub use goodreads::Book;
pub use lastfm::Song;
pub use letterboxd::Movie;

pub trait ApiRefresh {
type Content;

async fn fetch_newest(
n: u32,
) -> Result<std::vec::Vec<Self::Content>, Box<dyn std::error::Error>>;
}
7 changes: 0 additions & 7 deletions src/dynamic_content/cache.rs

This file was deleted.

92 changes: 50 additions & 42 deletions src/dynamic_content/github.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use super::cache::ApiRefresh;
use super::ApiRefresh;
use cached::proc_macro::once;
use reqwest::{self, header};
use url::Url;

#[derive(Clone)]
pub struct Commit {
pub message: String,
pub url: Url,
Expand All @@ -13,46 +15,52 @@ impl ApiRefresh for Commit {
type Content = Commit;

async fn fetch_newest(n: u32) -> Result<std::vec::Vec<Commit>, Box<dyn std::error::Error>> {
let username = std::env::var("GH_USERNAME")?;

let url = format!("https://api.github.com/users/{username}/events");

let client = reqwest::Client::new();
let response = client
.get(&url)
.header(header::USER_AGENT, "pulse")
.send()
.await?
.text()
.await?;

let json: serde_json::Value = serde_json::from_str(&response)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?;

let events = match json.as_array() {
Some(events) => events.clone(),
None => return Ok(Vec::new()),
};

let commits: Vec<_> = events
.iter()
.filter(|&event| event["type"] == "PushEvent")
.cloned()
.collect();

Ok(commits
.iter()
.filter_map(|commit| {
Some(Commit {
message: commit["payload"]["commits"][0]["message"]
.as_str()?
.to_string(),
url: Url::parse(commit["payload"]["commits"][0]["url"].as_str()?).ok()?,
repository_name: commit["repo"]["name"].as_str()?.to_string(),
repository_link: Url::parse(commit["repo"]["url"].as_str()?).ok()?,
})
})
.take(n as usize)
.collect())
fetch_newest_commits(n).await
}
}

// 15 min
#[once(result = true, time = 900)]
async fn fetch_newest_commits(n: u32) -> Result<std::vec::Vec<Commit>, Box<dyn std::error::Error>> {
let username = std::env::var("GH_USERNAME")?;

let url = format!("https://api.github.com/users/{username}/events");

let client = reqwest::Client::new();
let response = client
.get(&url)
.header(header::USER_AGENT, "pulse")
.send()
.await?
.text()
.await?;

let json: serde_json::Value = serde_json::from_str(&response)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?;

let events = match json.as_array() {
Some(events) => events.clone(),
None => return Ok(Vec::new()),
};

let commits: Vec<_> = events
.iter()
.filter(|&event| event["type"] == "PushEvent")
.cloned()
.collect();

Ok(commits
.iter()
.filter_map(|commit| {
Some(Commit {
message: commit["payload"]["commits"][0]["message"]
.as_str()?
.to_string(),
url: Url::parse(commit["payload"]["commits"][0]["url"].as_str()?).ok()?,
repository_name: commit["repo"]["name"].as_str()?.to_string(),
repository_link: Url::parse(commit["repo"]["url"].as_str()?).ok()?,
})
})
.take(n as usize)
.collect())
}
68 changes: 38 additions & 30 deletions src/dynamic_content/goodreads.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use super::cache::ApiRefresh;
use super::ApiRefresh;
use cached::proc_macro::once;
use regex::Regex;
use scraper::{Html, Selector};
use url::{ParseError, Url};

#[derive(Clone)]
pub struct Book {
pub title: String,
pub author: String,
Expand Down Expand Up @@ -33,39 +35,45 @@ impl ApiRefresh for Book {
type Content = Book;

async fn fetch_newest(n: u32) -> Result<std::vec::Vec<Book>, Box<dyn std::error::Error>> {
let shelf = std::env::var("GOODREADS_SHELF")?;
let html = Html::parse_document(
&reqwest::get(&shelf)
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?
.text()
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?,
);
fetch_newest_books(n).await
}
}

let row_selector = Selector::parse(r"tr.bookalike.review").unwrap();
// 1 day
#[once(result = true, time = 86400)]
async fn fetch_newest_books(n: u32) -> Result<std::vec::Vec<Book>, Box<dyn std::error::Error>> {
let shelf = std::env::var("GOODREADS_SHELF")?;
let html = Html::parse_document(
&reqwest::get(&shelf)
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?
.text()
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?,
);

let title_selector = Selector::parse(r"td.field.title a").unwrap();
let author_selector = Selector::parse(r"td.field.author a").unwrap();
let row_selector = Selector::parse(r"tr.bookalike.review").unwrap();

Ok(html
.select(&row_selector)
.filter_map(|row| {
let title_element = row.select(&title_selector).next()?;
let title_href = title_element.value().attr("href")?;
let title_selector = Selector::parse(r"td.field.title a").unwrap();
let author_selector = Selector::parse(r"td.field.author a").unwrap();

let author_element = row.select(&author_selector).next()?;
let author_href = author_element.value().attr("href")?;
Ok(html
.select(&row_selector)
.filter_map(|row| {
let title_element = row.select(&title_selector).next()?;
let title_href = title_element.value().attr("href")?;

Some(Book {
title: clean_text(&title_element.text().collect::<Vec<_>>().concat()),
author: swap_name_order(&author_element.text().collect::<Vec<_>>().concat())
.ok()?,
title_url: create_goodreads_url(title_href).ok()?,
author_url: create_goodreads_url(author_href).ok()?,
})
let author_element = row.select(&author_selector).next()?;
let author_href = author_element.value().attr("href")?;

Some(Book {
title: clean_text(&title_element.text().collect::<Vec<_>>().concat()),
author: swap_name_order(&author_element.text().collect::<Vec<_>>().concat())
.ok()?,
title_url: create_goodreads_url(title_href).ok()?,
author_url: create_goodreads_url(author_href).ok()?,
})
.take(n as usize)
.collect())
}
})
.take(n as usize)
.collect())
}
78 changes: 42 additions & 36 deletions src/dynamic_content/lastfm.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use super::cache::ApiRefresh;
use super::ApiRefresh;
use cached::proc_macro::once;
use url::Url;

#[derive(Clone)]
pub struct Song {
pub title: String,
pub artist_name: String,
Expand All @@ -13,41 +15,45 @@ impl ApiRefresh for Song {
type Content = Song;

async fn fetch_newest(n: u32) -> Result<std::vec::Vec<Song>, Box<dyn std::error::Error>> {
let key = std::env::var("LASTFM_KEY")?;
let username = std::env::var("LASTFM_USERNAME")?;

let url = format!("https://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user={username}&api_key={key}&format=json");

let response = reqwest::get(&url)
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?
.text()
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?;

let json: serde_json::Value = serde_json::from_str(&response)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?;

let tracks = match json["recenttracks"]["track"].as_array() {
Some(tracks) => tracks.clone(),
None => return Ok(Vec::new()),
};

Ok(tracks
.iter()
.filter_map(|track| {
Some(Song {
title: track["name"].as_str()?.to_string(),
artist_name: track["artist"]["#text"].as_str()?.to_string(),
album_name: track["album"]["#text"].as_str()?.to_string(),
album_image: Url::parse(
track["image"].as_array()?.get(1)?.get("#text")?.as_str()?,
)
fetch_newest_songs(n).await
}
}

// 20 min
#[once(result = true, time = 1200)]
async fn fetch_newest_songs(n: u32) -> Result<std::vec::Vec<Song>, Box<dyn std::error::Error>> {
let key = std::env::var("LASTFM_KEY")?;
let username = std::env::var("LASTFM_USERNAME")?;

let url = format!("https://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user={username}&api_key={key}&format=json");

let response = reqwest::get(&url)
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?
.text()
.await
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?;

let json: serde_json::Value = serde_json::from_str(&response)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error>)?;

let tracks = match json["recenttracks"]["track"].as_array() {
Some(tracks) => tracks.clone(),
None => return Ok(Vec::new()),
};

Ok(tracks
.iter()
.filter_map(|track| {
Some(Song {
title: track["name"].as_str()?.to_string(),
artist_name: track["artist"]["#text"].as_str()?.to_string(),
album_name: track["album"]["#text"].as_str()?.to_string(),
album_image: Url::parse(track["image"].as_array()?.get(1)?.get("#text")?.as_str()?)
.ok()?,
url: Url::parse(track["url"].as_str()?).ok()?,
})
url: Url::parse(track["url"].as_str()?).ok()?,
})
.take(n as usize)
.collect())
}
})
.take(n as usize)
.collect())
}
Loading

0 comments on commit a0c8262

Please sign in to comment.