deploy config

This commit is contained in:
lelgenio 2024-06-22 13:42:24 -03:00
parent da40e48b19
commit d0a7e7ec88
10 changed files with 231 additions and 39 deletions

View file

@ -1,7 +1,8 @@
use askama::Template;
use axum::extract::State;
use time::Date;
use crate::sources;
use crate::{sources, AppState};
#[derive(Template)]
#[template(path = "index.html")]
@ -22,18 +23,53 @@ impl TimeSince {
}
#[axum::debug_handler]
pub async fn get() -> HomeTemplate {
pub async fn get(state: State<AppState>) -> HomeTemplate {
let mut t = vec![];
for source in sources::sources() {
let url = source.url();
let Ok(res) = (reqwest::get(url)).await else {
tracing::error!("fetch error");
continue;
let mut cache = state.0.get_cache.lock().await;
let now = time::OffsetDateTime::now_utc();
let needs_update = match cache.get(&url) {
None => {
tracing::info!("Value is not present in cache");
true
}
Some((cached_time, _)) => {
let other_day = cached_time.to_julian_day() != now.to_julian_day();
let other_hour = cached_time.hour() != now.hour();
if other_day {
tracing::info!("Value is from another day");
}
if other_hour {
tracing::info!("Value is from another hour");
}
other_day || other_hour
}
};
let Ok(text) = res.text().await else {
tracing::error!("fetch decode text error");
if needs_update {
tracing::info!("Need update cache");
let Ok(res) = (reqwest::get(url.clone())).await else {
tracing::error!("fetch error");
continue;
};
let Ok(text) = res.text().await else {
tracing::error!("fetch decode text error");
continue;
};
tracing::info!("Cache updated");
cache.insert(url.clone(), (now, text));
}
let Some((_, text)) = cache.get(&url) else {
tracing::error!("filling cache error");
continue;
};

View file

@ -1,8 +1,9 @@
use std::{future::Future, pin::Pin};
use std::{collections::HashMap, future::Future, pin::Pin, sync::Arc};
use anyhow::Result;
use axum::{routing::get, Router};
use tokio::net::TcpListener;
use time::OffsetDateTime;
use tokio::{net::TcpListener, sync::Mutex};
use tower_http::services::ServeDir;
mod controllers;
@ -11,7 +12,12 @@ mod sources;
fn routes() -> Router {
Router::new()
.route("/", get(controllers::home::get))
.fallback_service(ServeDir::new("./static"))
.with_state(AppState::default())
}
#[derive(Default, Clone)]
pub struct AppState {
get_cache: Arc<Mutex<HashMap<String, (OffsetDateTime, String)>>>,
}
pub struct Config {
@ -26,7 +32,11 @@ pub struct RunningServer {
pub async fn run(config: Config) -> Result<RunningServer> {
setup_tracing();
let router = routes().layer(tower_http::trace::TraceLayer::new_for_http());
let static_dir = std::env::var("WARTHUNDER_LEAK_STATIC_DIR").unwrap_or("./static".to_string());
let router = routes()
.fallback_service(ServeDir::new(static_dir))
.layer(tower_http::trace::TraceLayer::new_for_http());
let tcp_listener = TcpListener::bind(format!("0.0.0.0:{}", config.port)).await?;
@ -44,7 +54,12 @@ pub async fn run(config: Config) -> Result<RunningServer> {
pub fn setup_tracing() {
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
let log_filter = std::env::var("WARTHUNDER_LOG").unwrap_or_else(|_| "warthunder_leak_counter=debug,warn".into());
eprintln!("RUST_LOG: {log_filter}");
tracing_subscriber::registry()
.with(tracing_subscriber::EnvFilter::new(log_filter))
.with(tracing_subscriber::fmt::layer())
.try_init()
.ok();

View file

@ -1,5 +1,5 @@
use anyhow::Result;
use warthunder_confidential_document_leak_counter::{run, Config};
use warthunder_leak_counter::{run, Config};
#[tokio::main]
async fn main() -> Result<()> {

View file

@ -6,7 +6,7 @@ pub trait Source {
/// Return the URL to query
fn url(&self) -> String;
/// Given the content of the url figure out the date of the latest leak
fn latest_leak(&self, html: String) -> Result<time::Date>;
fn latest_leak(&self, html: &str) -> Result<time::Date>;
}
pub fn sources() -> Vec<Box<dyn Source + Send>> {

View file

@ -1,4 +1,4 @@
use std::{str::FromStr, time::Instant};
use std::{str::FromStr};
use super::Source;
use anyhow::{bail, Context, Result};
@ -12,8 +12,8 @@ impl Source for Wikipedia {
"https://en.wikipedia.org/wiki/War_Thunder".to_string()
}
fn latest_leak(&self, html: String) -> Result<time::Date> {
let soup = soup::Soup::new(&html);
fn latest_leak(&self, html: &str) -> Result<time::Date> {
let soup = soup::Soup::new(html);
let tables = soup.tag("table").find_all();
@ -82,7 +82,7 @@ fn parse_wikipedia_date(text: &str) -> Result<time::Date> {
fn test_wikipedia_html_parse() {
let html = std::fs::read_to_string("./data/wikipedia.html").unwrap();
let real = Wikipedia.latest_leak(html).unwrap();
let real = Wikipedia.latest_leak(&html).unwrap();
let expected = time::Date::from_calendar_date(2023, time::Month::December, 12).unwrap();
assert_eq!(expected, real);
@ -108,8 +108,4 @@ fn test_wikipedia_date_parse() {
parse_wikipedia_date("October 2021").unwrap(),
time::Date::from_calendar_date(2021, time::Month::October, 1).unwrap()
);
assert_eq!(
parse_wikipedia_date("october 2021").unwrap(),
time::Date::from_calendar_date(2021, time::Month::October, 1).unwrap()
);
}