init
This commit is contained in:
commit
da40e48b19
19
.direnv/bin/nix-direnv-reload
Executable file
19
.direnv/bin/nix-direnv-reload
Executable file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
if [[ ! -d "/home/lelgenio/projects/git/warthunder-confidential-document-leak-counter" ]]; then
|
||||||
|
echo "Cannot find source directory; Did you move it?"
|
||||||
|
echo "(Looking for "/home/lelgenio/projects/git/warthunder-confidential-document-leak-counter")"
|
||||||
|
echo 'Cannot force reload with this script - use "direnv reload" manually and then try again'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# rebuild the cache forcefully
|
||||||
|
_nix_direnv_force_reload=1 direnv exec "/home/lelgenio/projects/git/warthunder-confidential-document-leak-counter" true
|
||||||
|
|
||||||
|
# Update the mtime for .envrc.
|
||||||
|
# This will cause direnv to reload again - but without re-building.
|
||||||
|
touch "/home/lelgenio/projects/git/warthunder-confidential-document-leak-counter/.envrc"
|
||||||
|
|
||||||
|
# Also update the timestamp of whatever profile_rc we have.
|
||||||
|
# This makes sure that we know we are up to date.
|
||||||
|
touch -r "/home/lelgenio/projects/git/warthunder-confidential-document-leak-counter/.envrc" "/home/lelgenio/projects/git/warthunder-confidential-document-leak-counter/.direnv"/*.rc
|
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
/target
|
||||||
|
.direnv
|
2369
Cargo.lock
generated
Normal file
2369
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
21
Cargo.toml
Normal file
21
Cargo.toml
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
[package]
|
||||||
|
name = "warthunder-confidential-document-leak-counter"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1.0.86"
|
||||||
|
askama = { version = "0.12.1", features = ["with-axum"] }
|
||||||
|
askama_axum = "0.4.0"
|
||||||
|
axum = { version = "0.7.5", features = ["ws", "macros"] }
|
||||||
|
regex = "1.10.5"
|
||||||
|
reqwest = "0.12.5"
|
||||||
|
soup = "0.5.1"
|
||||||
|
time = "0.3.36"
|
||||||
|
tokio = { version = "1.38.0", features = ["full"] }
|
||||||
|
tower-http = { version = "0.5.2", features = ["trace", "fs"] }
|
||||||
|
tracing = "0.1.40"
|
||||||
|
tracing-subscriber = "0.3.18"
|
||||||
|
|
1282
data/wikipedia.html
Normal file
1282
data/wikipedia.html
Normal file
File diff suppressed because one or more lines are too long
61
flake.lock
Normal file
61
flake.lock
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681202837,
|
||||||
|
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1719010183,
|
||||||
|
"narHash": "sha256-8HMWaqpyjbVeEsmy/A2H6VFtW/Wr71vkPLnpTiAXu+8=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "0f620ca71fa69abb411a6c78739a9b171a0a95a6",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "release-24.05",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
38
flake.nix
Normal file
38
flake.nix
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
{
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/release-24.05";
|
||||||
|
|
||||||
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
{
|
||||||
|
self,
|
||||||
|
nixpkgs,
|
||||||
|
flake-utils,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
flake-utils.lib.eachDefaultSystem (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = import nixpkgs { inherit system; };
|
||||||
|
inherit (pkgs) lib;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
nativeBuildInputs = with pkgs; [
|
||||||
|
rustc
|
||||||
|
cargo
|
||||||
|
rustfmt
|
||||||
|
rust-analyzer
|
||||||
|
clippy
|
||||||
|
cargo-feature
|
||||||
|
cargo-watch
|
||||||
|
pkg-config
|
||||||
|
openssl
|
||||||
|
curl
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
8
scripts/update-test-sources.sh
Executable file
8
scripts/update-test-sources.sh
Executable file
|
@ -0,0 +1,8 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
|
||||||
|
SCRIPT_PATH="$(realpath "$0")"
|
||||||
|
ROOT="$(dirname "$(dirname "$SCRIPT_PATH")")"
|
||||||
|
|
||||||
|
curl https://en.wikipedia.org/wiki/War_Thunder > "$ROOT/data/wikipedia.html"
|
59
src/controllers/home.rs
Normal file
59
src/controllers/home.rs
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
use askama::Template;
|
||||||
|
use time::Date;
|
||||||
|
|
||||||
|
use crate::sources;
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "index.html")]
|
||||||
|
pub struct HomeTemplate {
|
||||||
|
time_since: TimeSince,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TimeSince {
|
||||||
|
days: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TimeSince {
|
||||||
|
fn from_interval(leak: &Date, now: &Date) -> Self {
|
||||||
|
Self {
|
||||||
|
days: now.to_julian_day() - leak.to_julian_day(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[axum::debug_handler]
|
||||||
|
pub async fn get() -> HomeTemplate {
|
||||||
|
let mut t = vec![];
|
||||||
|
|
||||||
|
for source in sources::sources() {
|
||||||
|
let url = source.url();
|
||||||
|
let Ok(res) = (reqwest::get(url)).await else {
|
||||||
|
tracing::error!("fetch error");
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let Ok(text) = res.text().await else {
|
||||||
|
tracing::error!("fetch decode text error");
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let Ok(last) = source.latest_leak(text) else {
|
||||||
|
tracing::error!("source decode error");
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
t.push(last);
|
||||||
|
}
|
||||||
|
|
||||||
|
let last = t
|
||||||
|
.into_iter()
|
||||||
|
.max()
|
||||||
|
.unwrap_or(time::Date::from_calendar_date(2021, time::Month::July, 14).unwrap());
|
||||||
|
|
||||||
|
let now = time::OffsetDateTime::now_utc();
|
||||||
|
let now = time::Date::from_calendar_date(now.year(), now.month(), now.day()).unwrap();
|
||||||
|
|
||||||
|
HomeTemplate {
|
||||||
|
time_since: TimeSince::from_interval(&last, &now),
|
||||||
|
}
|
||||||
|
}
|
1
src/controllers/mod.rs
Normal file
1
src/controllers/mod.rs
Normal file
|
@ -0,0 +1 @@
|
||||||
|
pub mod home;
|
51
src/lib.rs
Normal file
51
src/lib.rs
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
use std::{future::Future, pin::Pin};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use axum::{routing::get, Router};
|
||||||
|
use tokio::net::TcpListener;
|
||||||
|
use tower_http::services::ServeDir;
|
||||||
|
|
||||||
|
mod controllers;
|
||||||
|
mod sources;
|
||||||
|
|
||||||
|
fn routes() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/", get(controllers::home::get))
|
||||||
|
.fallback_service(ServeDir::new("./static"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Config {
|
||||||
|
pub port: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RunningServer {
|
||||||
|
pub port: u16,
|
||||||
|
pub server: Pin<Box<dyn Future<Output = anyhow::Result<()>> + Send>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run(config: Config) -> Result<RunningServer> {
|
||||||
|
setup_tracing();
|
||||||
|
|
||||||
|
let router = routes().layer(tower_http::trace::TraceLayer::new_for_http());
|
||||||
|
|
||||||
|
let tcp_listener = TcpListener::bind(format!("0.0.0.0:{}", config.port)).await?;
|
||||||
|
|
||||||
|
let port = tcp_listener.local_addr()?.port();
|
||||||
|
|
||||||
|
tracing::info!("Listening on http://localhost:{port}");
|
||||||
|
let server = Box::pin(async move {
|
||||||
|
axum::serve(tcp_listener, router).await?;
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(RunningServer { port, server })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn setup_tracing() {
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(tracing_subscriber::fmt::layer())
|
||||||
|
.try_init()
|
||||||
|
.ok();
|
||||||
|
}
|
16
src/main.rs
Normal file
16
src/main.rs
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use warthunder_confidential_document_leak_counter::{run, Config};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
let config = Config {
|
||||||
|
port: std::env::var("WARTHUNDER_LEAK_SERVE_PORT")
|
||||||
|
.ok()
|
||||||
|
.and_then(|p| p.parse().ok())
|
||||||
|
.unwrap_or(8000u16),
|
||||||
|
};
|
||||||
|
|
||||||
|
run(config).await?.server.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
14
src/sources/mod.rs
Normal file
14
src/sources/mod.rs
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
mod wikipedia;
|
||||||
|
|
||||||
|
pub trait Source {
|
||||||
|
/// Return the URL to query
|
||||||
|
fn url(&self) -> String;
|
||||||
|
/// Given the content of the url figure out the date of the latest leak
|
||||||
|
fn latest_leak(&self, html: String) -> Result<time::Date>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sources() -> Vec<Box<dyn Source + Send>> {
|
||||||
|
vec![Box::new(wikipedia::Wikipedia)]
|
||||||
|
}
|
115
src/sources/wikipedia/mod.rs
Normal file
115
src/sources/wikipedia/mod.rs
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
use std::{str::FromStr, time::Instant};
|
||||||
|
|
||||||
|
use super::Source;
|
||||||
|
use anyhow::{bail, Context, Result};
|
||||||
|
use regex::Regex;
|
||||||
|
use soup::{NodeExt, QueryBuilderExt};
|
||||||
|
|
||||||
|
pub struct Wikipedia;
|
||||||
|
|
||||||
|
impl Source for Wikipedia {
|
||||||
|
fn url(&self) -> String {
|
||||||
|
"https://en.wikipedia.org/wiki/War_Thunder".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn latest_leak(&self, html: String) -> Result<time::Date> {
|
||||||
|
let soup = soup::Soup::new(&html);
|
||||||
|
|
||||||
|
let tables = soup.tag("table").find_all();
|
||||||
|
|
||||||
|
let tables_with_classified = tables
|
||||||
|
.into_iter()
|
||||||
|
.filter(|t| t.text().contains("Classified"))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let table = match &tables_with_classified[..] {
|
||||||
|
[table] => table,
|
||||||
|
_ => bail!("Cannot reliably find leaks table"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let lines: Vec<String> = table
|
||||||
|
.tag("tbody")
|
||||||
|
.find()
|
||||||
|
.context("Could not find table body")?
|
||||||
|
.tag("tr")
|
||||||
|
.find_all()
|
||||||
|
.flat_map(|line| line.tag("td").find())
|
||||||
|
.map(|td| td.text())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
lines
|
||||||
|
.iter()
|
||||||
|
.flat_map(|txt| parse_wikipedia_date(txt))
|
||||||
|
.max()
|
||||||
|
.context("Could not find any date?")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_wikipedia_date(text: &str) -> Result<time::Date> {
|
||||||
|
let full_regex = Regex::new(r"(\w+)\s+(\d+),?\s+(\d+)").unwrap();
|
||||||
|
|
||||||
|
if let Some(cap) = full_regex.captures(text) {
|
||||||
|
let (_, [month, day, year]) = cap.extract();
|
||||||
|
|
||||||
|
let month = time::Month::from_str(month);
|
||||||
|
|
||||||
|
return time::Date::from_calendar_date(
|
||||||
|
year.parse().context("Failed to parse year")?,
|
||||||
|
month.context("Failed to parse month")?,
|
||||||
|
day.parse().context("Failed to parse day")?,
|
||||||
|
)
|
||||||
|
.context("Failed to create date from provided text");
|
||||||
|
}
|
||||||
|
|
||||||
|
let small_regex = Regex::new(r"(\w+) (\d+)").unwrap();
|
||||||
|
if let Some(cap) = small_regex.captures(text) {
|
||||||
|
let (_, [month, year]) = cap.extract();
|
||||||
|
|
||||||
|
let month = time::Month::from_str(month);
|
||||||
|
|
||||||
|
return time::Date::from_calendar_date(
|
||||||
|
year.parse().context("Failed to parse year")?,
|
||||||
|
month.context("Failed to parse month")?,
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
.context("Failed to create date from provided text");
|
||||||
|
}
|
||||||
|
|
||||||
|
bail!("Failed to parse wikipedia date")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wikipedia_html_parse() {
|
||||||
|
let html = std::fs::read_to_string("./data/wikipedia.html").unwrap();
|
||||||
|
|
||||||
|
let real = Wikipedia.latest_leak(html).unwrap();
|
||||||
|
let expected = time::Date::from_calendar_date(2023, time::Month::December, 12).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(expected, real);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wikipedia_date_parse() {
|
||||||
|
assert!(parse_wikipedia_date("testing 123, 1234").is_err());
|
||||||
|
assert_eq!(
|
||||||
|
parse_wikipedia_date("July 14, 2021").unwrap(),
|
||||||
|
time::Date::from_calendar_date(2021, time::Month::July, 14).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_wikipedia_date(" July 14, 2021 ").unwrap(),
|
||||||
|
time::Date::from_calendar_date(2021, time::Month::July, 14).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_wikipedia_date("July 14 2021").unwrap(),
|
||||||
|
time::Date::from_calendar_date(2021, time::Month::July, 14).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
parse_wikipedia_date("October 2021").unwrap(),
|
||||||
|
time::Date::from_calendar_date(2021, time::Month::October, 1).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_wikipedia_date("october 2021").unwrap(),
|
||||||
|
time::Date::from_calendar_date(2021, time::Month::October, 1).unwrap()
|
||||||
|
);
|
||||||
|
}
|
BIN
static/fonts/amarurgt.ttf
Normal file
BIN
static/fonts/amarurgt.ttf
Normal file
Binary file not shown.
BIN
static/images/background.jpg
Normal file
BIN
static/images/background.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.7 MiB |
54
static/styles/main.css
Normal file
54
static/styles/main.css
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
* {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: "AmarilloUSAF";
|
||||||
|
}
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: "AmarilloUSAF";
|
||||||
|
src: url("/fonts/amarurgt.ttf") format("truetype");
|
||||||
|
}
|
||||||
|
|
||||||
|
.background-container {
|
||||||
|
position: fixed;
|
||||||
|
z-index: -1;
|
||||||
|
}
|
||||||
|
.background-image ,
|
||||||
|
.background-filter {
|
||||||
|
position: fixed;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
.background-image {
|
||||||
|
object-fit: cover;
|
||||||
|
filter: blur(10px);
|
||||||
|
scale: 1.1;
|
||||||
|
}
|
||||||
|
.background-filter {
|
||||||
|
background-color: #1b1b1ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
.main-title {
|
||||||
|
text-align: center;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
min-height: 50vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.time {
|
||||||
|
font-size: 4rem;
|
||||||
|
color: #EF2843;
|
||||||
|
}
|
||||||
|
|
||||||
|
.time-label {
|
||||||
|
font-size: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.subtitle {
|
||||||
|
font-family: "AmarilloUSAF";
|
||||||
|
}
|
18
templates/base.html
Normal file
18
templates/base.html
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>{% block title %} Warthunder Leak Counter {% endblock %}</title>
|
||||||
|
<link rel="stylesheet" href="/styles/main.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="background-container">
|
||||||
|
<img src="/images/background.jpg" class="background-image">
|
||||||
|
<div class="background-filter"></div>
|
||||||
|
</div>
|
||||||
|
{% block content %}
|
||||||
|
<p>Placeholder content</p>
|
||||||
|
{% endblock %}
|
||||||
|
</body>
|
||||||
|
</html>
|
21
templates/index.html
Normal file
21
templates/index.html
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}
|
||||||
|
Days since last Warthunder leak
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="main-title">
|
||||||
|
<h1>
|
||||||
|
<span class="time">
|
||||||
|
{{ time_since.days }}
|
||||||
|
</span>
|
||||||
|
<span class="time-label">
|
||||||
|
DAYS
|
||||||
|
</span>
|
||||||
|
</h1>
|
||||||
|
<h2 class="subtitle">
|
||||||
|
SINCE THE WARTHUNDER COMMUNITY LAST LEAKED CONFIDENTIAL DOCUMENTS
|
||||||
|
</h2>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
Loading…
Reference in a new issue