download and parse games
This commit is contained in:
parent
b4dd1b7be5
commit
2e149fd20c
4 changed files with 1881 additions and 0 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -19,3 +19,5 @@ target
|
|||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
data
|
||||
|
|
|
|||
1712
Cargo.lock
generated
Normal file
1712
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
11
Cargo.toml
Normal file
11
Cargo.toml
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "pips"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.42", features = ["serde"] }
|
||||
reqwest = { version = "0.12.24", features = ["json"] }
|
||||
serde = "1.0.228"
|
||||
serde_json = "1.0.145"
|
||||
tokio = { version = "1.48.0", features = ["full"] }
|
||||
156
src/main.rs
Normal file
156
src/main.rs
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{NaiveDate, Utc};
|
||||
use reqwest::{Client, Url};
|
||||
use serde::{Deserialize, Deserializer, de};
|
||||
use serde_json::Value;
|
||||
use tokio::fs::{self, File};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::time::{Duration, sleep};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
fetch_all_games().await?;
|
||||
|
||||
let pips = get_todays_game().await?;
|
||||
println!("{:?}", pips);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_pips(path: &PathBuf) -> Result<Pips, Box<dyn std::error::Error>> {
|
||||
let data = fs::read_to_string(&path).await?;
|
||||
let pips: Pips = serde_json::from_str(&data)?;
|
||||
Ok(pips)
|
||||
}
|
||||
|
||||
const FIRST_GAME_DATE: &str = "2025-08-18";
|
||||
|
||||
async fn fetch_all_games() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut cursor = NaiveDate::parse_from_str(FIRST_GAME_DATE, "%Y-%m-%d")?;
|
||||
let today = Utc::now().date_naive();
|
||||
|
||||
while cursor <= today {
|
||||
println!("checking {}", cursor);
|
||||
let (url, output_path) = get_paths(&cursor.to_string())?;
|
||||
if !fs::try_exists(&output_path).await? {
|
||||
download_json(&url, &output_path).await?;
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
cursor = cursor
|
||||
.checked_add_signed(chrono::Duration::days(1))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_todays_game() -> Result<Pips, Box<dyn std::error::Error>> {
|
||||
let today = Utc::now().date_naive();
|
||||
let (_, path) = get_paths(&today.to_string())?;
|
||||
let pips = get_pips(&path).await?;
|
||||
Ok(pips)
|
||||
}
|
||||
|
||||
fn get_paths(date_str: &str) -> Result<(Url, PathBuf), Box<dyn std::error::Error>> {
|
||||
let file_name = format!("{}.json", date_str);
|
||||
let base_url = Url::parse("https://www.nytimes.com/svc/pips/v1/")?;
|
||||
let url = base_url.join(&file_name)?;
|
||||
|
||||
let output_name = url
|
||||
.path_segments()
|
||||
.and_then(|mut seg| seg.next_back())
|
||||
.unwrap()
|
||||
.to_owned();
|
||||
let output_path = std::path::Path::new("data").join(output_name);
|
||||
|
||||
Ok((url, output_path))
|
||||
}
|
||||
|
||||
async fn download_json(url: &Url, output_path: &PathBuf) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let client = Client::new();
|
||||
let response = client.get(url.clone()).send().await?.error_for_status()?;
|
||||
let bytes = response.bytes().await?;
|
||||
|
||||
let json: Value = serde_json::from_slice(&bytes)?;
|
||||
let pretty = serde_json::to_string_pretty(&json)?;
|
||||
|
||||
if let Some(parent) = output_path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let mut file = File::create(output_path).await?;
|
||||
file.write_all(pretty.as_bytes()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct Pips {
|
||||
print_date: String,
|
||||
editor: String,
|
||||
#[serde(deserialize_with = "placeholder_as_none")]
|
||||
easy: Option<Game>,
|
||||
#[serde(deserialize_with = "placeholder_as_none")]
|
||||
medium: Option<Game>,
|
||||
#[serde(deserialize_with = "placeholder_as_none")]
|
||||
hard: Option<Game>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct Game {
|
||||
id: u32,
|
||||
backend_id: String,
|
||||
constructors: String,
|
||||
dominoes: Vec<Domino>,
|
||||
regions: Vec<Region>,
|
||||
solution: Vec<Coord>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Domino([u8; 2]);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Index([u8; 2]);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Coord([Index; 2]);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type")]
|
||||
enum Region {
|
||||
Empty { indices: Vec<Index> },
|
||||
Equals { indices: Vec<Index> },
|
||||
Unequal { indices: Vec<Index> },
|
||||
Sum { indices: Vec<Index>, target: u16 },
|
||||
Less { indices: Vec<Index>, target: u16 },
|
||||
Greater { indices: Vec<Index>, target: u16 },
|
||||
}
|
||||
|
||||
fn placeholder_as_none<'de, D>(deserializer: D) -> Result<Option<Game>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = Value::deserialize(deserializer)?;
|
||||
|
||||
if is_placeholder(&value) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
serde_json::from_value(value)
|
||||
.map(Some)
|
||||
.map_err(de::Error::custom)
|
||||
}
|
||||
|
||||
fn is_placeholder(value: &Value) -> bool {
|
||||
match value {
|
||||
Value::Null => true,
|
||||
Value::String(s) => s.is_empty(),
|
||||
Value::Number(n) => n.as_i64() == Some(0),
|
||||
Value::Array(items) => items.is_empty() || items.iter().all(is_placeholder),
|
||||
Value::Object(map) => map.is_empty() || map.values().all(is_placeholder),
|
||||
Value::Bool(_) => false,
|
||||
}
|
||||
}
|
||||
Loading…
Reference in a new issue