diff --git a/src/datasets.rs b/src/datasets.rs index cfdbac3..13ae359 100644 --- a/src/datasets.rs +++ b/src/datasets.rs @@ -72,7 +72,7 @@ pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> connection.execute(query) } -pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result> { +pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result> { let query = "SELECT last_sync FROM datasets WHERE name = ?"; Ok(connection @@ -81,7 +81,8 @@ pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result("last_sync").to_owned() as u64)) .next() - .and_then(Result::ok)) + .and_then(Result::ok) + .map(Timestamp)) } pub fn update_last_sync(connection: &Connection, dataset: &str, sync: u64) -> sqlite::Result<()> { diff --git a/src/main.rs b/src/main.rs index da95457..e0e4d2b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,7 @@ use clap::{Parser, Subcommand}; use std::io::{self, Write}; use std::path::PathBuf; +use std::process::exit; use std::time::SystemTime; mod queries; @@ -12,6 +13,11 @@ use datasets::*; mod sync; use sync::*; +pub fn error(msg: &str, code: i32) -> ! { + println!("\nERROR: {}", msg); + exit(code) +} + /// ## CLI Structs #[derive(Parser)] @@ -104,65 +110,91 @@ fn main() { } fn dataset_list() { - let config_dir = dirs::config_dir().unwrap(); + let config_dir = dirs::config_dir().expect("Could not determine config directory"); - let connection = open_datasets(&config_dir).unwrap(); - let datasets = list_datasets(&connection).unwrap(); + let connection = + open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1)); + let datasets = list_datasets(&connection).expect("Error communicating with SQLite"); println!("{:?}", datasets); } +fn read_string() -> String { + let mut line = String::new(); + io::stdout().flush().expect("Could not access stdout"); + io::stdin() + .read_line(&mut line) + .expect("Could not read from stdin"); + line.trim().to_owned() +} + fn dataset_new(name: Option) { - let config_dir = dirs::config_dir().unwrap(); + let config_dir = dirs::config_dir().expect("Could not determine config directory"); let name = name.unwrap_or_else(|| { - let mut line = String::new(); print!("Name of new dataset: "); - io::stdout().flush().expect("Could not access stdout"); - io::stdin() - .read_line(&mut line) - .expect("Could not read from stdin"); - line.trim().to_owned() + read_string() }); - let connection = open_datasets(&config_dir).unwrap(); - new_dataset(&connection, &name).unwrap(); + let connection = + open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1)); + new_dataset(&connection, &name).expect("Error communicating with SQLite"); } fn dataset_delete(name: Option) { - let config_dir = dirs::config_dir().unwrap(); + let config_dir = dirs::config_dir().expect("Could not determine config directory"); let name = name.unwrap_or_else(|| { - let mut line = String::new(); print!("Dataset to delete: "); - io::stdout().flush().expect("Could not access stdout"); - io::stdin() - .read_line(&mut line) - .expect("Could not read from stdin"); - line.trim().to_owned() + read_string() }); - let connection = open_datasets(&config_dir).unwrap(); - delete_dataset(&connection, &name).unwrap(); + let connection = + open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1)); + delete_dataset(&connection, &name).expect("Error communicating with SQLite"); } fn sync(datasets: Vec, all: bool, auth_token: Option) { let config_dir = dirs::config_dir().unwrap(); - let auth = auth_token.or_else(|| get_auth_token(&config_dir)).unwrap(); + let auth = auth_token + .or_else(|| get_auth_token(&config_dir)) + .unwrap_or_else(|| error("Access token not provided", 1)); - let connection = open_datasets(&config_dir).unwrap(); + let connection = + open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1)); + #[allow(unused_must_use)] let datasets = if all { list_datasets(&connection).unwrap() } else if datasets.len() == 0 { - new_dataset(&connection, "default").unwrap(); + new_dataset(&connection, "default"); vec![String::from("default")] } else { datasets }; for dataset in datasets { - let last_sync = get_last_sync(&connection, &dataset).unwrap().unwrap(); + let last_sync = get_last_sync(&connection, &dataset) + .expect("Error communicating with SQLite") + .unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1)); + + sync_dataset( + &connection, + &dataset, + last_sync, + VideogameId(1386), + Some("GA"), + &auth, + ) + .expect("Error communicating with SQLite"); + + let current_time = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or_else(|_| error("System time is before the Unix epoch!", 2)) + .as_secs(); + + update_last_sync(&connection, &dataset, current_time) + .expect("Error communicating with SQLite"); } } diff --git a/src/queries.rs b/src/queries.rs index 6f1d9de..407d3da 100644 --- a/src/queries.rs +++ b/src/queries.rs @@ -2,6 +2,8 @@ use cynic::{GraphQlResponse, QueryBuilder}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::path::Path; +use std::thread::sleep; +use std::time::Duration; pub mod search_games; pub use search_games::*; @@ -61,7 +63,7 @@ pub struct EntrantId(pub u64); #[cynic(graphql_type = "ID")] pub struct PlayerId(pub u64); -#[derive(cynic::Scalar, Debug, Clone)] +#[derive(cynic::Scalar, Debug, Copy, Clone)] pub struct Timestamp(pub u64); // Query machinery @@ -75,19 +77,28 @@ pub trait QueryUnwrap: 'static + QueryBuilder { // Generic function for running start.gg queries pub fn run_query(vars: Vars, auth_token: &str) -> Option where - Builder: Debug, + Vars: Clone, Builder: QueryUnwrap, Vars: Serialize, for<'de> Builder: Deserialize<'de>, { use cynic::http::ReqwestBlockingExt; - let query = Builder::build(vars); - - let response = reqwest::blocking::Client::new() + let mut response = reqwest::blocking::Client::new() .post("https://api.start.gg/gql/alpha") .header("Authorization", String::from("Bearer ") + auth_token) - .run_graphql(query); + .run_graphql(Builder::build(vars.clone())); - Builder::unwrap_response(response.unwrap()) + for _ in 1..10 { + sleep(Duration::from_secs(2)); + response = reqwest::blocking::Client::new() + .post("https://api.start.gg/gql/alpha") + .header("Authorization", String::from("Bearer ") + auth_token) + .run_graphql(Builder::build(vars.clone())); + if response.is_ok() { + break; + } + } + + Builder::unwrap_response(response.ok()?) } diff --git a/src/queries/event_sets.rs b/src/queries/event_sets.rs index 5f671a2..a7249e5 100644 --- a/src/queries/event_sets.rs +++ b/src/queries/event_sets.rs @@ -6,10 +6,10 @@ pub type Teams = Vec>; // Variables -#[derive(cynic::QueryVariables, Debug)] +#[derive(cynic::QueryVariables, Debug, Clone)] pub struct EventSetsVars { pub event: EventId, - pub sets_page: i32, + pub page: i32, } // Query @@ -24,16 +24,22 @@ pub struct EventSets { #[derive(cynic::QueryFragment, Debug)] #[cynic(variables = "EventSetsVars")] struct Event { - #[arguments(page: $sets_page, perPage: 11)] + #[arguments(page: $page, perPage: 50)] sets: Option, } #[derive(cynic::QueryFragment, Debug)] struct SetConnection { + page_info: Option, #[cynic(flatten)] nodes: Vec, } +#[derive(cynic::QueryFragment, Debug)] +struct PageInfo { + total_pages: Option, +} + #[derive(cynic::QueryFragment, Debug)] struct Set { #[arguments(includeByes: true)] @@ -68,53 +74,60 @@ struct Player { // Unwrap +pub struct EventSetsResponse { + pub pages: u64, + pub sets: Vec, +} + pub struct SetData { - teams: Teams, - winner: usize, + pub teams: Teams, + pub winner: usize, } impl QueryUnwrap for EventSets { - type Unwrapped = Vec; + type Unwrapped = EventSetsResponse; // This might be the most spaghetti code I've ever written - fn unwrap_response(response: GraphQlResponse) -> Option> { - Some( - response - .data? - .event? - .sets? - .nodes - .into_iter() - .filter_map(|set| { - let winner_id = set.winner_id?; - let winner = set.slots.iter().position(|slot| { - slot.entrant - .as_ref() - .and_then(|x| x.id) - .map(|id| id.0 == winner_id as u64) - .unwrap_or(false) - })?; - let teams = set - .slots - .into_iter() - .map(|slot| { - slot.entrant? - .participants - .into_iter() - .map(|p| { - let p_ = p.player?; - Some(PlayerData { - id: p_.id?, - name: p_.gamer_tag, - prefix: p_.prefix, - }) + fn unwrap_response(response: GraphQlResponse) -> Option { + let response_sets = response.data?.event?.sets?; + + let sets = response_sets + .nodes + .into_iter() + .filter_map(|set| { + let winner_id = set.winner_id?; + let winner = set.slots.iter().position(|slot| { + slot.entrant + .as_ref() + .and_then(|x| x.id) + .map(|id| id.0 == winner_id as u64) + .unwrap_or(false) + })?; + let teams = set + .slots + .into_iter() + .map(|slot| { + slot.entrant? + .participants + .into_iter() + .map(|p| { + let p_ = p.player?; + Some(PlayerData { + id: p_.id?, + name: p_.gamer_tag, + prefix: p_.prefix, }) - .try_collect() - }) - .try_collect()?; - Some(SetData { teams, winner }) - }) - .collect::>(), - ) + }) + .try_collect() + }) + .try_collect()?; + Some(SetData { teams, winner }) + }) + .collect::>(); + + Some(EventSetsResponse { + pages: response_sets.page_info?.total_pages? as u64, + sets, + }) } } diff --git a/src/queries/player_info.rs b/src/queries/player_info.rs index 2966f08..12880c1 100644 --- a/src/queries/player_info.rs +++ b/src/queries/player_info.rs @@ -4,7 +4,7 @@ use schema::schema; // Variables -#[derive(cynic::QueryVariables, Debug)] +#[derive(cynic::QueryVariables, Debug, Clone)] pub struct PlayerInfoVars { pub id: PlayerId, } diff --git a/src/queries/search_games.rs b/src/queries/search_games.rs index eec8e39..7505d32 100644 --- a/src/queries/search_games.rs +++ b/src/queries/search_games.rs @@ -4,7 +4,7 @@ use schema::schema; // Variables -#[derive(cynic::QueryVariables)] +#[derive(cynic::QueryVariables, Clone)] pub struct VideogameSearchVars<'a> { pub name: &'a str, } diff --git a/src/queries/tournament_events.rs b/src/queries/tournament_events.rs index 5a47751..939797c 100644 --- a/src/queries/tournament_events.rs +++ b/src/queries/tournament_events.rs @@ -5,14 +5,16 @@ use schema::schema; // Variables -#[derive(cynic::QueryVariables, Debug)] -pub struct TournamentEventsVars { +#[derive(cynic::QueryVariables, Debug, Clone)] +pub struct TournamentEventsVars<'a> { // HACK: This should really be an optional variable, but there seems to be a // server-side bug that completely breaks everything when this isn't passed. // We can use a dummy value of 1 when we don't want to filter by time. - pub last_query: Timestamp, + pub last_sync: Timestamp, pub game_id: VideogameId, pub page: i32, + + pub state: Option<&'a str>, } // Query @@ -22,12 +24,13 @@ pub struct TournamentEventsVars { pub struct TournamentEvents { #[arguments(query: { page: $page, - perPage: 300, + perPage: 250, sortBy: "endAt asc", filter: { past: true, - afterDate: $last_query, + afterDate: $last_sync, videogameIds: [$game_id], + addrState: $state }})] tournaments: Option, } @@ -35,10 +38,16 @@ pub struct TournamentEvents { #[derive(cynic::QueryFragment, Debug)] #[cynic(variables = "TournamentEventsVars")] struct TournamentConnection { + page_info: Option, #[cynic(flatten)] nodes: Vec, } +#[derive(cynic::QueryFragment, Debug)] +struct PageInfo { + total_pages: Option, +} + #[derive(cynic::QueryFragment, Debug)] #[cynic(variables = "TournamentEventsVars")] struct Tournament { @@ -56,33 +65,44 @@ struct Event { // Unwrap +#[derive(Debug, Clone)] +pub struct TournamentEventResponse { + pub pages: i32, + pub tournaments: Vec, +} + #[derive(Debug, Clone)] pub struct TournamentData { pub name: String, pub events: Vec, } -impl QueryUnwrap for TournamentEvents { - type Unwrapped = Vec; +impl<'a> QueryUnwrap> for TournamentEvents { + type Unwrapped = TournamentEventResponse; - fn unwrap_response(response: GraphQlResponse) -> Option> { - Some( - response - .data? - .tournaments? - .nodes - .into_iter() - .filter_map(|tour| { - Some(TournamentData { - name: tour.name?, - events: tour - .events - .into_iter() - .filter_map(|event| event.id) - .collect(), - }) + fn unwrap_response( + response: GraphQlResponse, + ) -> Option { + let response_tournaments = response.data?.tournaments?; + + let tournaments = response_tournaments + .nodes + .into_iter() + .filter_map(|tour| { + Some(TournamentData { + name: tour.name?, + events: tour + .events + .into_iter() + .filter_map(|event| event.id) + .collect(), }) - .collect(), - ) + }) + .collect::>(); + + Some(TournamentEventResponse { + pages: response_tournaments.page_info?.total_pages?, + tournaments, + }) } } diff --git a/src/sync.rs b/src/sync.rs index 4068696..ecc85c9 100644 --- a/src/sync.rs +++ b/src/sync.rs @@ -1,4 +1,8 @@ +use std::thread::sleep; +use std::time::Duration; + use crate::datasets::*; +use crate::error; use crate::queries::*; use sqlite::*; @@ -33,13 +37,105 @@ fn adjust_ratings(ratings: Teams<&mut f64>, winner: usize) { // Extract set data fn get_event_sets(event: EventId, auth: &str) -> Option> { - let sets = run_query::(EventSetsVars { - event, - sets_page: 1, - }); + sleep(Duration::from_millis(700)); + + let sets_response = run_query::(EventSetsVars { event, page: 1 }, auth)?; + + let pages = sets_response.pages; + if pages == 0 { + Some(vec![]) + } else if pages == 1 { + Some(sets_response.sets) + } else { + println!(" (Page 1)"); + + let mut sets = sets_response.sets; + + for page in 2..=pages { + println!(" (Page {})", page); + + let next_response = run_query::( + EventSetsVars { + event, + page: page as i32, + }, + auth, + )?; + + sleep(Duration::from_millis(700)); + + sets.extend(next_response.sets); + } + + Some(sets) + } } -/* +fn get_tournament_events( + last_sync: Timestamp, + game_id: VideogameId, + state: Option<&str>, + auth: &str, +) -> Option> { + println!("Accessing tournaments..."); + + let tour_response = run_query::( + TournamentEventsVars { + last_sync, + game_id, + state, + page: 1, + }, + auth, + )?; + + let pages = tour_response.pages; + if pages == 0 { + Some(vec![]) + } else if pages == 1 { + Some( + tour_response + .tournaments + .into_iter() + .flat_map(|tour| tour.events) + .collect::>(), + ) + } else { + println!(" (Page 1)"); + + let mut tournaments = tour_response + .tournaments + .into_iter() + .flat_map(|tour| tour.events) + .collect::>(); + + for page in 2..=pages { + println!(" (Page {})", page); + + let next_response = run_query::( + TournamentEventsVars { + last_sync, + game_id, + state, + page, + }, + auth, + )?; + + tournaments.extend( + next_response + .tournaments + .into_iter() + .flat_map(|tour| tour.events), + ); + } + + Some(tournaments) + } +} + +// Dataset syncing + fn update_from_set(connection: &Connection, dataset: &str, results: SetData) -> sqlite::Result<()> { let players_data = results.teams; add_players(connection, dataset, &players_data)?; @@ -54,14 +150,33 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) -> update_ratings(connection, dataset, elos) } -pub fn update_from_tournament( +pub fn sync_dataset( connection: &Connection, dataset: &str, - results: TournamentData, + last_sync: Timestamp, + game_id: VideogameId, + state: Option<&str>, + auth: &str, ) -> sqlite::Result<()> { - results - .sets - .into_iter() - .try_for_each(|set| update_from_set(connection, dataset, set)) + let events = get_tournament_events(last_sync, game_id, state, auth) + .unwrap_or_else(|| error("Could not access start.gg", 1)); + + connection.execute("BEGIN;")?; + + let num_events = events.len(); + for (i, event) in events.into_iter().enumerate() { + println!( + "Accessing sets from event ID {}... ({}/{})", + event.0, i, num_events + ); + + let sets = + get_event_sets(event, auth).unwrap_or_else(|| error("Could not access start.gg", 1)); + + println!(" Updating ratings from event..."); + + sets.into_iter() + .try_for_each(|set| update_from_set(connection, dataset, set))?; + } + connection.execute("COMMIT;") } -*/