Various fixes

This commit is contained in:
Kiana Sheibani 2023-09-30 04:37:10 -04:00
parent 0f71cfa3c5
commit 9feba1a136
Signed by: toki
GPG key ID: 6CB106C25E86A9F7
4 changed files with 74 additions and 84 deletions

View file

@ -12,7 +12,7 @@ fn datasets_path(config_dir: &Path) -> io::Result<PathBuf> {
// Create datasets path if it doesn't exist // Create datasets path if it doesn't exist
fs::create_dir_all(&path)?; fs::create_dir_all(&path)?;
path.push("main.db"); path.push("datasets.sqlite");
// Create datasets file if it doesn't exist // Create datasets file if it doesn't exist
OpenOptions::new().write(true).create(true).open(&path)?; OpenOptions::new().write(true).create(true).open(&path)?;
@ -26,7 +26,7 @@ pub fn open_datasets(config_dir: &Path) -> sqlite::Result<Connection> {
let query = " let query = "
CREATE TABLE IF NOT EXISTS datasets ( CREATE TABLE IF NOT EXISTS datasets (
name TEXT UNIQUE NOT NULL, name TEXT UNIQUE NOT NULL,
last_sync INTEGER NOT NULL DEFAULT 1 last_sync INTEGER DEFAULT 1
) STRICT;"; ) STRICT;";
let connection = sqlite::open(path)?; let connection = sqlite::open(path)?;
@ -58,7 +58,7 @@ pub fn delete_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<
pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> { pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"INSERT INTO datasets VALUES ('{0}'); r#"INSERT INTO datasets (name) VALUES ('{0}');
CREATE TABLE IF NOT EXISTS "dataset_{0}" ( CREATE TABLE IF NOT EXISTS "dataset_{0}" (
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
@ -201,7 +201,7 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) ->
update_ratings(connection, dataset, elos) update_ratings(connection, dataset, elos)
} }
fn update_from_tournament( pub fn update_from_tournament(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
results: TournamentData, results: TournamentData,
@ -211,13 +211,3 @@ fn update_from_tournament(
.into_iter() .into_iter()
.try_for_each(|set| update_from_set(connection, dataset, set)) .try_for_each(|set| update_from_set(connection, dataset, set))
} }
pub fn update_from_tournaments(
connection: &Connection,
dataset: &str,
results: Vec<TournamentData>,
) -> sqlite::Result<()> {
results
.into_iter()
.try_for_each(|tour| update_from_tournament(connection, dataset, tour))
}

View file

@ -119,6 +119,9 @@ fn sync(names: Vec<String>, all: bool, auth_token: Option<String>) {
let names = if all { let names = if all {
list_datasets(&connection).unwrap() list_datasets(&connection).unwrap()
} else if names.len() == 0 {
new_dataset(&connection, "default").unwrap();
vec![String::from("default")]
} else { } else {
names names
}; };
@ -130,14 +133,16 @@ fn sync(names: Vec<String>, all: bool, auth_token: Option<String>) {
TournamentSetsVars { TournamentSetsVars {
last_query: Timestamp(last_sync), last_query: Timestamp(last_sync),
game_id: VideogameId(1), game_id: VideogameId(1),
country: None, tournament: 1,
state: None, set_page: 1,
set_pagesize: 50,
event_limit: 9999999,
}, },
&auth, &auth,
) )
.unwrap(); .unwrap();
update_from_tournaments(&connection, &name, results).unwrap(); update_from_tournament(&connection, &name, results).unwrap();
let current_time = SystemTime::now() let current_time = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH) .duration_since(SystemTime::UNIX_EPOCH)

View file

@ -1,5 +1,6 @@
use cynic::{GraphQlResponse, QueryBuilder}; use cynic::{GraphQlResponse, QueryBuilder};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug;
use std::path::Path; use std::path::Path;
pub mod search_games; pub mod search_games;
@ -22,6 +23,7 @@ pub fn get_auth_token(config_dir: &Path) -> Option<String> {
Err(VarError::NotUnicode(_)) => panic!("Invalid authorization key"), Err(VarError::NotUnicode(_)) => panic!("Invalid authorization key"),
Err(VarError::NotPresent) => { Err(VarError::NotPresent) => {
let mut auth_file = config_dir.to_owned(); let mut auth_file = config_dir.to_owned();
auth_file.push("ggelo");
auth_file.push("auth.txt"); auth_file.push("auth.txt");
read_to_string(auth_file).ok().and_then(|s| { read_to_string(auth_file).ok().and_then(|s| {
let trimmed = s.trim(); let trimmed = s.trim();
@ -67,6 +69,7 @@ pub trait QueryUnwrap<Vars>: 'static + QueryBuilder<Vars> {
// Generic function for running start.gg queries // Generic function for running start.gg queries
pub fn run_query<Builder, Vars>(vars: Vars, auth_token: &str) -> Option<Builder::Unwrapped> pub fn run_query<Builder, Vars>(vars: Vars, auth_token: &str) -> Option<Builder::Unwrapped>
where where
Builder: Debug,
Builder: QueryUnwrap<Vars>, Builder: QueryUnwrap<Vars>,
Vars: Serialize, Vars: Serialize,
for<'de> Builder: Deserialize<'de>, for<'de> Builder: Deserialize<'de>,

View file

@ -7,15 +7,17 @@ pub type Teams<T> = Vec<Vec<T>>;
// Variables // Variables
#[derive(cynic::QueryVariables, Debug)] #[derive(cynic::QueryVariables, Debug)]
pub struct TournamentSetsVars<'a> { pub struct TournamentSetsVars {
// HACK: This should really be an optional variable, but there seems to be a // HACK: This should really be an optional variable, but there seems to be a
// server-side bug that completely breaks everything when this isn't passed. // server-side bug that completely breaks everything when this isn't passed.
// We can use a dummy value of 1 when we don't want to filter by time. // We can use a dummy value of 1 when we don't want to filter by time.
pub last_query: Timestamp, pub last_query: Timestamp,
pub game_id: VideogameId, pub game_id: VideogameId,
pub country: Option<&'a str>,
pub state: Option<&'a str>, pub tournament: i32,
pub event_limit: i32,
pub set_page: i32,
pub set_pagesize: i32,
} }
// Query // Query
@ -24,15 +26,13 @@ pub struct TournamentSetsVars<'a> {
#[cynic(graphql_type = "Query", variables = "TournamentSetsVars")] #[cynic(graphql_type = "Query", variables = "TournamentSetsVars")]
pub struct TournamentSets { pub struct TournamentSets {
#[arguments(query: { #[arguments(query: {
page: 1, page: $tournament,
perPage: 1, perPage: 1,
sortBy: "endAt desc", sortBy: "endAt asc",
filter: { filter: {
past: true, past: true,
afterDate: $last_query, afterDate: $last_query,
videogameIds: [$game_id], videogameIds: [$game_id],
countryCode: $country,
addrState: $state
}})] }})]
tournaments: Option<TournamentConnection>, tournaments: Option<TournamentConnection>,
} }
@ -48,14 +48,15 @@ struct TournamentConnection {
#[cynic(variables = "TournamentSetsVars")] #[cynic(variables = "TournamentSetsVars")]
struct Tournament { struct Tournament {
name: Option<String>, name: Option<String>,
#[arguments(limit: 99999, filter: { videogameId: [$game_id] })] #[arguments(limit: $event_limit, filter: { videogameId: [$game_id] })]
#[cynic(flatten)] #[cynic(flatten)]
events: Vec<Event>, events: Vec<Event>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug)]
#[cynic(variables = "TournamentSetsVars")]
struct Event { struct Event {
#[arguments(page: 1, perPage: 999)] #[arguments(page: $set_page, perPage: $set_pagesize)]
sets: Option<SetConnection>, sets: Option<SetConnection>,
} }
@ -111,67 +112,58 @@ pub struct SetData {
pub winner: usize, pub winner: usize,
} }
impl<'a> QueryUnwrap<TournamentSetsVars<'a>> for TournamentSets { impl QueryUnwrap<TournamentSetsVars> for TournamentSets {
type Unwrapped = Vec<TournamentData>; type Unwrapped = TournamentData;
// This might be the most spaghetti code I've ever written // This might be the most spaghetti code I've ever written
fn unwrap_response(response: GraphQlResponse<TournamentSets>) -> Option<Vec<TournamentData>> { fn unwrap_response(response: GraphQlResponse<TournamentSets>) -> Option<TournamentData> {
Some( let tour = response.data?.tournaments?.nodes.into_iter().next()?;
response let sets = tour
.data? .events
.tournaments? .into_iter()
.nodes .filter_map(|event| {
.into_iter() Some(
.filter_map(|tour| { event
let sets = tour .sets?
.events .nodes
.into_iter() .into_iter()
.filter_map(|event| { .filter_map(|set| {
Some( let winner_id = set.winner_id?;
event let winner = set.slots.iter().position(|slot| {
.sets? slot.entrant
.nodes .as_ref()
.into_iter() .and_then(|x| x.id)
.filter_map(|set| { .map(|id| id.0 == winner_id as u64)
let winner_id = set.winner_id?; .unwrap_or(false)
let winner = set.slots.iter().position(|slot| { })?;
slot.entrant let teams = set
.as_ref() .slots
.and_then(|x| x.id) .into_iter()
.map(|id| id.0 == winner_id as u64) .map(|slot| {
.unwrap_or(false) slot.entrant?
})?; .participants
let teams = set .into_iter()
.slots .map(|p| {
.into_iter() let p_ = p.player?;
.map(|slot| { Some(PlayerData {
slot.entrant? id: p_.id?,
.participants name: p_.gamer_tag,
.into_iter() prefix: p_.prefix,
.map(|p| {
let p_ = p.player?;
Some(PlayerData {
id: p_.id?,
name: p_.gamer_tag,
prefix: p_.prefix,
})
})
.try_collect()
}) })
.try_collect()?; })
Some(SetData { teams, winner }) .try_collect()
}) })
.collect::<Vec<_>>(), .try_collect()?;
) Some(SetData { teams, winner })
}) })
.flatten() .collect::<Vec<_>>(),
.collect(); )
Some(TournamentData { })
name: tour.name?, .flatten()
sets, .collect();
}) Some(TournamentData {
}) name: tour.name?,
.collect(), sets,
) })
} }
} }