Add better error messages
This commit is contained in:
parent
e16b0be447
commit
a4f130b36e
|
@ -72,7 +72,7 @@ pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()>
|
||||||
connection.execute(query)
|
connection.execute(query)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result<Option<u64>> {
|
pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result<Option<Timestamp>> {
|
||||||
let query = "SELECT last_sync FROM datasets WHERE name = ?";
|
let query = "SELECT last_sync FROM datasets WHERE name = ?";
|
||||||
|
|
||||||
Ok(connection
|
Ok(connection
|
||||||
|
@ -81,7 +81,8 @@ pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result<O
|
||||||
.bind((1, dataset))?
|
.bind((1, dataset))?
|
||||||
.map(|x| x.map(|r| r.read::<i64, _>("last_sync").to_owned() as u64))
|
.map(|x| x.map(|r| r.read::<i64, _>("last_sync").to_owned() as u64))
|
||||||
.next()
|
.next()
|
||||||
.and_then(Result::ok))
|
.and_then(Result::ok)
|
||||||
|
.map(Timestamp))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_last_sync(connection: &Connection, dataset: &str, sync: u64) -> sqlite::Result<()> {
|
pub fn update_last_sync(connection: &Connection, dataset: &str, sync: u64) -> sqlite::Result<()> {
|
||||||
|
|
82
src/main.rs
82
src/main.rs
|
@ -3,6 +3,7 @@
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::process::exit;
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
|
|
||||||
mod queries;
|
mod queries;
|
||||||
|
@ -12,6 +13,11 @@ use datasets::*;
|
||||||
mod sync;
|
mod sync;
|
||||||
use sync::*;
|
use sync::*;
|
||||||
|
|
||||||
|
pub fn error(msg: &str, code: i32) -> ! {
|
||||||
|
println!("\nERROR: {}", msg);
|
||||||
|
exit(code)
|
||||||
|
}
|
||||||
|
|
||||||
/// ## CLI Structs
|
/// ## CLI Structs
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
@ -104,65 +110,91 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dataset_list() {
|
fn dataset_list() {
|
||||||
let config_dir = dirs::config_dir().unwrap();
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
||||||
|
|
||||||
let connection = open_datasets(&config_dir).unwrap();
|
let connection =
|
||||||
let datasets = list_datasets(&connection).unwrap();
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
||||||
|
let datasets = list_datasets(&connection).expect("Error communicating with SQLite");
|
||||||
|
|
||||||
println!("{:?}", datasets);
|
println!("{:?}", datasets);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn read_string() -> String {
|
||||||
|
let mut line = String::new();
|
||||||
|
io::stdout().flush().expect("Could not access stdout");
|
||||||
|
io::stdin()
|
||||||
|
.read_line(&mut line)
|
||||||
|
.expect("Could not read from stdin");
|
||||||
|
line.trim().to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
fn dataset_new(name: Option<String>) {
|
fn dataset_new(name: Option<String>) {
|
||||||
let config_dir = dirs::config_dir().unwrap();
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
||||||
|
|
||||||
let name = name.unwrap_or_else(|| {
|
let name = name.unwrap_or_else(|| {
|
||||||
let mut line = String::new();
|
|
||||||
print!("Name of new dataset: ");
|
print!("Name of new dataset: ");
|
||||||
io::stdout().flush().expect("Could not access stdout");
|
read_string()
|
||||||
io::stdin()
|
|
||||||
.read_line(&mut line)
|
|
||||||
.expect("Could not read from stdin");
|
|
||||||
line.trim().to_owned()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let connection = open_datasets(&config_dir).unwrap();
|
let connection =
|
||||||
new_dataset(&connection, &name).unwrap();
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
||||||
|
new_dataset(&connection, &name).expect("Error communicating with SQLite");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dataset_delete(name: Option<String>) {
|
fn dataset_delete(name: Option<String>) {
|
||||||
let config_dir = dirs::config_dir().unwrap();
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
||||||
|
|
||||||
let name = name.unwrap_or_else(|| {
|
let name = name.unwrap_or_else(|| {
|
||||||
let mut line = String::new();
|
|
||||||
print!("Dataset to delete: ");
|
print!("Dataset to delete: ");
|
||||||
io::stdout().flush().expect("Could not access stdout");
|
read_string()
|
||||||
io::stdin()
|
|
||||||
.read_line(&mut line)
|
|
||||||
.expect("Could not read from stdin");
|
|
||||||
line.trim().to_owned()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let connection = open_datasets(&config_dir).unwrap();
|
let connection =
|
||||||
delete_dataset(&connection, &name).unwrap();
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
||||||
|
delete_dataset(&connection, &name).expect("Error communicating with SQLite");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
|
fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
|
||||||
let config_dir = dirs::config_dir().unwrap();
|
let config_dir = dirs::config_dir().unwrap();
|
||||||
|
|
||||||
let auth = auth_token.or_else(|| get_auth_token(&config_dir)).unwrap();
|
let auth = auth_token
|
||||||
|
.or_else(|| get_auth_token(&config_dir))
|
||||||
|
.unwrap_or_else(|| error("Access token not provided", 1));
|
||||||
|
|
||||||
let connection = open_datasets(&config_dir).unwrap();
|
let connection =
|
||||||
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
||||||
|
|
||||||
|
#[allow(unused_must_use)]
|
||||||
let datasets = if all {
|
let datasets = if all {
|
||||||
list_datasets(&connection).unwrap()
|
list_datasets(&connection).unwrap()
|
||||||
} else if datasets.len() == 0 {
|
} else if datasets.len() == 0 {
|
||||||
new_dataset(&connection, "default").unwrap();
|
new_dataset(&connection, "default");
|
||||||
vec![String::from("default")]
|
vec![String::from("default")]
|
||||||
} else {
|
} else {
|
||||||
datasets
|
datasets
|
||||||
};
|
};
|
||||||
|
|
||||||
for dataset in datasets {
|
for dataset in datasets {
|
||||||
let last_sync = get_last_sync(&connection, &dataset).unwrap().unwrap();
|
let last_sync = get_last_sync(&connection, &dataset)
|
||||||
|
.expect("Error communicating with SQLite")
|
||||||
|
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
|
||||||
|
|
||||||
|
sync_dataset(
|
||||||
|
&connection,
|
||||||
|
&dataset,
|
||||||
|
last_sync,
|
||||||
|
VideogameId(1386),
|
||||||
|
Some("GA"),
|
||||||
|
&auth,
|
||||||
|
)
|
||||||
|
.expect("Error communicating with SQLite");
|
||||||
|
|
||||||
|
let current_time = SystemTime::now()
|
||||||
|
.duration_since(SystemTime::UNIX_EPOCH)
|
||||||
|
.unwrap_or_else(|_| error("System time is before the Unix epoch!", 2))
|
||||||
|
.as_secs();
|
||||||
|
|
||||||
|
update_last_sync(&connection, &dataset, current_time)
|
||||||
|
.expect("Error communicating with SQLite");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ use cynic::{GraphQlResponse, QueryBuilder};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::thread::sleep;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
pub mod search_games;
|
pub mod search_games;
|
||||||
pub use search_games::*;
|
pub use search_games::*;
|
||||||
|
@ -61,7 +63,7 @@ pub struct EntrantId(pub u64);
|
||||||
#[cynic(graphql_type = "ID")]
|
#[cynic(graphql_type = "ID")]
|
||||||
pub struct PlayerId(pub u64);
|
pub struct PlayerId(pub u64);
|
||||||
|
|
||||||
#[derive(cynic::Scalar, Debug, Clone)]
|
#[derive(cynic::Scalar, Debug, Copy, Clone)]
|
||||||
pub struct Timestamp(pub u64);
|
pub struct Timestamp(pub u64);
|
||||||
|
|
||||||
// Query machinery
|
// Query machinery
|
||||||
|
@ -75,19 +77,28 @@ pub trait QueryUnwrap<Vars>: 'static + QueryBuilder<Vars> {
|
||||||
// Generic function for running start.gg queries
|
// Generic function for running start.gg queries
|
||||||
pub fn run_query<Builder, Vars>(vars: Vars, auth_token: &str) -> Option<Builder::Unwrapped>
|
pub fn run_query<Builder, Vars>(vars: Vars, auth_token: &str) -> Option<Builder::Unwrapped>
|
||||||
where
|
where
|
||||||
Builder: Debug,
|
Vars: Clone,
|
||||||
Builder: QueryUnwrap<Vars>,
|
Builder: QueryUnwrap<Vars>,
|
||||||
Vars: Serialize,
|
Vars: Serialize,
|
||||||
for<'de> Builder: Deserialize<'de>,
|
for<'de> Builder: Deserialize<'de>,
|
||||||
{
|
{
|
||||||
use cynic::http::ReqwestBlockingExt;
|
use cynic::http::ReqwestBlockingExt;
|
||||||
|
|
||||||
let query = Builder::build(vars);
|
let mut response = reqwest::blocking::Client::new()
|
||||||
|
|
||||||
let response = reqwest::blocking::Client::new()
|
|
||||||
.post("https://api.start.gg/gql/alpha")
|
.post("https://api.start.gg/gql/alpha")
|
||||||
.header("Authorization", String::from("Bearer ") + auth_token)
|
.header("Authorization", String::from("Bearer ") + auth_token)
|
||||||
.run_graphql(query);
|
.run_graphql(Builder::build(vars.clone()));
|
||||||
|
|
||||||
Builder::unwrap_response(response.unwrap())
|
for _ in 1..10 {
|
||||||
|
sleep(Duration::from_secs(2));
|
||||||
|
response = reqwest::blocking::Client::new()
|
||||||
|
.post("https://api.start.gg/gql/alpha")
|
||||||
|
.header("Authorization", String::from("Bearer ") + auth_token)
|
||||||
|
.run_graphql(Builder::build(vars.clone()));
|
||||||
|
if response.is_ok() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Builder::unwrap_response(response.ok()?)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,10 +6,10 @@ pub type Teams<T> = Vec<Vec<T>>;
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
|
|
||||||
#[derive(cynic::QueryVariables, Debug)]
|
#[derive(cynic::QueryVariables, Debug, Clone)]
|
||||||
pub struct EventSetsVars {
|
pub struct EventSetsVars {
|
||||||
pub event: EventId,
|
pub event: EventId,
|
||||||
pub sets_page: i32,
|
pub page: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query
|
// Query
|
||||||
|
@ -24,16 +24,22 @@ pub struct EventSets {
|
||||||
#[derive(cynic::QueryFragment, Debug)]
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
#[cynic(variables = "EventSetsVars")]
|
#[cynic(variables = "EventSetsVars")]
|
||||||
struct Event {
|
struct Event {
|
||||||
#[arguments(page: $sets_page, perPage: 11)]
|
#[arguments(page: $page, perPage: 50)]
|
||||||
sets: Option<SetConnection>,
|
sets: Option<SetConnection>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(cynic::QueryFragment, Debug)]
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
struct SetConnection {
|
struct SetConnection {
|
||||||
|
page_info: Option<PageInfo>,
|
||||||
#[cynic(flatten)]
|
#[cynic(flatten)]
|
||||||
nodes: Vec<Set>,
|
nodes: Vec<Set>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
|
struct PageInfo {
|
||||||
|
total_pages: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(cynic::QueryFragment, Debug)]
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
struct Set {
|
struct Set {
|
||||||
#[arguments(includeByes: true)]
|
#[arguments(includeByes: true)]
|
||||||
|
@ -68,53 +74,60 @@ struct Player {
|
||||||
|
|
||||||
// Unwrap
|
// Unwrap
|
||||||
|
|
||||||
|
pub struct EventSetsResponse {
|
||||||
|
pub pages: u64,
|
||||||
|
pub sets: Vec<SetData>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct SetData {
|
pub struct SetData {
|
||||||
teams: Teams<PlayerData>,
|
pub teams: Teams<PlayerData>,
|
||||||
winner: usize,
|
pub winner: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl QueryUnwrap<EventSetsVars> for EventSets {
|
impl QueryUnwrap<EventSetsVars> for EventSets {
|
||||||
type Unwrapped = Vec<SetData>;
|
type Unwrapped = EventSetsResponse;
|
||||||
|
|
||||||
// This might be the most spaghetti code I've ever written
|
// This might be the most spaghetti code I've ever written
|
||||||
fn unwrap_response(response: GraphQlResponse<EventSets>) -> Option<Vec<SetData>> {
|
fn unwrap_response(response: GraphQlResponse<EventSets>) -> Option<EventSetsResponse> {
|
||||||
Some(
|
let response_sets = response.data?.event?.sets?;
|
||||||
response
|
|
||||||
.data?
|
let sets = response_sets
|
||||||
.event?
|
.nodes
|
||||||
.sets?
|
.into_iter()
|
||||||
.nodes
|
.filter_map(|set| {
|
||||||
.into_iter()
|
let winner_id = set.winner_id?;
|
||||||
.filter_map(|set| {
|
let winner = set.slots.iter().position(|slot| {
|
||||||
let winner_id = set.winner_id?;
|
slot.entrant
|
||||||
let winner = set.slots.iter().position(|slot| {
|
.as_ref()
|
||||||
slot.entrant
|
.and_then(|x| x.id)
|
||||||
.as_ref()
|
.map(|id| id.0 == winner_id as u64)
|
||||||
.and_then(|x| x.id)
|
.unwrap_or(false)
|
||||||
.map(|id| id.0 == winner_id as u64)
|
})?;
|
||||||
.unwrap_or(false)
|
let teams = set
|
||||||
})?;
|
.slots
|
||||||
let teams = set
|
.into_iter()
|
||||||
.slots
|
.map(|slot| {
|
||||||
.into_iter()
|
slot.entrant?
|
||||||
.map(|slot| {
|
.participants
|
||||||
slot.entrant?
|
.into_iter()
|
||||||
.participants
|
.map(|p| {
|
||||||
.into_iter()
|
let p_ = p.player?;
|
||||||
.map(|p| {
|
Some(PlayerData {
|
||||||
let p_ = p.player?;
|
id: p_.id?,
|
||||||
Some(PlayerData {
|
name: p_.gamer_tag,
|
||||||
id: p_.id?,
|
prefix: p_.prefix,
|
||||||
name: p_.gamer_tag,
|
|
||||||
prefix: p_.prefix,
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.try_collect()
|
})
|
||||||
})
|
.try_collect()
|
||||||
.try_collect()?;
|
})
|
||||||
Some(SetData { teams, winner })
|
.try_collect()?;
|
||||||
})
|
Some(SetData { teams, winner })
|
||||||
.collect::<Vec<_>>(),
|
})
|
||||||
)
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Some(EventSetsResponse {
|
||||||
|
pages: response_sets.page_info?.total_pages? as u64,
|
||||||
|
sets,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use schema::schema;
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
|
|
||||||
#[derive(cynic::QueryVariables, Debug)]
|
#[derive(cynic::QueryVariables, Debug, Clone)]
|
||||||
pub struct PlayerInfoVars {
|
pub struct PlayerInfoVars {
|
||||||
pub id: PlayerId,
|
pub id: PlayerId,
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use schema::schema;
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
|
|
||||||
#[derive(cynic::QueryVariables)]
|
#[derive(cynic::QueryVariables, Clone)]
|
||||||
pub struct VideogameSearchVars<'a> {
|
pub struct VideogameSearchVars<'a> {
|
||||||
pub name: &'a str,
|
pub name: &'a str,
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,14 +5,16 @@ use schema::schema;
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
|
|
||||||
#[derive(cynic::QueryVariables, Debug)]
|
#[derive(cynic::QueryVariables, Debug, Clone)]
|
||||||
pub struct TournamentEventsVars {
|
pub struct TournamentEventsVars<'a> {
|
||||||
// HACK: This should really be an optional variable, but there seems to be a
|
// HACK: This should really be an optional variable, but there seems to be a
|
||||||
// server-side bug that completely breaks everything when this isn't passed.
|
// server-side bug that completely breaks everything when this isn't passed.
|
||||||
// We can use a dummy value of 1 when we don't want to filter by time.
|
// We can use a dummy value of 1 when we don't want to filter by time.
|
||||||
pub last_query: Timestamp,
|
pub last_sync: Timestamp,
|
||||||
pub game_id: VideogameId,
|
pub game_id: VideogameId,
|
||||||
pub page: i32,
|
pub page: i32,
|
||||||
|
|
||||||
|
pub state: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query
|
// Query
|
||||||
|
@ -22,12 +24,13 @@ pub struct TournamentEventsVars {
|
||||||
pub struct TournamentEvents {
|
pub struct TournamentEvents {
|
||||||
#[arguments(query: {
|
#[arguments(query: {
|
||||||
page: $page,
|
page: $page,
|
||||||
perPage: 300,
|
perPage: 250,
|
||||||
sortBy: "endAt asc",
|
sortBy: "endAt asc",
|
||||||
filter: {
|
filter: {
|
||||||
past: true,
|
past: true,
|
||||||
afterDate: $last_query,
|
afterDate: $last_sync,
|
||||||
videogameIds: [$game_id],
|
videogameIds: [$game_id],
|
||||||
|
addrState: $state
|
||||||
}})]
|
}})]
|
||||||
tournaments: Option<TournamentConnection>,
|
tournaments: Option<TournamentConnection>,
|
||||||
}
|
}
|
||||||
|
@ -35,10 +38,16 @@ pub struct TournamentEvents {
|
||||||
#[derive(cynic::QueryFragment, Debug)]
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
#[cynic(variables = "TournamentEventsVars")]
|
#[cynic(variables = "TournamentEventsVars")]
|
||||||
struct TournamentConnection {
|
struct TournamentConnection {
|
||||||
|
page_info: Option<PageInfo>,
|
||||||
#[cynic(flatten)]
|
#[cynic(flatten)]
|
||||||
nodes: Vec<Tournament>,
|
nodes: Vec<Tournament>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
|
struct PageInfo {
|
||||||
|
total_pages: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(cynic::QueryFragment, Debug)]
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
#[cynic(variables = "TournamentEventsVars")]
|
#[cynic(variables = "TournamentEventsVars")]
|
||||||
struct Tournament {
|
struct Tournament {
|
||||||
|
@ -56,33 +65,44 @@ struct Event {
|
||||||
|
|
||||||
// Unwrap
|
// Unwrap
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TournamentEventResponse {
|
||||||
|
pub pages: i32,
|
||||||
|
pub tournaments: Vec<TournamentData>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct TournamentData {
|
pub struct TournamentData {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub events: Vec<EventId>,
|
pub events: Vec<EventId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl QueryUnwrap<TournamentEventsVars> for TournamentEvents {
|
impl<'a> QueryUnwrap<TournamentEventsVars<'a>> for TournamentEvents {
|
||||||
type Unwrapped = Vec<TournamentData>;
|
type Unwrapped = TournamentEventResponse;
|
||||||
|
|
||||||
fn unwrap_response(response: GraphQlResponse<TournamentEvents>) -> Option<Vec<TournamentData>> {
|
fn unwrap_response(
|
||||||
Some(
|
response: GraphQlResponse<TournamentEvents>,
|
||||||
response
|
) -> Option<TournamentEventResponse> {
|
||||||
.data?
|
let response_tournaments = response.data?.tournaments?;
|
||||||
.tournaments?
|
|
||||||
.nodes
|
let tournaments = response_tournaments
|
||||||
.into_iter()
|
.nodes
|
||||||
.filter_map(|tour| {
|
.into_iter()
|
||||||
Some(TournamentData {
|
.filter_map(|tour| {
|
||||||
name: tour.name?,
|
Some(TournamentData {
|
||||||
events: tour
|
name: tour.name?,
|
||||||
.events
|
events: tour
|
||||||
.into_iter()
|
.events
|
||||||
.filter_map(|event| event.id)
|
.into_iter()
|
||||||
.collect(),
|
.filter_map(|event| event.id)
|
||||||
})
|
.collect(),
|
||||||
})
|
})
|
||||||
.collect(),
|
})
|
||||||
)
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Some(TournamentEventResponse {
|
||||||
|
pages: response_tournaments.page_info?.total_pages?,
|
||||||
|
tournaments,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
139
src/sync.rs
139
src/sync.rs
|
@ -1,4 +1,8 @@
|
||||||
|
use std::thread::sleep;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::datasets::*;
|
use crate::datasets::*;
|
||||||
|
use crate::error;
|
||||||
use crate::queries::*;
|
use crate::queries::*;
|
||||||
use sqlite::*;
|
use sqlite::*;
|
||||||
|
|
||||||
|
@ -33,13 +37,105 @@ fn adjust_ratings(ratings: Teams<&mut f64>, winner: usize) {
|
||||||
// Extract set data
|
// Extract set data
|
||||||
|
|
||||||
fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
|
fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
|
||||||
let sets = run_query::<EventSets, _>(EventSetsVars {
|
sleep(Duration::from_millis(700));
|
||||||
event,
|
|
||||||
sets_page: 1,
|
let sets_response = run_query::<EventSets, _>(EventSetsVars { event, page: 1 }, auth)?;
|
||||||
});
|
|
||||||
|
let pages = sets_response.pages;
|
||||||
|
if pages == 0 {
|
||||||
|
Some(vec![])
|
||||||
|
} else if pages == 1 {
|
||||||
|
Some(sets_response.sets)
|
||||||
|
} else {
|
||||||
|
println!(" (Page 1)");
|
||||||
|
|
||||||
|
let mut sets = sets_response.sets;
|
||||||
|
|
||||||
|
for page in 2..=pages {
|
||||||
|
println!(" (Page {})", page);
|
||||||
|
|
||||||
|
let next_response = run_query::<EventSets, _>(
|
||||||
|
EventSetsVars {
|
||||||
|
event,
|
||||||
|
page: page as i32,
|
||||||
|
},
|
||||||
|
auth,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
sleep(Duration::from_millis(700));
|
||||||
|
|
||||||
|
sets.extend(next_response.sets);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(sets)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
fn get_tournament_events(
|
||||||
|
last_sync: Timestamp,
|
||||||
|
game_id: VideogameId,
|
||||||
|
state: Option<&str>,
|
||||||
|
auth: &str,
|
||||||
|
) -> Option<Vec<EventId>> {
|
||||||
|
println!("Accessing tournaments...");
|
||||||
|
|
||||||
|
let tour_response = run_query::<TournamentEvents, _>(
|
||||||
|
TournamentEventsVars {
|
||||||
|
last_sync,
|
||||||
|
game_id,
|
||||||
|
state,
|
||||||
|
page: 1,
|
||||||
|
},
|
||||||
|
auth,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let pages = tour_response.pages;
|
||||||
|
if pages == 0 {
|
||||||
|
Some(vec![])
|
||||||
|
} else if pages == 1 {
|
||||||
|
Some(
|
||||||
|
tour_response
|
||||||
|
.tournaments
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|tour| tour.events)
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
println!(" (Page 1)");
|
||||||
|
|
||||||
|
let mut tournaments = tour_response
|
||||||
|
.tournaments
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|tour| tour.events)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
for page in 2..=pages {
|
||||||
|
println!(" (Page {})", page);
|
||||||
|
|
||||||
|
let next_response = run_query::<TournamentEvents, _>(
|
||||||
|
TournamentEventsVars {
|
||||||
|
last_sync,
|
||||||
|
game_id,
|
||||||
|
state,
|
||||||
|
page,
|
||||||
|
},
|
||||||
|
auth,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
tournaments.extend(
|
||||||
|
next_response
|
||||||
|
.tournaments
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|tour| tour.events),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(tournaments)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dataset syncing
|
||||||
|
|
||||||
fn update_from_set(connection: &Connection, dataset: &str, results: SetData) -> sqlite::Result<()> {
|
fn update_from_set(connection: &Connection, dataset: &str, results: SetData) -> sqlite::Result<()> {
|
||||||
let players_data = results.teams;
|
let players_data = results.teams;
|
||||||
add_players(connection, dataset, &players_data)?;
|
add_players(connection, dataset, &players_data)?;
|
||||||
|
@ -54,14 +150,33 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) ->
|
||||||
update_ratings(connection, dataset, elos)
|
update_ratings(connection, dataset, elos)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_from_tournament(
|
pub fn sync_dataset(
|
||||||
connection: &Connection,
|
connection: &Connection,
|
||||||
dataset: &str,
|
dataset: &str,
|
||||||
results: TournamentData,
|
last_sync: Timestamp,
|
||||||
|
game_id: VideogameId,
|
||||||
|
state: Option<&str>,
|
||||||
|
auth: &str,
|
||||||
) -> sqlite::Result<()> {
|
) -> sqlite::Result<()> {
|
||||||
results
|
let events = get_tournament_events(last_sync, game_id, state, auth)
|
||||||
.sets
|
.unwrap_or_else(|| error("Could not access start.gg", 1));
|
||||||
.into_iter()
|
|
||||||
.try_for_each(|set| update_from_set(connection, dataset, set))
|
connection.execute("BEGIN;")?;
|
||||||
|
|
||||||
|
let num_events = events.len();
|
||||||
|
for (i, event) in events.into_iter().enumerate() {
|
||||||
|
println!(
|
||||||
|
"Accessing sets from event ID {}... ({}/{})",
|
||||||
|
event.0, i, num_events
|
||||||
|
);
|
||||||
|
|
||||||
|
let sets =
|
||||||
|
get_event_sets(event, auth).unwrap_or_else(|| error("Could not access start.gg", 1));
|
||||||
|
|
||||||
|
println!(" Updating ratings from event...");
|
||||||
|
|
||||||
|
sets.into_iter()
|
||||||
|
.try_for_each(|set| update_from_set(connection, dataset, set))?;
|
||||||
|
}
|
||||||
|
connection.execute("COMMIT;")
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
|
Loading…
Reference in a new issue