diff --git a/src/datasets.rs b/src/datasets.rs index 13ae359..0cfd008 100644 --- a/src/datasets.rs +++ b/src/datasets.rs @@ -1,8 +1,16 @@ +use crate::error; use crate::queries::*; use sqlite::*; use std::fs::{self, OpenOptions}; use std::io; use std::path::{Path, PathBuf}; +use std::time::SystemTime; + +pub struct DatasetConfig { + pub last_sync: Timestamp, + pub game_id: VideogameId, + pub state: Option, +} /// Return the path to the datasets file. fn datasets_path(config_dir: &Path) -> io::Result { @@ -26,7 +34,9 @@ pub fn open_datasets(config_dir: &Path) -> sqlite::Result { let query = " CREATE TABLE IF NOT EXISTS datasets ( name TEXT UNIQUE NOT NULL, - last_sync INTEGER DEFAULT 1 + last_sync INTEGER DEFAULT 1, + game_id INTEGER NOT NULL, + state TEXT ) STRICT;"; let connection = sqlite::open(path)?; @@ -56,11 +66,14 @@ pub fn delete_dataset(connection: &Connection, dataset: &str) -> sqlite::Result< connection.execute(query) } -pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> { - let query = format!( - r#"INSERT INTO datasets (name) VALUES ('{0}'); - - CREATE TABLE IF NOT EXISTS "dataset_{0}" ( +pub fn new_dataset( + connection: &Connection, + dataset: &str, + config: DatasetConfig, +) -> sqlite::Result<()> { + let query1 = r#"INSERT INTO datasets (name, game_id, state) VALUES (?, ?, ?)"#; + let query2 = format!( + r#" CREATE TABLE "dataset_{0}" ( id INTEGER PRIMARY KEY, name TEXT, prefix TEXT, @@ -69,29 +82,51 @@ pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> dataset ); - connection.execute(query) + connection + .prepare(query1)? + .into_iter() + .bind((1, dataset))? + .bind((2, config.game_id.0 as i64))? + .bind((3, config.state.as_deref()))? + .try_for_each(|x| x.map(|_| ()))?; + + connection.execute(query2) } -pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result> { - let query = "SELECT last_sync FROM datasets WHERE name = ?"; +pub fn get_dataset_config( + connection: &Connection, + dataset: &str, +) -> sqlite::Result> { + let query = "SELECT last_sync, game_id, state FROM datasets WHERE name = ?"; Ok(connection .prepare(query)? .into_iter() .bind((1, dataset))? - .map(|x| x.map(|r| r.read::("last_sync").to_owned() as u64)) .next() - .and_then(Result::ok) - .map(Timestamp)) + .map(|r| { + let r_ = r?; + Ok(DatasetConfig { + last_sync: Timestamp(r_.read::("last_sync") as u64), + game_id: VideogameId(r_.read::("game_id") as u64), + state: r_.read::, _>("state").map(String::from), + }) + }) + .and_then(Result::ok)) } -pub fn update_last_sync(connection: &Connection, dataset: &str, sync: u64) -> sqlite::Result<()> { +pub fn update_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result<()> { let query = "UPDATE datasets SET last_sync = :sync WHERE name = :dataset"; + let current_time = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or_else(|_| error("System time is before the Unix epoch (1970)!", 2)) + .as_secs(); + connection .prepare(query)? .into_iter() - .bind((":sync", sync as i64))? + .bind((":sync", current_time as i64))? .bind((":dataset", dataset))? .try_for_each(|x| x.map(|_| ())) } diff --git a/src/main.rs b/src/main.rs index e0e4d2b..5a249e6 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,7 +4,6 @@ use clap::{Parser, Subcommand}; use std::io::{self, Write}; use std::path::PathBuf; use std::process::exit; -use std::time::SystemTime; mod queries; use queries::*; @@ -18,6 +17,11 @@ pub fn error(msg: &str, code: i32) -> ! { exit(code) } +pub fn issue(msg: &str, code: i32) -> ! { + println!("\n{}", msg); + exit(code) +} + /// ## CLI Structs #[derive(Parser)] @@ -69,14 +73,13 @@ dataset was synced." )] Sync { #[arg( - group = "datasets", help = "The datasets to sync", long_help = "A list of datasets to sync. If no datasets are given, then the dataset 'default' is synced. This dataset is created if it does not already exist." )] datasets: Vec, - #[arg(short, long, group = "datasets", help = "Sync all stored databases")] + #[arg(short, long, help = "Sync all stored databases")] all: bool, }, } @@ -100,7 +103,7 @@ fn main() { } => dataset_list(), Subcommands::Dataset { subcommand: DatasetSC::New { name }, - } => dataset_new(name), + } => dataset_new(name, cli.auth_token), Subcommands::Dataset { subcommand: DatasetSC::Delete { name }, } => dataset_delete(name), @@ -121,24 +124,67 @@ fn dataset_list() { fn read_string() -> String { let mut line = String::new(); - io::stdout().flush().expect("Could not access stdout"); + io::stdout() + .flush() + .unwrap_or_else(|_| error("Could not access stdout", 2)); io::stdin() .read_line(&mut line) - .expect("Could not read from stdin"); + .unwrap_or_else(|_| error("Could not read from stdin", 2)); line.trim().to_owned() } -fn dataset_new(name: Option) { +fn dataset_new(name: Option, auth_token: Option) { let config_dir = dirs::config_dir().expect("Could not determine config directory"); + let auth = auth_token + .or_else(|| get_auth_token(&config_dir)) + .unwrap_or_else(|| error("Access token not provided", 1)); + let name = name.unwrap_or_else(|| { print!("Name of new dataset: "); read_string() }); + print!("Search games: "); + let games = run_query::( + VideogameSearchVars { + name: &read_string(), + }, + &auth, + ) + .unwrap_or_else(|| error("Could not access start.gg", 1)); + + if games.is_empty() { + issue("No games found!", 0); + } + + println!("\nSearch results:"); + for (i, game) in games.iter().enumerate() { + println!("{} - {}", i, game.name); + } + + print!("\nGame to track ratings for (0-{}): ", games.len() - 1); + let index = read_string() + .parse::() + .unwrap_or_else(|_| error("Not an integer", 1)); + if index >= games.len() { + error("Out of range!", 1); + } + + let game_id = games[index].id; + let connection = open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1)); - new_dataset(&connection, &name).expect("Error communicating with SQLite"); + new_dataset( + &connection, + &name, + DatasetConfig { + last_sync: Timestamp(1), + game_id, + state: None, + }, + ) + .expect("Error communicating with SQLite"); } fn dataset_delete(name: Option) { @@ -168,33 +214,26 @@ fn sync(datasets: Vec, all: bool, auth_token: Option) { let datasets = if all { list_datasets(&connection).unwrap() } else if datasets.len() == 0 { - new_dataset(&connection, "default"); + print!("No datasets provided; create a new one? (y/n) "); + if read_string() == "y" { + dataset_new(Some(String::from("default")), Some(auth.clone())); + } vec![String::from("default")] } else { datasets }; for dataset in datasets { - let last_sync = get_last_sync(&connection, &dataset) + let dataset_config = get_dataset_config(&connection, &dataset) .expect("Error communicating with SQLite") .unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1)); - sync_dataset( - &connection, - &dataset, - last_sync, - VideogameId(1386), - Some("GA"), - &auth, - ) - .expect("Error communicating with SQLite"); + sync_dataset(&connection, &dataset, dataset_config, &auth).unwrap_or_else(|err| { + connection.execute("ROLLBACK;").unwrap(); + panic!("{:?}", err); + // error("Error communicating with SQLite", 2) + }); - let current_time = SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap_or_else(|_| error("System time is before the Unix epoch!", 2)) - .as_secs(); - - update_last_sync(&connection, &dataset, current_time) - .expect("Error communicating with SQLite"); + update_last_sync(&connection, &dataset).expect("Error communicating with SQLite"); } } diff --git a/src/queries/search_games.rs b/src/queries/search_games.rs index 7505d32..9944745 100644 --- a/src/queries/search_games.rs +++ b/src/queries/search_games.rs @@ -4,7 +4,7 @@ use schema::schema; // Variables -#[derive(cynic::QueryVariables, Clone)] +#[derive(cynic::QueryVariables, Debug, Clone)] pub struct VideogameSearchVars<'a> { pub name: &'a str, } @@ -14,7 +14,7 @@ pub struct VideogameSearchVars<'a> { #[derive(cynic::QueryFragment, Debug)] #[cynic(graphql_type = "Query", variables = "VideogameSearchVars")] pub struct VideogameSearch { - #[arguments(query: { filter: { name: $name }, page: 1, perPage: 10 })] + #[arguments(query: { filter: { name: $name }, page: 1, perPage: 8 })] videogames: Option, } diff --git a/src/sync.rs b/src/sync.rs index ecc85c9..d0bb539 100644 --- a/src/sync.rs +++ b/src/sync.rs @@ -71,19 +71,14 @@ fn get_event_sets(event: EventId, auth: &str) -> Option> { } } -fn get_tournament_events( - last_sync: Timestamp, - game_id: VideogameId, - state: Option<&str>, - auth: &str, -) -> Option> { +fn get_tournament_events(dataset_config: &DatasetConfig, auth: &str) -> Option> { println!("Accessing tournaments..."); let tour_response = run_query::( TournamentEventsVars { - last_sync, - game_id, - state, + last_sync: dataset_config.last_sync, + game_id: dataset_config.game_id, + state: dataset_config.state.as_deref(), page: 1, }, auth, @@ -114,9 +109,9 @@ fn get_tournament_events( let next_response = run_query::( TournamentEventsVars { - last_sync, - game_id, - state, + last_sync: dataset_config.last_sync, + game_id: dataset_config.game_id, + state: dataset_config.state.as_deref(), page, }, auth, @@ -153,12 +148,10 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) -> pub fn sync_dataset( connection: &Connection, dataset: &str, - last_sync: Timestamp, - game_id: VideogameId, - state: Option<&str>, + dataset_config: DatasetConfig, auth: &str, ) -> sqlite::Result<()> { - let events = get_tournament_events(last_sync, game_id, state, auth) + let events = get_tournament_events(&dataset_config, auth) .unwrap_or_else(|| error("Could not access start.gg", 1)); connection.execute("BEGIN;")?; @@ -170,8 +163,10 @@ pub fn sync_dataset( event.0, i, num_events ); - let sets = - get_event_sets(event, auth).unwrap_or_else(|| error("Could not access start.gg", 1)); + let sets = get_event_sets(event, auth).unwrap_or_else(|| { + connection.execute("ROLLBACK;").unwrap(); + error("Could not access start.gg", 1) + }); println!(" Updating ratings from event...");