Various improvements

This commit is contained in:
Kiana Sheibani 2023-10-03 01:26:25 -04:00
parent 5dc8a6d766
commit 1421720b2b
Signed by: toki
GPG key ID: 6CB106C25E86A9F7
5 changed files with 50 additions and 22 deletions

View file

@ -6,7 +6,7 @@ use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::SystemTime; use std::time::SystemTime;
pub struct DatasetConfig { pub struct DatasetMetadata {
pub last_sync: Timestamp, pub last_sync: Timestamp,
pub game_id: VideogameId, pub game_id: VideogameId,
pub game_name: String, pub game_name: String,
@ -48,13 +48,34 @@ pub fn open_datasets(config_dir: &Path) -> sqlite::Result<Connection> {
// TODO: Sanitize dataset names // TODO: Sanitize dataset names
pub fn list_datasets(connection: &Connection) -> sqlite::Result<Vec<String>> { pub fn list_dataset_names(connection: &Connection) -> sqlite::Result<Vec<String>> {
let query = "SELECT name FROM datasets";
connection
.prepare(query)?
.into_iter()
.map(|r| r.map(|x| x.read::<&str, _>("name").to_owned()))
.try_collect()
}
pub fn list_datasets(connection: &Connection) -> sqlite::Result<Vec<(String, DatasetMetadata)>> {
let query = "SELECT * FROM datasets"; let query = "SELECT * FROM datasets";
connection connection
.prepare(query)? .prepare(query)?
.into_iter() .into_iter()
.map(|x| x.map(|r| r.read::<&str, _>("name").to_owned())) .map(|r| {
let r_ = r?;
Ok((
r_.read::<&str, _>("name").to_owned(),
DatasetMetadata {
last_sync: Timestamp(r_.read::<i64, _>("last_sync") as u64),
game_id: VideogameId(r_.read::<i64, _>("game_id") as u64),
game_name: r_.read::<&str, _>("game_name").to_owned(),
state: r_.read::<Option<&str>, _>("state").map(String::from),
},
))
})
.try_collect() .try_collect()
} }
@ -71,7 +92,7 @@ pub fn delete_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<
pub fn new_dataset( pub fn new_dataset(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
config: DatasetConfig, config: DatasetMetadata,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query1 = r#"INSERT INTO datasets (name, game_id, game_name, state) let query1 = r#"INSERT INTO datasets (name, game_id, game_name, state)
VALUES (?, ?, ?, ?)"#; VALUES (?, ?, ?, ?)"#;
@ -97,11 +118,11 @@ pub fn new_dataset(
connection.execute(query2) connection.execute(query2)
} }
pub fn get_dataset_config( pub fn get_metadata(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
) -> sqlite::Result<Option<DatasetConfig>> { ) -> sqlite::Result<Option<DatasetMetadata>> {
let query = "SELECT last_sync, game_id, state FROM datasets WHERE name = ?"; let query = "SELECT last_sync, game_id, game_name, state FROM datasets WHERE name = ?";
Ok(connection Ok(connection
.prepare(query)? .prepare(query)?
@ -110,7 +131,7 @@ pub fn get_dataset_config(
.next() .next()
.map(|r| { .map(|r| {
let r_ = r?; let r_ = r?;
Ok(DatasetConfig { Ok(DatasetMetadata {
last_sync: Timestamp(r_.read::<i64, _>("last_sync") as u64), last_sync: Timestamp(r_.read::<i64, _>("last_sync") as u64),
game_id: VideogameId(r_.read::<i64, _>("game_id") as u64), game_id: VideogameId(r_.read::<i64, _>("game_id") as u64),
game_name: r_.read::<&str, _>("game_name").to_owned(), game_name: r_.read::<&str, _>("game_name").to_owned(),

View file

@ -3,7 +3,6 @@
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use std::io::{self, Write}; use std::io::{self, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::process::exit;
mod queries; mod queries;
use queries::*; use queries::*;
@ -13,11 +12,13 @@ mod sync;
use sync::*; use sync::*;
pub fn error(msg: &str, code: i32) -> ! { pub fn error(msg: &str, code: i32) -> ! {
use std::process::exit;
println!("\nERROR: {}", msg); println!("\nERROR: {}", msg);
exit(code) exit(code)
} }
pub fn issue(msg: &str, code: i32) -> ! { pub fn issue(msg: &str, code: i32) -> ! {
use std::process::exit;
println!("\n{}", msg); println!("\n{}", msg);
exit(code) exit(code)
} }
@ -119,7 +120,14 @@ fn dataset_list() {
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1)); open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
let datasets = list_datasets(&connection).expect("Error communicating with SQLite"); let datasets = list_datasets(&connection).expect("Error communicating with SQLite");
println!("{:?}", datasets); println!();
for (name, metadata) in datasets {
if let Some(state) = metadata.state {
println!("{} - {}, {}", name, metadata.game_name, state);
} else {
println!("{} - {}", name, metadata.game_name);
}
}
} }
fn read_string() -> String { fn read_string() -> String {
@ -181,7 +189,7 @@ fn dataset_new(name: Option<String>, auth_token: Option<String>) {
new_dataset( new_dataset(
&connection, &connection,
&name, &name,
DatasetConfig { DatasetMetadata {
last_sync: Timestamp(1), last_sync: Timestamp(1),
game_id, game_id,
game_name, game_name,
@ -216,7 +224,7 @@ fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
#[allow(unused_must_use)] #[allow(unused_must_use)]
let datasets = if all { let datasets = if all {
list_datasets(&connection).unwrap() list_dataset_names(&connection).unwrap()
} else if datasets.len() == 0 { } else if datasets.len() == 0 {
print!("No datasets provided; create a new one? (y/n) "); print!("No datasets provided; create a new one? (y/n) ");
if read_string() == "y" { if read_string() == "y" {
@ -228,14 +236,13 @@ fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
}; };
for dataset in datasets { for dataset in datasets {
let dataset_config = get_dataset_config(&connection, &dataset) let dataset_config = get_metadata(&connection, &dataset)
.expect("Error communicating with SQLite") .expect("Error communicating with SQLite")
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1)); .unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
sync_dataset(&connection, &dataset, dataset_config, &auth).unwrap_or_else(|err| { sync_dataset(&connection, &dataset, dataset_config, &auth).unwrap_or_else(|err| {
connection.execute("ROLLBACK;").unwrap(); connection.execute("ROLLBACK;").unwrap();
panic!("{:?}", err); error("Error communicating with SQLite", 2)
// error("Error communicating with SQLite", 2)
}); });
update_last_sync(&connection, &dataset).expect("Error communicating with SQLite"); update_last_sync(&connection, &dataset).expect("Error communicating with SQLite");

View file

@ -14,6 +14,7 @@ pub use event_sets::*;
pub mod player_info; pub mod player_info;
pub use player_info::*; pub use player_info::*;
use crate::error;
use schema::schema; use schema::schema;
// Auth key // Auth key
@ -24,7 +25,7 @@ pub fn get_auth_token(config_dir: &Path) -> Option<String> {
match var("AUTH_TOKEN") { match var("AUTH_TOKEN") {
Ok(key) => Some(key), Ok(key) => Some(key),
Err(VarError::NotUnicode(_)) => panic!("Invalid authorization key"), Err(VarError::NotUnicode(_)) => error("Invalid authorization key", 2),
Err(VarError::NotPresent) => { Err(VarError::NotPresent) => {
let mut auth_file = config_dir.to_owned(); let mut auth_file = config_dir.to_owned();
auth_file.push("ggelo"); auth_file.push("ggelo");
@ -77,9 +78,8 @@ pub trait QueryUnwrap<Vars>: 'static + QueryBuilder<Vars> {
// Generic function for running start.gg queries // Generic function for running start.gg queries
pub fn run_query<Builder, Vars>(vars: Vars, auth_token: &str) -> Option<Builder::Unwrapped> pub fn run_query<Builder, Vars>(vars: Vars, auth_token: &str) -> Option<Builder::Unwrapped>
where where
Vars: Copy, Vars: Copy + Serialize,
Builder: QueryUnwrap<Vars>, Builder: QueryUnwrap<Vars>,
Vars: Serialize,
for<'de> Builder: Deserialize<'de>, for<'de> Builder: Deserialize<'de>,
{ {
use cynic::http::ReqwestBlockingExt; use cynic::http::ReqwestBlockingExt;

View file

@ -11,10 +11,10 @@ pub struct TournamentEventsVars<'a> {
// server-side bug that completely breaks everything when this isn't passed. // server-side bug that completely breaks everything when this isn't passed.
// We can use a dummy value of 1 when we don't want to filter by time. // We can use a dummy value of 1 when we don't want to filter by time.
pub last_sync: Timestamp, pub last_sync: Timestamp,
pub game_id: VideogameId,
pub page: i32,
pub game_id: VideogameId,
pub state: Option<&'a str>, pub state: Option<&'a str>,
pub page: i32,
} }
// Query // Query

View file

@ -71,7 +71,7 @@ fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
} }
} }
fn get_tournament_events(dataset_config: &DatasetConfig, auth: &str) -> Option<Vec<EventId>> { fn get_tournament_events(dataset_config: &DatasetMetadata, auth: &str) -> Option<Vec<EventId>> {
println!("Accessing tournaments..."); println!("Accessing tournaments...");
let tour_response = run_query::<TournamentEvents, _>( let tour_response = run_query::<TournamentEvents, _>(
@ -148,7 +148,7 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) ->
pub fn sync_dataset( pub fn sync_dataset(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
dataset_config: DatasetConfig, dataset_config: DatasetMetadata,
auth: &str, auth: &str,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let events = get_tournament_events(&dataset_config, auth) let events = get_tournament_events(&dataset_config, auth)