2023-09-23 03:01:36 -04:00
|
|
|
#![feature(iterator_try_collect)]
|
2023-10-04 17:58:54 -04:00
|
|
|
#![feature(extend_one)]
|
2023-09-23 03:01:36 -04:00
|
|
|
|
2023-09-26 22:36:03 -04:00
|
|
|
use clap::{Parser, Subcommand};
|
2023-09-23 03:01:36 -04:00
|
|
|
use std::io::{self, Write};
|
2023-09-30 01:43:33 -04:00
|
|
|
use std::path::PathBuf;
|
2023-10-03 23:21:31 -04:00
|
|
|
use std::process::exit;
|
2023-09-23 03:01:36 -04:00
|
|
|
|
|
|
|
mod queries;
|
|
|
|
use queries::*;
|
|
|
|
mod datasets;
|
|
|
|
use datasets::*;
|
2023-09-30 18:16:00 -04:00
|
|
|
mod sync;
|
|
|
|
use sync::*;
|
2023-09-23 03:01:36 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
pub fn error(msg: &str, code: i32) -> ! {
|
|
|
|
println!("\nERROR: {}", msg);
|
|
|
|
exit(code)
|
|
|
|
}
|
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
pub fn issue(msg: &str, code: i32) -> ! {
|
|
|
|
println!("\n{}", msg);
|
|
|
|
exit(code)
|
|
|
|
}
|
|
|
|
|
2023-09-26 22:36:03 -04:00
|
|
|
/// ## CLI Structs
|
|
|
|
|
|
|
|
#[derive(Parser)]
|
2023-10-03 23:37:51 -04:00
|
|
|
#[command(name = "StartRNR")]
|
2023-09-26 22:36:03 -04:00
|
|
|
#[command(author = "Kiana Sheibani <kiana.a.sheibani@gmail.com>")]
|
|
|
|
#[command(version = "0.1.0")]
|
2023-10-03 23:37:51 -04:00
|
|
|
#[command(about = "StartRNR - Elo rating calculator for start.gg tournaments", long_about = None)]
|
2023-09-26 22:36:03 -04:00
|
|
|
struct Cli {
|
|
|
|
#[command(subcommand)]
|
|
|
|
subcommand: Subcommands,
|
2023-09-30 00:22:48 -04:00
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
#[arg(
|
|
|
|
short = 'A',
|
|
|
|
long = "auth",
|
|
|
|
value_name = "TOKEN",
|
|
|
|
global = true,
|
|
|
|
help = "Authentication token",
|
|
|
|
long_help = "The authentication token for accessing start.gg.
|
|
|
|
A token can be specified using this argument, in the environment variable
|
|
|
|
AUTH_TOKEN, or in a text file '<CONFIG_DIR>/auth.txt'."
|
|
|
|
)]
|
2023-09-30 00:22:48 -04:00
|
|
|
auth_token: Option<String>,
|
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
#[arg(
|
|
|
|
short,
|
|
|
|
long = "config",
|
|
|
|
value_name = "DIR",
|
|
|
|
global = true,
|
|
|
|
help = "Config directory",
|
2023-10-04 17:58:54 -04:00
|
|
|
long_help = "This flag overrides the default config directory.
|
2023-09-30 05:13:52 -04:00
|
|
|
If this directory does not exist, it will be created and a database file will
|
|
|
|
be initialized within it."
|
|
|
|
)]
|
2023-09-30 00:22:48 -04:00
|
|
|
config_dir: Option<PathBuf>,
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum Subcommands {
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "Manipulate stored datasets")]
|
2023-09-26 22:36:03 -04:00
|
|
|
Dataset {
|
|
|
|
#[command(subcommand)]
|
|
|
|
subcommand: DatasetSC,
|
|
|
|
},
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(
|
|
|
|
about = "Sync player ratings",
|
2023-10-04 17:58:54 -04:00
|
|
|
long_about = "Pull recent tournament data off of start.gg and use it to
|
|
|
|
update the network. This command will automatically keep track of the last time each
|
|
|
|
dataset was synced to ensure that each tournament is only accounted for once."
|
2023-09-30 05:13:52 -04:00
|
|
|
)]
|
2023-09-30 01:43:33 -04:00
|
|
|
Sync {
|
2023-09-30 05:13:52 -04:00
|
|
|
#[arg(
|
|
|
|
help = "The datasets to sync",
|
|
|
|
long_help = "A list of datasets to sync.
|
|
|
|
If no datasets are given, then the dataset 'default' is synced. This dataset is
|
|
|
|
created if it does not already exist."
|
|
|
|
)]
|
|
|
|
datasets: Vec<String>,
|
2023-10-02 20:29:39 -04:00
|
|
|
#[arg(short, long, help = "Sync all stored databases")]
|
2023-09-30 01:43:33 -04:00
|
|
|
all: bool,
|
|
|
|
},
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum DatasetSC {
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "List datasets")]
|
2023-09-26 22:36:03 -04:00
|
|
|
List,
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "Create a new dataset")]
|
2023-09-27 15:19:28 -04:00
|
|
|
New { name: Option<String> },
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "Delete a dataset")]
|
2023-09-30 00:22:48 -04:00
|
|
|
Delete { name: Option<String> },
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
|
|
|
|
2023-09-23 03:01:36 -04:00
|
|
|
fn main() {
|
2023-09-26 22:36:03 -04:00
|
|
|
let cli = Cli::parse();
|
|
|
|
|
|
|
|
match cli.subcommand {
|
|
|
|
Subcommands::Dataset {
|
|
|
|
subcommand: DatasetSC::List,
|
2023-09-27 15:19:28 -04:00
|
|
|
} => dataset_list(),
|
|
|
|
Subcommands::Dataset {
|
|
|
|
subcommand: DatasetSC::New { name },
|
2023-10-02 20:29:39 -04:00
|
|
|
} => dataset_new(name, cli.auth_token),
|
2023-09-30 00:22:48 -04:00
|
|
|
Subcommands::Dataset {
|
|
|
|
subcommand: DatasetSC::Delete { name },
|
|
|
|
} => dataset_delete(name),
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
Subcommands::Sync { datasets, all } => sync(datasets, all, cli.auth_token),
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
2023-09-27 15:19:28 -04:00
|
|
|
}
|
2023-09-26 22:36:03 -04:00
|
|
|
|
2023-09-27 15:19:28 -04:00
|
|
|
fn dataset_list() {
|
2023-10-01 14:55:15 -04:00
|
|
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
2023-09-23 03:01:36 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
|
|
|
let datasets = list_datasets(&connection).expect("Error communicating with SQLite");
|
2023-09-26 22:36:03 -04:00
|
|
|
|
2023-10-03 01:26:25 -04:00
|
|
|
println!();
|
|
|
|
for (name, metadata) in datasets {
|
|
|
|
if let Some(state) = metadata.state {
|
|
|
|
println!("{} - {}, {}", name, metadata.game_name, state);
|
|
|
|
} else {
|
|
|
|
println!("{} - {}", name, metadata.game_name);
|
|
|
|
}
|
|
|
|
}
|
2023-09-23 03:01:36 -04:00
|
|
|
}
|
2023-09-26 22:36:03 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
fn read_string() -> String {
|
|
|
|
let mut line = String::new();
|
2023-10-02 20:29:39 -04:00
|
|
|
io::stdout()
|
|
|
|
.flush()
|
|
|
|
.unwrap_or_else(|_| error("Could not access stdout", 2));
|
2023-10-01 14:55:15 -04:00
|
|
|
io::stdin()
|
|
|
|
.read_line(&mut line)
|
2023-10-02 20:29:39 -04:00
|
|
|
.unwrap_or_else(|_| error("Could not read from stdin", 2));
|
2023-10-01 14:55:15 -04:00
|
|
|
line.trim().to_owned()
|
|
|
|
}
|
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
fn dataset_new(name: Option<String>, auth_token: Option<String>) {
|
2023-10-01 14:55:15 -04:00
|
|
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
2023-09-27 15:19:28 -04:00
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
let auth = auth_token
|
|
|
|
.or_else(|| get_auth_token(&config_dir))
|
|
|
|
.unwrap_or_else(|| error("Access token not provided", 1));
|
|
|
|
|
2023-09-27 15:19:28 -04:00
|
|
|
let name = name.unwrap_or_else(|| {
|
|
|
|
print!("Name of new dataset: ");
|
2023-10-01 14:55:15 -04:00
|
|
|
read_string()
|
2023-09-27 15:19:28 -04:00
|
|
|
});
|
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
print!("Search games: ");
|
|
|
|
let games = run_query::<VideogameSearch, _>(
|
|
|
|
VideogameSearchVars {
|
|
|
|
name: &read_string(),
|
|
|
|
},
|
|
|
|
&auth,
|
|
|
|
)
|
|
|
|
.unwrap_or_else(|| error("Could not access start.gg", 1));
|
|
|
|
|
|
|
|
if games.is_empty() {
|
|
|
|
issue("No games found!", 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
println!("\nSearch results:");
|
|
|
|
for (i, game) in games.iter().enumerate() {
|
|
|
|
println!("{} - {}", i, game.name);
|
|
|
|
}
|
|
|
|
|
|
|
|
print!("\nGame to track ratings for (0-{}): ", games.len() - 1);
|
|
|
|
let index = read_string()
|
|
|
|
.parse::<usize>()
|
|
|
|
.unwrap_or_else(|_| error("Not an integer", 1));
|
|
|
|
if index >= games.len() {
|
|
|
|
error("Out of range!", 1);
|
|
|
|
}
|
|
|
|
|
2023-10-03 01:25:35 -04:00
|
|
|
let VideogameData {
|
|
|
|
id: game_id,
|
|
|
|
name: game_name,
|
|
|
|
} = games[index].clone();
|
2023-10-02 20:29:39 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
2023-10-02 20:29:39 -04:00
|
|
|
new_dataset(
|
|
|
|
&connection,
|
|
|
|
&name,
|
2023-10-03 01:26:25 -04:00
|
|
|
DatasetMetadata {
|
2023-10-02 20:29:39 -04:00
|
|
|
last_sync: Timestamp(1),
|
|
|
|
game_id,
|
2023-10-03 01:25:35 -04:00
|
|
|
game_name,
|
2023-10-02 20:29:39 -04:00
|
|
|
state: None,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.expect("Error communicating with SQLite");
|
2023-09-27 15:19:28 -04:00
|
|
|
}
|
2023-09-30 00:22:48 -04:00
|
|
|
|
|
|
|
fn dataset_delete(name: Option<String>) {
|
2023-10-01 14:55:15 -04:00
|
|
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
2023-09-30 00:22:48 -04:00
|
|
|
|
|
|
|
let name = name.unwrap_or_else(|| {
|
|
|
|
print!("Dataset to delete: ");
|
2023-10-01 14:55:15 -04:00
|
|
|
read_string()
|
2023-09-30 00:22:48 -04:00
|
|
|
});
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
|
|
|
delete_dataset(&connection, &name).expect("Error communicating with SQLite");
|
2023-09-30 00:22:48 -04:00
|
|
|
}
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
|
2023-09-30 01:43:33 -04:00
|
|
|
let config_dir = dirs::config_dir().unwrap();
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let auth = auth_token
|
|
|
|
.or_else(|| get_auth_token(&config_dir))
|
|
|
|
.unwrap_or_else(|| error("Access token not provided", 1));
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-10-03 23:21:31 -04:00
|
|
|
let all_datasets = list_dataset_names(&connection).unwrap();
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
#[allow(unused_must_use)]
|
2023-09-30 05:13:52 -04:00
|
|
|
let datasets = if all {
|
2023-10-03 23:21:31 -04:00
|
|
|
all_datasets
|
2023-09-30 05:13:52 -04:00
|
|
|
} else if datasets.len() == 0 {
|
2023-10-03 23:21:31 -04:00
|
|
|
if all_datasets.len() == 0 {
|
|
|
|
print!("No datasets exist; create one? (y/n) ");
|
|
|
|
if let Some('y') = read_string().chars().next() {
|
|
|
|
dataset_new(Some(String::from("default")), Some(auth.clone()));
|
|
|
|
vec![String::from("default")]
|
|
|
|
} else {
|
|
|
|
error("No datasets specified and no default dataset", 1)
|
|
|
|
}
|
|
|
|
} else if all_datasets.iter().any(|x| x == "default") {
|
|
|
|
vec![String::from("default")]
|
|
|
|
} else {
|
|
|
|
error("No datasets specified and no default dataset", 1);
|
2023-10-02 20:29:39 -04:00
|
|
|
}
|
2023-09-30 01:43:33 -04:00
|
|
|
} else {
|
2023-09-30 05:13:52 -04:00
|
|
|
datasets
|
2023-09-30 01:43:33 -04:00
|
|
|
};
|
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
for dataset in datasets {
|
2023-10-03 01:26:25 -04:00
|
|
|
let dataset_config = get_metadata(&connection, &dataset)
|
2023-10-01 14:55:15 -04:00
|
|
|
.expect("Error communicating with SQLite")
|
|
|
|
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
|
|
|
|
|
2023-10-03 23:21:31 -04:00
|
|
|
sync_dataset(&connection, &dataset, dataset_config, &auth).unwrap_or_else(|_| {
|
2023-10-02 20:29:39 -04:00
|
|
|
connection.execute("ROLLBACK;").unwrap();
|
2023-10-03 01:26:25 -04:00
|
|
|
error("Error communicating with SQLite", 2)
|
2023-10-02 20:29:39 -04:00
|
|
|
});
|
|
|
|
|
|
|
|
update_last_sync(&connection, &dataset).expect("Error communicating with SQLite");
|
2023-09-30 01:43:33 -04:00
|
|
|
}
|
|
|
|
}
|