Add more metadata to datasets
This commit is contained in:
parent
a4f130b36e
commit
23ecce06d5
|
@ -1,8 +1,16 @@
|
||||||
|
use crate::error;
|
||||||
use crate::queries::*;
|
use crate::queries::*;
|
||||||
use sqlite::*;
|
use sqlite::*;
|
||||||
use std::fs::{self, OpenOptions};
|
use std::fs::{self, OpenOptions};
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::time::SystemTime;
|
||||||
|
|
||||||
|
pub struct DatasetConfig {
|
||||||
|
pub last_sync: Timestamp,
|
||||||
|
pub game_id: VideogameId,
|
||||||
|
pub state: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the path to the datasets file.
|
/// Return the path to the datasets file.
|
||||||
fn datasets_path(config_dir: &Path) -> io::Result<PathBuf> {
|
fn datasets_path(config_dir: &Path) -> io::Result<PathBuf> {
|
||||||
|
@ -26,7 +34,9 @@ pub fn open_datasets(config_dir: &Path) -> sqlite::Result<Connection> {
|
||||||
let query = "
|
let query = "
|
||||||
CREATE TABLE IF NOT EXISTS datasets (
|
CREATE TABLE IF NOT EXISTS datasets (
|
||||||
name TEXT UNIQUE NOT NULL,
|
name TEXT UNIQUE NOT NULL,
|
||||||
last_sync INTEGER DEFAULT 1
|
last_sync INTEGER DEFAULT 1,
|
||||||
|
game_id INTEGER NOT NULL,
|
||||||
|
state TEXT
|
||||||
) STRICT;";
|
) STRICT;";
|
||||||
|
|
||||||
let connection = sqlite::open(path)?;
|
let connection = sqlite::open(path)?;
|
||||||
|
@ -56,11 +66,14 @@ pub fn delete_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<
|
||||||
connection.execute(query)
|
connection.execute(query)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> {
|
pub fn new_dataset(
|
||||||
let query = format!(
|
connection: &Connection,
|
||||||
r#"INSERT INTO datasets (name) VALUES ('{0}');
|
dataset: &str,
|
||||||
|
config: DatasetConfig,
|
||||||
CREATE TABLE IF NOT EXISTS "dataset_{0}" (
|
) -> sqlite::Result<()> {
|
||||||
|
let query1 = r#"INSERT INTO datasets (name, game_id, state) VALUES (?, ?, ?)"#;
|
||||||
|
let query2 = format!(
|
||||||
|
r#" CREATE TABLE "dataset_{0}" (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
name TEXT,
|
name TEXT,
|
||||||
prefix TEXT,
|
prefix TEXT,
|
||||||
|
@ -69,29 +82,51 @@ pub fn new_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()>
|
||||||
dataset
|
dataset
|
||||||
);
|
);
|
||||||
|
|
||||||
connection.execute(query)
|
connection
|
||||||
|
.prepare(query1)?
|
||||||
|
.into_iter()
|
||||||
|
.bind((1, dataset))?
|
||||||
|
.bind((2, config.game_id.0 as i64))?
|
||||||
|
.bind((3, config.state.as_deref()))?
|
||||||
|
.try_for_each(|x| x.map(|_| ()))?;
|
||||||
|
|
||||||
|
connection.execute(query2)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result<Option<Timestamp>> {
|
pub fn get_dataset_config(
|
||||||
let query = "SELECT last_sync FROM datasets WHERE name = ?";
|
connection: &Connection,
|
||||||
|
dataset: &str,
|
||||||
|
) -> sqlite::Result<Option<DatasetConfig>> {
|
||||||
|
let query = "SELECT last_sync, game_id, state FROM datasets WHERE name = ?";
|
||||||
|
|
||||||
Ok(connection
|
Ok(connection
|
||||||
.prepare(query)?
|
.prepare(query)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.bind((1, dataset))?
|
.bind((1, dataset))?
|
||||||
.map(|x| x.map(|r| r.read::<i64, _>("last_sync").to_owned() as u64))
|
|
||||||
.next()
|
.next()
|
||||||
.and_then(Result::ok)
|
.map(|r| {
|
||||||
.map(Timestamp))
|
let r_ = r?;
|
||||||
|
Ok(DatasetConfig {
|
||||||
|
last_sync: Timestamp(r_.read::<i64, _>("last_sync") as u64),
|
||||||
|
game_id: VideogameId(r_.read::<i64, _>("game_id") as u64),
|
||||||
|
state: r_.read::<Option<&str>, _>("state").map(String::from),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.and_then(Result::ok))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_last_sync(connection: &Connection, dataset: &str, sync: u64) -> sqlite::Result<()> {
|
pub fn update_last_sync(connection: &Connection, dataset: &str) -> sqlite::Result<()> {
|
||||||
let query = "UPDATE datasets SET last_sync = :sync WHERE name = :dataset";
|
let query = "UPDATE datasets SET last_sync = :sync WHERE name = :dataset";
|
||||||
|
|
||||||
|
let current_time = SystemTime::now()
|
||||||
|
.duration_since(SystemTime::UNIX_EPOCH)
|
||||||
|
.unwrap_or_else(|_| error("System time is before the Unix epoch (1970)!", 2))
|
||||||
|
.as_secs();
|
||||||
|
|
||||||
connection
|
connection
|
||||||
.prepare(query)?
|
.prepare(query)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.bind((":sync", sync as i64))?
|
.bind((":sync", current_time as i64))?
|
||||||
.bind((":dataset", dataset))?
|
.bind((":dataset", dataset))?
|
||||||
.try_for_each(|x| x.map(|_| ()))
|
.try_for_each(|x| x.map(|_| ()))
|
||||||
}
|
}
|
||||||
|
|
91
src/main.rs
91
src/main.rs
|
@ -4,7 +4,6 @@ use clap::{Parser, Subcommand};
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::time::SystemTime;
|
|
||||||
|
|
||||||
mod queries;
|
mod queries;
|
||||||
use queries::*;
|
use queries::*;
|
||||||
|
@ -18,6 +17,11 @@ pub fn error(msg: &str, code: i32) -> ! {
|
||||||
exit(code)
|
exit(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn issue(msg: &str, code: i32) -> ! {
|
||||||
|
println!("\n{}", msg);
|
||||||
|
exit(code)
|
||||||
|
}
|
||||||
|
|
||||||
/// ## CLI Structs
|
/// ## CLI Structs
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
|
@ -69,14 +73,13 @@ dataset was synced."
|
||||||
)]
|
)]
|
||||||
Sync {
|
Sync {
|
||||||
#[arg(
|
#[arg(
|
||||||
group = "datasets",
|
|
||||||
help = "The datasets to sync",
|
help = "The datasets to sync",
|
||||||
long_help = "A list of datasets to sync.
|
long_help = "A list of datasets to sync.
|
||||||
If no datasets are given, then the dataset 'default' is synced. This dataset is
|
If no datasets are given, then the dataset 'default' is synced. This dataset is
|
||||||
created if it does not already exist."
|
created if it does not already exist."
|
||||||
)]
|
)]
|
||||||
datasets: Vec<String>,
|
datasets: Vec<String>,
|
||||||
#[arg(short, long, group = "datasets", help = "Sync all stored databases")]
|
#[arg(short, long, help = "Sync all stored databases")]
|
||||||
all: bool,
|
all: bool,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -100,7 +103,7 @@ fn main() {
|
||||||
} => dataset_list(),
|
} => dataset_list(),
|
||||||
Subcommands::Dataset {
|
Subcommands::Dataset {
|
||||||
subcommand: DatasetSC::New { name },
|
subcommand: DatasetSC::New { name },
|
||||||
} => dataset_new(name),
|
} => dataset_new(name, cli.auth_token),
|
||||||
Subcommands::Dataset {
|
Subcommands::Dataset {
|
||||||
subcommand: DatasetSC::Delete { name },
|
subcommand: DatasetSC::Delete { name },
|
||||||
} => dataset_delete(name),
|
} => dataset_delete(name),
|
||||||
|
@ -121,24 +124,67 @@ fn dataset_list() {
|
||||||
|
|
||||||
fn read_string() -> String {
|
fn read_string() -> String {
|
||||||
let mut line = String::new();
|
let mut line = String::new();
|
||||||
io::stdout().flush().expect("Could not access stdout");
|
io::stdout()
|
||||||
|
.flush()
|
||||||
|
.unwrap_or_else(|_| error("Could not access stdout", 2));
|
||||||
io::stdin()
|
io::stdin()
|
||||||
.read_line(&mut line)
|
.read_line(&mut line)
|
||||||
.expect("Could not read from stdin");
|
.unwrap_or_else(|_| error("Could not read from stdin", 2));
|
||||||
line.trim().to_owned()
|
line.trim().to_owned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dataset_new(name: Option<String>) {
|
fn dataset_new(name: Option<String>, auth_token: Option<String>) {
|
||||||
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
||||||
|
|
||||||
|
let auth = auth_token
|
||||||
|
.or_else(|| get_auth_token(&config_dir))
|
||||||
|
.unwrap_or_else(|| error("Access token not provided", 1));
|
||||||
|
|
||||||
let name = name.unwrap_or_else(|| {
|
let name = name.unwrap_or_else(|| {
|
||||||
print!("Name of new dataset: ");
|
print!("Name of new dataset: ");
|
||||||
read_string()
|
read_string()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
print!("Search games: ");
|
||||||
|
let games = run_query::<VideogameSearch, _>(
|
||||||
|
VideogameSearchVars {
|
||||||
|
name: &read_string(),
|
||||||
|
},
|
||||||
|
&auth,
|
||||||
|
)
|
||||||
|
.unwrap_or_else(|| error("Could not access start.gg", 1));
|
||||||
|
|
||||||
|
if games.is_empty() {
|
||||||
|
issue("No games found!", 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\nSearch results:");
|
||||||
|
for (i, game) in games.iter().enumerate() {
|
||||||
|
println!("{} - {}", i, game.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
print!("\nGame to track ratings for (0-{}): ", games.len() - 1);
|
||||||
|
let index = read_string()
|
||||||
|
.parse::<usize>()
|
||||||
|
.unwrap_or_else(|_| error("Not an integer", 1));
|
||||||
|
if index >= games.len() {
|
||||||
|
error("Out of range!", 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let game_id = games[index].id;
|
||||||
|
|
||||||
let connection =
|
let connection =
|
||||||
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 1));
|
||||||
new_dataset(&connection, &name).expect("Error communicating with SQLite");
|
new_dataset(
|
||||||
|
&connection,
|
||||||
|
&name,
|
||||||
|
DatasetConfig {
|
||||||
|
last_sync: Timestamp(1),
|
||||||
|
game_id,
|
||||||
|
state: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.expect("Error communicating with SQLite");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dataset_delete(name: Option<String>) {
|
fn dataset_delete(name: Option<String>) {
|
||||||
|
@ -168,33 +214,26 @@ fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
|
||||||
let datasets = if all {
|
let datasets = if all {
|
||||||
list_datasets(&connection).unwrap()
|
list_datasets(&connection).unwrap()
|
||||||
} else if datasets.len() == 0 {
|
} else if datasets.len() == 0 {
|
||||||
new_dataset(&connection, "default");
|
print!("No datasets provided; create a new one? (y/n) ");
|
||||||
|
if read_string() == "y" {
|
||||||
|
dataset_new(Some(String::from("default")), Some(auth.clone()));
|
||||||
|
}
|
||||||
vec![String::from("default")]
|
vec![String::from("default")]
|
||||||
} else {
|
} else {
|
||||||
datasets
|
datasets
|
||||||
};
|
};
|
||||||
|
|
||||||
for dataset in datasets {
|
for dataset in datasets {
|
||||||
let last_sync = get_last_sync(&connection, &dataset)
|
let dataset_config = get_dataset_config(&connection, &dataset)
|
||||||
.expect("Error communicating with SQLite")
|
.expect("Error communicating with SQLite")
|
||||||
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
|
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
|
||||||
|
|
||||||
sync_dataset(
|
sync_dataset(&connection, &dataset, dataset_config, &auth).unwrap_or_else(|err| {
|
||||||
&connection,
|
connection.execute("ROLLBACK;").unwrap();
|
||||||
&dataset,
|
panic!("{:?}", err);
|
||||||
last_sync,
|
// error("Error communicating with SQLite", 2)
|
||||||
VideogameId(1386),
|
});
|
||||||
Some("GA"),
|
|
||||||
&auth,
|
|
||||||
)
|
|
||||||
.expect("Error communicating with SQLite");
|
|
||||||
|
|
||||||
let current_time = SystemTime::now()
|
update_last_sync(&connection, &dataset).expect("Error communicating with SQLite");
|
||||||
.duration_since(SystemTime::UNIX_EPOCH)
|
|
||||||
.unwrap_or_else(|_| error("System time is before the Unix epoch!", 2))
|
|
||||||
.as_secs();
|
|
||||||
|
|
||||||
update_last_sync(&connection, &dataset, current_time)
|
|
||||||
.expect("Error communicating with SQLite");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use schema::schema;
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
|
|
||||||
#[derive(cynic::QueryVariables, Clone)]
|
#[derive(cynic::QueryVariables, Debug, Clone)]
|
||||||
pub struct VideogameSearchVars<'a> {
|
pub struct VideogameSearchVars<'a> {
|
||||||
pub name: &'a str,
|
pub name: &'a str,
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ pub struct VideogameSearchVars<'a> {
|
||||||
#[derive(cynic::QueryFragment, Debug)]
|
#[derive(cynic::QueryFragment, Debug)]
|
||||||
#[cynic(graphql_type = "Query", variables = "VideogameSearchVars")]
|
#[cynic(graphql_type = "Query", variables = "VideogameSearchVars")]
|
||||||
pub struct VideogameSearch {
|
pub struct VideogameSearch {
|
||||||
#[arguments(query: { filter: { name: $name }, page: 1, perPage: 10 })]
|
#[arguments(query: { filter: { name: $name }, page: 1, perPage: 8 })]
|
||||||
videogames: Option<VideogameConnection>,
|
videogames: Option<VideogameConnection>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
31
src/sync.rs
31
src/sync.rs
|
@ -71,19 +71,14 @@ fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_tournament_events(
|
fn get_tournament_events(dataset_config: &DatasetConfig, auth: &str) -> Option<Vec<EventId>> {
|
||||||
last_sync: Timestamp,
|
|
||||||
game_id: VideogameId,
|
|
||||||
state: Option<&str>,
|
|
||||||
auth: &str,
|
|
||||||
) -> Option<Vec<EventId>> {
|
|
||||||
println!("Accessing tournaments...");
|
println!("Accessing tournaments...");
|
||||||
|
|
||||||
let tour_response = run_query::<TournamentEvents, _>(
|
let tour_response = run_query::<TournamentEvents, _>(
|
||||||
TournamentEventsVars {
|
TournamentEventsVars {
|
||||||
last_sync,
|
last_sync: dataset_config.last_sync,
|
||||||
game_id,
|
game_id: dataset_config.game_id,
|
||||||
state,
|
state: dataset_config.state.as_deref(),
|
||||||
page: 1,
|
page: 1,
|
||||||
},
|
},
|
||||||
auth,
|
auth,
|
||||||
|
@ -114,9 +109,9 @@ fn get_tournament_events(
|
||||||
|
|
||||||
let next_response = run_query::<TournamentEvents, _>(
|
let next_response = run_query::<TournamentEvents, _>(
|
||||||
TournamentEventsVars {
|
TournamentEventsVars {
|
||||||
last_sync,
|
last_sync: dataset_config.last_sync,
|
||||||
game_id,
|
game_id: dataset_config.game_id,
|
||||||
state,
|
state: dataset_config.state.as_deref(),
|
||||||
page,
|
page,
|
||||||
},
|
},
|
||||||
auth,
|
auth,
|
||||||
|
@ -153,12 +148,10 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) ->
|
||||||
pub fn sync_dataset(
|
pub fn sync_dataset(
|
||||||
connection: &Connection,
|
connection: &Connection,
|
||||||
dataset: &str,
|
dataset: &str,
|
||||||
last_sync: Timestamp,
|
dataset_config: DatasetConfig,
|
||||||
game_id: VideogameId,
|
|
||||||
state: Option<&str>,
|
|
||||||
auth: &str,
|
auth: &str,
|
||||||
) -> sqlite::Result<()> {
|
) -> sqlite::Result<()> {
|
||||||
let events = get_tournament_events(last_sync, game_id, state, auth)
|
let events = get_tournament_events(&dataset_config, auth)
|
||||||
.unwrap_or_else(|| error("Could not access start.gg", 1));
|
.unwrap_or_else(|| error("Could not access start.gg", 1));
|
||||||
|
|
||||||
connection.execute("BEGIN;")?;
|
connection.execute("BEGIN;")?;
|
||||||
|
@ -170,8 +163,10 @@ pub fn sync_dataset(
|
||||||
event.0, i, num_events
|
event.0, i, num_events
|
||||||
);
|
);
|
||||||
|
|
||||||
let sets =
|
let sets = get_event_sets(event, auth).unwrap_or_else(|| {
|
||||||
get_event_sets(event, auth).unwrap_or_else(|| error("Could not access start.gg", 1));
|
connection.execute("ROLLBACK;").unwrap();
|
||||||
|
error("Could not access start.gg", 1)
|
||||||
|
});
|
||||||
|
|
||||||
println!(" Updating ratings from event...");
|
println!(" Updating ratings from event...");
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue