Restructure SQL tables
This commit is contained in:
parent
c06c18c0ba
commit
72884497c1
|
@ -56,7 +56,26 @@ CREATE TABLE IF NOT EXISTS datasets (
|
|||
adj_decay_rate REAL NOT NULL,
|
||||
period REAL NOT NULL,
|
||||
tau REAL NOT NULL
|
||||
) STRICT;";
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS players (
|
||||
id INTEGER PRIMARY KEY,
|
||||
discrim TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
prefix TEXT
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id INTEGER PRIMARY KEY,
|
||||
slug TEXT NOT NULL
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sets (
|
||||
id TEXT UNIQUE NOT NULL,
|
||||
event INTEGER NOT NULL,
|
||||
FOREIGN KEY(event) REFERENCES events
|
||||
) STRICT;
|
||||
";
|
||||
|
||||
let connection = sqlite::open(path)?;
|
||||
connection.execute(query)?;
|
||||
|
@ -124,9 +143,6 @@ pub fn new_dataset(
|
|||
let query2 = format!(
|
||||
r#"CREATE TABLE "{0}_players" (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT,
|
||||
prefix TEXT,
|
||||
slug TEXT NOT NULL,
|
||||
last_played INTEGER NOT NULL,
|
||||
deviation REAL NOT NULL,
|
||||
volatility REAL NOT NULL,
|
||||
|
@ -168,8 +184,8 @@ CREATE VIEW "{0}_view"
|
|||
sets_A, sets_count_A, sets_B, sets_count_B, sets, sets_count) AS
|
||||
SELECT players_A.id, players_B.id, players_A.name, players_B.name, advantage,
|
||||
sets_A, sets_count_A, sets_B, sets_count_B, network.sets, network.sets_count FROM "{0}_network" network
|
||||
INNER JOIN "{0}_players" players_A ON player_A = players_A.id
|
||||
INNER JOIN "{0}_players" players_B ON player_B = players_B.id;"#,
|
||||
INNER JOIN players players_A ON player_A = players_A.id
|
||||
INNER JOIN players players_B ON player_B = players_B.id;"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
|
@ -241,16 +257,35 @@ pub fn update_last_sync(connection: &Connection, dataset: &str) -> sqlite::Resul
|
|||
|
||||
// Database Updating
|
||||
|
||||
pub fn add_event(connection: &Connection, event: EventId, slug: &str) -> sqlite::Result<()> {
|
||||
let query = "INSERT OR IGNORE INTO events (id, slug) VALUES (?, ?)";
|
||||
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((1, event.0 as i64))?;
|
||||
statement.bind((2, slug))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))
|
||||
}
|
||||
|
||||
pub fn add_set(connection: &Connection, set: &SetId, event: EventId) -> sqlite::Result<()> {
|
||||
let query = "INSERT OR IGNORE INTO sets (id, event) VALUES (?, ?)";
|
||||
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((1, &set.0.to_string()[..]))?;
|
||||
statement.bind((2, event.0 as i64))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))
|
||||
}
|
||||
|
||||
pub fn add_players(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
teams: &Teams<PlayerData>,
|
||||
time: Timestamp,
|
||||
) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
let query1 = "INSERT OR IGNORE INTO players (id, discrim, name, prefix) VALUES (?, ?, ?, ?)";
|
||||
let query2 = format!(
|
||||
r#"INSERT OR IGNORE INTO "{}_players"
|
||||
(id, name, prefix, slug, last_played, deviation, volatility, sets_won, sets_lost)
|
||||
VALUES (?, ?, ?, ?, ?, 2.01, 0.06, '', '')"#,
|
||||
(id, last_played, deviation, volatility, sets_won, sets_lost)
|
||||
VALUES (?, ?, 2.01, 0.06, '', '')"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
|
@ -260,29 +295,26 @@ pub fn add_players(
|
|||
id,
|
||||
name,
|
||||
prefix,
|
||||
slug,
|
||||
discrim,
|
||||
}| {
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
let mut statement = connection.prepare(&query1)?;
|
||||
statement.bind((1, id.0 as i64))?;
|
||||
statement.bind((2, &name[..]))?;
|
||||
statement.bind((3, prefix.as_ref().map(|x| &x[..])))?;
|
||||
statement.bind((4, &slug[..]))?;
|
||||
statement.bind((5, time.0 as i64))?;
|
||||
statement.bind((2, &discrim[..]))?;
|
||||
statement.bind((3, &name[..]))?;
|
||||
statement.bind((4, prefix.as_ref().map(|x| &x[..])))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
|
||||
|
||||
statement = connection.prepare(&query2)?;
|
||||
statement.bind((1, id.0 as i64))?;
|
||||
statement.bind((2, time.0 as i64))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_player(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
player: PlayerId,
|
||||
) -> sqlite::Result<PlayerData> {
|
||||
let query = format!(
|
||||
r#"SELECT name, prefix, slug FROM "{}_players" WHERE id = ?"#,
|
||||
dataset
|
||||
);
|
||||
pub fn get_player(connection: &Connection, player: PlayerId) -> sqlite::Result<PlayerData> {
|
||||
let query = "SELECT name, prefix, discrim FROM players WHERE id = ?";
|
||||
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((1, player.0 as i64))?;
|
||||
|
@ -291,11 +323,11 @@ pub fn get_player(
|
|||
id: player,
|
||||
name: statement.read::<String, _>("name")?,
|
||||
prefix: statement.read::<Option<String>, _>("prefix")?,
|
||||
slug: statement.read::<String, _>("slug")?,
|
||||
discrim: statement.read::<String, _>("discrim")?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_player_data(
|
||||
pub fn get_player_rating_data(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
player: PlayerId,
|
||||
|
@ -323,7 +355,7 @@ pub fn set_player_data(
|
|||
deviation: f64,
|
||||
volatility: f64,
|
||||
won: bool,
|
||||
set: SetId,
|
||||
set: &SetId,
|
||||
) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
r#"UPDATE "{}_players" SET deviation = :dev, volatility = :vol, last_played = :last,
|
||||
|
@ -631,7 +663,7 @@ CREATE TABLE IF NOT EXISTS datasets (
|
|||
id: PlayerId(i),
|
||||
name: format!("{}", i),
|
||||
prefix: None,
|
||||
slug: String::from("a"),
|
||||
discrim: String::from("a"),
|
||||
})
|
||||
.collect()
|
||||
}
|
16
src/main.rs
16
src/main.rs
|
@ -9,8 +9,8 @@ use time_format::strftime_utc;
|
|||
|
||||
mod queries;
|
||||
use queries::*;
|
||||
mod datasets;
|
||||
use datasets::*;
|
||||
mod database;
|
||||
use database::*;
|
||||
mod sync;
|
||||
use sync::*;
|
||||
mod util;
|
||||
|
@ -470,20 +470,20 @@ fn player_info(dataset: Option<String>, player: String) {
|
|||
let player_id = PlayerId(player.parse::<u64>().unwrap());
|
||||
|
||||
let PlayerData {
|
||||
id,
|
||||
id: _,
|
||||
name,
|
||||
prefix,
|
||||
slug,
|
||||
} = get_player(&connection, &dataset, player_id).unwrap();
|
||||
discrim,
|
||||
} = get_player(&connection, player_id).unwrap();
|
||||
|
||||
let (deviation, volatility, last_played) =
|
||||
get_player_data(&connection, &dataset, player_id).unwrap();
|
||||
get_player_rating_data(&connection, &dataset, player_id).unwrap();
|
||||
|
||||
print!("\n\x1b]8;;https://www.start.gg/{}\x1b\\", slug);
|
||||
print!("\n\x1b]8;;https://www.start.gg/user/{}\x1b\\", discrim);
|
||||
if let Some(pre) = prefix {
|
||||
print!("\x1b[2m{}\x1b[0m ", pre);
|
||||
}
|
||||
println!("\x1b[1m{}\x1b[0m\x1b]8;;\x1b\\ ({})", name, id.0);
|
||||
println!("\x1b[1m{}\x1b[0m\x1b]8;;\x1b\\ ({})", name, discrim);
|
||||
}
|
||||
|
||||
// Sync
|
||||
|
|
|
@ -79,7 +79,7 @@ struct Player {
|
|||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
struct User {
|
||||
slug: Option<String>,
|
||||
discriminator: Option<String>,
|
||||
}
|
||||
|
||||
// Unwrap
|
||||
|
@ -129,7 +129,7 @@ impl QueryUnwrap<EventSetsVars> for EventSets {
|
|||
id: p_.id?,
|
||||
name: p_.gamer_tag?,
|
||||
prefix: p_.prefix.filter(|pr| !pr.is_empty()),
|
||||
slug: p_.user?.slug?,
|
||||
discrim: p_.user?.discriminator?,
|
||||
})
|
||||
})
|
||||
.try_collect()
|
||||
|
|
|
@ -28,7 +28,7 @@ struct Player {
|
|||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
struct User {
|
||||
slug: Option<String>,
|
||||
discriminator: Option<String>,
|
||||
}
|
||||
|
||||
// Unwrapping
|
||||
|
@ -38,7 +38,7 @@ pub struct PlayerData {
|
|||
pub id: PlayerId,
|
||||
pub name: String,
|
||||
pub prefix: Option<String>,
|
||||
pub slug: String,
|
||||
pub discrim: String,
|
||||
}
|
||||
|
||||
impl QueryUnwrap<PlayerInfoVars> for PlayerInfo {
|
||||
|
@ -50,7 +50,7 @@ impl QueryUnwrap<PlayerInfoVars> for PlayerInfo {
|
|||
id: player.id?,
|
||||
name: player.gamer_tag?,
|
||||
prefix: player.prefix.filter(|pr| !pr.is_empty()),
|
||||
slug: player.user?.slug?,
|
||||
discrim: player.user?.discriminator?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ pub struct TournamentEventsVars<'a> {
|
|||
pub struct TournamentEvents {
|
||||
#[arguments(query: {
|
||||
page: $page,
|
||||
perPage: 250,
|
||||
perPage: 225,
|
||||
sortBy: "endAt asc",
|
||||
filter: {
|
||||
past: true,
|
||||
|
@ -62,6 +62,7 @@ struct Tournament {
|
|||
#[cynic(variables = "TournamentEventsVars")]
|
||||
struct Event {
|
||||
id: Option<EventId>,
|
||||
slug: Option<String>,
|
||||
start_at: Option<Timestamp>,
|
||||
}
|
||||
|
||||
|
@ -81,6 +82,7 @@ pub struct TournamentData {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct EventData {
|
||||
pub id: EventId,
|
||||
pub slug: String,
|
||||
pub time: Timestamp,
|
||||
}
|
||||
|
||||
|
@ -103,6 +105,7 @@ impl<'a> QueryUnwrap<TournamentEventsVars<'a>> for TournamentEvents {
|
|||
.filter_map(|event| {
|
||||
Some(EventData {
|
||||
id: event.id?,
|
||||
slug: event.slug?,
|
||||
time: event.start_at?,
|
||||
})
|
||||
})
|
||||
|
|
27
src/sync.rs
27
src/sync.rs
|
@ -2,7 +2,7 @@ use std::f64::consts::PI;
|
|||
use std::thread::sleep;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::datasets::*;
|
||||
use crate::database::*;
|
||||
use crate::error;
|
||||
use crate::queries::*;
|
||||
use sqlite::*;
|
||||
|
@ -200,10 +200,12 @@ fn update_from_set(
|
|||
let player2 = it.next().unwrap()[0].id;
|
||||
drop(it);
|
||||
|
||||
let (deviation1, volatility1, last_played1) = get_player_data(connection, dataset, player1)?;
|
||||
let (deviation1, volatility1, last_played1) =
|
||||
get_player_rating_data(connection, dataset, player1)?;
|
||||
let time1 = time.0.checked_sub(last_played1.0).unwrap_or(0);
|
||||
|
||||
let (deviation2, volatility2, last_played2) = get_player_data(connection, dataset, player1)?;
|
||||
let (deviation2, volatility2, last_played2) =
|
||||
get_player_rating_data(connection, dataset, player1)?;
|
||||
let time2 = time.0.checked_sub(last_played2.0).unwrap_or(0);
|
||||
|
||||
let advantage = match get_advantage(connection, dataset, player1, player2) {
|
||||
|
@ -250,7 +252,7 @@ fn update_from_set(
|
|||
dev_new1,
|
||||
vol_new1,
|
||||
results.winner == 0,
|
||||
results.id.clone(),
|
||||
&results.id,
|
||||
)?;
|
||||
set_player_data(
|
||||
connection,
|
||||
|
@ -260,7 +262,7 @@ fn update_from_set(
|
|||
dev_new2,
|
||||
vol_new2,
|
||||
results.winner == 1,
|
||||
results.id.clone(),
|
||||
&results.id,
|
||||
)?;
|
||||
|
||||
adjust_advantages(
|
||||
|
@ -298,6 +300,8 @@ pub fn sync_dataset(
|
|||
num_events
|
||||
);
|
||||
|
||||
add_event(connection, event.id, &event.slug)?;
|
||||
|
||||
let mut sets =
|
||||
get_event_sets(event.id, auth).unwrap_or_else(|| error("Could not access start.gg", 1));
|
||||
|
||||
|
@ -308,6 +312,7 @@ pub fn sync_dataset(
|
|||
|
||||
sets.sort_by_key(|set| set.time);
|
||||
sets.into_iter().try_for_each(|set| {
|
||||
add_set(connection, &set.id, event.id)?;
|
||||
update_from_set(connection, dataset, &metadata, event.time, set)
|
||||
})?;
|
||||
}
|
||||
|
@ -318,7 +323,7 @@ pub fn sync_dataset(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::datasets::tests::*;
|
||||
use crate::database::tests::*;
|
||||
|
||||
#[test]
|
||||
fn glicko_single() -> sqlite::Result<()> {
|
||||
|
@ -344,8 +349,14 @@ mod tests {
|
|||
"{:?}",
|
||||
get_advantage(&connection, "test", PlayerId(1), PlayerId(2))?.unwrap()
|
||||
);
|
||||
println!("{:?}", get_player_data(&connection, "test", PlayerId(1)));
|
||||
println!("{:?}", get_player_data(&connection, "test", PlayerId(2)));
|
||||
println!(
|
||||
"{:?}",
|
||||
get_player_rating_data(&connection, "test", PlayerId(1))
|
||||
);
|
||||
println!(
|
||||
"{:?}",
|
||||
get_player_rating_data(&connection, "test", PlayerId(2))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue