Track set counts for players

This commit is contained in:
Kiana Sheibani 2023-11-02 15:14:28 -04:00
parent ce6692c4c1
commit 5d935ab059
Signed by: toki
GPG key ID: 6CB106C25E86A9F7
5 changed files with 66 additions and 50 deletions

View file

@ -119,6 +119,8 @@ pub fn new_dataset(
prefix TEXT, prefix TEXT,
deviation REAL NOT NULL, deviation REAL NOT NULL,
volatility REAL NOT NULL, volatility REAL NOT NULL,
sets_won INTEGER NOT NULL,
sets_lost INTEGER NOT NULL,
last_played INTEGER NOT NULL last_played INTEGER NOT NULL
); );
@ -128,8 +130,6 @@ CREATE TABLE "{0}_network" (
advantage REAL NOT NULL, advantage REAL NOT NULL,
sets_A INTEGER NOT NULL DEFAULT 0, sets_A INTEGER NOT NULL DEFAULT 0,
sets_B INTEGER NOT NULL DEFAULT 0, sets_B INTEGER NOT NULL DEFAULT 0,
games_A INTEGER NOT NULL DEFAULT 0,
games_B INTEGER NOT NULL DEFAULT 0,
UNIQUE (player_A, player_B), UNIQUE (player_A, player_B),
CHECK (player_A < player_B), CHECK (player_A < player_B),
@ -144,10 +144,9 @@ CREATE INDEX "{0}_network_B"
ON "{0}_network" (player_B); ON "{0}_network" (player_B);
CREATE VIEW "{0}_view" CREATE VIEW "{0}_view"
(player_A_id, player_B_id, player_A_name, player_B_name, advantage, (player_A_id, player_B_id, player_A_name, player_B_name, advantage, sets_A, sets_B, sets) AS
sets_A, sets_B, sets, games_A, games_B, games) AS
SELECT players_A.id, players_B.id, players_A.name, players_B.name, advantage, SELECT players_A.id, players_B.id, players_A.name, players_B.name, advantage,
sets_A, sets_B, sets_A + sets_B, games_A, games_B, games_A + games_B FROM "{0}_network" sets_A, sets_B, sets_A + sets_B FROM "{0}_network"
INNER JOIN "{0}_players" players_A ON player_A = players_A.id INNER JOIN "{0}_players" players_A ON player_A = players_A.id
INNER JOIN "{0}_players" players_B ON player_B = players_B.id;"#, INNER JOIN "{0}_players" players_B ON player_B = players_B.id;"#,
dataset dataset
@ -222,7 +221,8 @@ pub fn add_players(
time: Timestamp, time: Timestamp,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"INSERT OR IGNORE INTO "{}_players" VALUES (?, ?, ?, ?, ?, ?)"#, r#"INSERT OR IGNORE INTO "{}_players"
VALUES (?, ?, ?, 2.01, 0.06, 0, 0, ?)"#,
dataset dataset
); );
@ -232,9 +232,7 @@ pub fn add_players(
statement.bind((1, id.0 as i64))?; statement.bind((1, id.0 as i64))?;
statement.bind((2, name.as_ref().map(|x| &x[..])))?; statement.bind((2, name.as_ref().map(|x| &x[..])))?;
statement.bind((3, prefix.as_ref().map(|x| &x[..])))?; statement.bind((3, prefix.as_ref().map(|x| &x[..])))?;
statement.bind((4, 2.01))?; statement.bind((4, time.0 as i64))?;
statement.bind((5, 0.06))?;
statement.bind((6, time.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ())) statement.into_iter().try_for_each(|x| x.map(|_| ()))
}) })
}) })
@ -267,17 +265,20 @@ pub fn set_player_data(
last_played: Timestamp, last_played: Timestamp,
deviation: f64, deviation: f64,
volatility: f64, volatility: f64,
won: bool,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"UPDATE "{}_players" SET deviation = ?, volatility = ?, last_played = ? WHERE id = ?"#, r#"UPDATE "{}_players" SET deviation = :dev, volatility = :vol, last_played = :last,
sets_won = iif(:won, sets_won + 1, sets_won), sets_lost = iif(:won, sets_lost, sets_lost + 1) WHERE id = :id"#,
dataset dataset
); );
let mut statement = connection.prepare(&query)?; let mut statement = connection.prepare(&query)?;
statement.bind((1, deviation))?; statement.bind((":dev", deviation))?;
statement.bind((2, volatility))?; statement.bind((":vol", volatility))?;
statement.bind((3, last_played.0 as i64))?; statement.bind((":last", last_played.0 as i64))?;
statement.bind((4, player.0 as i64))?; statement.bind((":id", player.0 as i64))?;
statement.bind((":won", if won { 1 } else { 0 }))?;
statement.next()?; statement.next()?;
Ok(()) Ok(())
} }
@ -313,8 +314,8 @@ pub fn insert_advantage(
advantage: f64, advantage: f64,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"INSERT INTO "{}_network" (player_A, player_B, advantage) r#"INSERT INTO "{}_network"
VALUES (min(:a, :b), max(:a, :b), iif(:a > :b, -:v, :v))"#, VALUES (min(:a, :b), max(:a, :b), iif(:a > :b, -:v, :v), 0, 0)"#,
dataset dataset
); );
@ -331,10 +332,13 @@ pub fn adjust_advantage(
player1: PlayerId, player1: PlayerId,
player2: PlayerId, player2: PlayerId,
adjust: f64, adjust: f64,
winner: usize,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"UPDATE "{}_network" r#"UPDATE "{}_network"
SET advantage = advantage + iif(:a > :b, -:v, :v) SET advantage = advantage + iif(:a > :b, -:v, :v),
sets_A = iif(:w = (:a > :b), sets_A + 1, sets_A),
sets_B = iif(:w = (:b > :a), sets_B + 1, sets_B),
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#, WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
dataset dataset
); );
@ -343,6 +347,7 @@ pub fn adjust_advantage(
statement.bind((":a", player1.0 as i64))?; statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?; statement.bind((":b", player2.0 as i64))?;
statement.bind((":v", adjust))?; statement.bind((":v", adjust))?;
statement.bind((":w", winner as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ())) statement.into_iter().try_for_each(|x| x.map(|_| ()))
} }
@ -391,20 +396,23 @@ pub fn get_edges(
.try_collect() .try_collect()
} }
pub fn is_isolated( pub fn either_isolated(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
player: PlayerId, player1: PlayerId,
player2: PlayerId,
) -> sqlite::Result<bool> { ) -> sqlite::Result<bool> {
let query = format!( let query = format!(
r#"SELECT EXISTS(SELECT 1 FROM "{}_network" WHERE player_A = :pl OR player_B = :pl)"#, r#"SELECT EXISTS(SELECT 1 FROM "{}_network"
WHERE player_A = :a OR player_B = :a OR player_A = :b OR player_B = :b)"#,
dataset dataset
); );
match connection match connection
.prepare(&query)? .prepare(&query)?
.into_iter() .into_iter()
.bind((":pl", player.0 as i64))? .bind((":a", player1.0 as i64))?
.bind((":b", player2.0 as i64))?
.next() .next()
{ {
None => Ok(true), None => Ok(true),
@ -421,10 +429,7 @@ pub fn hypothetical_advantage(
) -> sqlite::Result<f64> { ) -> sqlite::Result<f64> {
use std::collections::HashSet; use std::collections::HashSet;
// Check trivial cases // Check trivial cases
if player1 == player2 if player1 == player2 || either_isolated(connection, dataset, player1, player2)? {
|| is_isolated(connection, dataset, player1)?
|| is_isolated(connection, dataset, player2)?
{
return Ok(0.0); return Ok(0.0);
} }

View file

@ -1,6 +1,5 @@
use cynic::{GraphQlResponse, QueryBuilder}; use cynic::{GraphQlResponse, QueryBuilder};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug;
use std::path::Path; use std::path::Path;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
@ -15,7 +14,6 @@ pub mod player_info;
pub use player_info::*; pub use player_info::*;
use crate::error; use crate::error;
use schema::schema;
// Auth key // Auth key
@ -42,30 +40,39 @@ pub fn get_auth_token(config_dir: &Path) -> Option<String> {
} }
} }
// Types pub mod scalars {
use schema::schema;
// HACK: Unfortunately, start.gg seems to use integers for its ID type, whereas // Types
// cynic always assumes that IDs are strings. To get around that, we define new
// scalar types that deserialize to u64.
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] // HACK: Unfortunately, start.gg seems to use integers for its ID type, whereas
#[cynic(graphql_type = "ID")] // cynic always assumes that IDs are strings. To get around that, we define new
pub struct VideogameId(pub u64); // scalar types that deserialize to u64.
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cynic(graphql_type = "ID")] #[cynic(graphql_type = "ID")]
pub struct EventId(pub u64); pub struct VideogameId(pub u64);
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cynic(graphql_type = "ID")] #[cynic(graphql_type = "ID")]
pub struct EntrantId(pub u64); pub struct EventId(pub u64);
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cynic(graphql_type = "ID")] #[cynic(graphql_type = "ID")]
pub struct PlayerId(pub u64); pub struct EntrantId(pub u64);
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Timestamp(pub u64); #[cynic(graphql_type = "ID")]
pub struct PlayerId(pub u64);
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cynic(graphql_type = "ID")]
pub struct SetId(pub u64);
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Timestamp(pub u64);
}
pub use scalars::*;
// Query machinery // Query machinery

View file

@ -1,4 +1,5 @@
use super::{EntrantId, EventId, PlayerData, PlayerId, QueryUnwrap, Timestamp}; use super::scalars::*;
use super::{PlayerData, QueryUnwrap};
use cynic::GraphQlResponse; use cynic::GraphQlResponse;
use schema::schema; use schema::schema;
@ -24,7 +25,7 @@ pub struct EventSets {
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug)]
#[cynic(variables = "EventSetsVars")] #[cynic(variables = "EventSetsVars")]
struct Event { struct Event {
#[arguments(page: $page, perPage: 40, sortType: RECENT)] #[arguments(page: $page, perPage: 30, sortType: RECENT)]
sets: Option<SetConnection>, sets: Option<SetConnection>,
} }

View file

@ -1,5 +1,5 @@
use super::scalars::*;
use super::QueryUnwrap; use super::QueryUnwrap;
use super::{EventId, Timestamp, VideogameId};
use cynic::GraphQlResponse; use cynic::GraphQlResponse;
use schema::schema; use schema::schema;

View file

@ -203,9 +203,9 @@ fn update_from_set(
drop(it); drop(it);
let (deviation1, volatility1, last_played1) = get_player_data(connection, dataset, player1)?; let (deviation1, volatility1, last_played1) = get_player_data(connection, dataset, player1)?;
let (deviation2, volatility2, last_played2) = get_player_data(connection, dataset, player1)?;
let time1 = results.time.0.checked_sub(last_played1.0).unwrap_or(0); let time1 = results.time.0.checked_sub(last_played1.0).unwrap_or(0);
let (deviation2, volatility2, last_played2) = get_player_data(connection, dataset, player1)?;
let time2 = results.time.0.checked_sub(last_played2.0).unwrap_or(0); let time2 = results.time.0.checked_sub(last_played2.0).unwrap_or(0);
let advantage = match get_advantage(connection, dataset, player1, player2) { let advantage = match get_advantage(connection, dataset, player1, player2) {
@ -243,6 +243,7 @@ fn update_from_set(
results.time, results.time,
dev_new1, dev_new1,
vol_new1, vol_new1,
results.winner == 0,
)?; )?;
set_player_data( set_player_data(
connection, connection,
@ -251,6 +252,7 @@ fn update_from_set(
results.time, results.time,
dev_new2, dev_new2,
vol_new2, vol_new2,
results.winner == 1,
)?; )?;
let decay_rate = metadata.decay_rate; let decay_rate = metadata.decay_rate;
@ -262,6 +264,7 @@ fn update_from_set(
player1, player1,
player2, player2,
(1.0 - decay_rate) * (adjust2 - adjust1), (1.0 - decay_rate) * (adjust2 - adjust1),
results.winner,
) )
} }