Implement RNR rating adjustment
This commit is contained in:
parent
1b603bf727
commit
2e3bd017f3
|
@ -7,7 +7,8 @@ network of relative advantages between players.
|
|||
|
||||
Once the advantage network is generated, StartRNR can be used to predict the
|
||||
probability of a player winning a match, generate provably optimal seedings for
|
||||
tournaments, and create rankings of players automatically.
|
||||
tournaments, inspect the match history of two players, and create competitive
|
||||
rankings automatically.
|
||||
|
||||
**All of these features work for any game, in any region, without restriction.**
|
||||
|
||||
|
|
205
src/datasets.rs
205
src/datasets.rs
|
@ -8,6 +8,7 @@ use std::time::SystemTime;
|
|||
|
||||
pub struct DatasetMetadata {
|
||||
pub last_sync: Timestamp,
|
||||
|
||||
pub game_id: VideogameId,
|
||||
pub game_name: String,
|
||||
pub state: Option<String>,
|
||||
|
@ -33,9 +34,11 @@ pub fn open_datasets(config_dir: &Path) -> sqlite::Result<Connection> {
|
|||
let path = datasets_path(config_dir).unwrap();
|
||||
|
||||
let query = "
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS datasets (
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
last_sync INTEGER DEFAULT 1,
|
||||
last_sync INTEGER NOT NULL,
|
||||
game_id INTEGER NOT NULL,
|
||||
game_name TEXT NOT NULL,
|
||||
state TEXT
|
||||
|
@ -82,7 +85,8 @@ pub fn list_datasets(connection: &Connection) -> sqlite::Result<Vec<(String, Dat
|
|||
pub fn delete_dataset(connection: &Connection, dataset: &str) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
r#"DELETE FROM datasets WHERE name = '{0}';
|
||||
DROP TABLE "dataset_{0}";"#,
|
||||
DROP TABLE "dataset_{0}_players";
|
||||
DROP TABLE "dataset_{0}_network";"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
|
@ -97,11 +101,25 @@ pub fn new_dataset(
|
|||
let query1 = r#"INSERT INTO datasets (name, game_id, game_name, state)
|
||||
VALUES (?, ?, ?, ?)"#;
|
||||
let query2 = format!(
|
||||
r#" CREATE TABLE "dataset_{0}" (
|
||||
r#"
|
||||
CREATE TABLE "dataset_{0}_players" (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT,
|
||||
prefix TEXT,
|
||||
elo REAL NOT NULL
|
||||
prefix TEXT
|
||||
);
|
||||
CREATE TABLE "dataset_{0}_network" (
|
||||
player_A INTEGER NOT NULL,
|
||||
player_B INTEGER NOT NULL,
|
||||
advantage REAL NOT NULL,
|
||||
sets_A INTEGER NOT NULL,
|
||||
sets_B INTEGER NOT NULL,
|
||||
games_A INTEGER NOT NULL,
|
||||
games_B INTEGER NOT NULL,
|
||||
|
||||
UNIQUE (player_A, player_B),
|
||||
CHECK (player_A < player_B),
|
||||
FOREIGN KEY(player_A, player_B) REFERENCES "dataset_{0}_players"
|
||||
ON DELETE CASCADE
|
||||
) STRICT;"#,
|
||||
dataset
|
||||
);
|
||||
|
@ -165,7 +183,7 @@ pub fn add_players(
|
|||
teams: &Teams<PlayerData>,
|
||||
) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
r#"INSERT OR IGNORE INTO "dataset_{}" VALUES (?, ?, ?, 1500)"#,
|
||||
r#"INSERT OR IGNORE INTO "dataset_{}_players" VALUES (?, ?, ?)"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
|
@ -180,43 +198,160 @@ pub fn add_players(
|
|||
})
|
||||
}
|
||||
|
||||
pub fn get_ratings(
|
||||
pub fn get_advantage(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
teams: &Teams<PlayerData>,
|
||||
) -> sqlite::Result<Teams<(PlayerId, f64)>> {
|
||||
let query = format!(r#"SELECT id, elo FROM "dataset_{}" WHERE id = ?"#, dataset);
|
||||
player1: PlayerId,
|
||||
player2: PlayerId,
|
||||
) -> sqlite::Result<f64> {
|
||||
if player1 == player2 {
|
||||
return Ok(0.0);
|
||||
}
|
||||
|
||||
teams
|
||||
.iter()
|
||||
.map(|team| {
|
||||
team.iter()
|
||||
.map(|data| {
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((1, data.id.0 as i64))?;
|
||||
statement.next()?;
|
||||
Ok((data.id, statement.read::<f64, _>("elo")?))
|
||||
})
|
||||
.try_collect()
|
||||
let query = format!(
|
||||
r#"SELECT iif(:a > :b, -advantage, advantage) FROM "dataset_{}_network"
|
||||
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((":a", player1.0 as i64))?;
|
||||
statement.bind((":b", player2.0 as i64))?;
|
||||
statement.next()?;
|
||||
statement.read::<f64, _>("advantage")
|
||||
}
|
||||
|
||||
pub fn adjust_advantage(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
player1: PlayerId,
|
||||
player2: PlayerId,
|
||||
adjust: f64,
|
||||
) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
r#"UPDATE "dataset_{}_network"
|
||||
SET advantage = advantage + iif(:a > :b, -:v, :v)
|
||||
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((":a", player1.0 as i64))?;
|
||||
statement.bind((":b", player2.0 as i64))?;
|
||||
statement.bind((":v", adjust))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))
|
||||
}
|
||||
|
||||
pub fn adjust_advantages(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
player: PlayerId,
|
||||
adjust: f64,
|
||||
) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
r#"UPDATE "dataset_{}_network"
|
||||
SET advantage = advantage + iif(:pl = player_A, -:v, :v)
|
||||
WHERE player_A = :pl OR player_B = :pl"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((":pl", player.0 as i64))?;
|
||||
statement.bind((":v", adjust))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))
|
||||
}
|
||||
|
||||
pub fn get_edges(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
player: PlayerId,
|
||||
) -> sqlite::Result<Vec<(PlayerId, f64)>> {
|
||||
let query = format!(
|
||||
r#"SELECT iif(:pl = player_B, player_A, player_B) AS id, iif(:pl = player_B, -advantage, advantage) AS advantage
|
||||
FROM "dataset_{}_network"
|
||||
WHERE player_A = :pl OR player_B = :pl"#,
|
||||
dataset
|
||||
);
|
||||
|
||||
connection
|
||||
.prepare(&query)?
|
||||
.into_iter()
|
||||
.bind((":pl", player.0 as i64))?
|
||||
.map(|r| {
|
||||
let r_ = r?;
|
||||
Ok((
|
||||
PlayerId(r_.read::<i64, _>("id") as u64),
|
||||
r_.read::<f64, _>("advantage"),
|
||||
))
|
||||
})
|
||||
.try_collect()
|
||||
}
|
||||
|
||||
pub fn update_ratings(
|
||||
pub fn get_path_advantage(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
elos: Teams<(PlayerId, f64)>,
|
||||
) -> sqlite::Result<()> {
|
||||
let query = format!(
|
||||
r#"UPDATE "dataset_{}" SET elo = :elo WHERE id = :id"#,
|
||||
dataset
|
||||
);
|
||||
elos.into_iter().try_for_each(|team| {
|
||||
team.into_iter().try_for_each(|(id, elo)| {
|
||||
let mut statement = connection.prepare(&query)?;
|
||||
statement.bind((":elo", elo))?;
|
||||
statement.bind((":id", id.0 as i64))?;
|
||||
statement.into_iter().try_for_each(|x| x.map(|_| ()))
|
||||
})
|
||||
players: &[PlayerId],
|
||||
) -> sqlite::Result<f64> {
|
||||
players.windows(2).try_fold(0.0, |acc, [a, b]| {
|
||||
Ok(acc + get_advantage(connection, dataset, *a, *b)?)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn hypothetical_advantage(
|
||||
connection: &Connection,
|
||||
dataset: &str,
|
||||
player1: PlayerId,
|
||||
player2: PlayerId,
|
||||
) -> sqlite::Result<f64> {
|
||||
if player1 == player2 {
|
||||
return Ok(0.0);
|
||||
}
|
||||
|
||||
let mut paths: Vec<Vec<(Vec<PlayerId>, f64)>> = vec![vec![(vec![player1], 0.0)]];
|
||||
|
||||
for _ in 2..=6 {
|
||||
let new_paths = paths.last().unwrap().into_iter().cloned().try_fold(
|
||||
Vec::new(),
|
||||
|mut acc, (path, adv)| {
|
||||
acc.extend(
|
||||
get_edges(connection, dataset, *path.last().unwrap())?
|
||||
.into_iter()
|
||||
.map(|(x, next_adv)| {
|
||||
let mut path = path.clone();
|
||||
path.extend_one(x);
|
||||
(path, adv + next_adv)
|
||||
}),
|
||||
);
|
||||
Ok(acc)
|
||||
},
|
||||
)?;
|
||||
paths.extend_one(new_paths);
|
||||
}
|
||||
|
||||
let mut shortest_len = 0;
|
||||
|
||||
Ok(paths[1..]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, ps)| {
|
||||
let num_ps = ps.len();
|
||||
if num_ps == 0 {
|
||||
return 0.0;
|
||||
}
|
||||
if shortest_len == 0 {
|
||||
shortest_len = i + 1;
|
||||
}
|
||||
ps.into_iter()
|
||||
.filter_map(|(path, adv)| {
|
||||
if *path.last().unwrap() == player2 {
|
||||
Some(adv)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.sum::<f64>()
|
||||
/ num_ps as f64
|
||||
* (0.5_f64.powi((i - shortest_len) as i32))
|
||||
})
|
||||
.sum())
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#![feature(iterator_try_collect)]
|
||||
#![feature(extend_one)]
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::io::{self, Write};
|
||||
|
@ -51,7 +52,7 @@ AUTH_TOKEN, or in a text file '<CONFIG_DIR>/auth.txt'."
|
|||
value_name = "DIR",
|
||||
global = true,
|
||||
help = "Config directory",
|
||||
long_help = "This option overrides the default config directory.
|
||||
long_help = "This flag overrides the default config directory.
|
||||
If this directory does not exist, it will be created and a database file will
|
||||
be initialized within it."
|
||||
)]
|
||||
|
@ -67,9 +68,9 @@ enum Subcommands {
|
|||
},
|
||||
#[command(
|
||||
about = "Sync player ratings",
|
||||
long_about = "Pull recent tournament data off of start.gg and use it to update each player's
|
||||
stored ratings. This command will automatically keep track of the last time each
|
||||
dataset was synced."
|
||||
long_about = "Pull recent tournament data off of start.gg and use it to
|
||||
update the network. This command will automatically keep track of the last time each
|
||||
dataset was synced to ensure that each tournament is only accounted for once."
|
||||
)]
|
||||
Sync {
|
||||
#[arg(
|
||||
|
|
|
@ -52,19 +52,19 @@ pub fn get_auth_token(config_dir: &Path) -> Option<String> {
|
|||
#[cynic(graphql_type = "ID")]
|
||||
pub struct VideogameId(pub u64);
|
||||
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone)]
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cynic(graphql_type = "ID")]
|
||||
pub struct EventId(pub u64);
|
||||
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone)]
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cynic(graphql_type = "ID")]
|
||||
pub struct EntrantId(pub u64);
|
||||
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone)]
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cynic(graphql_type = "ID")]
|
||||
pub struct PlayerId(pub u64);
|
||||
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone)]
|
||||
#[derive(cynic::Scalar, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Timestamp(pub u64);
|
||||
|
||||
// Query machinery
|
||||
|
@ -89,6 +89,7 @@ where
|
|||
.header("Authorization", String::from("Bearer ") + auth_token)
|
||||
.run_graphql(Builder::build(vars));
|
||||
|
||||
// If query fails to reach server, retry up to 10 times
|
||||
for _ in 1..10 {
|
||||
if response.is_ok() {
|
||||
break;
|
||||
|
|
89
src/sync.rs
89
src/sync.rs
|
@ -8,31 +8,31 @@ use sqlite::*;
|
|||
|
||||
// Score calculation
|
||||
|
||||
/// Calculate the collective expected score for each team.
|
||||
fn expected_scores(ratings: &Teams<&mut f64>) -> Vec<f64> {
|
||||
let qs: Vec<f64> = ratings
|
||||
.into_iter()
|
||||
.map(|es| 10_f64.powf(es.iter().map(|x| **x).sum::<f64>() / es.len() as f64 / 400.0))
|
||||
.collect();
|
||||
let sumq: f64 = qs.iter().sum();
|
||||
qs.into_iter().map(|q| q / sumq).collect()
|
||||
}
|
||||
// /// Calculate the collective expected score for each team.
|
||||
// fn expected_scores(ratings: &Teams<&mut f64>) -> Vec<f64> {
|
||||
// let qs: Vec<f64> = ratings
|
||||
// .into_iter()
|
||||
// .map(|es| 10_f64.powf(es.iter().map(|x| **x).sum::<f64>() / es.len() as f64 / 400.0))
|
||||
// .collect();
|
||||
// let sumq: f64 = qs.iter().sum();
|
||||
// qs.into_iter().map(|q| q / sumq).collect()
|
||||
// }
|
||||
|
||||
/// Adjust the ratings of each player based on who won.
|
||||
fn adjust_ratings(ratings: Teams<&mut f64>, winner: usize) {
|
||||
let exp_scores = expected_scores(&ratings);
|
||||
// /// Adjust the ratings of each player based on who won.
|
||||
// fn adjust_ratings(ratings: Teams<&mut f64>, winner: usize) {
|
||||
// let exp_scores = expected_scores(&ratings);
|
||||
|
||||
ratings
|
||||
.into_iter()
|
||||
.zip(exp_scores.into_iter())
|
||||
.enumerate()
|
||||
.for_each(|(i, (es, exp_sc))| {
|
||||
let len = es.len() as f64;
|
||||
let score = f64::from(winner == i);
|
||||
es.into_iter()
|
||||
.for_each(|e| *e += 40.0 * (score - exp_sc) / len);
|
||||
})
|
||||
}
|
||||
// ratings
|
||||
// .into_iter()
|
||||
// .zip(exp_scores.into_iter())
|
||||
// .enumerate()
|
||||
// .for_each(|(i, (es, exp_sc))| {
|
||||
// let len = es.len() as f64;
|
||||
// let score = f64::from(winner == i);
|
||||
// es.into_iter()
|
||||
// .for_each(|e| *e += 40.0 * (score - exp_sc) / len);
|
||||
// })
|
||||
// }
|
||||
|
||||
// Extract set data
|
||||
|
||||
|
@ -135,14 +135,41 @@ fn update_from_set(connection: &Connection, dataset: &str, results: SetData) ->
|
|||
let players_data = results.teams;
|
||||
add_players(connection, dataset, &players_data)?;
|
||||
|
||||
let mut elos = get_ratings(connection, dataset, &players_data)?;
|
||||
adjust_ratings(
|
||||
elos.iter_mut()
|
||||
.map(|v| v.iter_mut().map(|x| &mut x.1).collect())
|
||||
.collect(),
|
||||
results.winner,
|
||||
);
|
||||
update_ratings(connection, dataset, elos)
|
||||
// Singles matches are currently not supported
|
||||
if players_data.len() != 2 || players_data[0].len() != 1 || players_data[1].len() != 1 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut it = players_data.into_iter();
|
||||
let player1 = it.next().unwrap()[0].id;
|
||||
let player2 = it.next().unwrap()[0].id;
|
||||
|
||||
let advantage = get_advantage(connection, dataset, player1, player2)
|
||||
.or_else(|_| hypothetical_advantage(connection, dataset, player1, player2))?;
|
||||
let adjust = 40.0 * (1.0 - 1.0 / (1.0 + 10_f64.powf(advantage / 400.0)));
|
||||
|
||||
if results.winner == 0 {
|
||||
adjust_advantages(connection, dataset, player1, 0.5 * adjust)?;
|
||||
adjust_advantages(connection, dataset, player2, -0.5 * adjust)?;
|
||||
adjust_advantage(
|
||||
connection,
|
||||
dataset,
|
||||
player1,
|
||||
player2,
|
||||
-2.0 * (1.0 - 0.5) * adjust,
|
||||
)?;
|
||||
} else {
|
||||
adjust_advantages(connection, dataset, player1, -0.5 * adjust)?;
|
||||
adjust_advantages(connection, dataset, player2, 0.5 * adjust)?;
|
||||
adjust_advantage(
|
||||
connection,
|
||||
dataset,
|
||||
player1,
|
||||
player2,
|
||||
2.0 * (1.0 - 0.5) * adjust,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn sync_dataset(
|
||||
|
|
Loading…
Reference in a new issue