Compare commits

..

No commits in common. "4b44b8253111fe82513b08713263a2b6d91de7fa" and "aed935e1ac5b85f708cee1d827d729d2f9db2d2d" have entirely different histories.

9 changed files with 698 additions and 539 deletions

View file

@ -10,13 +10,13 @@ name = "startrnr"
path = "src/main.rs" path = "src/main.rs"
[dependencies] [dependencies]
# GraphQL schema
schema.path = "schema"
# CLI # CLI
clap = { version = "4.4", features = ["derive"] } clap = { version = "4.4", features = ["derive"] }
chrono = "0.4" chrono = "0.4"
# GraphQL schema
schema.path = "schema"
# API access # API access
cynic = { version = "3.2", features = ["http-reqwest-blocking"] } cynic = { version = "3.2", features = ["http-reqwest-blocking"] }
reqwest = "0.11" reqwest = "0.11"

View file

@ -9,25 +9,23 @@ linked to your account. Instructions for generating one can be found in the
Once you have an auth token, it must be provided to StartRNR. In order, the Once you have an auth token, it must be provided to StartRNR. In order, the
program checks for a token in: program checks for a token in:
1. A command-line flag `--auth`. - A command-line flag `--auth`.
2. An environment variable `AUTH_TOKEN`, - An environment variable `AUTH_TOKEN`,
3. A file `auth.txt` within the config directory: - A file `auth.txt` within the config directory (see the [README](README.md) for
- Windows: `%APPDATA%\Roaming\startrnr/auth.txt` a list of directories in each OS).
- MacOS: `~/Library/Application Support/startrnr/auth.txt`
- Linux: `~/.config/startrnr/auth.txt`
The last method is recommended, as StartRNR can simply read from that file The last method is recommended, as StartRNR can simply read from that file
whenever it needs to. whenever it needs to.
## Step 2: Dependencies ## Step 2: Dependencies
StartRNR requires these dependencies: StartRNR is written in Rust, so install the [Rust
toolchain](https://www.rust-lang.org/tools/install).
- [Rust](https://www.rust-lang.org/tools/install) In addition, StartRNR needs these run-time dependencies:
- [OpenSSL](https://github.com/openssl/openssl#build-and-install)
- [SQLite](https://www.sqlite.org/download.html)
Follow the instructions to download and install each. - [OpenSSL](https://www.openssl.org/)
- [SQLite](https://www.sqlite.org/)
## Step 3: Compiling ## Step 3: Compiling

View file

@ -34,8 +34,6 @@ Alternatively, if you use Nix:
nix profile install github:kiana-S/StartRNR nix profile install github:kiana-S/StartRNR
``` ```
You will need to provide a start.gg API token to access tournament data. Details can be found in [INSTALL.md](INSTALL.md).
## Usage ## Usage
Once StartRNR is installed, run: Once StartRNR is installed, run:
@ -60,6 +58,17 @@ startrnr player matchup <player1> <player2>
A player can be specified by their tag or by their A player can be specified by their tag or by their
[discriminator](https://help.start.gg/en/articles/4855957-discriminators-on-start-gg). [discriminator](https://help.start.gg/en/articles/4855957-discriminators-on-start-gg).
## Configuration
StartRNR stores its rating databases in its config directory, which is located at:
- Windows: `%APPDATA%\Roaming\startrnr`
- MacOS: `~/Library/Application Support/startrnr`
- Linux: `~/.config/startrnr`
This directory can be used to store the authentication token, which is required
for using StartRNR.
## Details - The RNR System ## Details - The RNR System
*For more information on RNR, see the [details page](DETAILS.md).* *For more information on RNR, see the [details page](DETAILS.md).*

View file

@ -207,10 +207,6 @@ enum StreamType {
""" """
MIXER MIXER
"""
"""
YOUTUBE
} }
""" """
@ -331,14 +327,6 @@ type Event {
""" """
deckSubmissionDeadline: Timestamp deckSubmissionDeadline: Timestamp
""" """
Maximum number of participants each Entrant can have
"""
entrantSizeMax: Int @deprecated(reason: "Migrate to teamRosterSize")
"""
Minimum number of participants each Entrant can have
"""
entrantSizeMin: Int @deprecated(reason: "Migrate to teamRosterSize")
"""
The entrants that belong to an event, paginated by filter criteria The entrants that belong to an event, paginated by filter criteria
""" """
entrants(query: EventEntrantPageQuery): EntrantConnection entrants(query: EventEntrantPageQuery): EntrantConnection
@ -399,10 +387,6 @@ type Event {
""" """
rulesetId: Int rulesetId: Int
""" """
Settings pulled from the event ruleset, if one exists
"""
rulesetSettings: JSON @deprecated(reason: "Use ruleset")
"""
Paginated sets for this Event Paginated sets for this Event
""" """
sets( sets(
@ -539,7 +523,6 @@ type Entrant {
Standing for this entrant given an event. All entrants queried must be in the same event (for now). Standing for this entrant given an event. All entrants queried must be in the same event (for now).
""" """
standing: Standing standing: Standing
stream: Streams @deprecated(reason: "DEPRECATED. Use streams instead, which supports multiple stream types and teams.")
streams: [Streams] streams: [Streams]
""" """
Team linked to this entrant, if one exists Team linked to this entrant, if one exists
@ -730,10 +713,6 @@ type Set {
""" """
phaseGroup: PhaseGroup phaseGroup: PhaseGroup
""" """
The sets that are affected from resetting this set
"""
resetAffectedData: ResetAffectedData
"""
The round number of the set. Negative numbers are losers bracket The round number of the set. Negative numbers are losers bracket
""" """
round: Int round: Int
@ -777,14 +756,6 @@ A game represents a single game within a set.
""" """
type Game { type Game {
id: ID id: ID
"""
Score of entrant 1. For smash, this is equivalent to stocks remaining.
"""
entrant1Score: Int
"""
Score of entrant 2. For smash, this is equivalent to stocks remaining.
"""
entrant2Score: Int
images(type: String): [Image] images(type: String): [Image]
orderNum: Int orderNum: Int
""" """
@ -815,10 +786,6 @@ type Image {
A selection for this game. i.e. character/stage selection, etc A selection for this game. i.e. character/stage selection, etc
""" """
type GameSelection { type GameSelection {
"""
If this is a character selection, returns the selected character.
"""
character: Character
id: ID id: ID
""" """
The entrant who this selection is for The entrant who this selection is for
@ -834,18 +801,6 @@ type GameSelection {
selectionValue: Int selectionValue: Int
} }
"""
A character in a videogame
"""
type Character {
id: ID
images(type: String): [Image]
"""
Name of Character
"""
name: String
}
""" """
A participant of a tournament; either a spectator or competitor A participant of a tournament; either a spectator or competitor
""" """
@ -958,15 +913,6 @@ type Player {
""" """
rankings(limit: Int, videogameId: ID): [PlayerRank] rankings(limit: Int, videogameId: ID): [PlayerRank]
""" """
Recent sets for this player.
"""
recentSets(
"""
Use this to get H2H history between two players
"""
opponentId: ID
): [Set] @deprecated(reason: "Use the sets field instead.")
"""
Recent standings Recent standings
""" """
recentStandings( recentStandings(
@ -1027,7 +973,6 @@ type Standing {
The player(s) tied to this standing's entity The player(s) tied to this standing's entity
""" """
player: Player player: Player
standing: Int @deprecated(reason: "The \"placement\" field is identical and will eventually replace \"standing\"")
stats: StandingStats stats: StandingStats
totalPoints: Float totalPoints: Float
} }
@ -1298,10 +1243,6 @@ enum StreamSource {
Stream is on a mixer.com channel Stream is on a mixer.com channel
""" """
MIXER MIXER
"""
Stream is on a youtube.com channel
"""
YOUTUBE
} }
input TeamPaginationQuery { input TeamPaginationQuery {
@ -1415,8 +1356,6 @@ interface Team {
Uniquely identifying token for team. Same as the hashed part of the slug Uniquely identifying token for team. Same as the hashed part of the slug
""" """
discriminator: String discriminator: String
entrant: Entrant @deprecated(reason: "Use the entrant field off the EventTeam type")
event: Event @deprecated(reason: "Use the event field off the EventTeam type")
images(type: String): [Image] images(type: String): [Image]
members(status: [TeamMemberStatus]): [TeamMember] members(status: [TeamMemberStatus]): [TeamMember]
name: String name: String
@ -1492,22 +1431,6 @@ type PhaseGroup {
""" """
firstRoundTime: Timestamp firstRoundTime: Timestamp
numRounds: Int numRounds: Int
paginatedSeeds(query: SeedPaginationQuery!, eventId: ID): SeedConnection @deprecated(reason: "Please use 'seeds', which is now paginated")
"""
Paginated sets on this phaseGroup
"""
paginatedSets(
page: Int
perPage: Int
"""
How to sort these sets
"""
sortType: SetSortType
"""
Supported filter options to filter down set results.
"""
filters: SetFilters
): SetConnection @deprecated(reason: "Please use 'sets', which is now paginated")
""" """
The phase associated with this phase group The phase associated with this phase group
""" """
@ -1689,7 +1612,6 @@ type Phase {
The number of seeds this phase contains. The number of seeds this phase contains.
""" """
numSeeds: Int numSeeds: Int
paginatedSeeds(query: SeedPaginationQuery!, eventId: ID): SeedConnection @deprecated(reason: "Please use 'seeds' instead")
""" """
Phase groups under this phase, paginated Phase groups under this phase, paginated
""" """
@ -1831,12 +1753,6 @@ type Stage {
name: String name: String
} }
type ResetAffectedData {
affectedSetCount: Int
affectedSets: [Set]
affectedPhaseGroupCount: Int
}
""" """
A slot in a set where a seed currently or will eventually exist in order to participate in the set. A slot in a set where a seed currently or will eventually exist in order to participate in the set.
""" """
@ -1894,10 +1810,6 @@ type League {
""" """
events(query: LeagueEventsQuery): EventConnection events(query: LeagueEventsQuery): EventConnection
""" """
Hacked "progression" into this final event
"""
finalEventId: Int @deprecated(reason: "No longer used")
"""
True if tournament has at least one offline event True if tournament has at least one offline event
""" """
hasOfflineEvents: Boolean hasOfflineEvents: Boolean
@ -1916,10 +1828,6 @@ type League {
The tournament name The tournament name
""" """
name: String name: String
"""
Top X number of people in the standings who progress to final event
"""
numProgressingToFinalEvent: Int @deprecated(reason: "No longer used")
numUniquePlayers: Int numUniquePlayers: Int
postalCode: String postalCode: String
primaryContact: String primaryContact: String
@ -2056,10 +1964,18 @@ type Videogame {
images(type: String): [Image] images(type: String): [Image]
name: String name: String
slug: String slug: String
}
"""
A character in a videogame
"""
type Character {
id: ID
images(type: String): [Image]
""" """
All stages for this videogame Name of Character
""" """
stages: [Stage] name: String
} }
input StandingPaginationQuery { input StandingPaginationQuery {
@ -2380,32 +2296,6 @@ type Mutation {
""" """
deleteWave(waveId: ID!): Boolean deleteWave(waveId: ID!): Boolean
""" """
Generate tournament registration Token on behalf of user
"""
generateRegistrationToken(registration: TournamentRegistrationInput!, userId: ID!): String
"""
Update a set to called state
"""
markSetCalled(setId: ID!): Set
"""
Update a set to called state
"""
markSetInProgress(setId: ID!): Set
"""
Register for tournament
"""
registerForTournament(registration: TournamentRegistrationInput, registrationToken: String): Participant
"""
Report set winner or game stats for a H2H bracket set. If winnerId is
supplied, mark set as complete. gameData parameter will overwrite any existing
reported game data.
"""
reportBracketSet(setId: ID!, winnerId: ID, isDQ: Boolean, gameData: [BracketSetGameDataInput]): [Set]
"""
Resets set to initial state, can affect other sets and phase groups
"""
resetSet(setId: ID!, resetDependentSets: Boolean): Set
"""
Automatically attempt to resolve all schedule conflicts. Returns a list of changed seeds Automatically attempt to resolve all schedule conflicts. Returns a list of changed seeds
""" """
resolveScheduleConflicts(tournamentId: ID!, options: ResolveConflictsOptions): [Seed] resolveScheduleConflicts(tournamentId: ID!, options: ResolveConflictsOptions): [Seed]
@ -2414,11 +2304,6 @@ type Mutation {
""" """
swapSeeds(phaseId: ID!, seed1Id: ID!, seed2Id: ID!): [Seed] swapSeeds(phaseId: ID!, seed1Id: ID!, seed2Id: ID!): [Seed]
""" """
Update game stats for a H2H bracket set. Set winner cannot be changed with
this function, use the resetSet mutation instead.
"""
updateBracketSet(setId: ID!, winnerId: ID, isDQ: Boolean, gameData: [BracketSetGameDataInput]): Set
"""
Update set of phase groups in a phase Update set of phase groups in a phase
""" """
updatePhaseGroups(groupConfigs: [PhaseGroupUpdateInput]!): [PhaseGroup] updatePhaseGroups(groupConfigs: [PhaseGroupUpdateInput]!): [PhaseGroup]
@ -2440,54 +2325,6 @@ type Mutation {
upsertWave(waveId: ID, tournamentId: ID, fields: WaveUpsertInput!): Wave upsertWave(waveId: ID, tournamentId: ID, fields: WaveUpsertInput!): Wave
} }
input TournamentRegistrationInput {
eventIds: [ID]
}
"""
Game specific H2H set data such as character, stage, and stock info
"""
input BracketSetGameDataInput {
"""
Entrant ID of game winner
"""
winnerId: ID
"""
Game number
"""
gameNum: Int!
"""
Score for entrant 1 (if applicable). For smash, this is stocks remaining.
"""
entrant1Score: Int
"""
Score for entrant 2 (if applicable). For smash, this is stocks remaining.
"""
entrant2Score: Int
"""
ID of the stage that was played for this game (if applicable)
"""
stageId: ID
"""
List of selections for the game, typically character selections.
"""
selections: [BracketSetGameSelectionInput]
}
"""
Game specific H2H selections made by the entrants, such as character info
"""
input BracketSetGameSelectionInput {
"""
Entrant ID that made selection
"""
entrantId: ID!
"""
Character selected by this entrant for this game.
"""
characterId: Int
}
input ResolveConflictsOptions { input ResolveConflictsOptions {
lockedSeeds: [ResolveConflictsLockedSeedConfig] lockedSeeds: [ResolveConflictsLockedSeedConfig]
} }
@ -2589,8 +2426,6 @@ type EventTeam implements Team {
Uniquely identifying token for team. Same as the hashed part of the slug Uniquely identifying token for team. Same as the hashed part of the slug
""" """
discriminator: String discriminator: String
entrant: Entrant @deprecated(reason: "Use the entrant field off the EventTeam type")
event: Event @deprecated(reason: "Use the event field off the EventTeam type")
globalTeam: GlobalTeam globalTeam: GlobalTeam
images(type: String): [Image] images(type: String): [Image]
members(status: [TeamMemberStatus]): [TeamMember] members(status: [TeamMemberStatus]): [TeamMember]
@ -2606,8 +2441,6 @@ type GlobalTeam implements Team {
Uniquely identifying token for team. Same as the hashed part of the slug Uniquely identifying token for team. Same as the hashed part of the slug
""" """
discriminator: String discriminator: String
entrant: Entrant @deprecated(reason: "Use the entrant field off the EventTeam type")
event: Event @deprecated(reason: "Use the event field off the EventTeam type")
eventTeams(query: TeamPaginationQuery): EventTeamConnection eventTeams(query: TeamPaginationQuery): EventTeamConnection
images(type: String): [Image] images(type: String): [Image]
""" """

View file

@ -1,5 +1,6 @@
use crate::queries::*; use crate::queries::*;
use sqlite::*; use sqlite::*;
use std::fs::{self, OpenOptions};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
pub struct DatasetMetadata { pub struct DatasetMetadata {
@ -13,15 +14,17 @@ pub struct DatasetMetadata {
pub country: Option<String>, pub country: Option<String>,
pub state: Option<String>, pub state: Option<String>,
pub decay_const: f64, pub set_limit: u64,
pub var_const: f64, pub decay_rate: f64,
pub adj_decay_rate: f64,
pub period: f64,
pub tau: f64,
} }
/// Return the path to the datasets file. /// Return the path to the datasets file.
fn datasets_path(dir: &Path) -> std::io::Result<PathBuf> { fn datasets_path(config_dir: &Path) -> std::io::Result<PathBuf> {
use std::fs::{self, OpenOptions}; let mut path = config_dir.to_owned();
path.push("startrnr");
let mut path = dir.to_owned();
// Create datasets path if it doesn't exist // Create datasets path if it doesn't exist
fs::create_dir_all(&path)?; fs::create_dir_all(&path)?;
@ -34,8 +37,8 @@ fn datasets_path(dir: &Path) -> std::io::Result<PathBuf> {
Ok(path) Ok(path)
} }
pub fn open_datasets(dir: &Path) -> sqlite::Result<Connection> { pub fn open_datasets(config_dir: &Path) -> sqlite::Result<Connection> {
let path = datasets_path(dir).unwrap(); let path = datasets_path(config_dir).unwrap();
let query = " let query = "
CREATE TABLE IF NOT EXISTS datasets ( CREATE TABLE IF NOT EXISTS datasets (
@ -48,8 +51,11 @@ CREATE TABLE IF NOT EXISTS datasets (
game_slug TEXT NOT NULL, game_slug TEXT NOT NULL,
country TEXT, country TEXT,
state TEXT, state TEXT,
set_limit INTEGER NOT NULL,
decay_rate REAL NOT NULL, decay_rate REAL NOT NULL,
var_const REAL NOT NULL adj_decay_rate REAL NOT NULL,
period REAL NOT NULL,
tau REAL NOT NULL
) STRICT; ) STRICT;
CREATE TABLE IF NOT EXISTS players ( CREATE TABLE IF NOT EXISTS players (
@ -108,8 +114,11 @@ pub fn list_datasets(connection: &Connection) -> sqlite::Result<Vec<(String, Dat
game_slug: r_.read::<&str, _>("game_slug").to_owned(), game_slug: r_.read::<&str, _>("game_slug").to_owned(),
country: r_.read::<Option<&str>, _>("country").map(String::from), country: r_.read::<Option<&str>, _>("country").map(String::from),
state: r_.read::<Option<&str>, _>("state").map(String::from), state: r_.read::<Option<&str>, _>("state").map(String::from),
decay_const: r_.read::<f64, _>("decay_rate"), set_limit: r_.read::<i64, _>("set_limit") as u64,
var_const: r_.read::<f64, _>("adj_decay_rate"), decay_rate: r_.read::<f64, _>("decay_rate"),
adj_decay_rate: r_.read::<f64, _>("adj_decay_rate"),
period: r_.read::<f64, _>("period"),
tau: r_.read::<f64, _>("tau"),
}, },
)) ))
}) })
@ -149,14 +158,17 @@ pub fn new_dataset(
dataset: &str, dataset: &str,
metadata: DatasetMetadata, metadata: DatasetMetadata,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query1 = r#"INSERT INTO datasets VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"#; let query1 = r#"INSERT INTO datasets VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"#;
let query2 = format!( let query2 = format!(
r#"CREATE TABLE "{0}_players" ( r#"CREATE TABLE "{0}_players" (
id INTEGER PRIMARY KEY REFERENCES players, id INTEGER PRIMARY KEY REFERENCES players,
last_played INTEGER NOT NULL,
deviation REAL NOT NULL,
volatility REAL NOT NULL,
sets_won TEXT NOT NULL DEFAULT '', sets_won TEXT NOT NULL,
sets_count_won INTEGER AS (length(sets_won) - length(replace(sets_won, ';', ''))), sets_count_won INTEGER AS (length(sets_won) - length(replace(sets_won, ';', ''))),
sets_lost TEXT NOT NULL DEFAULT '', sets_lost TEXT NOT NULL,
sets_count_lost INTEGER AS (length(sets_lost) - length(replace(sets_lost, ';', ''))), sets_count_lost INTEGER AS (length(sets_lost) - length(replace(sets_lost, ';', ''))),
sets TEXT AS (sets_won || sets_lost), sets TEXT AS (sets_won || sets_lost),
sets_count INTEGER AS (sets_count_won + sets_count_lost) sets_count INTEGER AS (sets_count_won + sets_count_lost)
@ -166,12 +178,10 @@ CREATE TABLE "{0}_network" (
player_A INTEGER NOT NULL, player_A INTEGER NOT NULL,
player_B INTEGER NOT NULL, player_B INTEGER NOT NULL,
advantage REAL NOT NULL, advantage REAL NOT NULL,
variance REAL NOT NULL,
last_updated INTEGER NOT NULL,
sets_A TEXT NOT NULL DEFAULT '', sets_A TEXT NOT NULL,
sets_count_A INTEGER AS (length(sets_A) - length(replace(sets_A, ';', ''))), sets_count_A INTEGER AS (length(sets_A) - length(replace(sets_A, ';', ''))),
sets_B TEXT NOT NULL DEFAULT '', sets_B TEXT NOT NULL,
sets_count_B INTEGER AS (length(sets_B) - length(replace(sets_B, ';', ''))), sets_count_B INTEGER AS (length(sets_B) - length(replace(sets_B, ';', ''))),
sets TEXT AS (sets_A || sets_B), sets TEXT AS (sets_A || sets_B),
sets_count INTEGER AS (sets_count_A + sets_count_B), sets_count INTEGER AS (sets_count_A + sets_count_B),
@ -199,8 +209,11 @@ CREATE INDEX "{0}_network_B" ON "{0}_network" (player_B);"#,
.bind((7, &metadata.game_slug[..]))? .bind((7, &metadata.game_slug[..]))?
.bind((8, metadata.country.as_deref()))? .bind((8, metadata.country.as_deref()))?
.bind((9, metadata.state.as_deref()))? .bind((9, metadata.state.as_deref()))?
.bind((10, metadata.decay_const))? .bind((10, metadata.set_limit as i64))?
.bind((11, metadata.var_const))? .bind((11, metadata.decay_rate))?
.bind((12, metadata.adj_decay_rate))?
.bind((13, metadata.period))?
.bind((14, metadata.tau))?
.try_for_each(|x| x.map(|_| ()))?; .try_for_each(|x| x.map(|_| ()))?;
connection.execute(query2) connection.execute(query2)
@ -230,8 +243,11 @@ pub fn get_metadata(
game_slug: r_.read::<&str, _>("game_slug").to_owned(), game_slug: r_.read::<&str, _>("game_slug").to_owned(),
country: r_.read::<Option<&str>, _>("country").map(String::from), country: r_.read::<Option<&str>, _>("country").map(String::from),
state: r_.read::<Option<&str>, _>("state").map(String::from), state: r_.read::<Option<&str>, _>("state").map(String::from),
decay_const: r_.read::<f64, _>("decay_rate"), set_limit: r_.read::<i64, _>("set_limit") as u64,
var_const: r_.read::<f64, _>("var_const"), decay_rate: r_.read::<f64, _>("decay_rate"),
adj_decay_rate: r_.read::<f64, _>("adj_decay_rate"),
period: r_.read::<f64, _>("period"),
tau: r_.read::<f64, _>("tau"),
}) })
}) })
.and_then(Result::ok)) .and_then(Result::ok))
@ -275,46 +291,39 @@ pub fn add_set(connection: &Connection, set: &SetId, event: EventId) -> sqlite::
pub fn add_players( pub fn add_players(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
players: &Vec<PlayerData>, teams: &Teams<PlayerData>,
time: Timestamp,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query1 = "INSERT OR IGNORE INTO players (id, discrim, name, prefix) VALUES (?, ?, ?, ?)"; let query1 = "INSERT OR IGNORE INTO players (id, discrim, name, prefix) VALUES (?, ?, ?, ?)";
let query2 = format!( let query2 = format!(
r#"INSERT OR IGNORE INTO "{}_players" (id) VALUES (?)"#, r#"INSERT OR IGNORE INTO "{}_players"
(id, last_played, deviation, volatility, sets_won, sets_lost)
VALUES (?, ?, 2.01, 0.06, '', '')"#,
dataset dataset
); );
players.iter().try_for_each( teams.iter().try_for_each(|team| {
|PlayerData { team.iter().try_for_each(
id, |PlayerData {
name, id,
prefix, name,
discrim, prefix,
}| { discrim,
let mut statement = connection.prepare(&query1)?; }| {
statement.bind((1, id.0 as i64))?; let mut statement = connection.prepare(&query1)?;
statement.bind((2, &discrim[..]))?; statement.bind((1, id.0 as i64))?;
statement.bind((3, &name[..]))?; statement.bind((2, &discrim[..]))?;
statement.bind((4, prefix.as_ref().map(|x| &x[..])))?; statement.bind((3, &name[..]))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?; statement.bind((4, prefix.as_ref().map(|x| &x[..])))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
statement = connection.prepare(&query2)?; statement = connection.prepare(&query2)?;
statement.bind((1, id.0 as i64))?; statement.bind((1, id.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ())) statement.bind((2, time.0 as i64))?;
}, statement.into_iter().try_for_each(|x| x.map(|_| ()))
) },
} )
})
pub fn get_all_players(connection: &Connection, dataset: &str) -> sqlite::Result<Vec<PlayerId>> {
let query = format!(r#"SELECT id FROM "{}_players""#, dataset,);
connection
.prepare(&query)?
.into_iter()
.map(|r| {
let r_ = r?;
Ok(PlayerId(r_.read::<i64, _>("id") as u64))
})
.try_collect()
} }
pub fn get_player(connection: &Connection, player: PlayerId) -> sqlite::Result<PlayerData> { pub fn get_player(connection: &Connection, player: PlayerId) -> sqlite::Result<PlayerData> {
@ -367,6 +376,26 @@ pub fn match_player_name(connection: &Connection, name: &str) -> sqlite::Result<
.try_collect() .try_collect()
} }
pub fn get_player_rating_data(
connection: &Connection,
dataset: &str,
player: PlayerId,
) -> sqlite::Result<(f64, f64, Timestamp)> {
let query = format!(
r#"SELECT deviation, volatility, last_played FROM "{}_players" WHERE id = ?"#,
dataset
);
let mut statement = connection.prepare(&query)?;
statement.bind((1, player.0 as i64))?;
statement.next()?;
Ok((
statement.read::<f64, _>("deviation")?,
statement.read::<f64, _>("volatility")?,
Timestamp(statement.read::<i64, _>("last_played")? as u64),
))
}
pub fn get_player_set_counts( pub fn get_player_set_counts(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
@ -408,21 +437,27 @@ pub fn get_matchup_set_counts(
)) ))
} }
pub fn set_player_set_counts( pub fn set_player_data(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
player: PlayerId, player: PlayerId,
last_played: Timestamp,
deviation: f64,
volatility: f64,
won: bool, won: bool,
set: &SetId, set: &SetId,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"UPDATE "{}_players" SET r#"UPDATE "{}_players" SET deviation = :dev, volatility = :vol, last_played = :last,
sets_won = iif(:won, sets_won || :set || ';', sets_won), sets_won = iif(:won, sets_won || :set || ';', sets_won),
sets_lost = iif(:won, sets_lost, sets_lost || :set || ';') WHERE id = :id"#, sets_lost = iif(:won, sets_lost, sets_lost || :set || ';') WHERE id = :id"#,
dataset dataset
); );
let mut statement = connection.prepare(&query)?; let mut statement = connection.prepare(&query)?;
statement.bind((":dev", deviation))?;
statement.bind((":vol", volatility))?;
statement.bind((":last", last_played.0 as i64))?;
statement.bind((":id", player.0 as i64))?; statement.bind((":id", player.0 as i64))?;
statement.bind((":won", if won { 1 } else { 0 }))?; statement.bind((":won", if won { 1 } else { 0 }))?;
statement.bind((":set", &set.0.to_string()[..]))?; statement.bind((":set", &set.0.to_string()[..]))?;
@ -430,18 +465,18 @@ sets_lost = iif(:won, sets_lost, sets_lost || :set || ';') WHERE id = :id"#,
Ok(()) Ok(())
} }
pub fn get_network_data( pub fn get_advantage(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
player1: PlayerId, player1: PlayerId,
player2: PlayerId, player2: PlayerId,
) -> sqlite::Result<Option<(f64, f64)>> { ) -> sqlite::Result<Option<f64>> {
if player1 == player2 { if player1 == player2 {
return Ok(Some((0.0, 0.0))); return Ok(Some(0.0));
} }
let query = format!( let query = format!(
r#"SELECT iif(:a > :b, -advantage, advantage) AS advantage, variance FROM "{}_network" r#"SELECT iif(:a > :b, -advantage, advantage) AS advantage FROM "{}_network"
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#, WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
dataset dataset
); );
@ -450,24 +485,20 @@ pub fn get_network_data(
statement.bind((":a", player1.0 as i64))?; statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?; statement.bind((":b", player2.0 as i64))?;
statement.next()?; statement.next()?;
Ok(statement statement.read::<Option<f64>, _>("advantage")
.read::<Option<f64>, _>("advantage")?
.zip(statement.read::<Option<f64>, _>("variance")?))
} }
pub fn insert_network_data( pub fn insert_advantage(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
player1: PlayerId, player1: PlayerId,
player2: PlayerId, player2: PlayerId,
advantage: f64, advantage: f64,
variance: f64,
time: Timestamp,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let query = format!( let query = format!(
r#"INSERT INTO "{}_network" r#"INSERT INTO "{}_network"
(player_A, player_B, advantage, variance, last_updated) (player_A, player_B, advantage, sets_A, sets_B)
VALUES (min(:a, :b), max(:a, :b), iif(:a > :b, -:v, :v), :d, :t)"#, VALUES (min(:a, :b), max(:a, :b), iif(:a > :b, -:v, :v), '', '')"#,
dataset dataset
); );
@ -475,67 +506,32 @@ pub fn insert_network_data(
statement.bind((":a", player1.0 as i64))?; statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?; statement.bind((":b", player2.0 as i64))?;
statement.bind((":v", advantage))?; statement.bind((":v", advantage))?;
statement.bind((":d", variance))?;
statement.bind((":t", time.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ())) statement.into_iter().try_for_each(|x| x.map(|_| ()))
} }
pub fn adjust_for_time( pub fn adjust_advantages(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
player: PlayerId, set: SetId,
var_const: f64,
time: Timestamp,
) -> sqlite::Result<()> {
let query = format!(
r#"UPDATE "{0}_network" SET
variance = min(variance + :c * (:t - last_updated), 5.0),
last_updated = :t
WHERE player_A = :i OR player_B = :i"#,
dataset
);
let mut statement = connection.prepare(query)?;
statement.bind((":i", player.0 as i64))?;
statement.bind((":c", var_const))?;
statement.bind((":t", time.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))
}
pub fn glicko_adjust(
connection: &Connection,
dataset: &str,
set: &SetId,
player1: PlayerId, player1: PlayerId,
player2: PlayerId, player2: PlayerId,
advantage: f64,
variance: f64,
winner: usize, winner: usize,
adjust1: f64,
adjust2: f64,
decay_rate: f64, decay_rate: f64,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let score = if winner != 0 { 1.0 } else { 0.0 };
let exp_val = 1.0 / (1.0 + (-advantage).exp());
let like_var = 1.0 / exp_val / (1.0 - exp_val);
let var_new = 1.0 / (1.0 / variance + 1.0 / like_var);
let adjust = score - exp_val;
let query1 = format!( let query1 = format!(
r#"UPDATE "{}_network" SET r#"UPDATE "{}_network"
variance = 1.0 / (1.0 / variance + :d / :lv), SET advantage = advantage + iif(:pl = player_A, -:v, :v) * :d
advantage = advantage + :d * iif(:pl = player_A, -:adj, :adj)
/ (1.0 / variance + :d / :lv)
WHERE (player_A = :pl AND player_B != :plo) WHERE (player_A = :pl AND player_B != :plo)
OR (player_B = :pl AND player_A != :plo)"#, OR (player_B = :pl AND player_A != :plo)"#,
dataset dataset
); );
let query2 = format!( let query2 = format!(
r#"UPDATE "{}_network" SET r#"UPDATE "{}_network"
variance = :var, SET advantage = advantage + iif(:a > :b, -:v, :v),
advantage = advantage + iif(:a > :b, -:adj, :adj) * :var, sets_A = iif(:w = (:a > :b), sets_A || :set || ';', sets_A),
sets_A = iif(:w = (:a > :b), sets_A || :set || ';', sets_A), sets_B = iif(:w = (:b > :a), sets_B || :set || ';', sets_B)
sets_B = iif(:w = (:b > :a), sets_B || :set || ';', sets_B)
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#, WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
dataset dataset
); );
@ -543,24 +539,21 @@ WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
let mut statement = connection.prepare(&query1)?; let mut statement = connection.prepare(&query1)?;
statement.bind((":pl", player1.0 as i64))?; statement.bind((":pl", player1.0 as i64))?;
statement.bind((":plo", player2.0 as i64))?; statement.bind((":plo", player2.0 as i64))?;
statement.bind((":adj", -0.5 * adjust))?; statement.bind((":v", adjust1))?;
statement.bind((":d", decay_rate))?; statement.bind((":d", decay_rate))?;
statement.bind((":lv", like_var))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?; statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
statement = connection.prepare(&query1)?; statement = connection.prepare(&query1)?;
statement.bind((":pl", player2.0 as i64))?; statement.bind((":pl", player2.0 as i64))?;
statement.bind((":plo", player1.0 as i64))?; statement.bind((":plo", player1.0 as i64))?;
statement.bind((":adj", 0.5 * adjust))?; statement.bind((":v", adjust2))?;
statement.bind((":d", decay_rate))?; statement.bind((":d", decay_rate))?;
statement.bind((":lv", like_var))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?; statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
statement = connection.prepare(&query2)?; statement = connection.prepare(&query2)?;
statement.bind((":a", player1.0 as i64))?; statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?; statement.bind((":b", player2.0 as i64))?;
statement.bind((":adj", adjust))?; statement.bind((":v", adjust2 - adjust1))?;
statement.bind((":var", var_new))?;
statement.bind((":w", winner as i64))?; statement.bind((":w", winner as i64))?;
statement.bind((":set", &set.0.to_string()[..]))?; statement.bind((":set", &set.0.to_string()[..]))?;
statement.into_iter().try_for_each(|x| x.map(|_| ())) statement.into_iter().try_for_each(|x| x.map(|_| ()))
@ -570,11 +563,11 @@ pub fn get_edges(
connection: &Connection, connection: &Connection,
dataset: &str, dataset: &str,
player: PlayerId, player: PlayerId,
) -> sqlite::Result<Vec<(PlayerId, f64, f64)>> { ) -> sqlite::Result<Vec<(PlayerId, f64, u64)>> {
let query = format!( let query = format!(
r#"SELECT r#"SELECT
iif(:pl = player_B, player_A, player_B) AS id, iif(:pl = player_B, player_A, player_B) AS id,
iif(:pl = player_B, -advantage, advantage) AS advantage, variance iif(:pl = player_B, -advantage, advantage) AS advantage, sets_count
FROM "{}_network" FROM "{}_network"
WHERE player_A = :pl OR player_B = :pl"#, WHERE player_A = :pl OR player_B = :pl"#,
dataset dataset
@ -589,7 +582,7 @@ pub fn get_edges(
Ok(( Ok((
PlayerId(r_.read::<i64, _>("id") as u64), PlayerId(r_.read::<i64, _>("id") as u64),
r_.read::<f64, _>("advantage"), r_.read::<f64, _>("advantage"),
r_.read::<f64, _>("variance"), r_.read::<i64, _>("sets_count") as u64,
)) ))
}) })
.try_collect() .try_collect()
@ -624,20 +617,20 @@ pub fn hypothetical_advantage(
dataset: &str, dataset: &str,
player1: PlayerId, player1: PlayerId,
player2: PlayerId, player2: PlayerId,
set_limit: u64,
decay_rate: f64, decay_rate: f64,
) -> sqlite::Result<(f64, f64)> { adj_decay_rate: f64,
) -> sqlite::Result<f64> {
use std::collections::{HashSet, VecDeque}; use std::collections::{HashSet, VecDeque};
// Check trivial cases // Check trivial cases
if player1 == player2 { if player1 == player2 || either_isolated(connection, dataset, player1, player2)? {
return Ok((0.0, 0.0)); return Ok(0.0);
} else if decay_rate < 0.05 || either_isolated(connection, dataset, player1, player2)? {
return Ok((0.0, 5.0));
} }
let mut visited: HashSet<PlayerId> = HashSet::new(); let mut visited: HashSet<PlayerId> = HashSet::new();
let mut queue: VecDeque<(PlayerId, Vec<(f64, f64, f64)>)> = let mut queue: VecDeque<(PlayerId, Vec<(f64, f64)>)> =
VecDeque::from([(player1, Vec::from([(0.0, 0.0, 1.0 / decay_rate)]))]); VecDeque::from([(player1, Vec::from([(0.0, 1.0)]))]);
let mut final_paths = Vec::new(); let mut final_paths = Vec::new();
@ -646,7 +639,7 @@ pub fn hypothetical_advantage(
let connections = get_edges(connection, dataset, visiting)?; let connections = get_edges(connection, dataset, visiting)?;
for (id, adv, var) in connections for (id, adv, sets) in connections
.into_iter() .into_iter()
.filter(|(id, _, _)| !visited.contains(id)) .filter(|(id, _, _)| !visited.contains(id))
{ {
@ -660,9 +653,12 @@ pub fn hypothetical_advantage(
}; };
if rf.len() < 100 { if rf.len() < 100 {
let iter = paths let decay = if sets >= set_limit {
.iter() decay_rate
.map(|(av, vr, dec)| (av + adv, vr + var, dec * decay_rate)); } else {
adj_decay_rate
};
let iter = paths.iter().map(|(a, d)| (a + adv, d * decay));
rf.extend(iter); rf.extend(iter);
rf.truncate(100); rf.truncate(100);
@ -672,23 +668,22 @@ pub fn hypothetical_advantage(
visited.insert(visiting); visited.insert(visiting);
} }
if final_paths.len() == 0 { let max_decay = final_paths
// No paths found .iter()
Ok((0.0, 5.0)) .map(|x| x.1)
} else { .max_by(|d1, d2| d1.partial_cmp(d2).unwrap());
let sum_decay: f64 = final_paths.iter().map(|(_, _, dec)| dec).sum();
let (final_adv, final_var) = final_paths if let Some(mdec) = max_decay {
let sum_decay = final_paths.iter().map(|x| x.1).sum::<f64>();
Ok(final_paths
.into_iter() .into_iter()
.fold((0.0, 0.0), |(av, vr), (adv, var, dec)| { .map(|(adv, dec)| adv * dec)
(av + adv * dec, vr + (var + adv * adv) * dec) .sum::<f64>()
}); / sum_decay
let mut final_adv = final_adv / sum_decay; * mdec)
let mut final_var = final_var / sum_decay - final_adv * final_adv; } else {
if final_var > 5.0 { // No paths found
final_adv = final_adv * (5.0 / final_var).sqrt(); Ok(0.0)
final_var = 5.0;
}
Ok((final_adv, final_var))
} }
} }
@ -697,12 +692,21 @@ pub fn initialize_edge(
dataset: &str, dataset: &str,
player1: PlayerId, player1: PlayerId,
player2: PlayerId, player2: PlayerId,
set_limit: u64,
decay_rate: f64, decay_rate: f64,
time: Timestamp, adj_decay_rate: f64,
) -> sqlite::Result<(f64, f64)> { ) -> sqlite::Result<f64> {
let (adv, var) = hypothetical_advantage(connection, dataset, player1, player2, decay_rate)?; let adv = hypothetical_advantage(
insert_network_data(connection, dataset, player1, player2, adv, var, time)?; connection,
Ok((adv, var)) dataset,
player1,
player2,
set_limit,
decay_rate,
adj_decay_rate,
)?;
insert_advantage(connection, dataset, player1, player2, adv)?;
Ok(adv)
} }
// Tests // Tests
@ -726,7 +730,8 @@ CREATE TABLE IF NOT EXISTS datasets (
set_limit INTEGER NOT NULL, set_limit INTEGER NOT NULL,
decay_rate REAL NOT NULL, decay_rate REAL NOT NULL,
adj_decay_rate REAL NOT NULL, adj_decay_rate REAL NOT NULL,
var_const period REAL NOT NULL,
tau REAL NOT NULL
) STRICT; ) STRICT;
CREATE TABLE IF NOT EXISTS players ( CREATE TABLE IF NOT EXISTS players (
@ -765,8 +770,11 @@ CREATE TABLE IF NOT EXISTS sets (
game_slug: String::from("test"), game_slug: String::from("test"),
country: None, country: None,
state: None, state: None,
decay_const: 0.5, set_limit: 0,
var_const: 0.00000001, decay_rate: 0.5,
adj_decay_rate: 0.5,
period: (3600 * 24 * 30) as f64,
tau: 0.2,
} }
} }
@ -780,4 +788,141 @@ CREATE TABLE IF NOT EXISTS sets (
}) })
.collect() .collect()
} }
#[test]
fn sqlite_sanity_check() -> sqlite::Result<()> {
let test_value: i64 = 2;
let connection = sqlite::open(":memory:")?;
connection.execute(
r#"CREATE TABLE test (a INTEGER);
INSERT INTO test VALUES (1);
INSERT INTO test VALUES (2)"#,
)?;
let mut statement = connection.prepare("SELECT * FROM test WHERE a = ?")?;
statement.bind((1, test_value))?;
statement.next()?;
assert_eq!(statement.read::<i64, _>("a")?, test_value);
Ok(())
}
#[test]
fn test_players() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(2)], Timestamp(0))?;
let mut statement = connection.prepare("SELECT * FROM players WHERE id = 1")?;
statement.next()?;
assert_eq!(statement.read::<i64, _>("id")?, 1);
assert_eq!(statement.read::<String, _>("name")?, "1");
assert_eq!(statement.read::<Option<String>, _>("prefix")?, None);
Ok(())
}
#[test]
fn edge_insert_get() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(2)], Timestamp(0))?;
insert_advantage(&connection, "test", PlayerId(2), PlayerId(1), 1.0)?;
assert_eq!(
get_advantage(&connection, "test", PlayerId(1), PlayerId(2))?,
Some(-1.0)
);
assert_eq!(
get_advantage(&connection, "test", PlayerId(2), PlayerId(1))?,
Some(1.0)
);
Ok(())
}
#[test]
fn player_all_edges() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(3)], Timestamp(0))?;
insert_advantage(&connection, "test", PlayerId(2), PlayerId(1), 1.0)?;
insert_advantage(&connection, "test", PlayerId(1), PlayerId(3), 5.0)?;
assert_eq!(
get_edges(&connection, "test", PlayerId(1))?,
[(PlayerId(2), -1.0, 0), (PlayerId(3), 5.0, 0)]
);
assert_eq!(
get_edges(&connection, "test", PlayerId(2))?,
[(PlayerId(1), 1.0, 0)]
);
assert_eq!(
get_edges(&connection, "test", PlayerId(3))?,
[(PlayerId(1), -5.0, 0)]
);
Ok(())
}
#[test]
fn hypoth_adv_trivial() -> sqlite::Result<()> {
let num_players = 3;
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(
&connection,
"test",
&vec![players(num_players)],
Timestamp(0),
)?;
let metadata = metadata();
for i in 1..=num_players {
for j in 1..=num_players {
assert_eq!(
hypothetical_advantage(
&connection,
"test",
PlayerId(i),
PlayerId(j),
metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate
)?,
0.0
);
}
}
Ok(())
}
#[test]
fn hypoth_adv1() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(2)], Timestamp(0))?;
insert_advantage(&connection, "test", PlayerId(1), PlayerId(2), 1.0)?;
let metadata = metadata();
assert_eq!(
hypothetical_advantage(
&connection,
"test",
PlayerId(1),
PlayerId(2),
metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate
)?,
1.0
);
Ok(())
}
} }

View file

@ -45,7 +45,9 @@ AUTH_TOKEN, or in a text file '<CONFIG_DIR>/auth.txt'."
value_name = "DIR", value_name = "DIR",
global = true, global = true,
help = "Config directory", help = "Config directory",
long_help = "This flag overrides the default config directory." long_help = "This flag overrides the default config directory.
If this directory does not exist, it will be created and a database file will
be initialized within it."
)] )]
config_dir: Option<PathBuf>, config_dir: Option<PathBuf>,
} }
@ -81,12 +83,6 @@ created if it does not already exist."
#[arg(short, long, global = true, help = "The dataset to access")] #[arg(short, long, global = true, help = "The dataset to access")]
dataset: Option<String>, dataset: Option<String>,
}, },
Ranking {
#[command(subcommand)]
subcommand: RankingSC,
#[arg(short, long, global = true, help = "The dataset to access")]
dataset: Option<String>,
},
} }
#[derive(Subcommand)] #[derive(Subcommand)]
@ -112,28 +108,15 @@ enum PlayerSC {
Matchup { player1: String, player2: String }, Matchup { player1: String, player2: String },
} }
#[derive(Subcommand)]
enum RankingSC {
#[command(about = "Create a new ranking")]
Create,
}
fn main() { fn main() {
let cli = Cli::parse(); let cli = Cli::parse();
let config_dir = cli.config_dir.unwrap_or_else(|| { let config_dir = cli
let mut dir = dirs::config_dir().expect("Could not determine config directory"); .config_dir
dir.push("startrnr"); .unwrap_or_else(|| dirs::config_dir().expect("Could not determine config directory"));
dir
});
let mut data_dir = dirs::data_dir().expect("Could not determine user data directory");
data_dir.push("startrnr");
let connection = let connection =
open_datasets(&data_dir).unwrap_or_else(|_| error("Could not open datasets file", 2)); open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
#[allow(unreachable_patterns)]
match cli.subcommand { match cli.subcommand {
Subcommands::Dataset { Subcommands::Dataset {
subcommand: DatasetSC::List, subcommand: DatasetSC::List,
@ -237,10 +220,22 @@ fn dataset_list(connection: &Connection) {
); );
} }
} }
println!();
if metadata.set_limit != 0 && metadata.decay_rate != metadata.adj_decay_rate {
println!("\x1b[1mSet Limit:\x1b[0m {}", metadata.set_limit);
println!(
"\x1b[1mNetwork Decay Rate:\x1b[0m {} (adjusted {})",
metadata.decay_rate, metadata.adj_decay_rate
);
} else {
println!("\x1b[1mNetwork Decay Rate:\x1b[0m {}", metadata.decay_rate);
}
println!( println!(
"\n\x1b[1mNetwork Decay Constant:\x1b[0m {}", "\x1b[1mRating Period:\x1b[0m {} days",
metadata.decay_const metadata.period / SECS_IN_DAY as f64
); );
println!("\x1b[1mTau Constant:\x1b[0m {}\n", metadata.tau);
} }
} }
@ -400,11 +395,33 @@ End date (year, m/y, or m/d/y): "
} }
}; };
// Set Limit
let mut set_limit = 0;
print!(
"
\x1b[1mSet Limit\x1b[0m
The set limit is an optional feature of the rating system that defines how many
sets must be played between two players for their rating data to be considered
trustworthy.
This value should be set low, i.e. not more than 5 or 6.
Set limit (default 0): "
);
let set_limit_input = read_string();
if !set_limit_input.is_empty() {
set_limit = set_limit_input
.parse::<u64>()
.unwrap_or_else(|_| error("Input is not an integer", 1));
}
// Advanced Options // Advanced Options
// Defaults // Defaults
let mut decay_const = 0.9; let mut decay_rate = 0.8;
let mut var_const = (10.0 - 0.04) / SECS_IN_YEAR as f64 / 3.0; let mut adj_decay_rate = 0.6;
let mut period_days = 40.0;
let mut tau = 0.4;
print!("\nConfigure advanced options? (y/n) "); print!("\nConfigure advanced options? (y/n) ");
if let Some('y') = read_string().chars().next() { if let Some('y') = read_string().chars().next() {
@ -412,42 +429,87 @@ End date (year, m/y, or m/d/y): "
print!( print!(
" "
\x1b[1mNetwork Decay Constant\x1b[0m \x1b[1mNetwork Decay Rate\x1b[0m
The network decay constant is a number between 0 and 1 that controls how The network decay rate is a number between 0 and 1 that controls how the
player wins and losses propagate throughout the network. If the decay advantage network reacts to player wins and losses. If the decay rate is 1,
constant is 1, then it is assumed that a player's skill against one then it is assumed that a player's skill against one opponent always carries
opponent always carries over to all other opponents. If the decay over to all other opponents. If the decay rate is 0, then all player match-ups
constant is 0, then all player match-ups are assumed to be independent of are assumed to be independent of each other.
each other.
Network decay constant (default 0.9): " Network decay rate (default 0.8): "
); );
let decay_const_input = read_string(); let decay_rate_input = read_string();
if !decay_const_input.is_empty() { if !decay_rate_input.is_empty() {
decay_const = decay_const_input decay_rate = decay_rate_input
.parse::<f64>() .parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1)); .unwrap_or_else(|_| error("Input is not a number", 1));
if decay_const < 0.0 || decay_const > 1.0 { if decay_rate < 0.0 || decay_rate > 1.0 {
error("Input is not between 0 and 1", 1); error("Input is not between 0 and 1", 1);
} }
} }
// Variance Constant // Adjusted Decay Rate
if set_limit != 0 {
print!(
"
\x1b[1mAdjusted Network Decay Rate\x1b[0m
If the number of sets played between two players is less than the set limit,
then this value is used instead of the regular network decay rate.
This value should be \x1b[1mlower\x1b[0m than the network decay rate.
Adjusted network decay rate (default 0.6): "
);
let adj_decay_rate_input = read_string();
if !adj_decay_rate_input.is_empty() {
adj_decay_rate = adj_decay_rate_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
if decay_rate < 0.0 || decay_rate > 1.0 {
error("Input is not between 0 and 1", 1);
}
}
}
// Rating Period
print!( print!(
" "
\x1b[1mVariance Rate\x1b[0m \x1b[1mRating Period\x1b[0m
This constant determines how quickly a player's variance (the uncertainty The rating period is an interval of time that dictates how player ratings change
of their rating) increases over time. See the end of \x1b[4m\x1b]8;;http:\ during inactivity. Ideally the rating period should be somewhat long, long
//www.glicko.net/glicko/glicko.pdf\x1b\\this paper\x1b]8;;\x1b\\\x1b[0m for details enough to expect almost every player in the dataset to have played at least a
on how to compute a good value, or you can leave it blank and a reasonable few sets.
default will be chosen.
Variance rate: " Rating period (in days, default 40): "
); );
let var_const_input = read_string(); let period_input = read_string();
if !var_const_input.is_empty() { if !period_input.is_empty() {
var_const = var_const_input period_days = period_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
}
// Tau coefficient
print!(
"
\x1b[1mTau Constant\x1b[0m
The tau constant is an internal system constant that roughly represents how
much random chance and luck play a role in game outcomes. In games where match
results are highly predictable, and a player's skill is the sole factor for
whether they will win, the tau constant should be high (0.9 - 1.2). In games
where luck matters, and more improbable victories can occur, the tau constant
should be low (0.2 - 0.4).
The tau constant is set low by default, since skill-based competitive video
games tend to be on the more luck-heavy side.
Tau constant (default 0.4): "
);
let tau_input = read_string();
if !tau_input.is_empty() {
tau = tau_input
.parse::<f64>() .parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1)); .unwrap_or_else(|_| error("Input is not a number", 1));
} }
@ -467,8 +529,11 @@ Variance rate: "
game_slug, game_slug,
country, country,
state, state,
decay_const, set_limit,
var_const, decay_rate,
adj_decay_rate,
period: SECS_IN_DAY as f64 * period_days,
tau,
}, },
) )
.expect("Error communicating with SQLite"); .expect("Error communicating with SQLite");
@ -522,6 +587,9 @@ fn player_info(connection: &Connection, dataset: Option<String>, player: String)
} = get_player_from_input(connection, player) } = get_player_from_input(connection, player)
.unwrap_or_else(|_| error("Could not find player", 1)); .unwrap_or_else(|_| error("Could not find player", 1));
let (deviation, volatility, _) = get_player_rating_data(connection, &dataset, id)
.unwrap_or_else(|_| error("Could not find player", 1));
let (won, lost) = get_player_set_counts(connection, &dataset, id) let (won, lost) = get_player_set_counts(connection, &dataset, id)
.unwrap_or_else(|_| error("Could not find player", 1)); .unwrap_or_else(|_| error("Could not find player", 1));
@ -541,6 +609,9 @@ fn player_info(connection: &Connection, dataset: Option<String>, player: String)
lost, lost,
(won as f64 / (won + lost) as f64) * 100.0 (won as f64 / (won + lost) as f64) * 100.0
); );
println!("\n\x1b[1mDeviation:\x1b[0m {}", deviation);
println!("\x1b[1mVolatility:\x1b[0m {}", volatility);
} }
fn player_matchup( fn player_matchup(
@ -559,6 +630,9 @@ fn player_matchup(
} = get_player_from_input(connection, player1) } = get_player_from_input(connection, player1)
.unwrap_or_else(|_| error("Could not find player", 1)); .unwrap_or_else(|_| error("Could not find player", 1));
let (deviation1, _, _) = get_player_rating_data(connection, &dataset, player1)
.unwrap_or_else(|_| error("Could not find player", 1));
let PlayerData { let PlayerData {
id: player2, id: player2,
name: name2, name: name2,
@ -567,34 +641,43 @@ fn player_matchup(
} = get_player_from_input(connection, player2) } = get_player_from_input(connection, player2)
.unwrap_or_else(|_| error("Could not find player", 1)); .unwrap_or_else(|_| error("Could not find player", 1));
let (hypothetical, advantage, variance) = let (deviation2, _, _) = get_player_rating_data(connection, &dataset, player2)
get_network_data(connection, &dataset, player1, player2) .unwrap_or_else(|_| error("Could not find player", 1));
.expect("Error communicating with SQLite")
.map(|(adv, var)| (false, adv, var)) let (hypothetical, advantage) = get_advantage(connection, &dataset, player1, player2)
.unwrap_or_else(|| { .expect("Error communicating with SQLite")
let metadata = get_metadata(connection, &dataset) .map(|x| (false, x))
.expect("Error communicating with SQLite") .unwrap_or_else(|| {
.unwrap_or_else(|| error("Dataset not found", 1)); let metadata = get_metadata(connection, &dataset)
let (adv, var) = hypothetical_advantage( .expect("Error communicating with SQLite")
.unwrap_or_else(|| error("Dataset not found", 1));
(
true,
hypothetical_advantage(
connection, connection,
&dataset, &dataset,
player1, player1,
player2, player2,
metadata.decay_const, metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate,
) )
.expect("Error communicating with SQLite"); .expect("Error communicating with SQLite"),
(true, adv, var) )
}); });
let probability = 1.0 / (1.0 + f64::exp(-advantage)); let probability = 1.0
/ (1.0
+ f64::exp(
g_func((deviation1 * deviation1 + deviation2 * deviation2).sqrt()) * advantage,
));
let (color, other_color) = ansi_num_color(advantage, 0.2, 2.0); let color = ansi_num_color(advantage, 0.2, 2.0);
let other_color = ansi_num_color(-advantage, 0.2, 2.0);
let len1 = prefix1.as_deref().map(|s| s.len() + 1).unwrap_or(0) + name1.len(); let len1 = prefix1.as_deref().map(|s| s.len() + 1).unwrap_or(0) + name1.len();
let len2 = prefix2.as_deref().map(|s| s.len() + 1).unwrap_or(0) + name2.len(); let len2 = prefix2.as_deref().map(|s| s.len() + 1).unwrap_or(0) + name2.len();
// Prefix + name for each player
if let Some(pre) = prefix1 { if let Some(pre) = prefix1 {
print!("\x1b[2m{}\x1b[22m ", pre); print!("\x1b[2m{}\x1b[22m ", pre);
} }
@ -612,30 +695,26 @@ fn player_matchup(
discrim2, name2 discrim2, name2
); );
// Probability breakdown
println!( println!(
"\x1b[1m\x1b[{4}m{0:>2$}\x1b[0m - \x1b[1m\x1b[{5}m{1:<3$}\x1b[0m", "\x1b[1m\x1b[{4}m{0:>2$}\x1b[0m - \x1b[1m\x1b[{5}m{1:<3$}\x1b[0m",
format!("{:.1}%", (1.0 - probability) * 100.0),
format!("{:.1}%", probability * 100.0), format!("{:.1}%", probability * 100.0),
format!("{:.1}%", (1.0 - probability) * 100.0),
len1, len1,
len2, len2,
other_color, other_color,
color color
); );
// Advantage + variance if hypothetical {
println!(
println!( "\n\x1b[1mHypothetical Advantage: \x1b[{1}m{0:+.4}\x1b[0m",
"\n\x1b[1m{0}Advantage: \x1b[{1}m{2:+.4}\x1b[39m\n{0}Variance: {3:.4}\x1b[0m", advantage, color
if hypothetical { "Hypothetical " } else { "" }, );
color, } else {
advantage, println!(
variance "\n\x1b[1mAdvantage: \x1b[{1}m{0:+.4}\x1b[0m",
); advantage, color
);
if !hypothetical {
// Set count
let (a, b) = get_matchup_set_counts(connection, &dataset, player1, player2) let (a, b) = get_matchup_set_counts(connection, &dataset, player1, player2)
.expect("Error communicating with SQLite"); .expect("Error communicating with SQLite");
@ -695,70 +774,5 @@ fn sync(connection: &Connection, auth: String, datasets: Vec<String>, all: bool)
} }
fn ranking_create(connection: &Connection, dataset: Option<String>) { fn ranking_create(connection: &Connection, dataset: Option<String>) {
use std::collections::HashMap;
let dataset = dataset.unwrap_or_else(|| String::from("default")); let dataset = dataset.unwrap_or_else(|| String::from("default"));
let metadata = get_metadata(connection, &dataset)
.expect("Error communicating with SQLite")
.unwrap_or_else(|| error("Dataset not found", 1));
let exp = read_string().parse::<f64>().unwrap();
let players = get_all_players(connection, &dataset).expect("Error communicating with SQLite");
let num_players = players.len();
let mut table = players
.into_iter()
.map(|id| (id, 1.0 / num_players as f64))
.collect::<HashMap<_, _>>();
table.shrink_to_fit();
let mut diff: f64 = 1.0;
let mut iter = 0;
while diff > 1e-8 {
let mut new_table = HashMap::with_capacity(table.capacity());
for (&id, &last) in table.iter() {
let mut points = get_edges(connection, &dataset, id)
.expect("Error communicating with SQLite")
.into_iter()
.map(|(other, adv, _sets)| (other, exp.powf(adv)))
.collect::<Vec<_>>();
points.push((id, 1.0));
let sum_points = points.iter().map(|(_, val)| val).sum::<f64>();
points.into_iter().for_each(|(other, pts)| {
let pts_ = last * pts / sum_points;
new_table
.entry(other)
.and_modify(|v| *v += pts_)
.or_insert(pts_);
})
}
if iter % 10 == 0 {
diff = (table
.iter()
.map(|(id, &last)| (new_table[id] - last) * (new_table[id] - last))
.sum::<f64>()
/ num_players as f64)
.sqrt();
println!("{}", diff);
}
table = new_table;
iter += 1;
}
let mut list = table.into_iter().collect::<Vec<_>>();
list.sort_by(|(_, a), (_, b)| b.partial_cmp(a).unwrap());
println!();
for (id, pts) in list.into_iter().take(20) {
let player = get_player(connection, id).unwrap();
println!("{} - {}", player.name, pts);
}
} }

View file

@ -28,6 +28,7 @@ pub fn get_auth_token(config_dir: &Path) -> String {
Err(VarError::NotUnicode(_)) => error("Invalid authorization key", 2), Err(VarError::NotUnicode(_)) => error("Invalid authorization key", 2),
Err(VarError::NotPresent) => { Err(VarError::NotPresent) => {
let mut auth_file = config_dir.to_owned(); let mut auth_file = config_dir.to_owned();
auth_file.push("startrnr");
auth_file.push("auth.txt"); auth_file.push("auth.txt");
read_to_string(auth_file) read_to_string(auth_file)
.ok() .ok()

View file

@ -1,3 +1,4 @@
use std::f64::consts::PI;
use std::thread::sleep; use std::thread::sleep;
use std::time::Duration; use std::time::Duration;
@ -7,6 +8,86 @@ use crate::queries::*;
use itertools::Itertools; use itertools::Itertools;
use sqlite::*; use sqlite::*;
// Glicko-2 system calculation
pub fn g_func(dev: f64) -> f64 {
1.0 / (1.0 + 3.0 * dev * dev / PI / PI).sqrt()
}
fn time_adjust(periods: f64, old_dev_sq: f64, volatility: f64) -> f64 {
(old_dev_sq + periods * volatility * volatility).sqrt()
}
fn illinois_optimize(fun: impl Fn(f64) -> f64, mut a: f64, mut b: f64) -> f64 {
let mut f_a = fun(a);
let mut f_b = fun(b);
while (b - a).abs() > 1e-6 {
let c = a + (a - b) * f_a / (f_b - f_a);
let f_c = fun(c);
if f_c * f_b > 0.0 {
f_a = f_a / 2.0;
} else {
a = b;
f_a = f_b;
}
b = c;
f_b = f_c;
}
a
}
fn glicko_adjust(
advantage: f64,
deviation: f64,
volatility: f64,
other_deviation: f64,
won: bool,
time: u64,
metadata: &DatasetMetadata,
) -> (f64, f64, f64) {
let period = metadata.period;
let tau = metadata.tau;
let g_val = g_func(other_deviation);
let exp_val = 1.0 / (1.0 + f64::exp(-g_val * advantage));
let variance = 1.0 / (g_val * g_val * exp_val * (1.0 - exp_val));
let score = if won { 1.0 } else { 0.0 };
let delta = variance * g_val * (score - exp_val);
let delta_sq = delta * delta;
let dev_sq = deviation * deviation;
let a = (volatility * volatility).ln();
let vol_fn = |x| {
let ex = f64::exp(x);
let subf = dev_sq + variance + ex;
((ex * (delta_sq - dev_sq - variance - ex)) / 2.0 / subf / subf) - (x - a) / tau / tau
};
let initial_b = if delta_sq > dev_sq + variance {
(delta_sq - dev_sq - variance).ln()
} else {
(1..)
.map(|k| vol_fn(a - k as f64 * tau))
.inspect(|x| {
if x.is_nan() {
panic!();
}
})
.find(|x| x >= &0.0)
.unwrap()
};
let vol_new = f64::exp(illinois_optimize(vol_fn, a, initial_b) / 2.0);
let dev_time = time_adjust(time as f64 / period, dev_sq, vol_new);
let dev_new = 1.0 / (1.0 / dev_time / dev_time + 1.0 / variance).sqrt();
let adjust = dev_new * dev_new * g_val * (score - exp_val);
(adjust, dev_new, vol_new)
}
// Extract set data // Extract set data
fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> { fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
@ -119,67 +200,104 @@ fn update_from_set(
event_time: Timestamp, event_time: Timestamp,
results: SetData, results: SetData,
) -> sqlite::Result<()> { ) -> sqlite::Result<()> {
let teams = results.teams; let players_data = results.teams;
// Fall back to event time if set time is not recorded
let time = results.time.unwrap_or(event_time);
add_players(connection, dataset, &players_data, time)?;
// Non-singles matches are currently not supported // Non-singles matches are currently not supported
if teams.len() != 2 || teams[0].len() != 1 || teams[1].len() != 1 { if players_data.len() != 2 || players_data[0].len() != 1 || players_data[1].len() != 1 {
return Ok(()); return Ok(());
} }
let players = teams.into_iter().flatten().collect::<Vec<_>>(); let mut it = players_data.into_iter();
add_players(connection, dataset, &players)?; let player1 = it.next().unwrap()[0].id;
let player2 = it.next().unwrap()[0].id;
drop(it);
let player1 = players[0].id; let (deviation1, volatility1, last_played1) =
let player2 = players[1].id; get_player_rating_data(connection, dataset, player1)?;
let time1 = time.0.checked_sub(last_played1.0).unwrap_or(0);
// Time-adjust all variances associated with each player let (deviation2, volatility2, last_played2) =
let time = results.time.unwrap_or(event_time); get_player_rating_data(connection, dataset, player1)?;
adjust_for_time(connection, dataset, player1, metadata.var_const, time)?; let time2 = time.0.checked_sub(last_played2.0).unwrap_or(0);
adjust_for_time(connection, dataset, player2, metadata.var_const, time)?;
let (advantage, variance) = match get_network_data(connection, dataset, player1, player2) { let advantage = match get_advantage(connection, dataset, player1, player2) {
Err(e) => Err(e)?, Err(e) => Err(e)?,
Ok(None) => initialize_edge( Ok(None) => initialize_edge(
connection, connection,
dataset, dataset,
player1, player1,
player2, player2,
metadata.decay_const, metadata.set_limit,
time, metadata.decay_rate,
metadata.adj_decay_rate,
)?, )?,
Ok(Some(adv)) => adv, Ok(Some(adv)) => adv,
}; };
let (adjust1, dev_new1, vol_new1) = glicko_adjust(
// println!("{}, {} - {}, {}", player1.0, player2.0, advantage, variance); -advantage,
deviation1,
glicko_adjust( volatility1,
connection, deviation2,
dataset, results.winner == 0,
&results.id, time1,
player1, metadata,
player2, );
let (adjust2, dev_new2, vol_new2) = glicko_adjust(
advantage, advantage,
variance, deviation2,
results.winner, volatility2,
metadata.decay_const, deviation1,
)?; results.winner == 1,
time2,
metadata,
);
set_player_set_counts( // Set minimum deviation level
let dev_new1 = f64::max(dev_new1, 0.2);
let dev_new2 = f64::max(dev_new2, 0.2);
set_player_data(
connection, connection,
dataset, dataset,
player1, player1,
time,
dev_new1,
vol_new1,
results.winner == 0, results.winner == 0,
&results.id, &results.id,
)?; )?;
set_player_set_counts( set_player_data(
connection, connection,
dataset, dataset,
player2, player2,
time,
dev_new2,
vol_new2,
results.winner == 1, results.winner == 1,
&results.id, &results.id,
)?; )?;
Ok(()) let (sets1, sets2) = get_matchup_set_counts(connection, dataset, player1, player2)?;
let decay_rate = if sets1 + sets2 >= metadata.set_limit {
metadata.decay_rate
} else {
metadata.adj_decay_rate
};
adjust_advantages(
connection,
dataset,
results.id,
player1,
player2,
results.winner,
adjust1,
adjust2,
decay_rate,
)
} }
pub fn sync_dataset( pub fn sync_dataset(
@ -222,3 +340,45 @@ pub fn sync_dataset(
} }
connection.execute("COMMIT;") connection.execute("COMMIT;")
} }
#[cfg(test)]
mod tests {
use super::*;
use crate::database::tests::*;
#[test]
fn glicko_single() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
let players = players(2).into_iter().map(|x| vec![x]).collect();
add_players(&connection, "test", &players, Timestamp(0))?;
update_from_set(
&connection,
"test",
&metadata(),
Timestamp(0),
SetData {
id: SetId(StringOrInt::Int(0)),
time: None,
teams: players,
winner: 0,
},
)?;
println!(
"{:?}",
get_advantage(&connection, "test", PlayerId(1), PlayerId(2))?.unwrap()
);
println!(
"{:?}",
get_player_rating_data(&connection, "test", PlayerId(1))
);
println!(
"{:?}",
get_player_rating_data(&connection, "test", PlayerId(2))
);
Ok(())
}
}

View file

@ -8,7 +8,6 @@ use crate::queries::{PlayerData, PlayerId, Timestamp};
pub const SECS_IN_HR: u64 = 3600; pub const SECS_IN_HR: u64 = 3600;
pub const SECS_IN_DAY: u64 = SECS_IN_HR * 24; pub const SECS_IN_DAY: u64 = SECS_IN_HR * 24;
pub const SECS_IN_WEEK: u64 = SECS_IN_DAY * 7; pub const SECS_IN_WEEK: u64 = SECS_IN_DAY * 7;
pub const SECS_IN_YEAR: u64 = SECS_IN_DAY * 365 + SECS_IN_HR * 6;
pub fn error(msg: &str, code: i32) -> ! { pub fn error(msg: &str, code: i32) -> ! {
eprintln!("\nERROR: {}", msg); eprintln!("\nERROR: {}", msg);
@ -42,7 +41,7 @@ pub fn read_string() -> String {
line.trim().to_owned() line.trim().to_owned()
} }
pub fn ansi_num_color(num: f64, threshold1: f64, threshold2: f64) -> (&'static str, &'static str) { pub fn ansi_num_color(num: f64, threshold1: f64, threshold2: f64) -> &'static str {
let sign = num > 0.0; let sign = num > 0.0;
let num_abs = num.abs(); let num_abs = num.abs();
let severity = if num_abs < threshold1 { let severity = if num_abs < threshold1 {
@ -54,11 +53,11 @@ pub fn ansi_num_color(num: f64, threshold1: f64, threshold2: f64) -> (&'static s
}; };
match (sign, severity) { match (sign, severity) {
(false, 1) => ("31", "32"), (false, 1) => "31",
(true, 1) => ("32", "31"), (true, 1) => "32",
(false, 2) => ("91", "92"), (false, 2) => "91",
(true, 2) => ("92", "91"), (true, 2) => "92",
_ => ("39", "39"), _ => "39",
} }
} }