Compare commits

...

10 commits

Author SHA1 Message Date
Kiana Sheibani bc02daae5f
feat: paging support for player list 2024-08-18 05:02:48 -04:00
Kiana Sheibani c2b4c5a6bf
feat: add full player listing command 2024-08-18 05:02:26 -04:00
Kiana Sheibani 9b21944624
refactor: factor out player info display code 2024-08-18 05:00:41 -04:00
Kiana Sheibani 9cd8d77753
refactor: separate command subroutines into submodules 2024-08-18 03:22:30 -04:00
Kiana Sheibani e644f96648
fix: resolve inconsistent name of dataset option 2024-08-18 02:51:07 -04:00
Kiana Sheibani 4b44b82531
feat!: overhaul the entire rating algorithm
I am far, FAR too lazy to split this into multiple commits, so here it
is.
2024-08-17 21:54:57 -04:00
Kiana Sheibani c2158f85f7
refactor: have ansi_num_color return both sides' colors 2024-08-15 00:16:49 -04:00
Kiana Sheibani 126268d8e3
tweak!: move dataset file out of config dir
Move the SQLite file from `XDG_CONFIG_HOME` to `XDG_DATA_HOME`, which
seems like a better location for it.
2024-08-14 21:34:32 -04:00
Kiana Sheibani 221ba8268d
fix: interpret --config flag properly 2024-08-14 21:34:18 -04:00
Kiana Sheibani 6460947ace
docs: reorganize README and INSTALL files 2024-08-14 20:45:22 -04:00
13 changed files with 1062 additions and 1386 deletions

24
Cargo.lock generated
View file

@ -383,6 +383,17 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno"
version = "0.3.2"
@ -883,6 +894,16 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "pager"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2599211a5c97fbbb1061d3dc751fa15f404927e4846e07c643287d6d1f462880"
dependencies = [
"errno 0.2.8",
"libc",
]
[[package]]
name = "percent-encoding"
version = "2.3.0"
@ -1048,7 +1069,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bfe0f2582b4931a45d1fa608f8a8722e8b3c7ac54dd6d5f3b3212791fedef49"
dependencies = [
"bitflags 2.4.0",
"errno",
"errno 0.3.2",
"libc",
"linux-raw-sys",
"windows-sys",
@ -1212,6 +1233,7 @@ dependencies = [
"cynic-codegen",
"dirs",
"itertools",
"pager",
"reqwest",
"schema",
"serde",

View file

@ -16,6 +16,7 @@ schema.path = "schema"
# CLI
clap = { version = "4.4", features = ["derive"] }
chrono = "0.4"
pager = "0.16"
# API access
cynic = { version = "3.2", features = ["http-reqwest-blocking"] }

View file

@ -9,23 +9,25 @@ linked to your account. Instructions for generating one can be found in the
Once you have an auth token, it must be provided to StartRNR. In order, the
program checks for a token in:
- A command-line flag `--auth`.
- An environment variable `AUTH_TOKEN`,
- A file `auth.txt` within the config directory (see the [README](README.md) for
a list of directories in each OS).
1. A command-line flag `--auth`.
2. An environment variable `AUTH_TOKEN`,
3. A file `auth.txt` within the config directory:
- Windows: `%APPDATA%\Roaming\startrnr/auth.txt`
- MacOS: `~/Library/Application Support/startrnr/auth.txt`
- Linux: `~/.config/startrnr/auth.txt`
The last method is recommended, as StartRNR can simply read from that file
whenever it needs to.
## Step 2: Dependencies
StartRNR is written in Rust, so install the [Rust
toolchain](https://www.rust-lang.org/tools/install).
StartRNR requires these dependencies:
In addition, StartRNR needs these run-time dependencies:
- [Rust](https://www.rust-lang.org/tools/install)
- [OpenSSL](https://github.com/openssl/openssl#build-and-install)
- [SQLite](https://www.sqlite.org/download.html)
- [OpenSSL](https://www.openssl.org/)
- [SQLite](https://www.sqlite.org/)
Follow the instructions to download and install each.
## Step 3: Compiling

View file

@ -34,6 +34,8 @@ Alternatively, if you use Nix:
nix profile install github:kiana-S/StartRNR
```
You will need to provide a start.gg API token to access tournament data. Details can be found in [INSTALL.md](INSTALL.md).
## Usage
Once StartRNR is installed, run:
@ -58,17 +60,6 @@ startrnr player matchup <player1> <player2>
A player can be specified by their tag or by their
[discriminator](https://help.start.gg/en/articles/4855957-discriminators-on-start-gg).
## Configuration
StartRNR stores its rating databases in its config directory, which is located at:
- Windows: `%APPDATA%\Roaming\startrnr`
- MacOS: `~/Library/Application Support/startrnr`
- Linux: `~/.config/startrnr`
This directory can be used to store the authentication token, which is required
for using StartRNR.
## Details - The RNR System
*For more information on RNR, see the [details page](DETAILS.md).*

3
src/commands.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod dataset;
pub mod player;
pub mod sync;

342
src/commands/dataset.rs Normal file
View file

@ -0,0 +1,342 @@
use crate::database::*;
use crate::queries::*;
use crate::util::*;
use chrono::{Local, TimeZone, Utc};
use sqlite::*;
pub fn list(connection: &Connection) {
let datasets = list_datasets(&connection).expect("Error communicating with SQLite");
for (name, metadata) in datasets {
print!(
"· \x1b[1m\x1b[34m{}\x1b[0m
\x1b[4m\x1b]8;;https://www.start.gg/{}\x1b\\{}\x1b]8;;\x1b\\\x1b[0m ",
name, metadata.game_slug, metadata.game_name
);
if let Some(country) = metadata.country {
if let Some(state) = metadata.state {
println!("(in {}, {})", country, state);
} else {
println!("(in {})", country);
}
} else {
println!("(Global)");
}
let start = if metadata.start.0 != 1 {
Some(
Utc.timestamp_opt(metadata.start.0 as i64, 0)
.unwrap()
.format("%m/%d/%Y"),
)
} else {
None
};
let end = metadata
.end
.map(|x| Utc.timestamp_opt(x.0 as i64, 0).unwrap().format("%m/%d/%Y"));
match (start, end) {
(None, None) => (),
(Some(s), None) => println!("after {}", s),
(None, Some(e)) => println!("until {}", e),
(Some(s), Some(e)) => println!("{} - {}", s, e),
}
if metadata.last_sync == metadata.start {
print!("\x1b[1m\x1b[91mUnsynced\x1b[0m");
} else if Some(metadata.last_sync) == metadata.end {
print!("\x1b[1m\x1b[92mComplete\x1b[0m");
} else {
print!(
"\x1b[1mLast synced:\x1b[0m {}",
Local
.timestamp_opt(metadata.last_sync.0 as i64, 0)
.unwrap()
.format("%b %e, %Y %r")
);
}
if current_time().0 - metadata.last_sync.0 > SECS_IN_WEEK
&& Some(metadata.last_sync) != metadata.end
{
if name == "default" {
print!(" - \x1b[33mRun 'startrnr sync' to update!\x1b[0m");
} else {
print!(
" - \x1b[33mRun 'startrnr sync {:?}' to update!\x1b[0m",
name
);
}
}
println!(
"\n\x1b[1mNetwork Decay Constant:\x1b[0m {}",
metadata.decay_const
);
}
}
pub fn new(connection: &Connection, auth: String, name: Option<String>) {
// Name
let name = name.unwrap_or_else(|| {
print!("Name of new dataset: ");
read_string()
});
// Game
print!("Search games: ");
let games = run_query::<VideogameSearch, _>(
VideogameSearchVars {
name: &read_string(),
},
&auth,
)
.unwrap_or_else(|| error("Could not access start.gg", 1));
if games.is_empty() {
issue("No games found!", 0);
}
println!("\nSearch results:");
for (i, game) in games.iter().enumerate() {
println!("{} - {}", i, game.name);
}
print!("\nGame to track ratings for (0-{}): ", games.len() - 1);
let index = read_string()
.parse::<usize>()
.unwrap_or_else(|_| error("Not an integer", 1));
if index >= games.len() {
error("Out of range!", 1);
}
let VideogameData {
id: game_id,
name: game_name,
slug: game_slug,
} = games[index].clone();
// Location
print!(
"
\x1b[1mCountry\x1b[0m
Enter the two-letter code for the country you want to track ratings in, e.g.
\"US\" for the United States. See \x1b[4m\x1b]8;;https://www.ups.com/worldshiphelp/\
WSA/ENU/AppHelp/mergedProjects/CORE/Codes/Country_Territory_and_Currency_Codes.htm\
\x1b\\this site\x1b]8;;\x1b\\\x1b[0m for a list of these codes.
If no code is entered, then the dataset will track all players globally.
Country to track ratings for (leave empty for none): "
);
let country = {
let mut string = read_string();
if string.is_empty() {
None
} else if string.len() == 2 && string.chars().all(|c| c.is_ascii_alphabetic()) {
string.make_ascii_uppercase();
Some(string)
} else {
error("Input is not a two-letter code", 1)
}
};
let state = if country.as_ref().is_some_and(|s| s == "US" || s == "CA") {
print!(
"
\x1b[1mState/Province\x1b[0m
Enter the two-letter code for the US state or Canadian province you want to track
ratings in, e.g. \"CA\" for California. See \x1b[4m\x1b]8;;https://www.ups.com/worldshiphelp/\
WSA/ENU/AppHelp/mergedProjects/CORE/Codes/State_Province_Codes.htm\x1b\\this site\
\x1b]8;;\x1b\\\x1b[0m for a list of these codes.
If no code is entered, then the dataset will track all players within the country.
State/province to track ratings for (leave empty for none): "
);
let mut string = read_string();
if string.is_empty() {
None
} else if string.len() == 2 && string.chars().all(|c| c.is_ascii_alphabetic()) {
string.make_ascii_uppercase();
Some(string)
} else {
error("Input is not a two-letter code", 1);
}
} else {
None
};
// Interval
print!(
"
\x1b[1mStart Date\x1b[0m
The rating system will process tournaments starting at this date. If only a year
is entered, the date will be the start of that year.
Start date (year, m/y, or m/d/y): "
);
let start = {
let string = read_string();
if string.is_empty() {
Timestamp(1)
} else if string.chars().all(|c| c.is_ascii_digit() || c == '/') {
if let Some((y, m, d)) = match string.split('/').collect::<Vec<_>>()[..] {
[] => None,
[y] => Some((y.parse().unwrap(), 1, 1)),
[m, y] => Some((y.parse().unwrap(), m.parse().unwrap(), 1)),
[m, d, y] => Some((y.parse().unwrap(), m.parse().unwrap(), d.parse().unwrap())),
_ => error("Input is not a date", 1),
} {
Timestamp(Utc.with_ymd_and_hms(y, m, d, 0, 1, 1).unwrap().timestamp() as u64)
} else {
Timestamp(1)
}
} else {
error("Input is not a date", 1);
}
};
print!(
"
\x1b[1mEnd Date\x1b[0m
The rating system will stop processing tournaments when it reaches this date. If
only a year is entered, the date will be the end of that year.
End date (year, m/y, or m/d/y): "
);
let end = {
let string = read_string();
if string.is_empty() {
None
} else if string.chars().all(|c| c.is_ascii_digit() || c == '/') {
if let Some((y, m, d)) = match string.split('/').collect::<Vec<_>>()[..] {
[] => None,
[y] => Some((y.parse().unwrap(), 12, 31)),
[m, y] => Some((y.parse().unwrap(), m.parse().unwrap(), 30)),
[m, d, y] => Some((y.parse().unwrap(), m.parse().unwrap(), d.parse().unwrap())),
_ => error("Input is not a date", 1),
} {
Some(Timestamp(
Utc.with_ymd_and_hms(y, m, d, 11, 59, 59)
.unwrap()
.timestamp() as u64,
))
} else {
None
}
} else {
error("Input is not a date", 1);
}
};
// Advanced Options
// Defaults
let mut decay_const = 0.9;
let mut var_rate = (10.0 - 0.04) / SECS_IN_YEAR as f64 / 3.0;
print!("\nConfigure advanced options? (y/n) ");
if let Some('y') = read_string().chars().next() {
// Decay Rate
print!(
"
\x1b[1mNetwork Decay Constant\x1b[0m
The network decay constant is a number between 0 and 1 that controls how
player wins and losses propagate throughout the network. If the decay
constant is 1, then it is assumed that a player's skill against one
opponent always carries over to all other opponents. If the decay
constant is 0, then all player match-ups are assumed to be independent of
each other.
Network decay constant (default 0.9): "
);
let decay_const_input = read_string();
if !decay_const_input.is_empty() {
decay_const = decay_const_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
if decay_const < 0.0 || decay_const > 1.0 {
error("Input is not between 0 and 1", 1);
}
}
// Variance Constant
print!(
"
\x1b[1mVariance Rate\x1b[0m
This constant determines how quickly a player's variance (the uncertainty
of their rating) increases over time. See the end of \x1b[4m\x1b]8;;http:\
//www.glicko.net/glicko/glicko.pdf\x1b\\this paper\x1b]8;;\x1b\\\x1b[0m for details
on how to compute a good value, or you can leave it blank and a reasonable
default will be chosen.
Variance rate: "
);
let var_rate_input = read_string();
if !var_rate_input.is_empty() {
var_rate = var_rate_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
}
}
// Done configuring
new_dataset(
connection,
&name,
DatasetMetadata {
start,
end,
last_sync: start,
game_id,
game_name,
game_slug,
country,
state,
decay_const,
var_rate,
},
)
.expect("Error communicating with SQLite");
println!("\nCreated dataset {}", name);
}
pub fn delete(connection: &Connection, name: Option<String>) {
let name = name.unwrap_or_else(|| {
print!("Dataset to delete: ");
read_string()
});
delete_dataset(connection, &name).unwrap_or_else(|_| error("That dataset does not exist!", 1));
}
pub fn rename(connection: &Connection, old: Option<String>, new: Option<String>) {
let old = old.unwrap_or_else(|| {
print!("Dataset to rename: ");
read_string()
});
let new = new.unwrap_or_else(|| {
print!("Rename to: ");
read_string()
});
match rename_dataset(connection, &old, &new) {
Ok(()) => (),
Err(sqlite::Error {
code: Some(1),
message: _,
}) => error(&format!("Dataset {:?} does not exist", &old), 1),
Err(sqlite::Error {
code: Some(19),
message: _,
}) => error(&format!("Dataset {:?} already exists", &new), 1),
Err(_) => error("Unknown error occurred", 2),
};
}

148
src/commands/player.rs Normal file
View file

@ -0,0 +1,148 @@
use crate::database::*;
use crate::queries::*;
use crate::util::*;
use pager::Pager;
use sqlite::*;
fn player_display(
PlayerData {
id,
prefix,
name,
discrim,
}: PlayerData,
with_discrim: bool,
with_id: bool,
newline: bool,
) {
if let Some(prefix) = prefix {
print!("\x1b[2m{}\x1b[22m ", prefix);
}
print!(
"\x1b[4m\x1b]8;;https://www.start.gg/user/{1}\x1b\\\
\x1b[1m{0}\x1b[22m\x1b]8;;\x1b\\\x1b[0m",
name, discrim
);
if with_discrim {
print!(" ({})", discrim);
}
if with_id {
print!("\n\x1b[1mID:\x1b[0m {}", id.0);
}
if newline {
println!();
}
}
pub fn list(connection: &Connection, dataset: Option<String>, all: bool) {
let dataset = dataset.unwrap_or_else(|| String::from("default"));
let mut players = list_players(connection, if all { None } else { Some(&dataset) })
.expect("Error communicating with SQLite");
players.sort_by_cached_key(|data| data.name.clone());
Pager::new().setup();
for player in players {
player_display(player, true, false, true);
}
}
pub fn info(connection: &Connection, dataset: Option<String>, player: String) {
let dataset = dataset.unwrap_or_else(|| String::from("default"));
let player = get_player_from_input(connection, &dataset, player)
.unwrap_or_else(|_| error("Could not find player", 1));
let (won, lost) = get_player_set_counts(connection, &dataset, player.id)
.unwrap_or_else(|_| error("Could not find player", 1));
player_display(player, true, true, true);
println!(
"\n\x1b[1mSet Count:\x1b[0m {} - {} ({:.3}%)",
won,
lost,
(won as f64 / (won + lost) as f64) * 100.0
);
}
pub fn matchup(connection: &Connection, dataset: Option<String>, player1: String, player2: String) {
let dataset = dataset.unwrap_or_else(|| String::from("default"));
let player1 = get_player_from_input(connection, &dataset, player1)
.unwrap_or_else(|_| error("Could not find player", 1));
let id1 = player1.id;
let player2 = get_player_from_input(connection, &dataset, player2)
.unwrap_or_else(|_| error("Could not find player", 1));
let id2 = player2.id;
let (hypothetical, advantage, variance) = get_network_data(connection, &dataset, id1, id2)
.expect("Error communicating with SQLite")
.map(|(adv, var)| (false, adv, var))
.unwrap_or_else(|| {
let metadata = get_metadata(connection, &dataset)
.expect("Error communicating with SQLite")
.unwrap_or_else(|| error("Dataset not found", 1));
let (adv, var) = hypothetical_advantage(
connection,
&dataset,
player1.id,
player2.id,
metadata.decay_const,
)
.expect("Error communicating with SQLite");
(true, adv, var)
});
let probability = 1.0 / (1.0 + f64::exp(-advantage));
let (color, other_color) = ansi_num_color(advantage, 0.2, 2.0);
let len1 = player1.prefix.as_deref().map(|s| s.len() + 1).unwrap_or(0) + player1.name.len();
let len2 = player2.prefix.as_deref().map(|s| s.len() + 1).unwrap_or(0) + player2.name.len();
// Prefix + name for each player
player_display(player1, false, false, false);
print!(" - ");
player_display(player2, false, false, true);
// Probability breakdown
println!(
"\x1b[1m\x1b[{4}m{0:>2$}\x1b[0m - \x1b[1m\x1b[{5}m{1:<3$}\x1b[0m",
format!("{:.1}%", (1.0 - probability) * 100.0),
format!("{:.1}%", probability * 100.0),
len1,
len2,
other_color,
color
);
// Advantage + variance
println!(
"\n\x1b[1m{0}Advantage: \x1b[{1}m{2:+.4}\x1b[39m\n{0}Variance: {3:.4}\x1b[0m",
if hypothetical { "Hypothetical " } else { "" },
color,
advantage,
variance
);
if !hypothetical {
// Set count
let (a, b) = get_matchup_set_counts(connection, &dataset, id1, id2)
.expect("Error communicating with SQLite");
println!(
"\n\x1b[1mSet Count:\x1b[0m {} - {} ({:.3}% - {:.3}%)",
a,
b,
(a as f64 / (a + b) as f64) * 100.0,
(b as f64 / (a + b) as f64) * 100.0
);
}
}

267
src/commands/sync.rs Normal file
View file

@ -0,0 +1,267 @@
use crate::commands::*;
use crate::database::*;
use crate::queries::*;
use crate::util::*;
use itertools::Itertools;
use sqlite::*;
use std::cmp::min;
use std::thread::sleep;
use std::time::Duration;
// Extract set data
fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
sleep(Duration::from_millis(700));
let sets_response = run_query::<EventSets, _>(EventSetsVars { event, page: 1 }, auth)?;
let pages = sets_response.pages;
if pages == 0 {
Some(vec![])
} else if pages == 1 {
Some(sets_response.sets)
} else {
let mut sets = sets_response.sets;
for page in 2..=pages {
println!(" (Page {})", page);
let next_response = run_query::<EventSets, _>(
EventSetsVars {
event,
page: page as i32,
},
auth,
)?;
sleep(Duration::from_millis(700));
sets.extend(next_response.sets);
}
Some(sets)
}
}
fn get_tournament_events(
metadata: &DatasetMetadata,
before: Timestamp,
auth: &str,
) -> Option<Vec<EventData>> {
println!("Accessing tournaments...");
let mut after = metadata.last_sync;
let tour_response = run_query::<TournamentEvents, _>(
TournamentEventsVars {
after_date: after,
before_date: before,
game_id: metadata.game_id,
country: metadata.country.as_deref(),
state: metadata.state.as_deref(),
},
auth,
)?;
let mut cont = !tour_response.is_empty();
after = if tour_response.iter().any(|tour| tour.time != after) {
tour_response.last().unwrap().time
} else {
Timestamp(after.0 + 1)
};
let mut tournaments = tour_response;
let mut page: u64 = 1;
while cont {
page += 1;
println!(" (Page {})", page);
let next_response = run_query::<TournamentEvents, _>(
TournamentEventsVars {
after_date: after,
before_date: before,
game_id: metadata.game_id,
country: metadata.country.as_deref(),
state: metadata.state.as_deref(),
},
auth,
)?;
cont = !next_response.is_empty();
after = if next_response.iter().any(|tour| tour.time != after) {
next_response.last().unwrap().time
} else {
Timestamp(after.0 + 1)
};
tournaments.extend(next_response);
}
println!("Deduplicating...");
Some(
tournaments
.into_iter()
.group_by(|tour| tour.time)
.into_iter()
.flat_map(|(_, group)| group.into_iter().unique_by(|tour| tour.id))
.flat_map(|tour| tour.events)
.collect::<Vec<_>>(),
)
}
// Dataset syncing
fn update_from_set(
connection: &Connection,
dataset: &str,
metadata: &DatasetMetadata,
event_time: Timestamp,
results: SetData,
) -> sqlite::Result<()> {
let teams = results.teams;
// Non-singles matches are currently not supported
if teams.len() != 2 || teams[0].len() != 1 || teams[1].len() != 1 {
return Ok(());
}
let players = teams.into_iter().flatten().collect::<Vec<_>>();
add_players(connection, dataset, &players)?;
let player1 = players[0].id;
let player2 = players[1].id;
// Time-adjust all variances associated with each player
let time = results.time.unwrap_or(event_time);
adjust_for_time(connection, dataset, player1, metadata.var_rate, time)?;
adjust_for_time(connection, dataset, player2, metadata.var_rate, time)?;
let (advantage, variance) = match get_network_data(connection, dataset, player1, player2) {
Err(e) => Err(e)?,
Ok(None) => initialize_edge(
connection,
dataset,
player1,
player2,
metadata.decay_const,
time,
)?,
Ok(Some(adv)) => adv,
};
// println!("{}, {} - {}, {}", player1.0, player2.0, advantage, variance);
glicko_adjust(
connection,
dataset,
&results.id,
player1,
player2,
advantage,
variance,
results.winner,
metadata.decay_const,
)?;
set_player_set_counts(
connection,
dataset,
player1,
results.winner == 0,
&results.id,
)?;
set_player_set_counts(
connection,
dataset,
player2,
results.winner == 1,
&results.id,
)?;
Ok(())
}
pub fn sync_dataset(
connection: &Connection,
dataset: &str,
metadata: DatasetMetadata,
before: Timestamp,
auth: &str,
) -> sqlite::Result<()> {
let events = get_tournament_events(&metadata, before, auth)
.unwrap_or_else(|| error("Could not access start.gg", 1));
connection.execute("BEGIN;")?;
let num_events = events.len();
for (i, event) in events.into_iter().enumerate() {
println!(
"Accessing sets from event ID {}... ({}/{})",
event.id.0,
i + 1,
num_events
);
add_event(connection, event.id, &event.slug)?;
let mut sets =
get_event_sets(event.id, auth).unwrap_or_else(|| error("Could not access start.gg", 1));
if sets.is_empty() {
println!(" No valid sets");
} else {
println!(" Updating ratings from event...");
sets.sort_by_key(|set| set.time);
sets.into_iter().try_for_each(|set| {
add_set(connection, &set.id, event.id)?;
update_from_set(connection, dataset, &metadata, event.time, set)
})?;
}
}
connection.execute("COMMIT;")
}
pub fn sync(connection: &Connection, auth: String, datasets: Vec<String>, all: bool) {
let all_datasets = list_dataset_names(connection).unwrap();
let datasets = if all {
all_datasets
} else if datasets.is_empty() {
if all_datasets.is_empty() {
print!("No datasets exist; create one? (y/n) ");
if let Some('y') = read_string().chars().next() {
dataset::new(connection, auth.clone(), Some(String::from("default")));
vec![String::from("default")]
} else {
error("No datasets specified and no default dataset", 1)
}
} else if all_datasets.iter().any(|x| x == "default") {
vec![String::from("default")]
} else {
error("No datasets specified and no default dataset", 1);
}
} else {
datasets
};
let current_time = current_time();
for dataset in datasets {
let dataset_metadata = get_metadata(connection, &dataset)
.expect("Error communicating with SQLite")
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
let before = dataset_metadata
.end
.map(|end| min(end, current_time))
.unwrap_or(current_time);
sync_dataset(connection, &dataset, dataset_metadata, before, &auth)
.expect("Error communicating with SQLite");
update_last_sync(connection, &dataset, before).expect("Error communicating with SQLite");
}
}

View file

@ -1,6 +1,5 @@
use crate::queries::*;
use sqlite::*;
use std::fs::{self, OpenOptions};
use std::path::{Path, PathBuf};
pub struct DatasetMetadata {
@ -14,17 +13,15 @@ pub struct DatasetMetadata {
pub country: Option<String>,
pub state: Option<String>,
pub set_limit: u64,
pub decay_rate: f64,
pub adj_decay_rate: f64,
pub period: f64,
pub tau: f64,
pub decay_const: f64,
pub var_rate: f64,
}
/// Return the path to the datasets file.
fn datasets_path(config_dir: &Path) -> std::io::Result<PathBuf> {
let mut path = config_dir.to_owned();
path.push("startrnr");
fn datasets_path(dir: &Path) -> std::io::Result<PathBuf> {
use std::fs::{self, OpenOptions};
let mut path = dir.to_owned();
// Create datasets path if it doesn't exist
fs::create_dir_all(&path)?;
@ -37,8 +34,8 @@ fn datasets_path(config_dir: &Path) -> std::io::Result<PathBuf> {
Ok(path)
}
pub fn open_datasets(config_dir: &Path) -> sqlite::Result<Connection> {
let path = datasets_path(config_dir).unwrap();
pub fn open_datasets(dir: &Path) -> sqlite::Result<Connection> {
let path = datasets_path(dir).unwrap();
let query = "
CREATE TABLE IF NOT EXISTS datasets (
@ -51,11 +48,8 @@ CREATE TABLE IF NOT EXISTS datasets (
game_slug TEXT NOT NULL,
country TEXT,
state TEXT,
set_limit INTEGER NOT NULL,
decay_rate REAL NOT NULL,
adj_decay_rate REAL NOT NULL,
period REAL NOT NULL,
tau REAL NOT NULL
decay_const REAL NOT NULL,
var_rate REAL NOT NULL
) STRICT;
CREATE TABLE IF NOT EXISTS players (
@ -114,11 +108,8 @@ pub fn list_datasets(connection: &Connection) -> sqlite::Result<Vec<(String, Dat
game_slug: r_.read::<&str, _>("game_slug").to_owned(),
country: r_.read::<Option<&str>, _>("country").map(String::from),
state: r_.read::<Option<&str>, _>("state").map(String::from),
set_limit: r_.read::<i64, _>("set_limit") as u64,
decay_rate: r_.read::<f64, _>("decay_rate"),
adj_decay_rate: r_.read::<f64, _>("adj_decay_rate"),
period: r_.read::<f64, _>("period"),
tau: r_.read::<f64, _>("tau"),
decay_const: r_.read::<f64, _>("decay_const"),
var_rate: r_.read::<f64, _>("var_rate"),
},
))
})
@ -158,17 +149,14 @@ pub fn new_dataset(
dataset: &str,
metadata: DatasetMetadata,
) -> sqlite::Result<()> {
let query1 = r#"INSERT INTO datasets VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"#;
let query1 = r#"INSERT INTO datasets VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"#;
let query2 = format!(
r#"CREATE TABLE "{0}_players" (
id INTEGER PRIMARY KEY REFERENCES players,
last_played INTEGER NOT NULL,
deviation REAL NOT NULL,
volatility REAL NOT NULL,
sets_won TEXT NOT NULL,
sets_won TEXT NOT NULL DEFAULT '',
sets_count_won INTEGER AS (length(sets_won) - length(replace(sets_won, ';', ''))),
sets_lost TEXT NOT NULL,
sets_lost TEXT NOT NULL DEFAULT '',
sets_count_lost INTEGER AS (length(sets_lost) - length(replace(sets_lost, ';', ''))),
sets TEXT AS (sets_won || sets_lost),
sets_count INTEGER AS (sets_count_won + sets_count_lost)
@ -178,10 +166,12 @@ CREATE TABLE "{0}_network" (
player_A INTEGER NOT NULL,
player_B INTEGER NOT NULL,
advantage REAL NOT NULL,
variance REAL NOT NULL,
last_updated INTEGER NOT NULL,
sets_A TEXT NOT NULL,
sets_A TEXT NOT NULL DEFAULT '',
sets_count_A INTEGER AS (length(sets_A) - length(replace(sets_A, ';', ''))),
sets_B TEXT NOT NULL,
sets_B TEXT NOT NULL DEFAULT '',
sets_count_B INTEGER AS (length(sets_B) - length(replace(sets_B, ';', ''))),
sets TEXT AS (sets_A || sets_B),
sets_count INTEGER AS (sets_count_A + sets_count_B),
@ -209,11 +199,8 @@ CREATE INDEX "{0}_network_B" ON "{0}_network" (player_B);"#,
.bind((7, &metadata.game_slug[..]))?
.bind((8, metadata.country.as_deref()))?
.bind((9, metadata.state.as_deref()))?
.bind((10, metadata.set_limit as i64))?
.bind((11, metadata.decay_rate))?
.bind((12, metadata.adj_decay_rate))?
.bind((13, metadata.period))?
.bind((14, metadata.tau))?
.bind((10, metadata.decay_const))?
.bind((11, metadata.var_rate))?
.try_for_each(|x| x.map(|_| ()))?;
connection.execute(query2)
@ -243,11 +230,8 @@ pub fn get_metadata(
game_slug: r_.read::<&str, _>("game_slug").to_owned(),
country: r_.read::<Option<&str>, _>("country").map(String::from),
state: r_.read::<Option<&str>, _>("state").map(String::from),
set_limit: r_.read::<i64, _>("set_limit") as u64,
decay_rate: r_.read::<f64, _>("decay_rate"),
adj_decay_rate: r_.read::<f64, _>("adj_decay_rate"),
period: r_.read::<f64, _>("period"),
tau: r_.read::<f64, _>("tau"),
decay_const: r_.read::<f64, _>("decay_const"),
var_rate: r_.read::<f64, _>("var_rate"),
})
})
.and_then(Result::ok))
@ -291,43 +275,82 @@ pub fn add_set(connection: &Connection, set: &SetId, event: EventId) -> sqlite::
pub fn add_players(
connection: &Connection,
dataset: &str,
teams: &Teams<PlayerData>,
time: Timestamp,
players: &Vec<PlayerData>,
) -> sqlite::Result<()> {
let query1 = "INSERT OR IGNORE INTO players (id, discrim, name, prefix) VALUES (?, ?, ?, ?)";
let query2 = format!(
r#"INSERT OR IGNORE INTO "{}_players"
(id, last_played, deviation, volatility, sets_won, sets_lost)
VALUES (?, ?, 2.01, 0.06, '', '')"#,
r#"INSERT OR IGNORE INTO "{}_players" (id) VALUES (?)"#,
dataset
);
teams.iter().try_for_each(|team| {
team.iter().try_for_each(
|PlayerData {
id,
name,
prefix,
discrim,
}| {
let mut statement = connection.prepare(&query1)?;
statement.bind((1, id.0 as i64))?;
statement.bind((2, &discrim[..]))?;
statement.bind((3, &name[..]))?;
statement.bind((4, prefix.as_ref().map(|x| &x[..])))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
players.iter().try_for_each(
|PlayerData {
id,
name,
prefix,
discrim,
}| {
let mut statement = connection.prepare(&query1)?;
statement.bind((1, id.0 as i64))?;
statement.bind((2, &discrim[..]))?;
statement.bind((3, &name[..]))?;
statement.bind((4, prefix.as_ref().map(|x| &x[..])))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
statement = connection.prepare(&query2)?;
statement.bind((1, id.0 as i64))?;
statement.bind((2, time.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))
},
)
})
statement = connection.prepare(&query2)?;
statement.bind((1, id.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))
},
)
}
pub fn get_player(connection: &Connection, player: PlayerId) -> sqlite::Result<PlayerData> {
let query = "SELECT name, prefix, discrim FROM players WHERE id = ?";
pub fn get_all_players(connection: &Connection, dataset: &str) -> sqlite::Result<Vec<PlayerId>> {
let query = format!(r#"SELECT id FROM "{}_players""#, dataset,);
connection
.prepare(&query)?
.into_iter()
.map(|r| {
let r_ = r?;
Ok(PlayerId(r_.read::<i64, _>("id") as u64))
})
.try_collect()
}
pub fn list_players(
connection: &Connection,
dataset: Option<&str>,
) -> sqlite::Result<Vec<PlayerData>> {
let query = if let Some(dataset) = dataset {
format!(r#"SELECT * FROM "{}_players", players USING (id)"#, dataset,)
} else {
String::from(r#"SELECT * FROM players"#)
};
connection
.prepare(&query)?
.into_iter()
.map(|r| {
let r_ = r?;
Ok(PlayerData {
id: PlayerId(r_.read::<i64, _>("id") as u64),
name: String::from(r_.read::<&str, _>("name")),
prefix: r_.read::<Option<&str>, _>("prefix").map(String::from),
discrim: String::from(r_.read::<&str, _>("discrim")),
})
})
.try_collect()
}
pub fn get_player(
connection: &Connection,
dataset: &str,
player: PlayerId,
) -> sqlite::Result<PlayerData> {
let query = format!(
r#"SELECT * FROM "{}_players", players USING (id) WHERE id = ?"#,
dataset
);
let mut statement = connection.prepare(&query)?;
statement.bind((1, player.0 as i64))?;
@ -342,9 +365,13 @@ pub fn get_player(connection: &Connection, player: PlayerId) -> sqlite::Result<P
pub fn get_player_from_discrim(
connection: &Connection,
dataset: &str,
discrim: &str,
) -> sqlite::Result<PlayerData> {
let query = "SELECT id, name, prefix FROM players WHERE discrim = ?";
let query = format!(
r#"SELECT * FROM "{}_players", players USING (id) WHERE discrim = ?"#,
dataset
);
let mut statement = connection.prepare(&query)?;
statement.bind((1, discrim))?;
@ -357,8 +384,15 @@ pub fn get_player_from_discrim(
})
}
pub fn match_player_name(connection: &Connection, name: &str) -> sqlite::Result<Vec<PlayerData>> {
let query = "SELECT * FROM players WHERE name LIKE ?";
pub fn match_player_name(
connection: &Connection,
dataset: &str,
name: &str,
) -> sqlite::Result<Vec<PlayerData>> {
let query = format!(
r#"SELECT * FROM "{}_players", players USING (id) WHERE name LIKE ?"#,
dataset
);
connection
.prepare(&query)?
@ -376,26 +410,6 @@ pub fn match_player_name(connection: &Connection, name: &str) -> sqlite::Result<
.try_collect()
}
pub fn get_player_rating_data(
connection: &Connection,
dataset: &str,
player: PlayerId,
) -> sqlite::Result<(f64, f64, Timestamp)> {
let query = format!(
r#"SELECT deviation, volatility, last_played FROM "{}_players" WHERE id = ?"#,
dataset
);
let mut statement = connection.prepare(&query)?;
statement.bind((1, player.0 as i64))?;
statement.next()?;
Ok((
statement.read::<f64, _>("deviation")?,
statement.read::<f64, _>("volatility")?,
Timestamp(statement.read::<i64, _>("last_played")? as u64),
))
}
pub fn get_player_set_counts(
connection: &Connection,
dataset: &str,
@ -437,27 +451,21 @@ pub fn get_matchup_set_counts(
))
}
pub fn set_player_data(
pub fn set_player_set_counts(
connection: &Connection,
dataset: &str,
player: PlayerId,
last_played: Timestamp,
deviation: f64,
volatility: f64,
won: bool,
set: &SetId,
) -> sqlite::Result<()> {
let query = format!(
r#"UPDATE "{}_players" SET deviation = :dev, volatility = :vol, last_played = :last,
sets_won = iif(:won, sets_won || :set || ';', sets_won),
sets_lost = iif(:won, sets_lost, sets_lost || :set || ';') WHERE id = :id"#,
r#"UPDATE "{}_players" SET
sets_won = iif(:won, sets_won || :set || ';', sets_won),
sets_lost = iif(:won, sets_lost, sets_lost || :set || ';') WHERE id = :id"#,
dataset
);
let mut statement = connection.prepare(&query)?;
statement.bind((":dev", deviation))?;
statement.bind((":vol", volatility))?;
statement.bind((":last", last_played.0 as i64))?;
statement.bind((":id", player.0 as i64))?;
statement.bind((":won", if won { 1 } else { 0 }))?;
statement.bind((":set", &set.0.to_string()[..]))?;
@ -465,18 +473,18 @@ pub fn set_player_data(
Ok(())
}
pub fn get_advantage(
pub fn get_network_data(
connection: &Connection,
dataset: &str,
player1: PlayerId,
player2: PlayerId,
) -> sqlite::Result<Option<f64>> {
) -> sqlite::Result<Option<(f64, f64)>> {
if player1 == player2 {
return Ok(Some(0.0));
return Ok(Some((0.0, 0.0)));
}
let query = format!(
r#"SELECT iif(:a > :b, -advantage, advantage) AS advantage FROM "{}_network"
r#"SELECT iif(:a > :b, -advantage, advantage) AS advantage, variance FROM "{}_network"
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
dataset
);
@ -485,20 +493,24 @@ pub fn get_advantage(
statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?;
statement.next()?;
statement.read::<Option<f64>, _>("advantage")
Ok(statement
.read::<Option<f64>, _>("advantage")?
.zip(statement.read::<Option<f64>, _>("variance")?))
}
pub fn insert_advantage(
pub fn insert_network_data(
connection: &Connection,
dataset: &str,
player1: PlayerId,
player2: PlayerId,
advantage: f64,
variance: f64,
time: Timestamp,
) -> sqlite::Result<()> {
let query = format!(
r#"INSERT INTO "{}_network"
(player_A, player_B, advantage, sets_A, sets_B)
VALUES (min(:a, :b), max(:a, :b), iif(:a > :b, -:v, :v), '', '')"#,
(player_A, player_B, advantage, variance, last_updated)
VALUES (min(:a, :b), max(:a, :b), iif(:a > :b, -:v, :v), :d, :t)"#,
dataset
);
@ -506,32 +518,67 @@ pub fn insert_advantage(
statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?;
statement.bind((":v", advantage))?;
statement.bind((":d", variance))?;
statement.bind((":t", time.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))
}
pub fn adjust_advantages(
pub fn adjust_for_time(
connection: &Connection,
dataset: &str,
set: SetId,
player: PlayerId,
var_rate: f64,
time: Timestamp,
) -> sqlite::Result<()> {
let query = format!(
r#"UPDATE "{0}_network" SET
variance = min(variance + :c * (:t - last_updated), 5.0),
last_updated = :t
WHERE player_A = :i OR player_B = :i"#,
dataset
);
let mut statement = connection.prepare(query)?;
statement.bind((":i", player.0 as i64))?;
statement.bind((":c", var_rate))?;
statement.bind((":t", time.0 as i64))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))
}
pub fn glicko_adjust(
connection: &Connection,
dataset: &str,
set: &SetId,
player1: PlayerId,
player2: PlayerId,
advantage: f64,
variance: f64,
winner: usize,
adjust1: f64,
adjust2: f64,
decay_rate: f64,
decay_const: f64,
) -> sqlite::Result<()> {
let score = if winner != 0 { 1.0 } else { 0.0 };
let exp_val = 1.0 / (1.0 + (-advantage).exp());
let like_var = 1.0 / exp_val / (1.0 - exp_val);
let var_new = 1.0 / (1.0 / variance + 1.0 / like_var);
let adjust = score - exp_val;
let query1 = format!(
r#"UPDATE "{}_network"
SET advantage = advantage + iif(:pl = player_A, -:v, :v) * :d
r#"UPDATE "{}_network" SET
variance = 1.0 / (1.0 / variance + :d / :lv),
advantage = advantage + :d * iif(:pl = player_A, -:adj, :adj)
/ (1.0 / variance + :d / :lv)
WHERE (player_A = :pl AND player_B != :plo)
OR (player_B = :pl AND player_A != :plo)"#,
dataset
);
let query2 = format!(
r#"UPDATE "{}_network"
SET advantage = advantage + iif(:a > :b, -:v, :v),
sets_A = iif(:w = (:a > :b), sets_A || :set || ';', sets_A),
sets_B = iif(:w = (:b > :a), sets_B || :set || ';', sets_B)
r#"UPDATE "{}_network" SET
variance = :var,
advantage = advantage + iif(:a > :b, -:adj, :adj) * :var,
sets_A = iif(:w = (:a > :b), sets_A || :set || ';', sets_A),
sets_B = iif(:w = (:b > :a), sets_B || :set || ';', sets_B)
WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
dataset
);
@ -539,21 +586,24 @@ WHERE player_A = min(:a, :b) AND player_B = max(:a, :b)"#,
let mut statement = connection.prepare(&query1)?;
statement.bind((":pl", player1.0 as i64))?;
statement.bind((":plo", player2.0 as i64))?;
statement.bind((":v", adjust1))?;
statement.bind((":d", decay_rate))?;
statement.bind((":adj", -0.5 * adjust))?;
statement.bind((":d", decay_const))?;
statement.bind((":lv", like_var))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
statement = connection.prepare(&query1)?;
statement.bind((":pl", player2.0 as i64))?;
statement.bind((":plo", player1.0 as i64))?;
statement.bind((":v", adjust2))?;
statement.bind((":d", decay_rate))?;
statement.bind((":adj", 0.5 * adjust))?;
statement.bind((":d", decay_const))?;
statement.bind((":lv", like_var))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))?;
statement = connection.prepare(&query2)?;
statement.bind((":a", player1.0 as i64))?;
statement.bind((":b", player2.0 as i64))?;
statement.bind((":v", adjust2 - adjust1))?;
statement.bind((":adj", adjust))?;
statement.bind((":var", var_new))?;
statement.bind((":w", winner as i64))?;
statement.bind((":set", &set.0.to_string()[..]))?;
statement.into_iter().try_for_each(|x| x.map(|_| ()))
@ -563,11 +613,11 @@ pub fn get_edges(
connection: &Connection,
dataset: &str,
player: PlayerId,
) -> sqlite::Result<Vec<(PlayerId, f64, u64)>> {
) -> sqlite::Result<Vec<(PlayerId, f64, f64)>> {
let query = format!(
r#"SELECT
iif(:pl = player_B, player_A, player_B) AS id,
iif(:pl = player_B, -advantage, advantage) AS advantage, sets_count
iif(:pl = player_B, -advantage, advantage) AS advantage, variance
FROM "{}_network"
WHERE player_A = :pl OR player_B = :pl"#,
dataset
@ -582,7 +632,7 @@ pub fn get_edges(
Ok((
PlayerId(r_.read::<i64, _>("id") as u64),
r_.read::<f64, _>("advantage"),
r_.read::<i64, _>("sets_count") as u64,
r_.read::<f64, _>("variance"),
))
})
.try_collect()
@ -617,20 +667,20 @@ pub fn hypothetical_advantage(
dataset: &str,
player1: PlayerId,
player2: PlayerId,
set_limit: u64,
decay_rate: f64,
adj_decay_rate: f64,
) -> sqlite::Result<f64> {
decay_const: f64,
) -> sqlite::Result<(f64, f64)> {
use std::collections::{HashSet, VecDeque};
// Check trivial cases
if player1 == player2 || either_isolated(connection, dataset, player1, player2)? {
return Ok(0.0);
if player1 == player2 {
return Ok((0.0, 0.0));
} else if decay_const < 0.05 || either_isolated(connection, dataset, player1, player2)? {
return Ok((0.0, 5.0));
}
let mut visited: HashSet<PlayerId> = HashSet::new();
let mut queue: VecDeque<(PlayerId, Vec<(f64, f64)>)> =
VecDeque::from([(player1, Vec::from([(0.0, 1.0)]))]);
let mut queue: VecDeque<(PlayerId, Vec<(f64, f64, f64)>)> =
VecDeque::from([(player1, Vec::from([(0.0, 0.0, 1.0 / decay_const)]))]);
let mut final_paths = Vec::new();
@ -639,7 +689,7 @@ pub fn hypothetical_advantage(
let connections = get_edges(connection, dataset, visiting)?;
for (id, adv, sets) in connections
for (id, adv, var) in connections
.into_iter()
.filter(|(id, _, _)| !visited.contains(id))
{
@ -653,12 +703,9 @@ pub fn hypothetical_advantage(
};
if rf.len() < 100 {
let decay = if sets >= set_limit {
decay_rate
} else {
adj_decay_rate
};
let iter = paths.iter().map(|(a, d)| (a + adv, d * decay));
let iter = paths
.iter()
.map(|(av, vr, dec)| (av + adv, vr + var, dec * decay_const));
rf.extend(iter);
rf.truncate(100);
@ -668,22 +715,23 @@ pub fn hypothetical_advantage(
visited.insert(visiting);
}
let max_decay = final_paths
.iter()
.map(|x| x.1)
.max_by(|d1, d2| d1.partial_cmp(d2).unwrap());
if let Some(mdec) = max_decay {
let sum_decay = final_paths.iter().map(|x| x.1).sum::<f64>();
Ok(final_paths
.into_iter()
.map(|(adv, dec)| adv * dec)
.sum::<f64>()
/ sum_decay
* mdec)
} else {
if final_paths.len() == 0 {
// No paths found
Ok(0.0)
Ok((0.0, 5.0))
} else {
let sum_decay: f64 = final_paths.iter().map(|(_, _, dec)| dec).sum();
let (final_adv, final_var) = final_paths
.into_iter()
.fold((0.0, 0.0), |(av, vr), (adv, var, dec)| {
(av + adv * dec, vr + (var + adv * adv) * dec)
});
let mut final_adv = final_adv / sum_decay;
let mut final_var = final_var / sum_decay - final_adv * final_adv;
if final_var > 5.0 {
final_adv = final_adv * (5.0 / final_var).sqrt();
final_var = 5.0;
}
Ok((final_adv, final_var))
}
}
@ -692,21 +740,12 @@ pub fn initialize_edge(
dataset: &str,
player1: PlayerId,
player2: PlayerId,
set_limit: u64,
decay_rate: f64,
adj_decay_rate: f64,
) -> sqlite::Result<f64> {
let adv = hypothetical_advantage(
connection,
dataset,
player1,
player2,
set_limit,
decay_rate,
adj_decay_rate,
)?;
insert_advantage(connection, dataset, player1, player2, adv)?;
Ok(adv)
decay_const: f64,
time: Timestamp,
) -> sqlite::Result<(f64, f64)> {
let (adv, var) = hypothetical_advantage(connection, dataset, player1, player2, decay_const)?;
insert_network_data(connection, dataset, player1, player2, adv, var, time)?;
Ok((adv, var))
}
// Tests
@ -728,10 +767,8 @@ CREATE TABLE IF NOT EXISTS datasets (
country TEXT,
state TEXT,
set_limit INTEGER NOT NULL,
decay_rate REAL NOT NULL,
adj_decay_rate REAL NOT NULL,
period REAL NOT NULL,
tau REAL NOT NULL
decay_const REAL NOT NULL,
var_rate REAL NOT NULL
) STRICT;
CREATE TABLE IF NOT EXISTS players (
@ -770,11 +807,8 @@ CREATE TABLE IF NOT EXISTS sets (
game_slug: String::from("test"),
country: None,
state: None,
set_limit: 0,
decay_rate: 0.5,
adj_decay_rate: 0.5,
period: (3600 * 24 * 30) as f64,
tau: 0.2,
decay_const: 0.5,
var_rate: 0.00000001,
}
}
@ -788,141 +822,4 @@ CREATE TABLE IF NOT EXISTS sets (
})
.collect()
}
#[test]
fn sqlite_sanity_check() -> sqlite::Result<()> {
let test_value: i64 = 2;
let connection = sqlite::open(":memory:")?;
connection.execute(
r#"CREATE TABLE test (a INTEGER);
INSERT INTO test VALUES (1);
INSERT INTO test VALUES (2)"#,
)?;
let mut statement = connection.prepare("SELECT * FROM test WHERE a = ?")?;
statement.bind((1, test_value))?;
statement.next()?;
assert_eq!(statement.read::<i64, _>("a")?, test_value);
Ok(())
}
#[test]
fn test_players() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(2)], Timestamp(0))?;
let mut statement = connection.prepare("SELECT * FROM players WHERE id = 1")?;
statement.next()?;
assert_eq!(statement.read::<i64, _>("id")?, 1);
assert_eq!(statement.read::<String, _>("name")?, "1");
assert_eq!(statement.read::<Option<String>, _>("prefix")?, None);
Ok(())
}
#[test]
fn edge_insert_get() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(2)], Timestamp(0))?;
insert_advantage(&connection, "test", PlayerId(2), PlayerId(1), 1.0)?;
assert_eq!(
get_advantage(&connection, "test", PlayerId(1), PlayerId(2))?,
Some(-1.0)
);
assert_eq!(
get_advantage(&connection, "test", PlayerId(2), PlayerId(1))?,
Some(1.0)
);
Ok(())
}
#[test]
fn player_all_edges() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(3)], Timestamp(0))?;
insert_advantage(&connection, "test", PlayerId(2), PlayerId(1), 1.0)?;
insert_advantage(&connection, "test", PlayerId(1), PlayerId(3), 5.0)?;
assert_eq!(
get_edges(&connection, "test", PlayerId(1))?,
[(PlayerId(2), -1.0, 0), (PlayerId(3), 5.0, 0)]
);
assert_eq!(
get_edges(&connection, "test", PlayerId(2))?,
[(PlayerId(1), 1.0, 0)]
);
assert_eq!(
get_edges(&connection, "test", PlayerId(3))?,
[(PlayerId(1), -5.0, 0)]
);
Ok(())
}
#[test]
fn hypoth_adv_trivial() -> sqlite::Result<()> {
let num_players = 3;
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(
&connection,
"test",
&vec![players(num_players)],
Timestamp(0),
)?;
let metadata = metadata();
for i in 1..=num_players {
for j in 1..=num_players {
assert_eq!(
hypothetical_advantage(
&connection,
"test",
PlayerId(i),
PlayerId(j),
metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate
)?,
0.0
);
}
}
Ok(())
}
#[test]
fn hypoth_adv1() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
add_players(&connection, "test", &vec![players(2)], Timestamp(0))?;
insert_advantage(&connection, "test", PlayerId(1), PlayerId(2), 1.0)?;
let metadata = metadata();
assert_eq!(
hypothetical_advantage(
&connection,
"test",
PlayerId(1),
PlayerId(2),
metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate
)?,
1.0
);
Ok(())
}
}

View file

@ -1,17 +1,15 @@
#![feature(iterator_try_collect)]
#![feature(extend_one)]
use chrono::{Local, TimeZone, Utc};
use clap::{Parser, Subcommand};
use sqlite::*;
use std::{cmp::min, path::PathBuf};
use std::path::PathBuf;
mod queries;
use queries::*;
mod commands;
use commands::*;
mod database;
use database::*;
mod sync;
use sync::*;
use database::open_datasets;
mod queries;
use queries::get_auth_token;
mod util;
use util::*;
@ -45,9 +43,7 @@ AUTH_TOKEN, or in a text file '<CONFIG_DIR>/auth.txt'."
value_name = "DIR",
global = true,
help = "Config directory",
long_help = "This flag overrides the default config directory.
If this directory does not exist, it will be created and a database file will
be initialized within it."
long_help = "This flag overrides the default config directory."
)]
config_dir: Option<PathBuf>,
}
@ -102,6 +98,11 @@ enum DatasetSC {
#[derive(Subcommand)]
enum PlayerSC {
#[command(about = "List all players in a dataset")]
List {
#[arg(short, long, help = "List all players ever recorded")]
all: bool,
},
#[command(about = "Get info about a player")]
Info { player: String },
#[command(about = "Matchup data between two players")]
@ -111,668 +112,50 @@ enum PlayerSC {
fn main() {
let cli = Cli::parse();
let config_dir = cli
.config_dir
.unwrap_or_else(|| dirs::config_dir().expect("Could not determine config directory"));
let connection =
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
let config_dir = cli.config_dir.unwrap_or_else(|| {
let mut dir = dirs::config_dir().expect("Could not determine config directory");
dir.push("startrnr");
dir
});
let mut data_dir = dirs::data_dir().expect("Could not determine user data directory");
data_dir.push("startrnr");
let connection =
open_datasets(&data_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
#[allow(unreachable_patterns)]
match cli.subcommand {
Subcommands::Dataset {
subcommand: DatasetSC::List,
} => dataset_list(&connection),
} => dataset::list(&connection),
Subcommands::Dataset {
subcommand: DatasetSC::New { name },
} => dataset_new(&connection, get_auth_token(&config_dir), name),
} => dataset::new(&connection, get_auth_token(&config_dir), name),
Subcommands::Dataset {
subcommand: DatasetSC::Delete { name },
} => dataset_delete(&connection, name),
} => dataset::delete(&connection, name),
Subcommands::Dataset {
subcommand: DatasetSC::Rename { old, new },
} => dataset_rename(&connection, old, new),
} => dataset::rename(&connection, old, new),
Subcommands::Player {
subcommand: PlayerSC::List { all },
dataset,
} => player::list(&connection, dataset, all),
Subcommands::Player {
subcommand: PlayerSC::Info { player },
dataset,
} => player_info(&connection, dataset, player),
} => player::info(&connection, dataset, player),
Subcommands::Player {
subcommand: PlayerSC::Matchup { player1, player2 },
dataset,
} => player_matchup(&connection, dataset, player1, player2),
} => player::matchup(&connection, dataset, player1, player2),
Subcommands::Sync { datasets, all } => {
sync(&connection, get_auth_token(&config_dir), datasets, all)
sync::sync(&connection, get_auth_token(&config_dir), datasets, all)
}
Subcommands::Ranking {
subcommand: RankingSC::Create,
dataset,
} => ranking_create(&connection, dataset),
_ => eprintln!("This feature is currently unimplemented."),
}
}
// Datasets
fn dataset_list(connection: &Connection) {
let datasets = list_datasets(&connection).expect("Error communicating with SQLite");
for (name, metadata) in datasets {
print!(
"· \x1b[1m\x1b[34m{}\x1b[0m
\x1b[4m\x1b]8;;https://www.start.gg/{}\x1b\\{}\x1b]8;;\x1b\\\x1b[0m ",
name, metadata.game_slug, metadata.game_name
);
if let Some(country) = metadata.country {
if let Some(state) = metadata.state {
println!("(in {}, {})", country, state);
} else {
println!("(in {})", country);
}
} else {
println!("(Global)");
}
let start = if metadata.start.0 != 1 {
Some(
Utc.timestamp_opt(metadata.start.0 as i64, 0)
.unwrap()
.format("%m/%d/%Y"),
)
} else {
None
};
let end = metadata
.end
.map(|x| Utc.timestamp_opt(x.0 as i64, 0).unwrap().format("%m/%d/%Y"));
match (start, end) {
(None, None) => (),
(Some(s), None) => println!("after {}", s),
(None, Some(e)) => println!("until {}", e),
(Some(s), Some(e)) => println!("{} - {}", s, e),
}
if metadata.last_sync == metadata.start {
print!("\x1b[1m\x1b[91mUnsynced\x1b[0m");
} else if Some(metadata.last_sync) == metadata.end {
print!("\x1b[1m\x1b[92mComplete\x1b[0m");
} else {
print!(
"\x1b[1mLast synced:\x1b[0m {}",
Local
.timestamp_opt(metadata.last_sync.0 as i64, 0)
.unwrap()
.format("%b %e, %Y %r")
);
}
if current_time().0 - metadata.last_sync.0 > SECS_IN_WEEK
&& Some(metadata.last_sync) != metadata.end
{
if name == "default" {
print!(" - \x1b[33mRun 'startrnr sync' to update!\x1b[0m");
} else {
print!(
" - \x1b[33mRun 'startrnr sync {:?}' to update!\x1b[0m",
name
);
}
}
println!();
if metadata.set_limit != 0 && metadata.decay_rate != metadata.adj_decay_rate {
println!("\x1b[1mSet Limit:\x1b[0m {}", metadata.set_limit);
println!(
"\x1b[1mNetwork Decay Rate:\x1b[0m {} (adjusted {})",
metadata.decay_rate, metadata.adj_decay_rate
);
} else {
println!("\x1b[1mNetwork Decay Rate:\x1b[0m {}", metadata.decay_rate);
}
println!(
"\x1b[1mRating Period:\x1b[0m {} days",
metadata.period / SECS_IN_DAY as f64
);
println!("\x1b[1mTau Constant:\x1b[0m {}\n", metadata.tau);
}
}
fn dataset_new(connection: &Connection, auth: String, name: Option<String>) {
// Name
let name = name.unwrap_or_else(|| {
print!("Name of new dataset: ");
read_string()
});
// Game
print!("Search games: ");
let games = run_query::<VideogameSearch, _>(
VideogameSearchVars {
name: &read_string(),
},
&auth,
)
.unwrap_or_else(|| error("Could not access start.gg", 1));
if games.is_empty() {
issue("No games found!", 0);
}
println!("\nSearch results:");
for (i, game) in games.iter().enumerate() {
println!("{} - {}", i, game.name);
}
print!("\nGame to track ratings for (0-{}): ", games.len() - 1);
let index = read_string()
.parse::<usize>()
.unwrap_or_else(|_| error("Not an integer", 1));
if index >= games.len() {
error("Out of range!", 1);
}
let VideogameData {
id: game_id,
name: game_name,
slug: game_slug,
} = games[index].clone();
// Location
print!(
"
\x1b[1mCountry\x1b[0m
Enter the two-letter code for the country you want to track ratings in, e.g.
\"US\" for the United States. See \x1b[4m\x1b]8;;https://www.ups.com/worldshiphelp/\
WSA/ENU/AppHelp/mergedProjects/CORE/Codes/Country_Territory_and_Currency_Codes.htm\
\x1b\\this site\x1b]8;;\x1b\\\x1b[0m for a list of these codes.
If no code is entered, then the dataset will track all players globally.
Country to track ratings for (leave empty for none): "
);
let country = {
let mut string = read_string();
if string.is_empty() {
None
} else if string.len() == 2 && string.chars().all(|c| c.is_ascii_alphabetic()) {
string.make_ascii_uppercase();
Some(string)
} else {
error("Input is not a two-letter code", 1);
}
};
let state = if country.as_ref().is_some_and(|s| s == "US" || s == "CA") {
print!(
"
\x1b[1mState/Province\x1b[0m
Enter the two-letter code for the US state or Canadian province you want to track
ratings in, e.g. \"CA\" for California. See \x1b[4m\x1b]8;;https://www.ups.com/worldshiphelp/\
WSA/ENU/AppHelp/mergedProjects/CORE/Codes/State_Province_Codes.htm\x1b\\this site\
\x1b]8;;\x1b\\\x1b[0m for a list of these codes.
If no code is entered, then the dataset will track all players within the country.
State/province to track ratings for (leave empty for none): "
);
let mut string = read_string();
if string.is_empty() {
None
} else if string.len() == 2 && string.chars().all(|c| c.is_ascii_alphabetic()) {
string.make_ascii_uppercase();
Some(string)
} else {
error("Input is not a two-letter code", 1);
}
} else {
None
};
// Interval
print!(
"
\x1b[1mStart Date\x1b[0m
The rating system will process tournaments starting at this date. If only a year
is entered, the date will be the start of that year.
Start date (year, m/y, or m/d/y): "
);
let start = {
let string = read_string();
if string.is_empty() {
Timestamp(1)
} else if string.chars().all(|c| c.is_ascii_digit() || c == '/') {
if let Some((y, m, d)) = match string.split('/').collect::<Vec<_>>()[..] {
[] => None,
[y] => Some((y.parse().unwrap(), 1, 1)),
[m, y] => Some((y.parse().unwrap(), m.parse().unwrap(), 1)),
[m, d, y] => Some((y.parse().unwrap(), m.parse().unwrap(), d.parse().unwrap())),
_ => error("Input is not a date", 1),
} {
Timestamp(Utc.with_ymd_and_hms(y, m, d, 0, 1, 1).unwrap().timestamp() as u64)
} else {
Timestamp(1)
}
} else {
error("Input is not a date", 1);
}
};
print!(
"
\x1b[1mEnd Date\x1b[0m
The rating system will stop processing tournaments when it reaches this date. If
only a year is entered, the date will be the end of that year.
End date (year, m/y, or m/d/y): "
);
let end = {
let string = read_string();
if string.is_empty() {
None
} else if string.chars().all(|c| c.is_ascii_digit() || c == '/') {
if let Some((y, m, d)) = match string.split('/').collect::<Vec<_>>()[..] {
[] => None,
[y] => Some((y.parse().unwrap(), 12, 31)),
[m, y] => Some((y.parse().unwrap(), m.parse().unwrap(), 30)),
[m, d, y] => Some((y.parse().unwrap(), m.parse().unwrap(), d.parse().unwrap())),
_ => error("Input is not a date", 1),
} {
Some(Timestamp(
Utc.with_ymd_and_hms(y, m, d, 11, 59, 59)
.unwrap()
.timestamp() as u64,
))
} else {
None
}
} else {
error("Input is not a date", 1);
}
};
// Set Limit
let mut set_limit = 0;
print!(
"
\x1b[1mSet Limit\x1b[0m
The set limit is an optional feature of the rating system that defines how many
sets must be played between two players for their rating data to be considered
trustworthy.
This value should be set low, i.e. not more than 5 or 6.
Set limit (default 0): "
);
let set_limit_input = read_string();
if !set_limit_input.is_empty() {
set_limit = set_limit_input
.parse::<u64>()
.unwrap_or_else(|_| error("Input is not an integer", 1));
}
// Advanced Options
// Defaults
let mut decay_rate = 0.8;
let mut adj_decay_rate = 0.6;
let mut period_days = 40.0;
let mut tau = 0.4;
print!("\nConfigure advanced options? (y/n) ");
if let Some('y') = read_string().chars().next() {
// Decay Rate
print!(
"
\x1b[1mNetwork Decay Rate\x1b[0m
The network decay rate is a number between 0 and 1 that controls how the
advantage network reacts to player wins and losses. If the decay rate is 1,
then it is assumed that a player's skill against one opponent always carries
over to all other opponents. If the decay rate is 0, then all player match-ups
are assumed to be independent of each other.
Network decay rate (default 0.8): "
);
let decay_rate_input = read_string();
if !decay_rate_input.is_empty() {
decay_rate = decay_rate_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
if decay_rate < 0.0 || decay_rate > 1.0 {
error("Input is not between 0 and 1", 1);
}
}
// Adjusted Decay Rate
if set_limit != 0 {
print!(
"
\x1b[1mAdjusted Network Decay Rate\x1b[0m
If the number of sets played between two players is less than the set limit,
then this value is used instead of the regular network decay rate.
This value should be \x1b[1mlower\x1b[0m than the network decay rate.
Adjusted network decay rate (default 0.6): "
);
let adj_decay_rate_input = read_string();
if !adj_decay_rate_input.is_empty() {
adj_decay_rate = adj_decay_rate_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
if decay_rate < 0.0 || decay_rate > 1.0 {
error("Input is not between 0 and 1", 1);
}
}
}
// Rating Period
print!(
"
\x1b[1mRating Period\x1b[0m
The rating period is an interval of time that dictates how player ratings change
during inactivity. Ideally the rating period should be somewhat long, long
enough to expect almost every player in the dataset to have played at least a
few sets.
Rating period (in days, default 40): "
);
let period_input = read_string();
if !period_input.is_empty() {
period_days = period_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
}
// Tau coefficient
print!(
"
\x1b[1mTau Constant\x1b[0m
The tau constant is an internal system constant that roughly represents how
much random chance and luck play a role in game outcomes. In games where match
results are highly predictable, and a player's skill is the sole factor for
whether they will win, the tau constant should be high (0.9 - 1.2). In games
where luck matters, and more improbable victories can occur, the tau constant
should be low (0.2 - 0.4).
The tau constant is set low by default, since skill-based competitive video
games tend to be on the more luck-heavy side.
Tau constant (default 0.4): "
);
let tau_input = read_string();
if !tau_input.is_empty() {
tau = tau_input
.parse::<f64>()
.unwrap_or_else(|_| error("Input is not a number", 1));
}
}
// Done configuring
new_dataset(
connection,
&name,
DatasetMetadata {
start,
end,
last_sync: start,
game_id,
game_name,
game_slug,
country,
state,
set_limit,
decay_rate,
adj_decay_rate,
period: SECS_IN_DAY as f64 * period_days,
tau,
},
)
.expect("Error communicating with SQLite");
println!("\nCreated dataset {}", name);
}
fn dataset_delete(connection: &Connection, name: Option<String>) {
let name = name.unwrap_or_else(|| {
print!("Dataset to delete: ");
read_string()
});
delete_dataset(connection, &name).unwrap_or_else(|_| error("That dataset does not exist!", 1));
}
fn dataset_rename(connection: &Connection, old: Option<String>, new: Option<String>) {
let old = old.unwrap_or_else(|| {
print!("Dataset to rename: ");
read_string()
});
let new = new.unwrap_or_else(|| {
print!("Rename to: ");
read_string()
});
match rename_dataset(connection, &old, &new) {
Ok(()) => (),
Err(sqlite::Error {
code: Some(1),
message: _,
}) => error(&format!("Dataset {:?} does not exist", &old), 1),
Err(sqlite::Error {
code: Some(19),
message: _,
}) => error(&format!("Dataset {:?} already exists", &new), 1),
Err(_) => error("Unknown error occurred", 2),
};
}
// Players
fn player_info(connection: &Connection, dataset: Option<String>, player: String) {
let dataset = dataset.unwrap_or_else(|| String::from("default"));
let PlayerData {
id,
name,
prefix,
discrim,
} = get_player_from_input(connection, player)
.unwrap_or_else(|_| error("Could not find player", 1));
let (deviation, volatility, _) = get_player_rating_data(connection, &dataset, id)
.unwrap_or_else(|_| error("Could not find player", 1));
let (won, lost) = get_player_set_counts(connection, &dataset, id)
.unwrap_or_else(|_| error("Could not find player", 1));
if let Some(pre) = prefix {
print!("\x1b[2m{}\x1b[22m ", pre);
}
println!(
"\x1b[4m\x1b]8;;https://www.start.gg/user/{1}\x1b\\\
\x1b[1m{0}\x1b[22m\x1b]8;;\x1b\\\x1b[0m ({1})",
name, discrim
);
println!("\x1b[1mID:\x1b[0m {}", id.0);
println!(
"\n\x1b[1mSet Count:\x1b[0m {} - {} ({:.3}%)",
won,
lost,
(won as f64 / (won + lost) as f64) * 100.0
);
println!("\n\x1b[1mDeviation:\x1b[0m {}", deviation);
println!("\x1b[1mVolatility:\x1b[0m {}", volatility);
}
fn player_matchup(
connection: &Connection,
dataset: Option<String>,
player1: String,
player2: String,
) {
let dataset = dataset.unwrap_or_else(|| String::from("default"));
let PlayerData {
id: player1,
name: name1,
prefix: prefix1,
discrim: discrim1,
} = get_player_from_input(connection, player1)
.unwrap_or_else(|_| error("Could not find player", 1));
let (deviation1, _, _) = get_player_rating_data(connection, &dataset, player1)
.unwrap_or_else(|_| error("Could not find player", 1));
let PlayerData {
id: player2,
name: name2,
prefix: prefix2,
discrim: discrim2,
} = get_player_from_input(connection, player2)
.unwrap_or_else(|_| error("Could not find player", 1));
let (deviation2, _, _) = get_player_rating_data(connection, &dataset, player2)
.unwrap_or_else(|_| error("Could not find player", 1));
let (hypothetical, advantage) = get_advantage(connection, &dataset, player1, player2)
.expect("Error communicating with SQLite")
.map(|x| (false, x))
.unwrap_or_else(|| {
let metadata = get_metadata(connection, &dataset)
.expect("Error communicating with SQLite")
.unwrap_or_else(|| error("Dataset not found", 1));
(
true,
hypothetical_advantage(
connection,
&dataset,
player1,
player2,
metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate,
)
.expect("Error communicating with SQLite"),
)
});
let probability = 1.0
/ (1.0
+ f64::exp(
g_func((deviation1 * deviation1 + deviation2 * deviation2).sqrt()) * advantage,
));
let color = ansi_num_color(advantage, 0.2, 2.0);
let other_color = ansi_num_color(-advantage, 0.2, 2.0);
let len1 = prefix1.as_deref().map(|s| s.len() + 1).unwrap_or(0) + name1.len();
let len2 = prefix2.as_deref().map(|s| s.len() + 1).unwrap_or(0) + name2.len();
if let Some(pre) = prefix1 {
print!("\x1b[2m{}\x1b[22m ", pre);
}
print!(
"\x1b[4m\x1b]8;;https://www.start.gg/user/{}\x1b\\\
\x1b[1m{}\x1b[22m\x1b]8;;\x1b\\\x1b[0m - ",
discrim1, name1
);
if let Some(pre) = prefix2 {
print!("\x1b[2m{}\x1b[22m ", pre);
}
println!(
"\x1b[4m\x1b]8;;https://www.start.gg/user/{}\x1b\\\
\x1b[1m{}\x1b[22m\x1b]8;;\x1b\\\x1b[0m",
discrim2, name2
);
println!(
"\x1b[1m\x1b[{4}m{0:>2$}\x1b[0m - \x1b[1m\x1b[{5}m{1:<3$}\x1b[0m",
format!("{:.1}%", probability * 100.0),
format!("{:.1}%", (1.0 - probability) * 100.0),
len1,
len2,
other_color,
color
);
if hypothetical {
println!(
"\n\x1b[1mHypothetical Advantage: \x1b[{1}m{0:+.4}\x1b[0m",
advantage, color
);
} else {
println!(
"\n\x1b[1mAdvantage: \x1b[{1}m{0:+.4}\x1b[0m",
advantage, color
);
let (a, b) = get_matchup_set_counts(connection, &dataset, player1, player2)
.expect("Error communicating with SQLite");
println!(
"\n\x1b[1mSet Count:\x1b[0m {} - {} ({:.3}% - {:.3}%)",
a,
b,
(a as f64 / (a + b) as f64) * 100.0,
(b as f64 / (a + b) as f64) * 100.0
);
}
}
// Sync
fn sync(connection: &Connection, auth: String, datasets: Vec<String>, all: bool) {
let all_datasets = list_dataset_names(connection).unwrap();
let datasets = if all {
all_datasets
} else if datasets.is_empty() {
if all_datasets.is_empty() {
print!("No datasets exist; create one? (y/n) ");
if let Some('y') = read_string().chars().next() {
dataset_new(connection, auth.clone(), Some(String::from("default")));
vec![String::from("default")]
} else {
error("No datasets specified and no default dataset", 1)
}
} else if all_datasets.iter().any(|x| x == "default") {
vec![String::from("default")]
} else {
error("No datasets specified and no default dataset", 1);
}
} else {
datasets
};
let current_time = current_time();
for dataset in datasets {
let dataset_metadata = get_metadata(connection, &dataset)
.expect("Error communicating with SQLite")
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
let before = dataset_metadata
.end
.map(|end| min(end, current_time))
.unwrap_or(current_time);
sync_dataset(connection, &dataset, dataset_metadata, before, &auth)
.expect("Error communicating with SQLite");
update_last_sync(connection, &dataset, before).expect("Error communicating with SQLite");
}
}
fn ranking_create(connection: &Connection, dataset: Option<String>) {
let dataset = dataset.unwrap_or_else(|| String::from("default"));
}

View file

@ -28,7 +28,6 @@ pub fn get_auth_token(config_dir: &Path) -> String {
Err(VarError::NotUnicode(_)) => error("Invalid authorization key", 2),
Err(VarError::NotPresent) => {
let mut auth_file = config_dir.to_owned();
auth_file.push("startrnr");
auth_file.push("auth.txt");
read_to_string(auth_file)
.ok()

View file

@ -1,384 +0,0 @@
use std::f64::consts::PI;
use std::thread::sleep;
use std::time::Duration;
use crate::database::*;
use crate::error;
use crate::queries::*;
use itertools::Itertools;
use sqlite::*;
// Glicko-2 system calculation
pub fn g_func(dev: f64) -> f64 {
1.0 / (1.0 + 3.0 * dev * dev / PI / PI).sqrt()
}
fn time_adjust(periods: f64, old_dev_sq: f64, volatility: f64) -> f64 {
(old_dev_sq + periods * volatility * volatility).sqrt()
}
fn illinois_optimize(fun: impl Fn(f64) -> f64, mut a: f64, mut b: f64) -> f64 {
let mut f_a = fun(a);
let mut f_b = fun(b);
while (b - a).abs() > 1e-6 {
let c = a + (a - b) * f_a / (f_b - f_a);
let f_c = fun(c);
if f_c * f_b > 0.0 {
f_a = f_a / 2.0;
} else {
a = b;
f_a = f_b;
}
b = c;
f_b = f_c;
}
a
}
fn glicko_adjust(
advantage: f64,
deviation: f64,
volatility: f64,
other_deviation: f64,
won: bool,
time: u64,
metadata: &DatasetMetadata,
) -> (f64, f64, f64) {
let period = metadata.period;
let tau = metadata.tau;
let g_val = g_func(other_deviation);
let exp_val = 1.0 / (1.0 + f64::exp(-g_val * advantage));
let variance = 1.0 / (g_val * g_val * exp_val * (1.0 - exp_val));
let score = if won { 1.0 } else { 0.0 };
let delta = variance * g_val * (score - exp_val);
let delta_sq = delta * delta;
let dev_sq = deviation * deviation;
let a = (volatility * volatility).ln();
let vol_fn = |x| {
let ex = f64::exp(x);
let subf = dev_sq + variance + ex;
((ex * (delta_sq - dev_sq - variance - ex)) / 2.0 / subf / subf) - (x - a) / tau / tau
};
let initial_b = if delta_sq > dev_sq + variance {
(delta_sq - dev_sq - variance).ln()
} else {
(1..)
.map(|k| vol_fn(a - k as f64 * tau))
.inspect(|x| {
if x.is_nan() {
panic!();
}
})
.find(|x| x >= &0.0)
.unwrap()
};
let vol_new = f64::exp(illinois_optimize(vol_fn, a, initial_b) / 2.0);
let dev_time = time_adjust(time as f64 / period, dev_sq, vol_new);
let dev_new = 1.0 / (1.0 / dev_time / dev_time + 1.0 / variance).sqrt();
let adjust = dev_new * dev_new * g_val * (score - exp_val);
(adjust, dev_new, vol_new)
}
// Extract set data
fn get_event_sets(event: EventId, auth: &str) -> Option<Vec<SetData>> {
sleep(Duration::from_millis(700));
let sets_response = run_query::<EventSets, _>(EventSetsVars { event, page: 1 }, auth)?;
let pages = sets_response.pages;
if pages == 0 {
Some(vec![])
} else if pages == 1 {
Some(sets_response.sets)
} else {
let mut sets = sets_response.sets;
for page in 2..=pages {
println!(" (Page {})", page);
let next_response = run_query::<EventSets, _>(
EventSetsVars {
event,
page: page as i32,
},
auth,
)?;
sleep(Duration::from_millis(700));
sets.extend(next_response.sets);
}
Some(sets)
}
}
fn get_tournament_events(
metadata: &DatasetMetadata,
before: Timestamp,
auth: &str,
) -> Option<Vec<EventData>> {
println!("Accessing tournaments...");
let mut after = metadata.last_sync;
let tour_response = run_query::<TournamentEvents, _>(
TournamentEventsVars {
after_date: after,
before_date: before,
game_id: metadata.game_id,
country: metadata.country.as_deref(),
state: metadata.state.as_deref(),
},
auth,
)?;
let mut cont = !tour_response.is_empty();
after = if tour_response.iter().any(|tour| tour.time != after) {
tour_response.last().unwrap().time
} else {
Timestamp(after.0 + 1)
};
let mut tournaments = tour_response;
let mut page: u64 = 1;
while cont {
page += 1;
println!(" (Page {})", page);
let next_response = run_query::<TournamentEvents, _>(
TournamentEventsVars {
after_date: after,
before_date: before,
game_id: metadata.game_id,
country: metadata.country.as_deref(),
state: metadata.state.as_deref(),
},
auth,
)?;
cont = !next_response.is_empty();
after = if next_response.iter().any(|tour| tour.time != after) {
next_response.last().unwrap().time
} else {
Timestamp(after.0 + 1)
};
tournaments.extend(next_response);
}
println!("Deduplicating...");
Some(
tournaments
.into_iter()
.group_by(|tour| tour.time)
.into_iter()
.flat_map(|(_, group)| group.into_iter().unique_by(|tour| tour.id))
.flat_map(|tour| tour.events)
.collect::<Vec<_>>(),
)
}
// Dataset syncing
fn update_from_set(
connection: &Connection,
dataset: &str,
metadata: &DatasetMetadata,
event_time: Timestamp,
results: SetData,
) -> sqlite::Result<()> {
let players_data = results.teams;
// Fall back to event time if set time is not recorded
let time = results.time.unwrap_or(event_time);
add_players(connection, dataset, &players_data, time)?;
// Non-singles matches are currently not supported
if players_data.len() != 2 || players_data[0].len() != 1 || players_data[1].len() != 1 {
return Ok(());
}
let mut it = players_data.into_iter();
let player1 = it.next().unwrap()[0].id;
let player2 = it.next().unwrap()[0].id;
drop(it);
let (deviation1, volatility1, last_played1) =
get_player_rating_data(connection, dataset, player1)?;
let time1 = time.0.checked_sub(last_played1.0).unwrap_or(0);
let (deviation2, volatility2, last_played2) =
get_player_rating_data(connection, dataset, player1)?;
let time2 = time.0.checked_sub(last_played2.0).unwrap_or(0);
let advantage = match get_advantage(connection, dataset, player1, player2) {
Err(e) => Err(e)?,
Ok(None) => initialize_edge(
connection,
dataset,
player1,
player2,
metadata.set_limit,
metadata.decay_rate,
metadata.adj_decay_rate,
)?,
Ok(Some(adv)) => adv,
};
let (adjust1, dev_new1, vol_new1) = glicko_adjust(
-advantage,
deviation1,
volatility1,
deviation2,
results.winner == 0,
time1,
metadata,
);
let (adjust2, dev_new2, vol_new2) = glicko_adjust(
advantage,
deviation2,
volatility2,
deviation1,
results.winner == 1,
time2,
metadata,
);
// Set minimum deviation level
let dev_new1 = f64::max(dev_new1, 0.2);
let dev_new2 = f64::max(dev_new2, 0.2);
set_player_data(
connection,
dataset,
player1,
time,
dev_new1,
vol_new1,
results.winner == 0,
&results.id,
)?;
set_player_data(
connection,
dataset,
player2,
time,
dev_new2,
vol_new2,
results.winner == 1,
&results.id,
)?;
let (sets1, sets2) = get_matchup_set_counts(connection, dataset, player1, player2)?;
let decay_rate = if sets1 + sets2 >= metadata.set_limit {
metadata.decay_rate
} else {
metadata.adj_decay_rate
};
adjust_advantages(
connection,
dataset,
results.id,
player1,
player2,
results.winner,
adjust1,
adjust2,
decay_rate,
)
}
pub fn sync_dataset(
connection: &Connection,
dataset: &str,
metadata: DatasetMetadata,
before: Timestamp,
auth: &str,
) -> sqlite::Result<()> {
let events = get_tournament_events(&metadata, before, auth)
.unwrap_or_else(|| error("Could not access start.gg", 1));
connection.execute("BEGIN;")?;
let num_events = events.len();
for (i, event) in events.into_iter().enumerate() {
println!(
"Accessing sets from event ID {}... ({}/{})",
event.id.0,
i + 1,
num_events
);
add_event(connection, event.id, &event.slug)?;
let mut sets =
get_event_sets(event.id, auth).unwrap_or_else(|| error("Could not access start.gg", 1));
if sets.is_empty() {
println!(" No valid sets");
} else {
println!(" Updating ratings from event...");
sets.sort_by_key(|set| set.time);
sets.into_iter().try_for_each(|set| {
add_set(connection, &set.id, event.id)?;
update_from_set(connection, dataset, &metadata, event.time, set)
})?;
}
}
connection.execute("COMMIT;")
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::tests::*;
#[test]
fn glicko_single() -> sqlite::Result<()> {
let connection = mock_datasets()?;
new_dataset(&connection, "test", metadata())?;
let players = players(2).into_iter().map(|x| vec![x]).collect();
add_players(&connection, "test", &players, Timestamp(0))?;
update_from_set(
&connection,
"test",
&metadata(),
Timestamp(0),
SetData {
id: SetId(StringOrInt::Int(0)),
time: None,
teams: players,
winner: 0,
},
)?;
println!(
"{:?}",
get_advantage(&connection, "test", PlayerId(1), PlayerId(2))?.unwrap()
);
println!(
"{:?}",
get_player_rating_data(&connection, "test", PlayerId(1))
);
println!(
"{:?}",
get_player_rating_data(&connection, "test", PlayerId(2))
);
Ok(())
}
}

View file

@ -8,6 +8,7 @@ use crate::queries::{PlayerData, PlayerId, Timestamp};
pub const SECS_IN_HR: u64 = 3600;
pub const SECS_IN_DAY: u64 = SECS_IN_HR * 24;
pub const SECS_IN_WEEK: u64 = SECS_IN_DAY * 7;
pub const SECS_IN_YEAR: u64 = SECS_IN_DAY * 365 + SECS_IN_HR * 6;
pub fn error(msg: &str, code: i32) -> ! {
eprintln!("\nERROR: {}", msg);
@ -41,7 +42,7 @@ pub fn read_string() -> String {
line.trim().to_owned()
}
pub fn ansi_num_color(num: f64, threshold1: f64, threshold2: f64) -> &'static str {
pub fn ansi_num_color(num: f64, threshold1: f64, threshold2: f64) -> (&'static str, &'static str) {
let sign = num > 0.0;
let num_abs = num.abs();
let severity = if num_abs < threshold1 {
@ -53,11 +54,11 @@ pub fn ansi_num_color(num: f64, threshold1: f64, threshold2: f64) -> &'static st
};
match (sign, severity) {
(false, 1) => "31",
(true, 1) => "32",
(false, 2) => "91",
(true, 2) => "92",
_ => "39",
(false, 1) => ("31", "32"),
(true, 1) => ("32", "31"),
(false, 2) => ("91", "92"),
(true, 2) => ("92", "91"),
_ => ("39", "39"),
}
}
@ -79,12 +80,16 @@ pub fn parse_player_input(input: String) -> PlayerInput {
}
}
pub fn get_player_from_input(connection: &Connection, input: String) -> sqlite::Result<PlayerData> {
pub fn get_player_from_input(
connection: &Connection,
dataset: &str,
input: String,
) -> sqlite::Result<PlayerData> {
match parse_player_input(input) {
PlayerInput::Id(id) => get_player(connection, id),
PlayerInput::Discrim(discrim) => get_player_from_discrim(connection, &discrim),
PlayerInput::Id(id) => get_player(connection, dataset, id),
PlayerInput::Discrim(discrim) => get_player_from_discrim(connection, dataset, &discrim),
PlayerInput::Name(name) => {
let players = match_player_name(connection, &name)?;
let players = match_player_name(connection, dataset, &name)?;
if players.is_empty() {
error(&format!("Player {:?} not found", name), 1);