2023-09-23 03:01:36 -04:00
|
|
|
#![feature(iterator_try_collect)]
|
2023-10-04 17:58:54 -04:00
|
|
|
#![feature(extend_one)]
|
2023-09-23 03:01:36 -04:00
|
|
|
|
2023-09-26 22:36:03 -04:00
|
|
|
use clap::{Parser, Subcommand};
|
2023-09-23 03:01:36 -04:00
|
|
|
use std::io::{self, Write};
|
2023-09-30 01:43:33 -04:00
|
|
|
use std::path::PathBuf;
|
2023-10-03 23:21:31 -04:00
|
|
|
use std::process::exit;
|
2023-09-23 03:01:36 -04:00
|
|
|
|
|
|
|
mod queries;
|
|
|
|
use queries::*;
|
|
|
|
mod datasets;
|
|
|
|
use datasets::*;
|
2023-09-30 18:16:00 -04:00
|
|
|
mod sync;
|
|
|
|
use sync::*;
|
2023-09-23 03:01:36 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
pub fn error(msg: &str, code: i32) -> ! {
|
|
|
|
println!("\nERROR: {}", msg);
|
|
|
|
exit(code)
|
|
|
|
}
|
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
pub fn issue(msg: &str, code: i32) -> ! {
|
|
|
|
println!("\n{}", msg);
|
|
|
|
exit(code)
|
|
|
|
}
|
|
|
|
|
2023-09-26 22:36:03 -04:00
|
|
|
/// ## CLI Structs
|
|
|
|
|
|
|
|
#[derive(Parser)]
|
2023-10-03 23:37:51 -04:00
|
|
|
#[command(name = "StartRNR")]
|
2023-09-26 22:36:03 -04:00
|
|
|
#[command(author = "Kiana Sheibani <kiana.a.sheibani@gmail.com>")]
|
2023-10-14 00:45:07 -04:00
|
|
|
#[command(version = "0.2.0")]
|
2023-10-03 23:37:51 -04:00
|
|
|
#[command(about = "StartRNR - Elo rating calculator for start.gg tournaments", long_about = None)]
|
2023-09-26 22:36:03 -04:00
|
|
|
struct Cli {
|
|
|
|
#[command(subcommand)]
|
|
|
|
subcommand: Subcommands,
|
2023-09-30 00:22:48 -04:00
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
#[arg(
|
|
|
|
short = 'A',
|
|
|
|
long = "auth",
|
|
|
|
value_name = "TOKEN",
|
|
|
|
global = true,
|
|
|
|
help = "Authentication token",
|
|
|
|
long_help = "The authentication token for accessing start.gg.
|
|
|
|
A token can be specified using this argument, in the environment variable
|
|
|
|
AUTH_TOKEN, or in a text file '<CONFIG_DIR>/auth.txt'."
|
|
|
|
)]
|
2023-09-30 00:22:48 -04:00
|
|
|
auth_token: Option<String>,
|
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
#[arg(
|
|
|
|
short,
|
|
|
|
long = "config",
|
|
|
|
value_name = "DIR",
|
|
|
|
global = true,
|
|
|
|
help = "Config directory",
|
2023-10-04 17:58:54 -04:00
|
|
|
long_help = "This flag overrides the default config directory.
|
2023-09-30 05:13:52 -04:00
|
|
|
If this directory does not exist, it will be created and a database file will
|
|
|
|
be initialized within it."
|
|
|
|
)]
|
2023-09-30 00:22:48 -04:00
|
|
|
config_dir: Option<PathBuf>,
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum Subcommands {
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "Manipulate stored datasets")]
|
2023-09-26 22:36:03 -04:00
|
|
|
Dataset {
|
|
|
|
#[command(subcommand)]
|
|
|
|
subcommand: DatasetSC,
|
|
|
|
},
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(
|
|
|
|
about = "Sync player ratings",
|
2023-10-04 17:58:54 -04:00
|
|
|
long_about = "Pull recent tournament data off of start.gg and use it to
|
|
|
|
update the network. This command will automatically keep track of the last time each
|
|
|
|
dataset was synced to ensure that each tournament is only accounted for once."
|
2023-09-30 05:13:52 -04:00
|
|
|
)]
|
2023-09-30 01:43:33 -04:00
|
|
|
Sync {
|
2023-09-30 05:13:52 -04:00
|
|
|
#[arg(
|
|
|
|
help = "The datasets to sync",
|
|
|
|
long_help = "A list of datasets to sync.
|
|
|
|
If no datasets are given, then the dataset 'default' is synced. This dataset is
|
|
|
|
created if it does not already exist."
|
|
|
|
)]
|
|
|
|
datasets: Vec<String>,
|
2023-10-02 20:29:39 -04:00
|
|
|
#[arg(short, long, help = "Sync all stored databases")]
|
2023-09-30 01:43:33 -04:00
|
|
|
all: bool,
|
|
|
|
},
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum DatasetSC {
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "List datasets")]
|
2023-09-26 22:36:03 -04:00
|
|
|
List,
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "Create a new dataset")]
|
2023-09-27 15:19:28 -04:00
|
|
|
New { name: Option<String> },
|
2023-09-30 05:13:52 -04:00
|
|
|
#[command(about = "Delete a dataset")]
|
2023-09-30 00:22:48 -04:00
|
|
|
Delete { name: Option<String> },
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
|
|
|
|
2023-09-23 03:01:36 -04:00
|
|
|
fn main() {
|
2023-09-26 22:36:03 -04:00
|
|
|
let cli = Cli::parse();
|
|
|
|
|
|
|
|
match cli.subcommand {
|
|
|
|
Subcommands::Dataset {
|
|
|
|
subcommand: DatasetSC::List,
|
2023-09-27 15:19:28 -04:00
|
|
|
} => dataset_list(),
|
|
|
|
Subcommands::Dataset {
|
|
|
|
subcommand: DatasetSC::New { name },
|
2023-10-02 20:29:39 -04:00
|
|
|
} => dataset_new(name, cli.auth_token),
|
2023-09-30 00:22:48 -04:00
|
|
|
Subcommands::Dataset {
|
|
|
|
subcommand: DatasetSC::Delete { name },
|
|
|
|
} => dataset_delete(name),
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
Subcommands::Sync { datasets, all } => sync(datasets, all, cli.auth_token),
|
2023-09-26 22:36:03 -04:00
|
|
|
}
|
2023-09-27 15:19:28 -04:00
|
|
|
}
|
2023-09-26 22:36:03 -04:00
|
|
|
|
2023-09-27 15:19:28 -04:00
|
|
|
fn dataset_list() {
|
2023-10-01 14:55:15 -04:00
|
|
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
2023-09-23 03:01:36 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
2023-10-14 00:13:00 -04:00
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
|
2023-10-01 14:55:15 -04:00
|
|
|
let datasets = list_datasets(&connection).expect("Error communicating with SQLite");
|
2023-09-26 22:36:03 -04:00
|
|
|
|
2023-10-03 01:26:25 -04:00
|
|
|
println!();
|
|
|
|
for (name, metadata) in datasets {
|
2023-10-14 00:13:00 -04:00
|
|
|
if let Some(country) = metadata.country {
|
|
|
|
if let Some(state) = metadata.state {
|
|
|
|
println!(
|
|
|
|
"{} - {} (in {}, {})",
|
|
|
|
name, metadata.game_name, country, state
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
println!("{} - {} (in {})", name, metadata.game_name, country);
|
|
|
|
}
|
2023-10-03 01:26:25 -04:00
|
|
|
} else {
|
|
|
|
println!("{} - {}", name, metadata.game_name);
|
|
|
|
}
|
|
|
|
}
|
2023-09-23 03:01:36 -04:00
|
|
|
}
|
2023-09-26 22:36:03 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
fn read_string() -> String {
|
|
|
|
let mut line = String::new();
|
2023-10-02 20:29:39 -04:00
|
|
|
io::stdout()
|
|
|
|
.flush()
|
|
|
|
.unwrap_or_else(|_| error("Could not access stdout", 2));
|
2023-10-01 14:55:15 -04:00
|
|
|
io::stdin()
|
|
|
|
.read_line(&mut line)
|
2023-10-02 20:29:39 -04:00
|
|
|
.unwrap_or_else(|_| error("Could not read from stdin", 2));
|
2023-10-01 14:55:15 -04:00
|
|
|
line.trim().to_owned()
|
|
|
|
}
|
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
fn dataset_new(name: Option<String>, auth_token: Option<String>) {
|
2023-10-01 14:55:15 -04:00
|
|
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
2023-09-27 15:19:28 -04:00
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
let auth = auth_token
|
|
|
|
.or_else(|| get_auth_token(&config_dir))
|
|
|
|
.unwrap_or_else(|| error("Access token not provided", 1));
|
|
|
|
|
2023-10-13 19:13:17 -04:00
|
|
|
// Name
|
|
|
|
|
2023-09-27 15:19:28 -04:00
|
|
|
let name = name.unwrap_or_else(|| {
|
|
|
|
print!("Name of new dataset: ");
|
2023-10-01 14:55:15 -04:00
|
|
|
read_string()
|
2023-09-27 15:19:28 -04:00
|
|
|
});
|
|
|
|
|
2023-10-13 19:13:17 -04:00
|
|
|
// Game
|
|
|
|
|
2023-10-02 20:29:39 -04:00
|
|
|
print!("Search games: ");
|
|
|
|
let games = run_query::<VideogameSearch, _>(
|
|
|
|
VideogameSearchVars {
|
|
|
|
name: &read_string(),
|
|
|
|
},
|
|
|
|
&auth,
|
|
|
|
)
|
|
|
|
.unwrap_or_else(|| error("Could not access start.gg", 1));
|
|
|
|
|
|
|
|
if games.is_empty() {
|
|
|
|
issue("No games found!", 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
println!("\nSearch results:");
|
|
|
|
for (i, game) in games.iter().enumerate() {
|
|
|
|
println!("{} - {}", i, game.name);
|
|
|
|
}
|
|
|
|
|
|
|
|
print!("\nGame to track ratings for (0-{}): ", games.len() - 1);
|
|
|
|
let index = read_string()
|
|
|
|
.parse::<usize>()
|
|
|
|
.unwrap_or_else(|_| error("Not an integer", 1));
|
|
|
|
if index >= games.len() {
|
|
|
|
error("Out of range!", 1);
|
|
|
|
}
|
|
|
|
|
2023-10-03 01:25:35 -04:00
|
|
|
let VideogameData {
|
|
|
|
id: game_id,
|
|
|
|
name: game_name,
|
|
|
|
} = games[index].clone();
|
2023-10-02 20:29:39 -04:00
|
|
|
|
2023-10-13 19:13:17 -04:00
|
|
|
// Location
|
|
|
|
|
2023-10-14 00:13:00 -04:00
|
|
|
print!(
|
|
|
|
"
|
|
|
|
\x1b[4mCountry\x1b[0m
|
|
|
|
|
|
|
|
Enter the two-letter code for the country you want to track ratings in, e.g.
|
|
|
|
\"US\" for the United States. See \x1b[1m\x1b]8;;https://www.ups.com/worldshiphelp/\
|
|
|
|
WSA/ENU/AppHelp/mergedProjects/CORE/Codes/Country_Territory_and_Currency_Codes.htm\
|
|
|
|
\x1b\\this site\x1b]8;;\x1b\\\x1b[0m for a list of these codes.
|
|
|
|
If no code is entered, then the dataset will track all players globally.
|
|
|
|
|
|
|
|
Country to track ratings for (leave empty for none): "
|
|
|
|
);
|
2023-10-13 19:13:17 -04:00
|
|
|
let country = {
|
2023-10-14 00:13:00 -04:00
|
|
|
let mut string = read_string();
|
2023-10-13 19:13:17 -04:00
|
|
|
if string.is_empty() {
|
|
|
|
None
|
2023-10-14 00:13:00 -04:00
|
|
|
} else if string.len() == 2 && string.chars().all(|c| c.is_ascii_alphabetic()) {
|
|
|
|
string.make_ascii_uppercase();
|
2023-10-13 19:13:17 -04:00
|
|
|
Some(string)
|
2023-10-14 00:13:00 -04:00
|
|
|
} else {
|
|
|
|
error("Input is not a two-letter code", 1);
|
2023-10-13 19:13:17 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let state = if country.as_ref().is_some_and(|s| s == "US" || s == "CA") {
|
2023-10-14 00:13:00 -04:00
|
|
|
print!(
|
|
|
|
"
|
|
|
|
\x1b[4mState/Province\x1b[0m
|
|
|
|
|
|
|
|
Enter the two-letter code for the US state or Canadian province you want to track
|
|
|
|
ratings in, e.g. \"CA\" for California. See \x1b[1m\x1b]8;;https://www.ups.com/worldshiphelp/\
|
|
|
|
WSA/ENU/AppHelp/mergedProjects/CORE/Codes/State_Province_Codes.htm\x1b\\this site\
|
|
|
|
\x1b]8;;\x1b\\\x1b[0m for a list of these codes.
|
|
|
|
If no code is entered, then the dataset will track all players within the country.
|
|
|
|
|
|
|
|
State/province to track ratings for (leave empty for none): "
|
|
|
|
);
|
|
|
|
let mut string = read_string();
|
2023-10-13 19:13:17 -04:00
|
|
|
if string.is_empty() {
|
|
|
|
None
|
2023-10-14 00:13:00 -04:00
|
|
|
} else if string.len() == 2 && string.chars().all(|c| c.is_ascii_alphabetic()) {
|
|
|
|
string.make_ascii_uppercase();
|
2023-10-13 19:13:17 -04:00
|
|
|
Some(string)
|
2023-10-14 00:13:00 -04:00
|
|
|
} else {
|
|
|
|
error("Input is not a two-letter code", 1);
|
2023-10-13 19:13:17 -04:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2023-10-14 00:13:00 -04:00
|
|
|
// Advanced Options
|
|
|
|
|
|
|
|
// Defaults
|
|
|
|
let mut decay_rate = 0.5;
|
|
|
|
let mut period_days = 30.0;
|
|
|
|
let mut tau = 0.2;
|
|
|
|
|
|
|
|
print!("\nConfigure advanced options? (y/n) ");
|
|
|
|
if let Some('y') = read_string().chars().next() {
|
|
|
|
// Decay Rate
|
|
|
|
|
|
|
|
print!(
|
|
|
|
"
|
|
|
|
\x1b[4mNetwork Decay Rate\x1b[0m
|
|
|
|
|
|
|
|
The network decay rate is a number between 0 and 1 that controls how the
|
|
|
|
advantage network reacts to player wins and losses. If the decay rate is 1,
|
|
|
|
then it is assumed that a player's skill against one opponent always carries
|
|
|
|
over to all other opponents. If the decay rate is 0, then all player match-ups
|
|
|
|
are assumed to be independent of each other.
|
|
|
|
|
|
|
|
Network decay rate (default 0.5): "
|
|
|
|
);
|
|
|
|
let decay_rate_input = read_string();
|
|
|
|
if !decay_rate_input.is_empty() {
|
|
|
|
decay_rate = decay_rate_input
|
|
|
|
.parse::<f64>()
|
|
|
|
.unwrap_or_else(|_| error("Not a number", 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Rating Period
|
|
|
|
|
|
|
|
print!(
|
|
|
|
"
|
|
|
|
\x1b[4mRating Period\x1b[0m
|
|
|
|
|
|
|
|
The rating period is an interval of time that dictates how player ratings change
|
|
|
|
during inactivity. Ideally the rating period should be somewhat long, long
|
|
|
|
enough to expect almost every player in the dataset to have played at least a
|
|
|
|
few sets.
|
|
|
|
|
|
|
|
Rating period (in days, default 30): "
|
|
|
|
);
|
|
|
|
let period_input = read_string();
|
|
|
|
if !period_input.is_empty() {
|
|
|
|
period_days = period_input
|
|
|
|
.parse::<f64>()
|
|
|
|
.unwrap_or_else(|_| error("Not a number", 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Tau coefficient
|
|
|
|
|
|
|
|
print!(
|
|
|
|
"
|
|
|
|
\x1b[4mTau Constant\x1b[0m
|
|
|
|
|
|
|
|
The tau constant is an internal system constant that roughly represents how
|
|
|
|
much random chance and luck play a role in game outcomes. In games where match
|
|
|
|
results are highly predictable, and a player's skill is the sole factor for
|
|
|
|
whether they will win, the tau constant should be high (0.9 - 1.2). In games
|
|
|
|
where luck matters, and more improbable victories can occur, the tau constant
|
|
|
|
should be low (0.2 - 0.4).
|
|
|
|
|
|
|
|
The tau constant is set low by default, since skill-based competitive video
|
|
|
|
games tend to be on the more luck-heavy side.
|
|
|
|
|
|
|
|
Tau constant (default 0.2): "
|
|
|
|
);
|
|
|
|
let tau_input = read_string();
|
|
|
|
if !tau_input.is_empty() {
|
|
|
|
tau = tau_input
|
|
|
|
.parse::<f64>()
|
|
|
|
.unwrap_or_else(|_| error("Not a number", 1));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-13 19:13:17 -04:00
|
|
|
// Done configuring
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
2023-10-14 00:13:00 -04:00
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
|
2023-10-02 20:29:39 -04:00
|
|
|
new_dataset(
|
|
|
|
&connection,
|
|
|
|
&name,
|
2023-10-03 01:26:25 -04:00
|
|
|
DatasetMetadata {
|
2023-10-02 20:29:39 -04:00
|
|
|
last_sync: Timestamp(1),
|
|
|
|
game_id,
|
2023-10-03 01:25:35 -04:00
|
|
|
game_name,
|
2023-10-13 19:13:17 -04:00
|
|
|
country,
|
|
|
|
state,
|
2023-10-14 00:13:00 -04:00
|
|
|
decay_rate,
|
|
|
|
period: (3600 * 24) as f64 * period_days,
|
|
|
|
tau,
|
2023-10-02 20:29:39 -04:00
|
|
|
},
|
|
|
|
)
|
|
|
|
.expect("Error communicating with SQLite");
|
2023-10-14 00:13:00 -04:00
|
|
|
|
|
|
|
println!("\nCreated dataset {}", name);
|
2023-09-27 15:19:28 -04:00
|
|
|
}
|
2023-09-30 00:22:48 -04:00
|
|
|
|
|
|
|
fn dataset_delete(name: Option<String>) {
|
2023-10-01 14:55:15 -04:00
|
|
|
let config_dir = dirs::config_dir().expect("Could not determine config directory");
|
2023-09-30 00:22:48 -04:00
|
|
|
|
|
|
|
let name = name.unwrap_or_else(|| {
|
|
|
|
print!("Dataset to delete: ");
|
2023-10-01 14:55:15 -04:00
|
|
|
read_string()
|
2023-09-30 00:22:48 -04:00
|
|
|
});
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
2023-10-14 00:13:00 -04:00
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
|
|
|
|
delete_dataset(&connection, &name).unwrap_or_else(|_| error("That dataset does not exist!", 1));
|
2023-09-30 00:22:48 -04:00
|
|
|
}
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
fn sync(datasets: Vec<String>, all: bool, auth_token: Option<String>) {
|
2023-09-30 01:43:33 -04:00
|
|
|
let config_dir = dirs::config_dir().unwrap();
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let auth = auth_token
|
|
|
|
.or_else(|| get_auth_token(&config_dir))
|
|
|
|
.unwrap_or_else(|| error("Access token not provided", 1));
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
let connection =
|
2023-10-14 00:13:00 -04:00
|
|
|
open_datasets(&config_dir).unwrap_or_else(|_| error("Could not open datasets file", 2));
|
2023-09-30 01:43:33 -04:00
|
|
|
|
2023-10-03 23:21:31 -04:00
|
|
|
let all_datasets = list_dataset_names(&connection).unwrap();
|
|
|
|
|
2023-10-01 14:55:15 -04:00
|
|
|
#[allow(unused_must_use)]
|
2023-09-30 05:13:52 -04:00
|
|
|
let datasets = if all {
|
2023-10-03 23:21:31 -04:00
|
|
|
all_datasets
|
2023-10-05 01:47:09 -04:00
|
|
|
} else if datasets.is_empty() {
|
|
|
|
if all_datasets.is_empty() {
|
2023-10-03 23:21:31 -04:00
|
|
|
print!("No datasets exist; create one? (y/n) ");
|
|
|
|
if let Some('y') = read_string().chars().next() {
|
|
|
|
dataset_new(Some(String::from("default")), Some(auth.clone()));
|
|
|
|
vec![String::from("default")]
|
|
|
|
} else {
|
|
|
|
error("No datasets specified and no default dataset", 1)
|
|
|
|
}
|
|
|
|
} else if all_datasets.iter().any(|x| x == "default") {
|
|
|
|
vec![String::from("default")]
|
|
|
|
} else {
|
|
|
|
error("No datasets specified and no default dataset", 1);
|
2023-10-02 20:29:39 -04:00
|
|
|
}
|
2023-09-30 01:43:33 -04:00
|
|
|
} else {
|
2023-09-30 05:13:52 -04:00
|
|
|
datasets
|
2023-09-30 01:43:33 -04:00
|
|
|
};
|
|
|
|
|
2023-09-30 05:13:52 -04:00
|
|
|
for dataset in datasets {
|
2023-10-03 01:26:25 -04:00
|
|
|
let dataset_config = get_metadata(&connection, &dataset)
|
2023-10-01 14:55:15 -04:00
|
|
|
.expect("Error communicating with SQLite")
|
|
|
|
.unwrap_or_else(|| error(&format!("Dataset {} does not exist!", dataset), 1));
|
|
|
|
|
2023-10-05 01:47:09 -04:00
|
|
|
sync_dataset(&connection, &dataset, dataset_config, &auth)
|
|
|
|
.unwrap_or_else(|_| error("Error communicating with SQLite", 2));
|
2023-10-02 20:29:39 -04:00
|
|
|
|
|
|
|
update_last_sync(&connection, &dataset).expect("Error communicating with SQLite");
|
2023-09-30 01:43:33 -04:00
|
|
|
}
|
|
|
|
}
|