176 lines
7.1 KiB
Python
176 lines
7.1 KiB
Python
import json
|
|
from typing import Dict, List, Optional
|
|
|
|
import platformdirs
|
|
import requests_cache
|
|
import psycopg2.extras
|
|
|
|
import constants
|
|
import database
|
|
from database import conn_manager
|
|
from log_setup import logger
|
|
from config import config_manager
|
|
|
|
# This ensures that we do not pose too many requests to the website, especially while in development where we
|
|
# might frequently re-initialize the database.
|
|
session = requests_cache.CachedSession(
|
|
platformdirs.user_cache_dir(constants.APP_NAME) + 'hanab.live.requests-cache',
|
|
urls_expire_after={
|
|
# Game exports will never change, so cache them forever
|
|
'hanab.live/export/*': requests_cache.NEVER_EXPIRE
|
|
}
|
|
)
|
|
|
|
|
|
class GameInfo:
|
|
def __init__(self, game_id: int, num_players: int, variant_id: int, seed: str, score: int, num_turns: int, user_ids: List[int], normalized_usernames: List[str]):
|
|
self.game_id = game_id
|
|
self.num_players = num_players
|
|
self.variant_id = variant_id
|
|
self.seed = seed
|
|
self.score = score
|
|
self.num_turns = num_turns
|
|
self.user_ids = user_ids
|
|
self.normalized_usernames = normalized_usernames
|
|
|
|
|
|
def fetch_games_for_player(username: str, latest_game_id: int):
|
|
logger.verbose("Fetching games for username {} more recent than id {}".format(username, latest_game_id))
|
|
url = "https://hanab.live/api/v1/history-full/{}?start={}".format(username, latest_game_id + 1)
|
|
response = session.get(url)
|
|
if not response.status_code == 200:
|
|
err_msg = "Failed to fetch games for username {}, requested URL {}".format(username, url)
|
|
logger.error(err_msg)
|
|
raise ConnectionError(err_msg)
|
|
return json.loads(response.text)
|
|
|
|
|
|
def process_game_entry(game_json: Dict, username_dict: Dict, variant_ids: List[int]) -> Optional[GameInfo]:
|
|
logger.debug("Processing entry {}".format(game_json))
|
|
config = config_manager.get_config()
|
|
# Check if the game is one that we accept
|
|
game_id = game_json["id"]
|
|
players = game_json["playerNames"]
|
|
num_players = len(players)
|
|
seed = game_json["seed"]
|
|
score = game_json["score"]
|
|
num_turns = game_json["numTurns"]
|
|
|
|
game_options = game_json["options"]
|
|
var_id = game_options["variantID"]
|
|
|
|
normalized_usernames = [database.normalize_username(username) for username in players]
|
|
|
|
# Check that the game has no special options enabled
|
|
for forbidden_option in constants.FORBIDDEN_GAME_OPTIONS:
|
|
if game_options.get(forbidden_option, False):
|
|
logger.debug("Rejected game {} due to option {} set".format(game_id, forbidden_option))
|
|
return
|
|
|
|
# Check if player count matches
|
|
if not (config.min_player_count <= num_players <= config.max_player_count):
|
|
logger.debug("Rejected game {} due to invalid number of players ({})".format(game_id, num_players))
|
|
return
|
|
|
|
# Check if the variant was ok
|
|
if var_id not in variant_ids:
|
|
logger.debug("Rejected game {} due to invalid variant id {}".format(game_id, var_id))
|
|
return
|
|
|
|
# Everything matches, so we can parse the participants now
|
|
user_ids = [] # This will be a list of the (league specific) user_id's that played this game.
|
|
for normalized_username in normalized_usernames:
|
|
user_id = username_dict.get(normalized_username, None)
|
|
if user_id is None:
|
|
logger.debug("Rejected game {} due to unregistered participant {}".format(game_id, normalized_username))
|
|
return
|
|
user_ids.append(user_id)
|
|
|
|
return GameInfo(game_id, num_players, 0, seed, score, num_turns, user_ids, normalized_usernames)
|
|
|
|
|
|
def fetch_games_for_all_players():
|
|
logger.info("Fetching new games.")
|
|
cur = conn_manager.get_new_cursor()
|
|
cur.execute("SELECT user_accounts.normalized_username, user_accounts.user_id, downloads.latest_game_id "
|
|
"FROM user_accounts "
|
|
"LEFT OUTER JOIN downloads "
|
|
" ON user_accounts.normalized_username = downloads.normalized_username"
|
|
)
|
|
# This will be a mapping of normalized username -> user ID that we built from the DB data
|
|
username_dict = {}
|
|
|
|
# This will be a mapping of game id -> JSON data that we get from hanab.live, where we will collect all the
|
|
# possibly relevant games now
|
|
games: Dict[int, Dict] = {}
|
|
for username, user_id, latest_game_id in cur.fetchall():
|
|
username_dict[username] = user_id
|
|
# Use the starting id as fallback if we have not downloaded any games for this player yet.
|
|
if latest_game_id is None:
|
|
latest_game_id = config_manager.get_config().starting_game_id
|
|
|
|
player_games = fetch_games_for_player(username, latest_game_id)
|
|
for game in player_games:
|
|
games[game['id']] = game
|
|
|
|
allowed_variants = database.get_variant_ids()
|
|
|
|
# This will hold the processed games that we will add to the database.
|
|
good_games: Dict[int, GameInfo] = {}
|
|
|
|
for game_id, game in games.items():
|
|
game_info = process_game_entry(game, username_dict, allowed_variants)
|
|
if game_info is not None:
|
|
good_games[game_id] = game_info
|
|
|
|
return good_games
|
|
|
|
|
|
def store_new_games(games: Dict[int, GameInfo]):
|
|
conn = conn_manager.get_connection()
|
|
cur = conn.cursor()
|
|
games_vals = []
|
|
game_participants_vals = []
|
|
latest_game_ids: Dict[str, int] = {}
|
|
|
|
# Now, iterate over all games and convert to tuples for insertion
|
|
for game in sorted(games.values(), key=lambda game_info: game_info.game_id):
|
|
tup = (game.game_id, game.num_players, game.variant_id, game.seed, game.score, game.num_turns)
|
|
games_vals.append(tup)
|
|
for player_id in game.user_ids:
|
|
tup = (game.game_id, player_id)
|
|
game_participants_vals.append(tup)
|
|
|
|
# Note that this gives the maximum in the end since we process the games in order
|
|
for normalized_username in game.normalized_usernames:
|
|
latest_game_ids[normalized_username] = game.game_id
|
|
|
|
# Do the insertions
|
|
# Notice that on conflict we can just do nothing: In that case, we already have the game in the DB for some reason
|
|
# (for example, because we forced a download refresh)
|
|
psycopg2.extras.execute_values(
|
|
cur,
|
|
"INSERT INTO games (id, num_players, variant_id, seed, score, num_turns) "
|
|
"VALUES %s "
|
|
"ON CONFLICT (id) DO NOTHING",
|
|
games_vals
|
|
)
|
|
psycopg2.extras.execute_values(
|
|
cur,
|
|
"INSERT INTO game_participants (game_id, user_id) "
|
|
"VALUES %s "
|
|
"ON CONFLICT (game_id, user_id) DO NOTHING",
|
|
game_participants_vals
|
|
)
|
|
# Here, we want to update on insertion conflict
|
|
psycopg2.extras.execute_values(
|
|
cur,
|
|
"INSERT INTO downloads (normalized_username, latest_game_id) "
|
|
"VALUES %s "
|
|
"ON CONFLICT (normalized_username) "
|
|
"DO UPDATE SET (normalized_username, latest_game_id) = (EXCLUDED.normalized_username, EXCLUDED.latest_game_id)",
|
|
latest_game_ids.items()
|
|
)
|
|
# We only commit after performing all insertions. This guarantees that the download table is always in sync
|
|
# with the actual games stored in the database.
|
|
conn.commit()
|