hanabi-league/src/stats.py

250 lines
11 KiB
Python

import aenum
from typing import List, Tuple, Set
import psycopg2.extras
from hanabi import hanab_game
import utils
from database import conn_manager
import games_db_interface
from log_setup import logger
class GameOutcome(aenum.Enum):
_init_ = 'value string'
win = 0, 'Win'
discard_crit = 1, 'Discard Critical'
bomb_crit = 2, 'Bomb Critical'
strikeout = 3, 'Strikeout'
bottom_deck = 4, 'Bottom Deck'
vote_to_kill = 5, 'Vote to Kill'
out_of_pace = 6, 'Out of Pace'
class GameAnalysisResult:
def __init__(self,
outcomes: Set[GameOutcome],
bdrs: List[Tuple[hanab_game.DeckCard, int]],
lost_crits: List[hanab_game.DeckCard]
):
self.outcomes = outcomes
self.bdrs = bdrs
self.lost_crits = lost_crits
def analyze_replay(instance: hanab_game.HanabiInstance, actions: List[hanab_game.Action]) -> GameAnalysisResult:
# List of bdrs
bdrs = []
# This is the default value if we find no other reason why the game was lost (or won)
outcomes = set()
lost_crits = []
game = hanab_game.GameState(instance)
def handle_lost_card(card, game, play: bool):
if not game.is_trash(card):
if game.is_critical(card):
outcomes.add(GameOutcome.bomb_crit if play else GameOutcome.discard_crit)
lost_crits.append(card)
elif card.rank != 1:
if card in game.deck[game.progress:]:
bdrs.append((card, game.draw_pile_size))
else:
if game.deck[game.progress:].count(card) == 2:
bdrs.append((card, game.draw_pile_size))
for action in actions:
if action.type == hanab_game.ActionType.Discard:
discarded_card = instance.deck[action.target]
handle_lost_card(discarded_card, game, False)
if action.type == hanab_game.ActionType.Play:
played_card = instance.deck[action.target]
if not game.is_playable(played_card) and not game.is_trash(played_card):
bombed_card = instance.deck[action.target]
handle_lost_card(bombed_card, game, True)
game.make_action(action)
if game.pace < 0:
outcomes.add(GameOutcome.out_of_pace)
if game.strikes == 3:
outcomes.add(GameOutcome.strikeout)
elif actions[-1].type in [hanab_game.ActionType.EndGame, hanab_game.ActionType.VoteTerminate]:
outcomes.add(GameOutcome.vote_to_kill)
if game.score == 5 * instance.num_suits:
outcomes.add(GameOutcome.win)
return GameAnalysisResult(outcomes, bdrs, lost_crits)
def analyze_game_and_store_stats(game_id: int):
logger.verbose("Analysing game {} for BDRs and lost crits".format(game_id))
instance, actions = games_db_interface.load_game(game_id)
analysis = analyze_replay(instance, actions)
cur = conn_manager.get_new_cursor()
cur.execute(
"INSERT INTO game_statistics (game_id, num_bottom_deck_risks, num_crits_lost) "
"VALUES (%s, %s, %s) "
"ON CONFLICT (game_id) DO UPDATE "
"SET (num_bottom_deck_risks, num_crits_lost) = (EXCLUDED.num_bottom_deck_risks, EXCLUDED.num_crits_lost)",
(game_id, len(analysis.bdrs), len(analysis.lost_crits))
)
psycopg2.extras.execute_values(
cur,
"INSERT INTO game_outcomes (game_id, outcome) VALUES %s",
((game_id, outcome.value) for outcome in analysis.outcomes)
)
conn_manager.get_connection().commit()
def analyze_all_games():
"""
Runs analysis on replays of all games
@warning: This assumes that detailed game data has been fetched from the server already
"""
logger.info("Analysing replays of all games.")
cur = conn_manager.get_new_cursor()
cur.execute(
"SELECT id FROM games "
"LEFT OUTER JOIN game_statistics "
" ON games.id = game_statistics.game_id "
"WHERE game_statistics.game_id IS NULL "
"ORDER BY games.id"
)
for (game_id, ) in cur.fetchall():
analyze_game_and_store_stats(game_id)
def update_user_statistics():
"""
Update the cumulative user statistics for this user, assuming that the corresponding game statistics have
been computed already.
@param user_ids:
@return:
"""
# Note that some of these statistics could be computed by updating them on each new game insertion.
# However, it would be tedious to ensure that *every* new game triggers an update of these statistics.
# Also, this would be error-prone, since doing a mistake once means that values will be off forever
# (unless the DB is reset).
# Since it is cheap to accumulate some values over the whole DB, we therefore recreate the statistics as a whole,
# reusing only the individual results (that never change and therefore can only be missing, but never wrong)
cur = conn_manager.get_new_cursor()
# Update total number of moves
for clue_starved in [True, False]:
rating_type = utils.get_rating_type(clue_starved)
# We insert 0 here to ensure that we have an entry for each player
# Note that this will immediately be changed by the next query in case it is nonzero,
# so the zero value never shows up in the database if it was nonzero before.
cur.execute(
"INSERT INTO user_statistics"
" (user_id, variant_type, total_game_moves, games_played, games_won, current_streak, maximum_streak, total_bdr, total_crits_lots)"
" ("
" SELECT id, %s, 0, 0, 0, 0, 0, 0, 0 FROM users"
" )"
"ON CONFLICT (user_id, variant_type) DO UPDATE "
"SET"
" (total_game_moves, games_played, games_won, current_streak, maximum_streak, total_bdr, total_crits_lots)"
" ="
" (EXCLUDED.total_game_moves, EXCLUDED.games_played, EXCLUDED.games_won, EXCLUDED.current_streak, EXCLUDED.maximum_streak, EXCLUDED.total_bdr, EXCLUDED.total_crits_lots)",
(rating_type,)
)
# Most of the statistics are very easy to compute: We just have to accumulate data from other tables
cur.execute(
"INSERT INTO user_statistics (user_id, variant_type, total_game_moves, games_played, games_won, total_bdr, total_crits_lots)"
" ("
" SELECT"
" users.id,"
" rating_type,"
" SUM(games.num_turns),"
" COUNT(*)," # This counts the number of rows (per user id), so the number of played game
" COUNT(*) FILTER ( WHERE variants.num_suits * 5 = games.score )," # Same, but only count wins now
" SUM (game_statistics.num_bottom_deck_risks)," # Simple accumulation of the game stats
" SUM (game_statistics.num_crits_lost)"
"FROM users"
" INNER JOIN game_participants "
" ON game_participants.user_id = users.id "
" INNER JOIN games "
" ON game_participants.game_id = games.id "
" INNER JOIN variants"
" ON variants.id = games.variant_id "
" LEFT OUTER JOIN game_statistics"
" ON games.id = game_statistics.game_id"
" GROUP BY users.id, rating_type "
" ) "
"ON CONFLICT (user_id, variant_type) DO UPDATE "
"SET"
" (total_game_moves, games_played, games_won, total_bdr, total_crits_lots)"
" ="
" (EXCLUDED.total_game_moves, EXCLUDED.games_played, EXCLUDED.games_won, EXCLUDED.total_bdr, EXCLUDED.total_crits_lots)",
(utils.get_rating_type(True), utils.get_rating_type(False))
)
# This computes the maximum streak lengths, it's quite complicated.
# First (in the innermost select clause), we build up an auxiliary table, which consists of some joined data that
# we are interested in, but most importantly each row gets a 'group_id' entry in such a way that games belonging
# to the same streak will have the same group_id:
# In ascending league_id order, this entry counts the number of *losses* up until this point: Therefore, the number
# does not increase during win streaks, but increases for each loss.
# Additionally, we subtract 1 from this sum for lost games, so that losses always have the same group id as the last
# win immediately before them. Therefore, each group (= entries with the same group id) now consists of
# some consecutive wins, optionally followed by a loss
# In the second query, we can now use these group ids to add a 'streak_length' to each row by numbering the rows
# inside their corresponding group (his is what the OVER (PARTITION BY ..., group_id) does.
# Now, in a third select statement, it is now easy to calculate the maximum streak by taking the maximum of this
# row ranging over all games, where we grouped by user id and rating type (Clue Starved/Non-CS currently)
# Finally, we just wrap the computed data into an insert statement to directly store it in the statistics table
cur.execute(
"INSERT INTO user_statistics (user_id, variant_type, maximum_streak, current_streak, maximum_streak_last_game)"
" ("
" SELECT"
" user_id,"
" rating_type,"
" COALESCE(MAX(streak_length), 0) AS maximum_streak,"
" COALESCE((ARRAY_AGG(streak_length ORDER BY league_id DESC))[1], 0) AS current_streak,"
" ("
" ARRAY_AGG(league_id ORDER BY streak_length DESC, league_id ASC)"
" FILTER ( WHERE streak_length IS NOT NULL)"
" )[1] AS maximum_streak_last_game"
" FROM"
" ("
" SELECT"
" *,"
# Note that here we have the extra distinction to only add a streak_length to wins, not losses.
# Otherwise, a streak of n games would result in a loss that has 'streak' n + 1, which is not what we want.
" CASE"
" WHEN num_suits * 5 = score"
" THEN"
" COUNT(*)"
" OVER (PARTITION BY user_id, rating_type, group_id ORDER BY league_id ASC)"
" END"
" AS streak_length "
" FROM"
" ("
" SELECT"
" users.id AS user_id,"
" variants.num_suits,"
" variants.rating_type,"
" games.score,"
" games.league_id,"
# This count function is the tricky part that labels each game with the group_id of the streak it belongs to
" COUNT(*) "
" FILTER (WHERE variants.num_suits * 5 != games.score)"
" OVER (PARTITION BY users.id, variants.clue_starved ORDER BY games.league_id)"
" - CASE WHEN variants.num_suits * 5 != games.score THEN 1 ELSE 0 END"
" AS group_id"
" FROM users "
" INNER JOIN game_participants "
" ON game_participants.user_id = users.id "
" INNER JOIN games "
" ON game_participants.game_id = games.id "
" INNER JOIN variants "
" ON variants.id = games.variant_id "
" ) AS games_grouped_by_streak "
" ) AS games_with_streaks "
" GROUP BY user_id, rating_type"
" )"
"ON CONFLICT (user_id, variant_type) DO UPDATE "
"SET (maximum_streak, current_streak, maximum_streak_last_game) = (EXCLUDED.maximum_streak, EXCLUDED.current_streak, EXCLUDED.maximum_streak_last_game)",
(utils.get_rating_type(True), utils.get_rating_type(False))
)
conn_manager.get_connection().commit()