Made statistics work with refactored changes

This commit is contained in:
Yarne Coppens 2024-08-22 10:16:16 +02:00
parent 61232a13ea
commit da3af316fa
3 changed files with 19 additions and 15 deletions

View file

@ -62,7 +62,7 @@ class BoardgameFilterParams(BaseModel):
filter_expansions_out: bool = False filter_expansions_out: bool = False
only_expansions: bool = False only_expansions: bool = False
def do_filtering(self,boardgame_list): def do_filtering(self,boardgame_list) -> list[boardgame_classes.BoardGame]:
if self.filter_expansions_out: if self.filter_expansions_out:
boardgame_list = boardgame_filters.filter_expansions_out(boardgame_list) boardgame_list = boardgame_filters.filter_expansions_out(boardgame_list)
@ -75,7 +75,7 @@ class PlayFilterParams(BaseModel):
filter_expansions_out: bool = False filter_expansions_out: bool = False
only_expansions: bool = False only_expansions: bool = False
def do_filtering(self, play_list): def do_filtering(self, play_list) -> list[play_classes.Play]:
if self.filter_expansions_out: if self.filter_expansions_out:
play_list = play_filters.filter_expansions_out(play_list) play_list = play_filters.filter_expansions_out(play_list)

View file

@ -11,7 +11,7 @@ sqlite_url = definitions.SQLITE_URL
connect_args = {"check_same_thread": False} connect_args = {"check_same_thread": False}
engine = create_engine(sqlite_url, echo=True, connect_args=connect_args) engine = create_engine(sqlite_url, echo=False, connect_args=connect_args)
def get_engine(): def get_engine():
return engine return engine

View file

@ -1,10 +1,14 @@
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from src.main import BoardgameFilterParams, PlayFilterParams
from src.classes import statistic_classes from src.classes import statistic_classes
from src.modules import data_connection from src.modules import data_connection
from pydantic import BaseModel
from datetime import date, timedelta, datetime from datetime import date, timedelta, datetime
from sqlmodel import Session from sqlmodel import Session
def get_total_owned_games(session: Session, filtering_query: BaseModel = None) -> statistic_classes.NumberStatistic: def get_total_owned_games(session: Session, filtering_query: BoardgameFilterParams = None) -> statistic_classes.NumberStatistic:
owned_collection = data_connection.get_user_owned_collection(session) owned_collection = data_connection.get_user_owned_collection(session)
if filtering_query != None: if filtering_query != None:
@ -22,14 +26,14 @@ def get_total_owned_games(session: Session, filtering_query: BaseModel = None) -
return statistic_to_return return statistic_to_return
def get_total_owned_collection_cost(session: Session, filtering_query: BaseModel = None) -> statistic_classes.NumberStatistic: def get_total_owned_collection_cost(session: Session, filtering_query: BoardgameFilterParams = None) -> statistic_classes.NumberStatistic:
owned_collection = data_connection.get_user_owned_collection(session) owned_collection = data_connection.get_user_owned_collection(session)
if filtering_query != None: if filtering_query != None:
owned_collection = filtering_query.do_filtering(owned_collection) owned_collection = filtering_query.do_filtering(owned_collection)
total_cost = sum([boardgame.price_paid for boardgame in owned_collection]) total_cost = sum([boardgame.owned_info.price_paid for boardgame in owned_collection])
statistic_dict = { statistic_dict = {
"name":"Total cost of the owned collection", "name":"Total cost of the owned collection",
@ -41,23 +45,23 @@ def get_total_owned_collection_cost(session: Session, filtering_query: BaseModel
return statistic_to_return return statistic_to_return
def get_amount_of_games_over_time(session: Session, filtering_query: BaseModel = None, day_step: int = 1) -> statistic_classes.TimeLineStatistic: def get_amount_of_games_over_time(session: Session, filtering_query: BoardgameFilterParams = None, day_step: int = 1) -> statistic_classes.TimeLineStatistic:
def daterange(start_date: date, end_date: date, day_step): def daterange(start_date: date, end_date: date, day_step):
days = int((end_date - start_date).days) days = int((end_date - start_date).days)
for n in range(0, days, day_step): for n in range(0, days, day_step):
yield start_date + timedelta(n) yield start_date + timedelta(n)
games_in_owned_collection = data_connection.get_user_owned_collection(session) games_in_owned_collection = data_connection.get_user_owned_collection(session)
games_in_owned_collection.sort(key=lambda x: x.acquisition_date) games_in_owned_collection.sort(key=lambda x: x.owned_info.acquisition_date)
start_date = games_in_owned_collection[0].acquisition_date start_date = games_in_owned_collection[0].owned_info.acquisition_date
games_in_owned_collection = filtering_query.do_filtering(games_in_owned_collection) games_in_owned_collection = filtering_query.do_filtering(games_in_owned_collection)
timeline_dict = {} timeline_dict = {}
for current_date in daterange(start_date, date.today(), day_step): for current_date in daterange(start_date, date.today(), day_step):
games_in_collection_at_date = list(filter(lambda game: game.acquisition_date <= current_date, games_in_owned_collection)) games_in_collection_at_date = list(filter(lambda game: game.owned_info.acquisition_date <= current_date, games_in_owned_collection))
timeline_dict[current_date] = len(games_in_collection_at_date) timeline_dict[current_date] = len(games_in_collection_at_date)
statistic_dict = { statistic_dict = {
@ -70,7 +74,7 @@ def get_amount_of_games_over_time(session: Session, filtering_query: BaseModel =
return statistic_to_return return statistic_to_return
def get_amount_of_games_played_per_year(session: Session, filtering_query: BaseModel = None) -> statistic_classes.TimeLineStatistic: def get_amount_of_games_played_per_year(session: Session, filtering_query: PlayFilterParams = None) -> statistic_classes.TimeLineStatistic:
all_plays = data_connection.get_plays(session) all_plays = data_connection.get_plays(session)
all_plays.sort(key= lambda x: x.play_date) all_plays.sort(key= lambda x: x.play_date)
@ -103,12 +107,12 @@ def get_amount_of_games_played_per_year(session: Session, filtering_query: BaseM
return statistic_to_return return statistic_to_return
def get_most_expensive_games(session: Session, filtering_query: BaseModel = None, top_amount: int = 10) -> statistic_classes.GamesStatistic: def get_most_expensive_games(session: Session, filtering_query: BoardgameFilterParams = None, top_amount: int = 10) -> statistic_classes.GamesStatistic:
most_expensive_games = data_connection.get_user_owned_collection(session) most_expensive_games = data_connection.get_user_owned_collection(session)
most_expensive_games = filtering_query.do_filtering(most_expensive_games) most_expensive_games = filtering_query.do_filtering(most_expensive_games)
most_expensive_games.sort(key=lambda x: x.price_paid, reverse=True) most_expensive_games.sort(key=lambda x: x.owned_info.price_paid, reverse=True)
most_expensive_games = most_expensive_games[0:top_amount] most_expensive_games = most_expensive_games[0:top_amount]
@ -122,7 +126,7 @@ def get_most_expensive_games(session: Session, filtering_query: BaseModel = None
return statistic_to_return return statistic_to_return
def get_shelf_of_shame(session: Session, filtering_query: BaseModel = None) -> statistic_classes.GamesStatistic: def get_shelf_of_shame(session: Session, filtering_query: BoardgameFilterParams = None) -> statistic_classes.GamesStatistic:
boardgames_in_collection = data_connection.get_user_collection(session) boardgames_in_collection = data_connection.get_user_collection(session)
owned_boardgames = data_connection.get_user_owned_collection(session) owned_boardgames = data_connection.get_user_owned_collection(session)