Merge branch 'master' into flashpoint-metadata-handler

This commit is contained in:
Georges-Antoine Assi
2025-09-11 21:23:17 -04:00
536 changed files with 14574 additions and 5296 deletions

18
.devcontainer/dev.env Normal file
View File

@@ -0,0 +1,18 @@
AUTHENTIK_BOOTSTRAP_PASSWORD=password
# Authentik defaults
AUTHENTIK_SECRET_KEY=secret-key-default
# Database defaults for development
DB_HOST=romm-db-dev
DB_NAME=romm
DB_PASSWD=romm
DB_ROOT_PASSWD=rootpassword
DB_USER=romm
DEV_HTTPS=false
# Development environment overrides for Dev Containers
DEV_MODE=true
# Pick a host port that doesn't conflict with local services
DEV_PORT=5001
# Logging
LOGLEVEL=DEBUG
# Redis
REDIS_PORT=6379

View File

@@ -1,13 +1,13 @@
{
"name": "ROMM Development",
"dockerComposeFile": "docker-compose.yml",
"dockerComposeFile": ["../docker-compose.yml"],
"service": "romm-dev",
"workspaceFolder": "/app",
"shutdownAction": "stopCompose",
"forwardPorts": [5000, 3000, 3306, 6379],
"forwardPorts": [8443, 3000, 3306, 5000, 6379],
"portsAttributes": {
"5000": {
"label": "Backend API",
"8443": {
"label": "HTTPS Dev Server",
"onAutoForward": "notify"
},
"3000": {
@@ -18,6 +18,10 @@
"label": "MariaDB",
"onAutoForward": "silent"
},
"5000": {
"label": "Backend API",
"onAutoForward": "notify"
},
"6379": {
"label": "Valkey/Redis",
"onAutoForward": "silent"
@@ -29,7 +33,6 @@
"ms-python.python",
"ms-python.pylint",
"ms-python.black-formatter",
"bradlc.vscode-tailwindcss",
"vue.volar",
"ms-vscode.vscode-typescript-next"
],

View File

@@ -1,2 +1,13 @@
[settings]
profile=black
known_first_party =
adapters
config
decorators
endpoints
exceptions
handler
logger
models
tasks
utils

View File

@@ -0,0 +1,5 @@
plugins: ["@trivago/prettier-plugin-sort-imports"]
importOrder:
- "<THIRD_PARTY_MODULES>"
- "^@/(.*)$"
- "^[./]"

View File

@@ -2,12 +2,12 @@
# To learn more about the format of this file, see https://docs.trunk.io/reference/trunk-yaml
version: 0.1
cli:
version: 1.24.0
version: 1.25.0
# Trunk provides extensibility via plugins. (https://docs.trunk.io/plugins)
plugins:
sources:
- id: trunk
ref: v1.7.1
ref: v1.7.2
uri: https://github.com/trunk-io/plugins
# Many linters and tools depend on runtimes - configure them here. (https://docs.trunk.io/runtimes)
runtimes:
@@ -20,26 +20,28 @@ lint:
disabled:
- pyright
enabled:
- hadolint@2.12.1-beta
- dotenv-linter@3.3.0
- hadolint@2.13.1
- markdownlint@0.45.0
- eslint@9.31.0
- eslint@9.34.0
- actionlint@1.7.7
- bandit@1.8.6
- black@25.1.0
- checkov@3.2.451
- checkov@3.2.469
- git-diff-check
- isort@6.0.1
- mypy@1.17.0
- osv-scanner@2.0.3
- oxipng@9.1.5
- prettier@3.6.2
- ruff@0.12.4
- shellcheck@0.10.0
- mypy@1.17.1
- osv-scanner@2.2.2
- prettier@3.6.2:
packages:
- "@trivago/prettier-plugin-sort-imports@5.2.2"
- "@vue/compiler-sfc@3.5.21"
- ruff@0.12.11
- shellcheck@0.11.0
- shfmt@3.6.0
- svgo@4.0.0
- taplo@0.9.3
- trivy@0.64.1
- trufflehog@3.90.1
- taplo@0.10.0
- trivy@0.66.0
- trufflehog@3.90.5
- yamllint@1.37.1
ignore:
- linters: [ALL]

View File

@@ -6,7 +6,7 @@
<img src=".github/resources/logotipo.png" height="45px" width="auto" alt="romm logotype">
<h3 style="font-size: 25px;">
A beautiful, powerful, self-hosted rom manager.
A beautiful, powerful, self-hosted ROM manager.
</h3>
<br/>

View File

@@ -0,0 +1,203 @@
import asyncio
import http
import json
from collections.abc import Sequence
from functools import partial
from typing import TYPE_CHECKING
import aiohttp
import yarl
from aiohttp.client import ClientTimeout
from fastapi import HTTPException, status
from unidecode import unidecode
from adapters.services.igdb_types import Game
from config import IGDB_CLIENT_ID
from logger.logger import log
from utils.context import ctx_aiohttp_session
if TYPE_CHECKING:
from handler.metadata.igdb_handler import TwitchAuth
class IGDBInvalidCredentialsException(Exception):
"""Exception raised when IGDB credentials are invalid."""
async def auth_middleware(
req: aiohttp.ClientRequest,
handler: aiohttp.ClientHandlerType,
*,
twitch_auth: "TwitchAuth",
) -> aiohttp.ClientResponse:
"""IGDB API authentication mechanism.
Reference: https://api-docs.igdb.com/#authentication
"""
token = await twitch_auth.get_oauth_token()
if not token:
raise IGDBInvalidCredentialsException()
req.headers.update(
{
"Accept": "application/json",
"Authorization": f"Bearer {token}",
"Client-ID": IGDB_CLIENT_ID,
}
)
return await handler(req)
class IGDBService:
"""Service to interact with the IGDB API.
Reference: https://api-docs.igdb.com/
"""
def __init__(
self,
twitch_auth: "TwitchAuth",
base_url: str | None = None,
) -> None:
self.url = yarl.URL(base_url or "https://api.igdb.com/v4")
self.twitch_auth = twitch_auth
self.auth_middleware = partial(auth_middleware, twitch_auth=self.twitch_auth)
async def _request(
self,
url: str,
search_term: str | None = None,
fields: Sequence[str] | None = None,
where: str | None = None,
limit: int | None = None,
request_timeout: int = 120,
) -> list:
aiohttp_session = ctx_aiohttp_session.get()
content = ""
if search_term:
content += f'search "{unidecode(search_term)}"; '
if fields:
content += f"fields {','.join(fields)}; "
if where:
content += f"where {where}; "
if limit is not None:
content += f"limit {limit}; "
content = content.strip()
log.debug(
"API request: URL=%s, Content=%s, Timeout=%s",
url,
content,
request_timeout,
)
try:
res = await aiohttp_session.post(
url,
data=content,
middlewares=(self.auth_middleware,),
timeout=ClientTimeout(total=request_timeout),
)
res.raise_for_status()
return await res.json()
except aiohttp.ServerTimeoutError:
# Retry the request once if it times out
log.debug("Request to URL=%s timed out. Retrying...", url)
except IGDBInvalidCredentialsException as exc:
log.critical("IGDB Error: Invalid IGDB_CLIENT_ID or IGDB_CLIENT_SECRET")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Invalid IGDB credentials",
) from exc
except aiohttp.ClientConnectionError as exc:
log.critical("Connection error: can't connect to IGDB", exc_info=True)
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Can't connect to IGDB, check your internet connection",
) from exc
except aiohttp.ClientResponseError as exc:
if exc.status == http.HTTPStatus.UNAUTHORIZED:
# Refresh the token and retry if the auth token is invalid
log.info("Twitch token invalid: fetching a new one...")
await self.twitch_auth._update_twitch_token()
elif exc.status == http.HTTPStatus.TOO_MANY_REQUESTS:
# Retry after 2 seconds if rate limit hit
await asyncio.sleep(2)
else:
# Log the error and return an empty list if the request fails with a different code
log.error(exc)
return []
except json.JSONDecodeError as exc:
log.error("Error decoding JSON response from IGDB: %s", exc)
return []
# Retry the request once if it times out
try:
log.debug(
"API request: URL=%s, Content=%s, Timeout=%s",
url,
content,
request_timeout,
)
res = await aiohttp_session.post(
url,
data=content,
middlewares=(self.auth_middleware,),
timeout=ClientTimeout(total=request_timeout),
)
res.raise_for_status()
return await res.json()
except (aiohttp.ClientResponseError, aiohttp.ServerTimeoutError) as exc:
if (
isinstance(exc, aiohttp.ClientResponseError)
and exc.status == http.HTTPStatus.UNAUTHORIZED
):
return []
log.error(exc)
return []
except json.JSONDecodeError as exc:
log.error("Error decoding JSON response from IGDB: %s", exc)
return []
async def list_games(
self,
*,
search_term: str | None = None,
fields: Sequence[str] | None = None,
where: str | None = None,
limit: int | None = None,
) -> list[Game]:
"""Retrieve games.
Reference: https://api-docs.igdb.com/#game
"""
url = self.url.joinpath("games")
return await self._request(
str(url),
search_term=search_term,
fields=fields,
where=where,
limit=limit,
)
async def search(
self,
*,
search_term: str | None = None,
fields: Sequence[str] | None = None,
where: str | None = None,
limit: int | None = None,
) -> list[dict]:
"""Search for different entities.
Reference: https://api-docs.igdb.com/#search
"""
url = self.url.joinpath("search")
return await self._request(
str(url),
search_term=search_term,
fields=fields,
where=where,
limit=limit,
)

View File

@@ -1,11 +1,22 @@
from __future__ import annotations
import enum
from typing import Literal, NewType, TypedDict
from typing import Literal, NewType, TypedDict, TypeGuard
# https://api-docs.igdb.com/#expander
type ExpandableField[T] = T | int
def mark_expanded[T](value: ExpandableField[T]) -> TypeGuard[T]:
"""Type guard to narrow an `ExpandableField` to its expanded type."""
return True
def mark_list_expanded[T](value: list[ExpandableField[T]]) -> TypeGuard[list[T]]:
"""Type guard to narrow an `ExpandableField` list to its expanded type."""
return True
# TODO: Add missing structures until all are implemented.
UnimplementedEntity = NewType("UnimplementedEntity", dict)
AgeRatingContentDescription = UnimplementedEntity
@@ -95,7 +106,7 @@ class AgeRating(IGDBEntity, total=False):
category: AgeRatingCategory
checksum: str # uuid
content_descriptions: list[ExpandableField[AgeRatingContentDescription]]
rating: AgeRatingRating
rating_category: AgeRatingRating
rating_cover_url: str
synopsis: str

View File

@@ -6,10 +6,11 @@ from typing import Literal, overload
import aiohttp
import yarl
from adapters.services.mobygames_types import MobyGame, MobyGameBrief, MobyOutputFormat
from aiohttp.client import ClientTimeout
from config import MOBYGAMES_API_KEY
from fastapi import HTTPException, status
from adapters.services.mobygames_types import MobyGame, MobyGameBrief, MobyOutputFormat
from config import MOBYGAMES_API_KEY
from logger.logger import log
from utils.context import ctx_aiohttp_session

View File

@@ -6,6 +6,9 @@ from typing import cast
import aiohttp
import yarl
from aiohttp.client import ClientTimeout
from fastapi import HTTPException, status
from adapters.services.retroachievements_types import (
RAGameExtendedDetails,
RAGameInfoAndUserProgress,
@@ -13,9 +16,7 @@ from adapters.services.retroachievements_types import (
RAUserCompletionProgress,
RAUserCompletionProgressResult,
)
from aiohttp.client import ClientTimeout
from config import RETROACHIEVEMENTS_API_KEY
from fastapi import HTTPException, status
from logger.logger import log
from utils.context import ctx_aiohttp_session

View File

@@ -6,10 +6,11 @@ from typing import Final, cast
import aiohttp
import yarl
from adapters.services.screenscraper_types import SSGame
from aiohttp.client import ClientTimeout
from config import SCREENSCRAPER_PASSWORD, SCREENSCRAPER_USER
from fastapi import HTTPException, status
from adapters.services.screenscraper_types import SSGame
from config import SCREENSCRAPER_PASSWORD, SCREENSCRAPER_USER
from logger.logger import log
from utils.context import ctx_aiohttp_session

View File

@@ -7,6 +7,8 @@ from typing import Literal, cast
import aiohttp
import aiohttp.client_exceptions
import yarl
from aiohttp.client import ClientTimeout
from adapters.services.steamgriddb_types import (
SGDBDimension,
SGDBGame,
@@ -17,7 +19,6 @@ from adapters.services.steamgriddb_types import (
SGDBTag,
SGDBType,
)
from aiohttp.client import ClientTimeout
from config import STEAMGRIDDB_API_KEY
from exceptions.endpoint_exceptions import SGDBInvalidAPIKeyException
from logger.logger import log
@@ -177,3 +178,14 @@ class SteamGridDBService:
url = self.url.joinpath("search/autocomplete", term)
response = await self._request(str(url))
return cast(list[SGDBGame], response.get("data", []))
async def get_game_by_id(self, game_id: int) -> SGDBGame | None:
"""Get game details by ID.
Reference: https://www.steamgriddb.com/api/v2#tag/GAMES/operation/getGameById
"""
url = self.url.joinpath("games", str(game_id))
response = await self._request(str(url))
if not response or "data" not in response:
return None
return cast(SGDBGame, response["data"])

View File

@@ -2,6 +2,8 @@ import sys
from pathlib import Path
from alembic import context
from sqlalchemy import create_engine
from config.config_manager import ConfigManager
from logger.logger import unify_logger
from models.assets import Save, Screenshot, State # noqa
@@ -11,7 +13,6 @@ from models.firmware import Firmware # noqa
from models.platform import Platform # noqa
from models.rom import Rom, RomMetadata, SiblingRom # noqa
from models.user import User # noqa
from sqlalchemy import create_engine
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.

View File

@@ -9,6 +9,7 @@ Create Date: 2023-09-12 18:18:27.158732
import sqlalchemy as sa
from alembic import op
from sqlalchemy.exc import OperationalError
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2023-12-03 10:54:46.859106
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -10,10 +10,11 @@ import os
import sqlalchemy as sa
from alembic import op
from config import ROMM_DB_DRIVER
from config.config_manager import SQLITE_DB_BASE_PATH, ConfigManager
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from config import ROMM_DB_DRIVER
from config.config_manager import SQLITE_DB_BASE_PATH, ConfigManager
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2024-02-13 17:57:25.936825
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -13,6 +13,7 @@ from urllib.parse import quote
import sqlalchemy as sa
from alembic import op
from config import RESOURCES_BASE_PATH
# revision identifiers, used by Alembic.

View File

@@ -12,8 +12,9 @@ import shutil
import sqlalchemy as sa
from alembic import op
from config import RESOURCES_BASE_PATH
from sqlalchemy import inspect
from config import RESOURCES_BASE_PATH
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2024-07-07 13:44:25.811184
import sqlalchemy as sa
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2024-08-08 12:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2024-08-11 21:50:53.301352
import sqlalchemy as sa
from alembic import op
from config import IS_PYTEST_RUN, SCAN_TIMEOUT
from endpoints.sockets.scan import scan_platforms
from handler.redis_handler import high_prio_queue

View File

@@ -9,6 +9,7 @@ Create Date: 2024-08-29 15:52:56.031850
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects.postgresql import ENUM
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2024-11-17 23:05:31.038917
import sqlalchemy as sa
from alembic import op
from models.platform import DEFAULT_COVER_ASPECT_RATIO
# revision identifiers, used by Alembic.

View File

@@ -8,11 +8,12 @@ Create Date: 2024-12-19 23:16:11.053536
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects.postgresql import ENUM
from config import IS_PYTEST_RUN, SCAN_TIMEOUT
from endpoints.sockets.scan import scan_platforms
from handler.redis_handler import high_prio_queue
from handler.scan_handler import ScanType
from sqlalchemy.dialects.postgresql import ENUM
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2024-08-08 12:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-01-02 18:58:55.557123
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-01-02 18:58:55.557123
import sqlalchemy as sa
from alembic import op
from handler.metadata.ss_handler import SCREENSAVER_PLATFORM_LIST
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-03-17 00:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-04-23 00:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-04-11 00:59:30.772416
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -8,9 +8,10 @@ Create Date: 2025-05-14 18:10:23.522345
import os
from alembic import op
from sqlalchemy.sql import text
from config import ASSETS_BASE_PATH
from logger.logger import log
from sqlalchemy.sql import text
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -10,6 +10,7 @@ import json
from alembic import op
from sqlalchemy.sql import text
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -9,6 +9,7 @@ Create Date: 2025-05-20 22:39:16.993191
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from utils.database import CustomJSON
revision = "0043_launchbox_id"

View File

@@ -8,6 +8,7 @@ Create Date: 2025-03-17 00:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-07-24 15:24:04.331946
import sqlalchemy as sa
from alembic import op
from config.config_manager import config_manager as cm
from handler.metadata.base_handler import UniversalPlatformSlug as UPS

View File

@@ -8,6 +8,7 @@ Create Date: 2024-12-19 12:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-01-27 00:00:00.000000
import sqlalchemy as sa
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2025-08-22 04:42:22.367888
import sqlalchemy as sa
from alembic import op
from models.firmware import Firmware
from utils.database import is_postgresql

View File

@@ -9,6 +9,7 @@ Create Date: 2023-04-10 23:02:37.472055
import sqlalchemy as sa
from alembic import op
from sqlalchemy.exc import OperationalError
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2023-04-10 23:13:43.591414
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2023-04-17 12:03:19.163501
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON, is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -7,6 +7,7 @@ Create Date: 2023-05-17 12:59:44.344356
"""
from alembic import op
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -8,6 +8,7 @@ Create Date: 2023-04-17 12:03:19.163501
import sqlalchemy as sa
from alembic import op
from utils.database import CustomJSON
# revision identifiers, used by Alembic.

View File

@@ -9,6 +9,7 @@ Create Date: 2023-08-10 22:18:24.012779
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects.postgresql import ENUM
from utils.database import is_postgresql
# revision identifiers, used by Alembic.

View File

@@ -55,46 +55,46 @@ REDIS_URL: Final = yarl.URL.build(
)
# IGDB
IGDB_CLIENT_ID: Final = os.environ.get(
IGDB_CLIENT_ID: Final[str] = os.environ.get(
"IGDB_CLIENT_ID", os.environ.get("CLIENT_ID", "")
).strip()
IGDB_CLIENT_SECRET: Final = os.environ.get(
IGDB_CLIENT_SECRET: Final[str] = os.environ.get(
"IGDB_CLIENT_SECRET", os.environ.get("CLIENT_SECRET", "")
).strip()
# MOBYGAMES
MOBYGAMES_API_KEY: Final = os.environ.get("MOBYGAMES_API_KEY", "").strip()
MOBYGAMES_API_KEY: Final[str] = os.environ.get("MOBYGAMES_API_KEY", "").strip()
# SCREENSCRAPER
SCREENSCRAPER_USER: Final = os.environ.get("SCREENSCRAPER_USER", "")
SCREENSCRAPER_PASSWORD: Final = os.environ.get("SCREENSCRAPER_PASSWORD", "")
SCREENSCRAPER_USER: Final[str] = os.environ.get("SCREENSCRAPER_USER", "")
SCREENSCRAPER_PASSWORD: Final[str] = os.environ.get("SCREENSCRAPER_PASSWORD", "")
# STEAMGRIDDB
STEAMGRIDDB_API_KEY: Final = os.environ.get("STEAMGRIDDB_API_KEY", "").strip()
STEAMGRIDDB_API_KEY: Final[str] = os.environ.get("STEAMGRIDDB_API_KEY", "").strip()
# RETROACHIEVEMENTS
RETROACHIEVEMENTS_API_KEY: Final = os.environ.get("RETROACHIEVEMENTS_API_KEY", "")
REFRESH_RETROACHIEVEMENTS_CACHE_DAYS: Final = int(
RETROACHIEVEMENTS_API_KEY: Final[str] = os.environ.get("RETROACHIEVEMENTS_API_KEY", "")
REFRESH_RETROACHIEVEMENTS_CACHE_DAYS: Final[int] = int(
os.environ.get("REFRESH_RETROACHIEVEMENTS_CACHE_DAYS", 30)
)
# LAUNCHBOX
LAUNCHBOX_API_ENABLED: Final = str_to_bool(
LAUNCHBOX_API_ENABLED: Final[bool] = str_to_bool(
os.environ.get("LAUNCHBOX_API_ENABLED", "false")
)
# PLAYMATCH
PLAYMATCH_API_ENABLED: Final = str_to_bool(
PLAYMATCH_API_ENABLED: Final[bool] = str_to_bool(
os.environ.get("PLAYMATCH_API_ENABLED", "false")
)
# HASHEOUS
HASHEOUS_API_ENABLED: Final = str_to_bool(
HASHEOUS_API_ENABLED: Final[bool] = str_to_bool(
os.environ.get("HASHEOUS_API_ENABLED", "false")
)
# THEGAMESDB
TGDB_API_ENABLED: Final = str_to_bool(os.environ.get("TGDB_API_ENABLED", "false"))
TGDB_API_ENABLED: Final[bool] = str_to_bool(os.environ.get("TGDB_API_ENABLED", "false"))
# FLASHPOINT
FLASHPOINT_API_ENABLED: Final = str_to_bool(
@@ -102,7 +102,9 @@ FLASHPOINT_API_ENABLED: Final = str_to_bool(
)
# AUTH
ROMM_AUTH_SECRET_KEY: Final = os.environ.get("ROMM_AUTH_SECRET_KEY")
ROMM_AUTH_SECRET_KEY: Final[str] = os.environ.get("ROMM_AUTH_SECRET_KEY", "")
if not ROMM_AUTH_SECRET_KEY:
raise ValueError("ROMM_AUTH_SECRET_KEY environment variable is not set!")
SESSION_MAX_AGE_SECONDS: Final = int(
os.environ.get("SESSION_MAX_AGE_SECONDS", 14 * 24 * 60 * 60)
@@ -154,7 +156,21 @@ ENABLE_SCHEDULED_UPDATE_LAUNCHBOX_METADATA: Final = str_to_bool(
)
SCHEDULED_UPDATE_LAUNCHBOX_METADATA_CRON: Final = os.environ.get(
"SCHEDULED_UPDATE_LAUNCHBOX_METADATA_CRON",
"0 5 * * *", # At 5:00 AM every day
"0 4 * * *", # At 4:00 AM every day
)
ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP: Final = str_to_bool(
os.environ.get("ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP", "false")
)
SCHEDULED_CONVERT_IMAGES_TO_WEBP_CRON: Final = os.environ.get(
"SCHEDULED_CONVERT_IMAGES_TO_WEBP_CRON",
"0 4 * * *", # At 4:00 AM every day
)
ENABLE_SCHEDULED_RETROACHIEVEMENTS_PROGRESS_SYNC: Final[bool] = str_to_bool(
os.environ.get("ENABLE_SCHEDULED_RETROACHIEVEMENTS_PROGRESS_SYNC", "false")
)
SCHEDULED_RETROACHIEVEMENTS_PROGRESS_SYNC_CRON: Final[str] = os.environ.get(
"SCHEDULED_RETROACHIEVEMENTS_PROGRESS_SYNC_CRON",
"0 4 * * *", # At 4:00 AM every day
)
# EMULATION

View File

@@ -1,9 +1,12 @@
import json
import sys
from typing import Final
from typing import Final, NotRequired, TypedDict
import pydash
import yaml
from sqlalchemy import URL
from yaml.loader import SafeLoader
from config import (
DB_HOST,
DB_NAME,
@@ -22,14 +25,27 @@ from exceptions.config_exceptions import (
from logger.formatter import BLUE
from logger.formatter import highlight as hl
from logger.logger import log
from sqlalchemy import URL
from yaml.loader import SafeLoader
ROMM_USER_CONFIG_PATH: Final = f"{ROMM_BASE_PATH}/config"
ROMM_USER_CONFIG_FILE: Final = f"{ROMM_USER_CONFIG_PATH}/config.yml"
SQLITE_DB_BASE_PATH: Final = f"{ROMM_BASE_PATH}/database"
class EjsControlsButton(TypedDict):
value: NotRequired[str] # Keyboard key
value2: NotRequired[str] # Controller button
class EjsControls(TypedDict):
_0: dict[int, EjsControlsButton] # button_number -> EjsControlsButton
_1: dict[int, EjsControlsButton]
_2: dict[int, EjsControlsButton]
_3: dict[int, EjsControlsButton]
EjsOption = dict[str, str] # option_name -> option_value
class Config:
EXCLUDED_PLATFORMS: list[str]
EXCLUDED_SINGLE_EXT: list[str]
@@ -42,6 +58,10 @@ class Config:
ROMS_FOLDER_NAME: str
FIRMWARE_FOLDER_NAME: str
HIGH_PRIO_STRUCTURE_PATH: str
EJS_DEBUG: bool
EJS_CACHE_LIMIT: int | None
EJS_SETTINGS: dict[str, EjsOption] # core_name -> EjsOption
EJS_CONTROLS: dict[str, EjsControls] # core_name -> EjsControls
def __init__(self, **entries):
self.__dict__.update(entries)
@@ -149,8 +169,30 @@ class ConfigManager:
FIRMWARE_FOLDER_NAME=pydash.get(
self._raw_config, "filesystem.firmware_folder", "bios"
),
EJS_DEBUG=pydash.get(self._raw_config, "emulatorjs.debug", False),
EJS_CACHE_LIMIT=pydash.get(
self._raw_config, "emulatorjs.cache_limit", None
),
EJS_SETTINGS=pydash.get(self._raw_config, "emulatorjs.settings", {}),
EJS_CONTROLS=self._get_ejs_controls(),
)
def _get_ejs_controls(self) -> dict[str, EjsControls]:
"""Get EJS controls with default player entries for each core"""
raw_controls = pydash.get(self._raw_config, "emulatorjs.controls", {})
controls = {}
for core, core_controls in raw_controls.items():
# Create EjsControls object with default empty player dictionaries
controls[core] = EjsControls(
_0=core_controls.get(0, {}),
_1=core_controls.get(1, {}),
_2=core_controls.get(2, {}),
_3=core_controls.get(3, {}),
)
return controls
def _validate_config(self):
"""Validates the config.yml file"""
if not isinstance(self.config.EXCLUDED_PLATFORMS, list):
@@ -231,6 +273,54 @@ class ConfigManager:
)
sys.exit(3)
if not isinstance(self.config.EJS_DEBUG, bool):
log.critical("Invalid config.yml: emulatorjs.debug must be a boolean")
sys.exit(3)
if self.config.EJS_CACHE_LIMIT is not None and not isinstance(
self.config.EJS_CACHE_LIMIT, int
):
log.critical(
"Invalid config.yml: emulatorjs.cache_limit must be an integer"
)
sys.exit(3)
if not isinstance(self.config.EJS_SETTINGS, dict):
log.critical("Invalid config.yml: emulatorjs.settings must be a dictionary")
sys.exit(3)
else:
for core, options in self.config.EJS_SETTINGS.items():
if not isinstance(options, dict):
log.critical(
f"Invalid config.yml: emulatorjs.settings.{core} must be a dictionary"
)
sys.exit(3)
if not isinstance(self.config.EJS_CONTROLS, dict):
log.critical("Invalid config.yml: emulatorjs.controls must be a dictionary")
sys.exit(3)
else:
for core, controls in self.config.EJS_CONTROLS.items():
if not isinstance(controls, dict):
log.critical(
f"Invalid config.yml: emulatorjs.controls.{core} must be a dictionary"
)
sys.exit(3)
for player, buttons in controls.items():
if not isinstance(buttons, dict):
log.critical(
f"Invalid config.yml: emulatorjs.controls.{core}.{player} must be a dictionary"
)
sys.exit(3)
for button, value in buttons.items():
if not isinstance(value, dict):
log.critical(
f"Invalid config.yml: emulatorjs.controls.{core}.{player}.{button} must be a dictionary"
)
sys.exit(3)
def get_config(self) -> Config:
with open(self.config_file) as config_file:
self._raw_config = yaml.load(config_file, Loader=SafeLoader) or {}

View File

@@ -2,6 +2,13 @@ from typing import Any
from authlib.integrations.starlette_client import OAuth
from authlib.oidc.discovery import get_well_known_url
from fastapi import Security
from fastapi.security.http import HTTPBasic
from fastapi.security.oauth2 import OAuth2PasswordBearer
from fastapi.types import DecoratedCallable
from starlette.authentication import requires
from starlette.config import Config
from config import (
OIDC_CLIENT_ID,
OIDC_CLIENT_SECRET,
@@ -11,10 +18,6 @@ from config import (
OIDC_SERVER_APPLICATION_URL,
OIDC_TLS_CACERTFILE,
)
from fastapi import Security
from fastapi.security.http import HTTPBasic
from fastapi.security.oauth2 import OAuth2PasswordBearer
from fastapi.types import DecoratedCallable
from handler.auth.constants import (
EDIT_SCOPES_MAP,
FULL_SCOPES_MAP,
@@ -22,8 +25,6 @@ from handler.auth.constants import (
WRITE_SCOPES_MAP,
Scope,
)
from starlette.authentication import requires
from starlette.config import Config
# Using the internal password flow
oauth2_password_bearer = OAuth2PasswordBearer(

View File

@@ -1,9 +1,10 @@
import functools
from fastapi import HTTPException, status
from sqlalchemy.exc import ProgrammingError
from handler.database.base_handler import sync_session
from logger.logger import log
from sqlalchemy.exc import ProgrammingError
def begin_session(func):

View File

@@ -1,6 +1,10 @@
from datetime import datetime, timedelta, timezone
from typing import Annotated, Final
from fastapi import Body, Depends, HTTPException, Request, status
from fastapi.responses import RedirectResponse
from fastapi.security.http import HTTPBasic
from config import OIDC_ENABLED, OIDC_REDIRECT_URI
from decorators.auth import oauth
from endpoints.forms.identity import OAuth2RequestForm
@@ -11,9 +15,6 @@ from exceptions.auth_exceptions import (
OIDCNotConfiguredException,
UserDisabledException,
)
from fastapi import Body, Depends, HTTPException, Request, status
from fastapi.responses import RedirectResponse
from fastapi.security.http import HTTPBasic
from handler.auth import auth_handler, oauth_handler, oidc_handler
from handler.database import db_user_handler
from logger.formatter import CYAN

View File

@@ -1,5 +1,9 @@
import json
from io import BytesIO
from typing import Annotated
from fastapi import Path as PathVar
from fastapi import Request, UploadFile, status
from config import str_to_bool
from decorators.auth import protected_route
@@ -13,7 +17,6 @@ from exceptions.endpoint_exceptions import (
CollectionNotFoundInDatabaseException,
CollectionPermissionError,
)
from fastapi import Request, UploadFile
from handler.auth.constants import Scope
from handler.database import db_collection_handler
from handler.filesystem import fs_resource_handler
@@ -387,22 +390,18 @@ async def update_smart_collection(
return SmartCollectionSchema.model_validate(smart_collection)
@protected_route(router.delete, "/{id}", [Scope.COLLECTIONS_WRITE])
async def delete_collection(request: Request, id: int) -> None:
"""Delete collections endpoint
Args:
request (Request): Fastapi Request object
{
"collections": List of rom's ids to delete
}
Raises:
HTTPException: Collection not found
"""
@protected_route(
router.delete,
"/{id}",
[Scope.COLLECTIONS_WRITE],
responses={status.HTTP_404_NOT_FOUND: {}},
)
async def delete_collection(
request: Request,
id: Annotated[int, PathVar(description="Collection internal id.", ge=1)],
) -> None:
"""Delete a collection by ID."""
collection = db_collection_handler.get_collection(id)
if not collection:
raise CollectionNotFoundInDatabaseException(id)
@@ -417,17 +416,18 @@ async def delete_collection(request: Request, id: int) -> None:
)
@protected_route(router.delete, "/smart/{id}", [Scope.COLLECTIONS_WRITE])
async def delete_smart_collection(request: Request, id: int) -> None:
"""Delete smart collection endpoint
Args:
request (Request): Fastapi Request object
id (int): Smart collection id
"""
@protected_route(
router.delete,
"/smart/{id}",
[Scope.COLLECTIONS_WRITE],
responses={status.HTTP_404_NOT_FOUND: {}},
)
async def delete_smart_collection(
request: Request,
id: Annotated[int, PathVar(description="Smart collection internal id.", ge=1)],
) -> None:
"""Delete a smart collection by ID."""
smart_collection = db_collection_handler.get_smart_collection(id)
if not smart_collection:
raise CollectionNotFoundInDatabaseException(id)

View File

@@ -1,3 +1,5 @@
from fastapi import HTTPException, Request, status
from config.config_manager import config_manager as cm
from decorators.auth import protected_route
from endpoints.responses.config import ConfigResponse
@@ -5,7 +7,6 @@ from exceptions.config_exceptions import (
ConfigNotReadableException,
ConfigNotWritableException,
)
from fastapi import HTTPException, Request, status
from handler.auth.constants import Scope
from logger.logger import log
from utils.router import APIRouter
@@ -35,6 +36,10 @@ def get_config() -> ConfigResponse:
EXCLUDED_MULTI_PARTS_FILES=cfg.EXCLUDED_MULTI_PARTS_FILES,
PLATFORMS_BINDING=cfg.PLATFORMS_BINDING,
PLATFORMS_VERSIONS=cfg.PLATFORMS_VERSIONS,
EJS_DEBUG=cfg.EJS_DEBUG,
EJS_CACHE_LIMIT=cfg.EJS_CACHE_LIMIT,
EJS_CONTROLS=cfg.EJS_CONTROLS,
EJS_SETTINGS=cfg.EJS_SETTINGS,
)
except ConfigNotReadableException as exc:
log.critical(exc.message)

View File

@@ -1,5 +1,8 @@
from collections.abc import Sequence
from fastapi import Request
from starlette.datastructures import URLPath
from config import DISABLE_DOWNLOAD_ENDPOINT_AUTH, FRONTEND_RESOURCES_PATH
from decorators.auth import protected_route
from endpoints.responses.feeds import (
@@ -13,13 +16,11 @@ from endpoints.responses.feeds import (
WebrcadeFeedItemSchema,
WebrcadeFeedSchema,
)
from fastapi import Request
from handler.auth.constants import Scope
from handler.database import db_platform_handler, db_rom_handler
from handler.metadata import meta_igdb_handler
from handler.metadata.base_handler import SWITCH_PRODUCT_ID_REGEX, SWITCH_TITLEDB_REGEX
from models.rom import Rom
from starlette.datastructures import URLPath
from utils.router import APIRouter
router = APIRouter(

View File

@@ -1,9 +1,12 @@
from typing import Annotated
from fastapi import Body, File, HTTPException, Request, UploadFile, status
from fastapi.responses import FileResponse
from config import DISABLE_DOWNLOAD_ENDPOINT_AUTH
from decorators.auth import protected_route
from endpoints.responses import BulkOperationResponse
from endpoints.responses.firmware import AddFirmwareResponse, FirmwareSchema
from fastapi import File, HTTPException, Request, UploadFile, status
from fastapi.responses import FileResponse
from handler.auth.constants import Scope
from handler.database import db_firmware_handler, db_platform_handler
from handler.filesystem import fs_firmware_handler
@@ -215,46 +218,46 @@ def get_firmware_content(
@protected_route(router.post, "/delete", [Scope.FIRMWARE_WRITE])
async def delete_firmware(
request: Request,
firmware: Annotated[
list[int],
Body(
description="List of firmware ids to delete from database.",
embed=True,
),
],
delete_from_fs: Annotated[
list[int],
Body(
description="List of firmware ids to delete from filesystem.",
default_factory=list,
embed=True,
),
],
) -> BulkOperationResponse:
"""Delete firmware endpoint
Args:
request (Request): Fastapi Request object.
{
"firmware": List of firmware IDs to delete
}
delete_from_fs (bool, optional): Flag to delete rom from filesystem. Defaults to False.
Returns:
BulkOperationResponse: Bulk operation response with details
"""
data: dict = await request.json()
firmware_ids: list = data["firmware"]
delete_from_fs: list = data["delete_from_fs"]
"""Delete firmware."""
successful_items = 0
failed_items = 0
errors = []
for id in firmware_ids:
firmware = db_firmware_handler.get_firmware(id)
if not firmware:
for id in firmware:
fw = db_firmware_handler.get_firmware(id)
if not fw:
failed_items += 1
errors.append(f"Firmware with ID {id} not found")
continue
try:
log.info(f"Deleting {hl(firmware.file_name)} from database")
log.info(f"Deleting {hl(fw.file_name)} from database")
db_firmware_handler.delete_firmware(id)
if id in delete_from_fs:
log.info(f"Deleting {hl(firmware.file_name)} from filesystem")
log.info(f"Deleting {hl(fw.file_name)} from filesystem")
try:
file_path = f"{firmware.file_path}/{firmware.file_name}"
file_path = f"{fw.file_path}/{fw.file_name}"
await fs_firmware_handler.remove_file(file_path=file_path)
except FileNotFoundError:
error = f"Firmware file {hl(firmware.file_name)} not found for platform {hl(firmware.platform.slug)}"
error = f"Firmware file {hl(fw.file_name)} not found for platform {hl(fw.platform.slug)}"
log.error(error)
errors.append(error)
failed_items += 1

View File

@@ -2,24 +2,34 @@ from config import (
DISABLE_EMULATOR_JS,
DISABLE_RUFFLE_RS,
DISABLE_USERPASS_LOGIN,
FLASHPOINT_API_ENABLED,
HASHEOUS_API_ENABLED,
LAUNCHBOX_API_ENABLED,
ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP,
ENABLE_SCHEDULED_RESCAN,
ENABLE_SCHEDULED_UPDATE_LAUNCHBOX_METADATA,
ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB,
OIDC_ENABLED,
OIDC_PROVIDER,
PLAYMATCH_API_ENABLED,
TGDB_API_ENABLED,
SCHEDULED_CONVERT_IMAGES_TO_WEBP_CRON,
SCHEDULED_RESCAN_CRON,
SCHEDULED_UPDATE_LAUNCHBOX_METADATA_CRON,
SCHEDULED_UPDATE_SWITCH_TITLEDB_CRON,
UPLOAD_TIMEOUT,
YOUTUBE_BASE_URL,
)
from endpoints.responses.heartbeat import HeartbeatResponse
from handler.database import db_user_handler
from handler.filesystem import fs_platform_handler
from handler.metadata.igdb_handler import IGDB_API_ENABLED
from handler.metadata.moby_handler import MOBY_API_ENABLED
from handler.metadata.ra_handler import RA_API_ENABLED
from handler.metadata.sgdb_handler import STEAMGRIDDB_API_ENABLED
from handler.metadata.ss_handler import SS_API_ENABLED
from handler.metadata import (
meta_hasheous_handler,
meta_igdb_handler,
meta_launchbox_handler,
meta_moby_handler,
meta_playmatch_handler,
meta_ra_handler,
meta_sgdb_handler,
meta_ss_handler,
meta_tgdb_handler,
meta_flashpoint_handler,
)
from utils import get_version
from utils.router import APIRouter
@@ -42,23 +52,26 @@ async def heartbeat() -> HeartbeatResponse:
"SHOW_SETUP_WIZARD": len(db_user_handler.get_admin_users()) == 0,
},
"METADATA_SOURCES": {
"ANY_SOURCE_ENABLED": IGDB_API_ENABLED
or SS_API_ENABLED
or MOBY_API_ENABLED
or RA_API_ENABLED
or LAUNCHBOX_API_ENABLED
or HASHEOUS_API_ENABLED
or TGDB_API_ENABLED,
"IGDB_API_ENABLED": IGDB_API_ENABLED,
"SS_API_ENABLED": SS_API_ENABLED,
"MOBY_API_ENABLED": MOBY_API_ENABLED,
"STEAMGRIDDB_API_ENABLED": STEAMGRIDDB_API_ENABLED,
"RA_API_ENABLED": RA_API_ENABLED,
"LAUNCHBOX_API_ENABLED": LAUNCHBOX_API_ENABLED,
"HASHEOUS_API_ENABLED": HASHEOUS_API_ENABLED,
"PLAYMATCH_API_ENABLED": PLAYMATCH_API_ENABLED,
"TGDB_API_ENABLED": TGDB_API_ENABLED,
"FLASHPOINT_API_ENABLED": FLASHPOINT_API_ENABLED,
"ANY_SOURCE_ENABLED": (
meta_igdb_handler.is_enabled()
or meta_ss_handler.is_enabled()
or meta_moby_handler.is_enabled()
or meta_ra_handler.is_enabled()
or meta_launchbox_handler.is_enabled()
or meta_hasheous_handler.is_enabled()
or meta_tgdb_handler.is_enabled()
or meta_flashpoint_handler.is_enabled()
),
"IGDB_API_ENABLED": meta_igdb_handler.is_enabled(),
"SS_API_ENABLED": meta_ss_handler.is_enabled(),
"MOBY_API_ENABLED": meta_moby_handler.is_enabled(),
"STEAMGRIDDB_API_ENABLED": meta_sgdb_handler.is_enabled(),
"RA_API_ENABLED": meta_ra_handler.is_enabled(),
"LAUNCHBOX_API_ENABLED": meta_launchbox_handler.is_enabled(),
"HASHEOUS_API_ENABLED": meta_hasheous_handler.is_enabled(),
"PLAYMATCH_API_ENABLED": meta_playmatch_handler.is_enabled(),
"TGDB_API_ENABLED": meta_tgdb_handler.is_enabled(),
"FLASHPOINT_API_ENABLED": meta_flashpoint_handler.is_enabled(),
},
"FILESYSTEM": {
"FS_PLATFORMS": await fs_platform_handler.get_platforms(),
@@ -76,4 +89,14 @@ async def heartbeat() -> HeartbeatResponse:
"ENABLED": OIDC_ENABLED,
"PROVIDER": OIDC_PROVIDER,
},
"TASKS": {
"ENABLE_SCHEDULED_RESCAN": ENABLE_SCHEDULED_RESCAN,
"SCHEDULED_RESCAN_CRON": SCHEDULED_RESCAN_CRON,
"ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB": ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB,
"SCHEDULED_UPDATE_SWITCH_TITLEDB_CRON": SCHEDULED_UPDATE_SWITCH_TITLEDB_CRON,
"ENABLE_SCHEDULED_UPDATE_LAUNCHBOX_METADATA": ENABLE_SCHEDULED_UPDATE_LAUNCHBOX_METADATA,
"SCHEDULED_UPDATE_LAUNCHBOX_METADATA_CRON": SCHEDULED_UPDATE_LAUNCHBOX_METADATA_CRON,
"ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP": ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP,
"SCHEDULED_CONVERT_IMAGES_TO_WEBP_CRON": SCHEDULED_CONVERT_IMAGES_TO_WEBP_CRON,
},
}

View File

@@ -1,13 +1,14 @@
from datetime import datetime, timezone
from typing import Annotated
from fastapi import Body
from fastapi import Path as PathVar
from fastapi import Request, status
from decorators.auth import protected_route
from endpoints.responses.platform import PlatformSchema
from exceptions.endpoint_exceptions import PlatformNotFoundInDatabaseException
from exceptions.fs_exceptions import PlatformAlreadyExistsException
from fastapi import Body
from fastapi import Path as PathVar
from fastapi import Request, status
from handler.auth.constants import Scope
from handler.database import db_platform_handler
from handler.filesystem import fs_platform_handler
@@ -207,7 +208,7 @@ async def delete_platform(
request: Request,
id: Annotated[int, PathVar(description="Platform id.", ge=1)],
) -> None:
"""Delete a platform."""
"""Delete a platform by ID."""
platform = db_platform_handler.get_platform(id)
if not platform:

View File

@@ -1,6 +1,7 @@
from decorators.auth import protected_route
from fastapi import HTTPException, Request
from fastapi.responses import FileResponse
from decorators.auth import protected_route
from handler.auth.constants import Scope
from handler.filesystem import fs_asset_handler
from utils.router import APIRouter

View File

@@ -1,5 +1,7 @@
from typing import TypedDict
from config.config_manager import EjsControls
class ConfigResponse(TypedDict):
EXCLUDED_PLATFORMS: list[str]
@@ -10,3 +12,7 @@ class ConfigResponse(TypedDict):
EXCLUDED_MULTI_PARTS_FILES: list[str]
PLATFORMS_BINDING: dict[str, str]
PLATFORMS_VERSIONS: dict[str, str]
EJS_DEBUG: bool
EJS_CACHE_LIMIT: int | None
EJS_SETTINGS: dict[str, dict[str, str]]
EJS_CONTROLS: dict[str, EjsControls]

View File

@@ -40,6 +40,17 @@ class OIDCDict(TypedDict):
PROVIDER: str
class TasksDict(TypedDict):
ENABLE_SCHEDULED_RESCAN: bool
SCHEDULED_RESCAN_CRON: str
ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB: bool
SCHEDULED_UPDATE_SWITCH_TITLEDB_CRON: str
ENABLE_SCHEDULED_UPDATE_LAUNCHBOX_METADATA: bool
SCHEDULED_UPDATE_LAUNCHBOX_METADATA_CRON: str
ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP: bool
SCHEDULED_CONVERT_IMAGES_TO_WEBP_CRON: str
class HeartbeatResponse(TypedDict):
SYSTEM: SystemDict
METADATA_SOURCES: MetadataSourcesDict
@@ -47,3 +58,4 @@ class HeartbeatResponse(TypedDict):
EMULATION: EmulationDict
FRONTEND: FrontendDict
OIDC: OIDCDict
TASKS: TasksDict

View File

@@ -1,8 +1,9 @@
from datetime import datetime
from models.platform import DEFAULT_COVER_ASPECT_RATIO
from pydantic import Field, computed_field, field_validator
from models.platform import DEFAULT_COVER_ASPECT_RATIO
from .base import BaseModel
from .firmware import FirmwareSchema

View File

@@ -4,18 +4,19 @@ import re
from datetime import datetime, timezone
from typing import NotRequired, TypedDict, get_type_hints
from endpoints.responses.assets import SaveSchema, ScreenshotSchema, StateSchema
from fastapi import Request
from handler.metadata.flashpoint_handler import FlashpointMetadata
from pydantic import computed_field, field_validator
from endpoints.responses.assets import SaveSchema, ScreenshotSchema, StateSchema
from handler.metadata.hasheous_handler import HasheousMetadata
from handler.metadata.igdb_handler import IGDBMetadata
from handler.metadata.launchbox_handler import LaunchboxMetadata
from handler.metadata.moby_handler import MobyMetadata
from handler.metadata.ra_handler import RAMetadata
from handler.metadata.ss_handler import SSMetadata
from handler.metadata.flashpoint_handler import FlashpointMetadata
from models.collection import Collection
from models.rom import Rom, RomFileCategory, RomUserStatus
from pydantic import computed_field, field_validator
from .base import BaseModel

View File

@@ -1,3 +1,5 @@
from handler.metadata.sgdb_handler import SGDBResource
from .base import BaseModel
@@ -23,4 +25,4 @@ class SearchRomSchema(BaseModel):
class SearchCoverSchema(BaseModel):
name: str
resources: list
resources: list[SGDBResource]

View File

@@ -8,22 +8,6 @@ from urllib.parse import quote
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile, ZipInfo
from anyio import Path, open_file
from config import (
DEV_MODE,
DISABLE_DOWNLOAD_ENDPOINT_AUTH,
LIBRARY_BASE_PATH,
str_to_bool,
)
from decorators.auth import protected_route
from endpoints.responses import BulkOperationResponse
from endpoints.responses.rom import (
DetailedRomSchema,
RomFileSchema,
RomUserSchema,
SimpleRomSchema,
)
from exceptions.endpoint_exceptions import RomNotFoundInDatabaseException
from exceptions.fs_exceptions import RomAlreadyExistsException
from fastapi import (
Body,
File,
@@ -40,6 +24,28 @@ from fastapi import (
from fastapi.responses import Response
from fastapi_pagination.ext.sqlalchemy import paginate
from fastapi_pagination.limit_offset import LimitOffsetPage, LimitOffsetParams
from pydantic import BaseModel
from starlette.requests import ClientDisconnect
from starlette.responses import FileResponse
from streaming_form_data import StreamingFormDataParser
from streaming_form_data.targets import FileTarget, NullTarget
from config import (
DEV_MODE,
DISABLE_DOWNLOAD_ENDPOINT_AUTH,
LIBRARY_BASE_PATH,
str_to_bool,
)
from decorators.auth import protected_route
from endpoints.responses import BulkOperationResponse
from endpoints.responses.rom import (
DetailedRomSchema,
RomFileSchema,
RomUserSchema,
SimpleRomSchema,
)
from exceptions.endpoint_exceptions import RomNotFoundInDatabaseException
from exceptions.fs_exceptions import RomAlreadyExistsException
from handler.auth.constants import Scope
from handler.database import db_platform_handler, db_rom_handler
from handler.database.base_handler import sync_session
@@ -56,11 +62,6 @@ from logger.formatter import BLUE
from logger.formatter import highlight as hl
from logger.logger import log
from models.rom import RomFile
from pydantic import BaseModel
from starlette.requests import ClientDisconnect
from starlette.responses import FileResponse
from streaming_form_data import StreamingFormDataParser
from streaming_form_data.targets import FileTarget, NullTarget
from utils.filesystem import sanitize_filename
from utils.hashing import crc32_to_hex
from utils.nginx import FileRedirectResponse, ZipContentLine, ZipResponse
@@ -542,7 +543,7 @@ async def get_rom_content(
filename=f.file_name_for_download(rom, hidden_folder),
)
content_lines = [await create_zip_content(f, "/library-zip") for f in files]
content_lines = [await create_zip_content(f, "/library") for f in files]
if not rom.has_m3u_file():
m3u_encoded_content = "\n".join(
@@ -635,6 +636,29 @@ async def update_rom(
"flashpoint_id": data.get("flashpoint_id", rom.flashpoint_id),
}
if (
cleaned_data.get("flashpoint_id", "")
and cleaned_data.get("flashpoint_id", "") != rom.flashpoint_id
):
flashpoint_rom = await meta_flashpoint_handler.get_rom_by_id(
cleaned_data["flashpoint_id"]
)
cleaned_data.update(flashpoint_rom)
if (
cleaned_data.get("launchbox_id", "")
and int(cleaned_data.get("launchbox_id", "")) != rom.launchbox_id
):
launchbox_rom = await meta_launchbox_handler.get_rom_by_id(
cleaned_data["launchbox_id"]
)
cleaned_data.update(launchbox_rom)
path_screenshots = await fs_resource_handler.get_rom_screenshots(
rom=rom,
url_screenshots=cleaned_data.get("url_screenshots", []),
)
cleaned_data.update({"path_screenshots": path_screenshots})
if (
cleaned_data.get("moby_id", "")
and int(cleaned_data.get("moby_id", "")) != rom.moby_id
@@ -673,29 +697,6 @@ async def update_rom(
)
cleaned_data.update({"path_screenshots": path_screenshots})
if (
cleaned_data.get("launchbox_id", "")
and int(cleaned_data.get("launchbox_id", "")) != rom.launchbox_id
):
igdb_rom = await meta_launchbox_handler.get_rom_by_id(
cleaned_data["launchbox_id"]
)
cleaned_data.update(igdb_rom)
path_screenshots = await fs_resource_handler.get_rom_screenshots(
rom=rom,
url_screenshots=cleaned_data.get("url_screenshots", []),
)
cleaned_data.update({"path_screenshots": path_screenshots})
if (
cleaned_data.get("flashpoint_id", "")
and cleaned_data.get("flashpoint_id", "") != rom.flashpoint_id
):
flashpoint_rom = await meta_flashpoint_handler.get_rom_by_id(
cleaned_data["flashpoint_id"]
)
cleaned_data.update(flashpoint_rom)
cleaned_data.update(
{
"name": data.get("name", rom.name),
@@ -704,6 +705,7 @@ async def update_rom(
)
new_fs_name = str(data.get("fs_name") or rom.fs_name)
new_fs_name = sanitize_filename(new_fs_name)
cleaned_data.update(
{
"fs_name": new_fs_name,
@@ -777,7 +779,6 @@ async def update_rom(
should_update_fs = new_fs_name != rom.fs_name
if should_update_fs:
try:
new_fs_name = sanitize_filename(new_fs_name)
await fs_rom_handler.rename_fs_rom(
old_name=rom.fs_name,
new_name=new_fs_name,
@@ -884,13 +885,17 @@ async def delete_roms(
request: Request,
roms: Annotated[
list[int],
Body(description="List of rom ids to delete from database."),
Body(
description="List of rom ids to delete from database.",
embed=True,
),
],
delete_from_fs: Annotated[
list[int],
Body(
description="List of rom ids to delete from filesystem.",
default_factory=list,
embed=True,
),
],
) -> BulkOperationResponse:

View File

@@ -1,9 +1,11 @@
from datetime import datetime, timezone
from typing import Annotated
from fastapi import Body, HTTPException, Request, UploadFile, status
from decorators.auth import protected_route
from endpoints.responses.assets import SaveSchema
from exceptions.endpoint_exceptions import RomNotFoundInDatabaseException
from fastapi import HTTPException, Request, UploadFile, status
from handler.auth.constants import Scope
from handler.database import db_rom_handler, db_save_handler, db_screenshot_handler
from handler.filesystem import fs_asset_handler
@@ -222,17 +224,32 @@ async def update_save(request: Request, id: int) -> SaveSchema:
return SaveSchema.model_validate(db_save)
@protected_route(router.post, "/delete", [Scope.ASSETS_WRITE])
async def delete_saves(request: Request) -> list[int]:
data: dict = await request.json()
save_ids: list = data["saves"]
if not save_ids:
@protected_route(
router.post,
"/delete",
[Scope.ASSETS_WRITE],
responses={
status.HTTP_400_BAD_REQUEST: {},
status.HTTP_404_NOT_FOUND: {},
},
)
async def delete_saves(
request: Request,
saves: Annotated[
list[int],
Body(
description="List of save ids to delete from database.",
embed=True,
),
],
) -> list[int]:
"""Delete saves."""
if not saves:
error = "No saves were provided"
log.error(error)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=error)
for save_id in save_ids:
for save_id in saves:
save = db_save_handler.get_save(user_id=request.user.id, id=save_id)
if not save:
error = f"Save with ID {save_id} not found"
@@ -261,4 +278,4 @@ async def delete_saves(request: Request) -> list[int]:
error = f"Screenshot file {hl(save.screenshot.file_name)} not found for save {hl(save.file_name)}[{hl(save.rom.platform_slug)}]"
log.error(error)
return save_ids
return saves

View File

@@ -1,7 +1,8 @@
from fastapi import HTTPException, Request, UploadFile, status
from decorators.auth import protected_route
from endpoints.responses.assets import ScreenshotSchema
from exceptions.endpoint_exceptions import RomNotFoundInDatabaseException
from fastapi import HTTPException, Request, UploadFile, status
from handler.auth.constants import Scope
from handler.database import db_rom_handler, db_screenshot_handler
from handler.filesystem import fs_asset_handler

View File

@@ -1,9 +1,10 @@
import asyncio
from fastapi import HTTPException, Request, status
from decorators.auth import protected_route
from endpoints.responses.search import SearchCoverSchema, SearchRomSchema
from exceptions.endpoint_exceptions import SGDBInvalidAPIKeyException
from fastapi import HTTPException, Request, status
from handler.auth.constants import Scope
from handler.database import db_rom_handler
from handler.metadata import (
@@ -13,14 +14,11 @@ from handler.metadata import (
meta_sgdb_handler,
meta_ss_handler,
)
from handler.metadata.flashpoint_handler import (
FLASHPOINT_API_ENABLED,
FlashpointRom,
)
from handler.metadata.igdb_handler import IGDB_API_ENABLED, IGDBRom
from handler.metadata.moby_handler import MOBY_API_ENABLED, MobyGamesRom
from handler.metadata.sgdb_handler import STEAMGRIDDB_API_ENABLED, SGDBRom
from handler.metadata.ss_handler import SS_API_ENABLED, SSRom
from handler.metadata.igdb_handler import IGDBRom
from handler.metadata.moby_handler import MobyGamesRom
from handler.metadata.sgdb_handler import SGDBRom
from handler.metadata.ss_handler import SSRom
from handler.metadata.flashpoint_handler import FlashpointRom
from handler.scan_handler import get_main_platform_igdb_id
from logger.formatter import BLUE, CYAN
from logger.formatter import highlight as hl
@@ -56,10 +54,10 @@ async def search_rom(
"""
if (
not IGDB_API_ENABLED
and not SS_API_ENABLED
and not MOBY_API_ENABLED
and not FLASHPOINT_API_ENABLED
not meta_igdb_handler.is_enabled()
and not meta_ss_handler.is_enabled()
and not meta_moby_handler.is_enabled()
and not meta_flashpoint_handler.is_enabled()
):
log.error("Search error: No metadata providers enabled")
raise HTTPException(
@@ -216,7 +214,7 @@ async def search_cover(
search_term: str = "",
) -> list[SearchCoverSchema]:
if not STEAMGRIDDB_API_ENABLED:
if not meta_sgdb_handler.is_enabled():
log.error("Search error: No SteamGridDB enabled")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,

View File

@@ -5,6 +5,9 @@ from itertools import batched
from typing import Any, Final
import socketio # type: ignore
from rq import Worker
from rq.job import Job
from config import DEV_MODE, REDIS_URL, SCAN_TIMEOUT
from endpoints.responses.platform import PlatformSchema
from endpoints.responses.rom import SimpleRomSchema
@@ -36,8 +39,6 @@ from logger.logger import log
from models.firmware import Firmware
from models.platform import Platform
from models.rom import Rom, RomFile
from rq import Worker
from rq.job import Job
from utils import emoji
from utils.context import initialize_context

View File

@@ -1,9 +1,11 @@
from datetime import datetime, timezone
from typing import Annotated
from fastapi import Body, HTTPException, Request, UploadFile, status
from decorators.auth import protected_route
from endpoints.responses.assets import StateSchema
from exceptions.endpoint_exceptions import RomNotFoundInDatabaseException
from fastapi import HTTPException, Request, UploadFile, status
from handler.auth.constants import Scope
from handler.database import db_rom_handler, db_screenshot_handler, db_state_handler
from handler.filesystem import fs_asset_handler
@@ -226,17 +228,32 @@ async def update_state(request: Request, id: int) -> StateSchema:
return StateSchema.model_validate(db_state)
@protected_route(router.post, "/delete", [Scope.ASSETS_WRITE])
async def delete_states(request: Request) -> list[int]:
data: dict = await request.json()
state_ids: list = data["states"]
if not state_ids:
@protected_route(
router.post,
"/delete",
[Scope.ASSETS_WRITE],
responses={
status.HTTP_400_BAD_REQUEST: {},
status.HTTP_404_NOT_FOUND: {},
},
)
async def delete_states(
request: Request,
states: Annotated[
list[int],
Body(
description="List of states ids to delete from database.",
embed=True,
),
],
) -> list[int]:
"""Delete states."""
if not states:
error = "No states were provided"
log.error(error)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=error)
for state_id in state_ids:
for state_id in states:
state = db_state_handler.get_state(user_id=request.user.id, id=state_id)
if not state:
error = f"State with ID {state_id} not found"
@@ -265,4 +282,4 @@ async def delete_states(request: Request) -> list[int]:
error = f"Screenshot file {hl(state.screenshot.file_name)} not found for state {hl(state.file_name)}[{hl(state.rom.platform_slug)}]"
log.error(error)
return state_ids
return states

View File

@@ -1,5 +1,8 @@
from datetime import datetime, timezone
from fastapi import HTTPException, Request
from rq.job import Job
from config import (
ENABLE_RESCAN_ON_FILESYSTEM_CHANGE,
RESCAN_ON_FILESYSTEM_CHANGE_DELAY,
@@ -8,11 +11,10 @@ from config import (
from decorators.auth import protected_route
from endpoints.responses import TaskExecutionResponse, TaskStatusResponse
from endpoints.responses.tasks import GroupedTasksDict, TaskInfo
from fastapi import HTTPException, Request
from handler.auth.constants import Scope
from handler.redis_handler import low_prio_queue
from rq.job import Job
from tasks.manual.cleanup_orphaned_resources import cleanup_orphaned_resources_task
from tasks.scheduled.convert_images_to_webp import convert_images_to_webp_task
from tasks.scheduled.scan_library import scan_library_task
from tasks.scheduled.update_launchbox_metadata import update_launchbox_metadata_task
from tasks.scheduled.update_switch_titledb import update_switch_titledb_task
@@ -28,6 +30,7 @@ scheduled_tasks: dict[str, Task] = {
"scan_library": scan_library_task,
"update_launchbox_metadata": update_launchbox_metadata_task,
"update_switch_titledb": update_switch_titledb_task,
"convert_images_to_webp": convert_images_to_webp_task,
}
manual_tasks: dict[str, Task] = {
@@ -54,7 +57,7 @@ async def list_tasks(request: Request) -> GroupedTasksDict:
Args:
request (Request): FastAPI Request object
Returns:
Dictionary with tasks grouped by their type (scheduled, manual, watcher)
GroupedTasksDict: Dictionary with tasks grouped by their type (scheduled, manual, watcher)
"""
# Initialize the grouped tasks dictionary
grouped_tasks: GroupedTasksDict = {

View File

@@ -1,11 +1,12 @@
from typing import Annotated, Any, cast
from fastapi import Body, Form, HTTPException
from fastapi import Path as PathVar
from fastapi import Request, status
from decorators.auth import protected_route
from endpoints.forms.identity import UserForm
from endpoints.responses.identity import InviteLinkSchema, UserSchema
from fastapi import Body, Form, HTTPException
from fastapi import Path as PathVar
from fastapi import Request, status
from handler.auth import auth_handler
from handler.auth.constants import Scope
from handler.database import db_user_handler
@@ -335,13 +336,20 @@ async def update_user(
return UserSchema.model_validate(db_user)
@protected_route(router.delete, "/{id}", [Scope.USERS_WRITE])
async def delete_user(request: Request, id: int) -> None:
"""Delete user endpoint
Args:
request (Request): Fastapi Request object
user_id (int): User internal id
@protected_route(
router.delete,
"/{id}",
[Scope.USERS_WRITE],
responses={
status.HTTP_400_BAD_REQUEST: {},
status.HTTP_404_NOT_FOUND: {},
},
)
async def delete_user(
request: Request,
id: Annotated[int, PathVar(description="User internal id.", ge=1)],
) -> None:
"""Delete a user by ID.
Raises:
HTTPException: User is not found in database

View File

@@ -1,4 +1,5 @@
from fastapi import HTTPException, status
from logger.logger import log

View File

@@ -2,20 +2,21 @@ import uuid
from datetime import datetime, timedelta, timezone
from typing import Any
from config import OIDC_ENABLED, ROMM_AUTH_SECRET_KEY, ROMM_BASE_URL
from decorators.auth import oauth
from exceptions.auth_exceptions import OAuthCredentialsException, UserDisabledException
from fastapi import HTTPException, status
from handler.auth.constants import ALGORITHM, DEFAULT_OAUTH_TOKEN_EXPIRY, TokenPurpose
from handler.redis_handler import redis_client
from joserfc import jwt
from joserfc.errors import BadSignatureError, DecodeError
from joserfc.jwk import OctKey
from passlib.context import CryptContext
from starlette.requests import HTTPConnection
from config import OIDC_ENABLED, ROMM_AUTH_SECRET_KEY, ROMM_BASE_URL
from decorators.auth import oauth
from exceptions.auth_exceptions import OAuthCredentialsException, UserDisabledException
from handler.auth.constants import ALGORITHM, DEFAULT_OAUTH_TOKEN_EXPIRY, TokenPurpose
from handler.redis_handler import redis_client
from logger.formatter import CYAN
from logger.formatter import highlight as hl
from logger.logger import log
from passlib.context import CryptContext
from starlette.requests import HTTPConnection
class AuthHandler:

View File

@@ -1,10 +1,11 @@
from config import KIOSK_MODE
from fastapi.security.http import HTTPBasic
from handler.auth import auth_handler, oauth_handler
from models.user import User
from starlette.authentication import AuthCredentials, AuthenticationBackend
from starlette.requests import HTTPConnection
from config import KIOSK_MODE
from handler.auth import auth_handler, oauth_handler
from models.user import User
from .constants import READ_SCOPES

View File

@@ -1,7 +1,6 @@
import time
from collections import namedtuple
from config import SESSION_MAX_AGE_SECONDS
from joserfc import jwt
from joserfc.errors import BadSignatureError
from joserfc.jwk import OctKey
@@ -10,6 +9,8 @@ from starlette.requests import HTTPConnection, Request
from starlette.types import ASGIApp, Message, Receive, Scope, Send
from starlette_csrf.middleware import CSRFMiddleware
from config import SESSION_MAX_AGE_SECONDS
class CustomCSRFMiddleware(CSRFMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:

View File

@@ -1,11 +1,12 @@
import logging
import time
from config import DEV_SQL_ECHO
from config.config_manager import ConfigManager
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker
from config import DEV_SQL_ECHO
from config.config_manager import ConfigManager
sync_engine = create_engine(
ConfigManager.get_db_engine(), pool_pre_ping=True, echo=False
)

View File

@@ -2,6 +2,9 @@ import functools
from collections.abc import Sequence
from typing import Any
from sqlalchemy import delete, insert, literal, or_, select, update
from sqlalchemy.orm import Query, Session, noload, selectinload
from decorators.database import begin_session
from models.collection import (
Collection,
@@ -10,8 +13,6 @@ from models.collection import (
VirtualCollection,
)
from models.rom import Rom
from sqlalchemy import delete, insert, literal, or_, select, update
from sqlalchemy.orm import Query, Session, noload, selectinload
from .base_handler import DBBaseHandler

View File

@@ -1,9 +1,10 @@
from collections.abc import Sequence
from sqlalchemy import and_, delete, select, update
from sqlalchemy.orm import Session
from decorators.database import begin_session
from models.firmware import Firmware
from sqlalchemy import and_, delete, select, update
from sqlalchemy.orm import Session
from .base_handler import DBBaseHandler

View File

@@ -1,11 +1,12 @@
import functools
from collections.abc import Sequence
from sqlalchemy import delete, or_, select, update
from sqlalchemy.orm import Query, Session, selectinload
from decorators.database import begin_session
from models.platform import Platform
from models.rom import Rom
from sqlalchemy import delete, or_, select, update
from sqlalchemy.orm import Query, Session, selectinload
from .base_handler import DBBaseHandler

View File

@@ -2,18 +2,13 @@ import functools
from collections.abc import Iterable, Sequence
from typing import Any
from config import ROMM_DB_DRIVER
from decorators.database import begin_session
from handler.metadata.base_handler import UniversalPlatformSlug as UPS
from models.assets import Save, Screenshot, State
from models.platform import Platform
from models.rom import Rom, RomFile, RomMetadata, RomUser
from sqlalchemy import (
Integer,
Row,
String,
Text,
and_,
case,
cast,
delete,
false,
@@ -26,6 +21,15 @@ from sqlalchemy import (
update,
)
from sqlalchemy.orm import Query, Session, joinedload, noload, selectinload
from sqlalchemy.sql.elements import KeyedColumnElement
from config import ROMM_DB_DRIVER
from decorators.database import begin_session
from handler.metadata.base_handler import UniversalPlatformSlug as UPS
from models.assets import Save, Screenshot, State
from models.platform import Platform
from models.rom import Rom, RomFile, RomMetadata, RomUser
from utils.database import json_array_contains_value
from .base_handler import DBBaseHandler
@@ -80,6 +84,23 @@ EJS_SUPPORTED_PLATFORMS = [
STRIP_ARTICLES_REGEX = r"^(the|a|an)\s+"
def _create_metadata_id_case(
prefix: str, id_column: KeyedColumnElement, platform_id_column: KeyedColumnElement
):
return case(
(
id_column.isnot(None),
func.concat(
f"{prefix}-",
platform_id_column,
"-",
id_column,
),
),
else_=None,
)
def with_details(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
@@ -287,75 +308,30 @@ class DBRomsHandler(DBBaseHandler):
or_(*(Rom.hasheous_metadata[key].as_boolean() for key in keys_to_check))
)
def filter_by_genre(self, query: Query, selected_genre: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("genres @> (:genre)::jsonb").bindparams(
genre=f'["{selected_genre}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(genres, JSON_ARRAY(:genre))").bindparams(
genre=selected_genre
)
)
def filter_by_genre(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(RomMetadata.genres, value, session=session)
)
def filter_by_franchise(self, query: Query, selected_franchise: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("franchises @> (:franchise)::jsonb").bindparams(
franchise=f'["{selected_franchise}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(franchises, JSON_ARRAY(:franchise))").bindparams(
franchise=selected_franchise
)
)
def filter_by_franchise(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(RomMetadata.franchises, value, session=session)
)
def filter_by_collection(self, query: Query, selected_collection: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("collections @> (:collection)::jsonb").bindparams(
collection=f'["{selected_collection}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(collections, JSON_ARRAY(:collection))").bindparams(
collection=selected_collection
)
)
def filter_by_collection(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(RomMetadata.collections, value, session=session)
)
def filter_by_company(self, query: Query, selected_company: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("companies @> (:company)::jsonb").bindparams(
company=f'["{selected_company}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(companies, JSON_ARRAY(:company))").bindparams(
company=selected_company
)
)
def filter_by_company(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(RomMetadata.companies, value, session=session)
)
def filter_by_age_rating(self, query: Query, selected_age_rating: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("age_ratings @> (:age_rating)::jsonb").bindparams(
age_rating=f'["{selected_age_rating}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(age_ratings, JSON_ARRAY(:age_rating))").bindparams(
age_rating=selected_age_rating
)
)
def filter_by_age_rating(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(RomMetadata.age_ratings, value, session=session)
)
def filter_by_status(self, query: Query, selected_status: str):
status_filter = RomUser.status == selected_status
@@ -371,33 +347,15 @@ class DBRomsHandler(DBBaseHandler):
return query.filter(status_filter, RomUser.hidden.is_(False))
def filter_by_region(self, query: Query, selected_region: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("regions @> (:region)::jsonb").bindparams(
region=f'["{selected_region}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(regions, JSON_ARRAY(:region))").bindparams(
region=selected_region
)
)
def filter_by_region(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(Rom.regions, value, session=session)
)
def filter_by_language(self, query: Query, selected_language: str):
if ROMM_DB_DRIVER == "postgresql":
return query.filter(
text("languages @> (:language)::jsonb").bindparams(
language=f'["{selected_language}"]'
)
)
else:
return query.filter(
text("JSON_OVERLAPS(languages, JSON_ARRAY(:language))").bindparams(
language=selected_language
)
)
def filter_by_language(self, query: Query, session: Session, value: str) -> Query:
return query.filter(
json_array_contains_value(Rom.languages, value, session=session)
)
@begin_session
def filter_roms(
@@ -508,60 +466,47 @@ class DBRomsHandler(DBBaseHandler):
func.row_number()
.over(
partition_by=func.coalesce(
func.concat(
"igdb-",
base_subquery.c.platform_id,
"-",
_create_metadata_id_case(
"igdb",
base_subquery.c.igdb_id,
),
func.concat(
"ss-",
base_subquery.c.platform_id,
"-",
base_subquery.c.ss_id,
),
func.concat(
"moby-",
base_subquery.c.platform_id,
"-",
_create_metadata_id_case(
"ss", base_subquery.c.ss_id, base_subquery.c.platform_id
),
_create_metadata_id_case(
"moby",
base_subquery.c.moby_id,
),
func.concat(
"ra-",
base_subquery.c.platform_id,
"-",
base_subquery.c.ra_id,
),
func.concat(
"hasheous-",
base_subquery.c.platform_id,
"-",
_create_metadata_id_case(
"ra", base_subquery.c.ra_id, base_subquery.c.platform_id
),
_create_metadata_id_case(
"hasheous",
base_subquery.c.hasheous_id,
),
func.concat(
"launchbox-",
base_subquery.c.platform_id,
"-",
),
_create_metadata_id_case(
"launchbox",
base_subquery.c.launchbox_id,
),
func.concat(
"tgdb-",
base_subquery.c.platform_id,
"-",
),
_create_metadata_id_case(
"tgdb",
base_subquery.c.tgdb_id,
),
func.concat(
"flashpoint-",
base_subquery.c.platform_id,
"-",
base_subquery.c.flashpoint_id,
),
func.concat(
_create_metadata_id_case(
"romm-",
base_subquery.c.platform_id,
"-",
base_subquery.c.id,
),
_create_metadata_id_case(
"flashpoint",
base_subquery.c.flashpoint_id,
base_subquery.c.platform_id,
),
),
order_by=[
is_main_sibling_order,
@@ -592,25 +537,29 @@ class DBRomsHandler(DBBaseHandler):
query = query.outerjoin(RomMetadata)
if selected_genre:
query = self.filter_by_genre(query, selected_genre)
query = self.filter_by_genre(query, session=session, value=selected_genre)
if selected_franchise:
query = self.filter_by_franchise(query, selected_franchise)
query = self.filter_by_franchise(
query, session=session, value=selected_franchise
)
if selected_collection:
query = self.filter_by_collection(query, selected_collection)
query = self.filter_by_collection(
query, session=session, value=selected_collection
)
if selected_company:
query = self.filter_by_company(query, selected_company)
query = self.filter_by_company(
query, session=session, value=selected_company
)
if selected_age_rating:
query = self.filter_by_age_rating(query, selected_age_rating)
query = self.filter_by_age_rating(
query, session=session, value=selected_age_rating
)
if selected_region:
query = self.filter_by_region(query, selected_region)
query = self.filter_by_region(query, session=session, value=selected_region)
if selected_language:
query = self.filter_by_language(query, selected_language)
query = self.filter_by_language(
query, session=session, value=selected_language
)
# The RomUser table is already joined if user_id is set
if selected_status and user_id:

View File

@@ -1,9 +1,10 @@
from collections.abc import Sequence
from sqlalchemy import and_, delete, select, update
from sqlalchemy.orm import Session
from decorators.database import begin_session
from models.assets import Save
from sqlalchemy import and_, delete, select, update
from sqlalchemy.orm import Session
from .base_handler import DBBaseHandler

View File

@@ -1,12 +1,13 @@
from collections.abc import Sequence
from functools import partial
from decorators.database import begin_session
from models.assets import Screenshot
from sqlalchemy import delete, select, update
from sqlalchemy.orm import Session
from sqlalchemy.sql import Delete, Select, Update
from decorators.database import begin_session
from models.assets import Screenshot
from .base_handler import DBBaseHandler

View File

@@ -1,9 +1,10 @@
from collections.abc import Sequence
from sqlalchemy import and_, delete, select, update
from sqlalchemy.orm import Session
from decorators.database import begin_session
from models.assets import State
from sqlalchemy import and_, delete, select, update
from sqlalchemy.orm import Session
from .base_handler import DBBaseHandler

View File

@@ -1,8 +1,9 @@
from sqlalchemy import distinct, func, select
from sqlalchemy.orm import Session
from decorators.database import begin_session
from models.assets import Save, Screenshot, State
from models.rom import Rom, RomFile
from sqlalchemy import distinct, func, select
from sqlalchemy.orm import Session
from .base_handler import DBBaseHandler

View File

@@ -1,14 +1,42 @@
from collections.abc import Sequence
from sqlalchemy import and_, delete, func, not_, select, update
from sqlalchemy.orm import Session
from sqlalchemy.sql import Delete, Select, Update
from decorators.database import begin_session
from models.user import Role, User
from sqlalchemy import delete, func, select, update
from sqlalchemy.orm import Session
from .base_handler import DBBaseHandler
class DBUsersHandler(DBBaseHandler):
def filter[QueryT: Select[tuple[User]] | Update | Delete](
self,
query: QueryT,
*,
usernames: Sequence[str] = (),
emails: Sequence[str] = (),
roles: Sequence[Role] = (),
has_ra_username: bool | None = None,
) -> QueryT:
if usernames:
query = query.filter(
func.lower(User.username).in_([u.lower() for u in usernames])
)
if emails:
query = query.filter(
func.lower(User.email).in_([e.lower() for e in emails])
)
if roles:
query = query.filter(User.role.in_(roles))
if has_ra_username is not None:
predicate = and_(User.ra_username != "", User.ra_username.isnot(None))
if not has_ra_username:
predicate = not_(predicate)
query = query.filter(predicate)
return query
@begin_session
def add_user(self, user: User, session: Session = None) -> User:
return session.merge(user)
@@ -17,15 +45,13 @@ class DBUsersHandler(DBBaseHandler):
def get_user_by_username(
self, username: str, session: Session = None
) -> User | None:
return session.scalar(
select(User).filter(func.lower(User.username) == username.lower()).limit(1)
)
query = self.filter(select(User), usernames=[username])
return session.scalar(query.limit(1))
@begin_session
def get_user_by_email(self, email: str, session: Session = None) -> User | None:
return session.scalar(
select(User).filter(func.lower(User.email) == email.lower()).limit(1)
)
query = self.filter(select(User), emails=[email])
return session.scalar(query.limit(1))
@begin_session
def get_user(self, id: int, session: Session = None) -> User | None:
@@ -42,8 +68,23 @@ class DBUsersHandler(DBBaseHandler):
return session.query(User).filter_by(id=id).one()
@begin_session
def get_users(self, session: Session = None) -> Sequence[User]:
return session.scalars(select(User)).all()
def get_users(
self,
*,
usernames: Sequence[str] = (),
emails: Sequence[str] = (),
roles: Sequence[Role] = (),
has_ra_username: bool | None = None,
session: Session = None,
) -> Sequence[User]:
query = self.filter(
select(User),
usernames=usernames,
emails=emails,
roles=roles,
has_ra_username=has_ra_username,
)
return session.scalars(query).all()
@begin_session
def delete_user(self, id: int, session: Session = None):
@@ -55,4 +96,5 @@ class DBUsersHandler(DBBaseHandler):
@begin_session
def get_admin_users(self, session: Session = None) -> Sequence[User]:
return session.scalars(select(User).filter_by(role=Role.ADMIN)).all()
query = self.filter(select(User), roles=[Role.ADMIN])
return session.scalars(query).all()

View File

@@ -11,31 +11,63 @@ from tempfile import SpooledTemporaryFile
from typing import BinaryIO
from anyio import open_file
from starlette.datastructures import UploadFile
from config.config_manager import config_manager as cm
from models.base import FILE_NAME_MAX_LENGTH
from starlette.datastructures import UploadFile
from utils.filesystem import iter_directories, iter_files
TAG_REGEX = re.compile(r"\(([^)]+)\)|\[([^]]+)\]")
EXTENSION_REGEX = re.compile(r"\.(([a-z]+\.)*\w+)$")
LANGUAGES = (
("Af", "Afrikaans"),
("Ar", "Arabic"),
("Be", "Belarusian"),
("Bg", "Bulgarian"),
("Ca", "Catalan"),
("Cs", "Czech"),
("Da", "Danish"),
("De", "German"),
("El", "Greek"),
("En", "English"),
("Es", "Spanish"),
("Et", "Estonian"),
("Eu", "Basque"),
("Fi", "Finnish"),
("Fr", "French"),
("Gd", "Gaelic"),
("He", "Hebrew"),
("Hi", "Hindi"),
("Hr", "Croatian"),
("Hu", "Hungarian"),
("Hy", "Armenian"),
("Id", "Indonesian"),
("Is", "Icelandic"),
("It", "Italian"),
("Ja", "Japanese"),
("Ko", "Korean"),
("La", "Latin"),
("Lt", "Lithuanian"),
("Lv", "Latvian"),
("Mk", "Macedonian"),
("Nl", "Dutch"),
("No", "Norwegian"),
("Pa", "Punjabi"),
("Pl", "Polish"),
("Pt", "Portuguese"),
("Ro", "Romanian"),
("Ru", "Russian"),
("Sk", "Slovak"),
("Sl", "Slovenian"),
("Sq", "Albanian"),
("Sr", "Serbian"),
("Sv", "Swedish"),
("Ta", "Tamil"),
("Th", "Thai"),
("Tr", "Turkish"),
("Uk", "Ukrainian"),
("Vi", "Vietnamese"),
("Zh", "Chinese"),
("nolang", "No Language"),
)

View File

@@ -3,12 +3,14 @@ from io import BytesIO
from pathlib import Path
import httpx
from config import RESOURCES_BASE_PATH
from fastapi import status
from PIL import Image, ImageFile, UnidentifiedImageError
from config import ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP, RESOURCES_BASE_PATH
from logger.logger import log
from models.collection import Collection
from models.rom import Rom
from PIL import Image, ImageFile, UnidentifiedImageError
from tasks.scheduled.convert_images_to_webp import ImageConverter
from utils.context import ctx_httpx_client
from .base_handler import CoverSize, FSHandler
@@ -17,6 +19,7 @@ from .base_handler import CoverSize, FSHandler
class FSResourcesHandler(FSHandler):
def __init__(self) -> None:
super().__init__(base_path=RESOURCES_BASE_PATH)
self.image_converter = ImageConverter()
def get_platform_resources_path(self, platform_id: int) -> str:
return os.path.join("roms", str(platform_id))
@@ -73,6 +76,12 @@ class FSResourcesHandler(FSHandler):
) as f:
async for chunk in response.aiter_raw():
await f.write(chunk)
if ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP:
self.image_converter.convert_to_webp(
self.validate_path(f"{cover_file}/{size.value}.png"),
force=True,
)
except httpx.TransportError as exc:
log.error(f"Unable to fetch cover at {url_cover}: {str(exc)}")
return None
@@ -82,6 +91,11 @@ class FSResourcesHandler(FSHandler):
image_path = self.validate_path(f"{cover_file}/{size.value}.png")
with Image.open(image_path) as img:
self.resize_cover_to_small(img, save_path=str(image_path))
if ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP:
self.image_converter.convert_to_webp(
self.validate_path(f"{cover_file}/{size.value}.png"), force=True
)
except UnidentifiedImageError as exc:
log.error(f"Unable to identify image {cover_file}: {str(exc)}")
return None
@@ -160,6 +174,10 @@ class FSResourcesHandler(FSHandler):
with Image.open(artwork) as img:
img.save(path_cover_l)
self.resize_cover_to_small(img, save_path=str(path_cover_s))
if ENABLE_SCHEDULED_CONVERT_IMAGES_TO_WEBP:
self.image_converter.convert_to_webp(path_cover_l, force=True)
self.image_converter.convert_to_webp(path_cover_s, force=True)
except UnidentifiedImageError as exc:
log.error(
f"Unable to identify image for {entity.fs_resources_path}: {str(exc)}"

View File

@@ -13,6 +13,7 @@ from typing import IO, Any, Final, Literal, TypedDict
import magic
import zipfile_inflate64 # trunk-ignore(ruff/F401): Patches zipfile to support Enhanced Deflate
from config import LIBRARY_BASE_PATH
from config.config_manager import config_manager as cm
from exceptions.fs_exceptions import (

View File

@@ -1,3 +1,4 @@
import abc
import enum
import json
import re
@@ -6,9 +7,10 @@ from functools import lru_cache
from pathlib import Path
from typing import Final, NotRequired, TypedDict
from strsimpy.jaro_winkler import JaroWinkler
from handler.redis_handler import async_cache
from logger.logger import log
from strsimpy.jaro_winkler import JaroWinkler
from tasks.scheduled.update_switch_titledb import (
SWITCH_PRODUCT_ID_KEY,
SWITCH_TITLEDB_INDEX_KEY,
@@ -79,10 +81,15 @@ def _normalize_search_term(
return name.strip()
class MetadataHandler:
class MetadataHandler(abc.ABC):
SEARCH_TERM_SPLIT_PATTERN = re.compile(r"[\:\-\/]")
SEARCH_TERM_NORMALIZER = re.compile(r"\s*[:-]\s*")
@classmethod
@abc.abstractmethod
def is_enabled(cls) -> bool:
"""Return whether this metadata handler is enabled."""
def normalize_cover_url(self, url: str) -> str:
return url if not url else f"https:{url.replace('https:', '')}"

View File

@@ -97,6 +97,10 @@ class FlashpointHandler(MetadataHandler):
self.search_url = f"{self.base_url}/search"
self.min_similarity_score: Final = 0.75
@classmethod
def is_enabled(cls) -> bool:
return FLASHPOINT_API_ENABLED
async def _request(self, url: str, query: dict) -> dict:
"""
Sends a request to Flashpoint API.

View File

@@ -4,8 +4,9 @@ from typing import Any, NotRequired, TypedDict
import httpx
import pydash
from config import DEV_MODE, HASHEOUS_API_ENABLED
from fastapi import HTTPException, status
from config import DEV_MODE, HASHEOUS_API_ENABLED
from logger.logger import log
from models.rom import RomFile
from utils import get_version
@@ -123,6 +124,11 @@ class HasheousHandler(MetadataHandler):
else "JNoFBA-jEh4HbxuxEHM6MVzydKoAXs9eCcp2dvcg5LRCnpp312voiWmjuaIssSzS"
)
@classmethod
def is_enabled(cls) -> bool:
"""Return whether this metadata handler is enabled."""
return HASHEOUS_API_ENABLED
async def _request(
self,
url: str,
@@ -213,7 +219,7 @@ class HasheousHandler(MetadataHandler):
hasheous_id=None, igdb_id=None, tgdb_id=None, ra_id=None
)
if not HASHEOUS_API_ENABLED:
if not self.is_enabled():
return fallback_rom
filtered_files = [
@@ -314,7 +320,7 @@ class HasheousHandler(MetadataHandler):
)
async def get_igdb_game(self, hasheous_rom: HasheousRom) -> HasheousRom:
if not HASHEOUS_API_ENABLED:
if not self.is_enabled():
return hasheous_rom
igdb_id = hasheous_rom.get("igdb_id", None)
@@ -358,7 +364,7 @@ class HasheousHandler(MetadataHandler):
)
async def get_ra_game(self, hasheous_rom: HasheousRom) -> HasheousRom:
if not HASHEOUS_API_ENABLED:
if not self.is_enabled():
return hasheous_rom
ra_id = hasheous_rom.get("ra_id", None)

View File

@@ -1,16 +1,20 @@
import functools
import json
import re
from typing import Final, NotRequired, TypedDict
import httpx
import pydash
from adapters.services.igdb_types import GameType
from fastapi import status
from adapters.services.igdb import IGDBService
from adapters.services.igdb_types import (
Game,
GameType,
mark_expanded,
mark_list_expanded,
)
from config import IGDB_CLIENT_ID, IGDB_CLIENT_SECRET, IS_PYTEST_RUN
from fastapi import HTTPException, status
from handler.redis_handler import async_cache
from logger.logger import log
from unidecode import unidecode as uc
from utils.context import ctx_httpx_client
from .base_handler import (
@@ -23,15 +27,15 @@ from .base_handler import (
)
from .base_handler import UniversalPlatformSlug as UPS
# Used to display the IGDB API status in the frontend
IGDB_API_ENABLED: Final = bool(IGDB_CLIENT_ID) and bool(IGDB_CLIENT_SECRET)
PS1_IGDB_ID: Final = 7
PS2_IGDB_ID: Final = 8
PSP_IGDB_ID: Final = 38
SWITCH_IGDB_ID: Final = 130
ARCADE_IGDB_IDS: Final = [52, 79, 80]
# Regex to detect IGDB ID tags in filenames like (igdb-12345)
IGDB_TAG_REGEX = re.compile(r"\(igdb-(\d+)\)", re.IGNORECASE)
class IGDBPlatform(TypedDict):
slug: str
@@ -93,114 +97,113 @@ class IGDBRom(BaseRom):
igdb_metadata: NotRequired[IGDBMetadata]
def extract_metadata_from_igdb_rom(self: MetadataHandler, rom: dict) -> IGDBMetadata:
def build_related_game(
handler: MetadataHandler, rom: Game, game_type: str
) -> IGDBRelatedGame:
cover = rom.get("cover")
assert mark_expanded(cover)
cover_url = cover.get("url", "") if cover else ""
return IGDBRelatedGame(
id=rom["id"],
slug=rom.get("slug", ""),
name=rom.get("name", ""),
cover_url=handler.normalize_cover_url(cover_url.replace("t_thumb", "t_1080p")),
type=game_type,
)
def extract_metadata_from_igdb_rom(self: MetadataHandler, rom: Game) -> IGDBMetadata:
age_ratings = rom.get("age_ratings", [])
alternative_names = rom.get("alternative_names", [])
collections = rom.get("collections", [])
dlcs = rom.get("dlcs", [])
expanded_games = rom.get("expanded_games", [])
expansions = rom.get("expansions", [])
franchise = rom.get("franchise", None)
franchises = rom.get("franchises", [])
game_modes = rom.get("game_modes", [])
genres = rom.get("genres", [])
involved_companies = rom.get("involved_companies", [])
platforms = rom.get("platforms", [])
ports = rom.get("ports", [])
remakes = rom.get("remakes", [])
remasters = rom.get("remasters", [])
similar_games = rom.get("similar_games", [])
videos = rom.get("videos", [])
# Narrow types for expandable fields we requested IGDB to be expanded.
assert mark_expanded(franchise)
assert mark_list_expanded(age_ratings)
assert mark_list_expanded(alternative_names)
assert mark_list_expanded(collections)
assert mark_list_expanded(dlcs)
assert mark_list_expanded(expanded_games)
assert mark_list_expanded(expansions)
assert mark_list_expanded(franchises)
assert mark_list_expanded(game_modes)
assert mark_list_expanded(genres)
assert mark_list_expanded(involved_companies)
assert mark_list_expanded(platforms)
assert mark_list_expanded(ports)
assert mark_list_expanded(remakes)
assert mark_list_expanded(remasters)
assert mark_list_expanded(similar_games)
assert mark_list_expanded(videos)
return IGDBMetadata(
{
"youtube_video_id": pydash.get(rom, "videos[0].video_id", None),
"youtube_video_id": videos[0].get("video_id") if videos else None,
"total_rating": str(round(rom.get("total_rating", 0.0), 2)),
"aggregated_rating": str(round(rom.get("aggregated_rating", 0.0), 2)),
"first_release_date": rom.get("first_release_date", None),
"genres": pydash.map_(rom.get("genres", []), "name"),
"genres": [g.get("name", "") for g in genres if g.get("name")],
"franchises": pydash.compact(
[rom.get("franchise.name", None)]
+ pydash.map_(rom.get("franchises", []), "name")
[franchise.get("name") if franchise else None]
+ [f.get("name", "") for f in franchises if f.get("name")]
),
"alternative_names": pydash.map_(rom.get("alternative_names", []), "name"),
"collections": pydash.map_(rom.get("collections", []), "name"),
"game_modes": pydash.map_(rom.get("game_modes", []), "name"),
"companies": pydash.map_(rom.get("involved_companies", []), "company.name"),
"alternative_names": [
n.get("name", "") for n in alternative_names if n.get("name")
],
"collections": [c.get("name", "") for c in collections if c.get("name")],
"game_modes": [g.get("name", "") for g in game_modes if g.get("name")],
"companies": [
c["company"]["name"] for c in involved_companies if c.get("company")
],
"platforms": [
IGDBMetadataPlatform(igdb_id=p.get("id", ""), name=p.get("name", ""))
for p in rom.get("platforms", [])
IGDBMetadataPlatform(igdb_id=p["id"], name=p.get("name", ""))
for p in platforms
],
"age_ratings": [
IGDB_AGE_RATINGS[r["rating_category"]]
for r in rom.get("age_ratings", [])
if r["rating_category"] in IGDB_AGE_RATINGS
IGDB_AGE_RATINGS[rating_category]
for r in age_ratings
if (rating_category := r.get("rating_category")) in IGDB_AGE_RATINGS
],
"expansions": [
IGDBRelatedGame(
id=e["id"],
slug=e["slug"],
name=e["name"],
cover_url=self.normalize_cover_url(
pydash.get(e, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="expansion",
)
for e in rom.get("expansions", [])
build_related_game(handler=self, rom=r, game_type="expansion")
for r in expansions
],
"dlcs": [
IGDBRelatedGame(
id=d["id"],
slug=d["slug"],
name=d["name"],
cover_url=self.normalize_cover_url(
pydash.get(d, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="dlc",
)
for d in rom.get("dlcs", [])
build_related_game(handler=self, rom=r, game_type="dlc") for r in dlcs
],
"remasters": [
IGDBRelatedGame(
id=r["id"],
slug=r["slug"],
name=r["name"],
cover_url=self.normalize_cover_url(
pydash.get(r, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="remaster",
)
for r in rom.get("remasters", [])
build_related_game(handler=self, rom=r, game_type="remaster")
for r in remasters
],
"remakes": [
IGDBRelatedGame(
id=r["id"],
slug=r["slug"],
name=r["name"],
cover_url=self.normalize_cover_url(
pydash.get(r, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="remake",
)
for r in rom.get("remakes", [])
build_related_game(handler=self, rom=r, game_type="remake")
for r in remakes
],
"expanded_games": [
IGDBRelatedGame(
id=g["id"],
slug=g["slug"],
name=g["name"],
cover_url=self.normalize_cover_url(
pydash.get(g, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="expanded",
)
for g in rom.get("expanded_games", [])
build_related_game(handler=self, rom=r, game_type="expanded")
for r in expanded_games
],
"ports": [
IGDBRelatedGame(
id=p["id"],
slug=p["slug"],
name=p["name"],
cover_url=self.normalize_cover_url(
pydash.get(p, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="port",
)
for p in rom.get("ports", [])
build_related_game(handler=self, rom=r, game_type="port") for r in ports
],
"similar_games": [
IGDBRelatedGame(
id=s["id"],
slug=s["slug"],
name=s["name"],
cover_url=self.normalize_cover_url(
pydash.get(s, "cover.url", "").replace("t_thumb", "t_1080p")
),
type="similar",
)
for s in rom.get("similar_games", [])
build_related_game(handler=self, rom=r, game_type="similar")
for r in similar_games
],
}
)
@@ -208,115 +211,24 @@ def extract_metadata_from_igdb_rom(self: MetadataHandler, rom: dict) -> IGDBMeta
class IGDBHandler(MetadataHandler):
def __init__(self) -> None:
self.BASE_URL = "https://api.igdb.com/v4"
self.platform_endpoint = f"{self.BASE_URL}/platforms"
self.platforms_fields = PLATFORMS_FIELDS
self.platform_version_endpoint = f"{self.BASE_URL}/platform_versions"
self.platform_version_fields = PLATFORMS_VERSION_FIELDS
self.games_endpoint = f"{self.BASE_URL}/games"
self.games_fields = GAMES_FIELDS
self.search_endpoint = f"{self.BASE_URL}/search"
self.search_fields = SEARCH_FIELDS
self.igdb_service = IGDBService(twitch_auth=TwitchAuth())
self.pagination_limit = 200
self.twitch_auth = TwitchAuth()
self.headers = {
"Client-ID": IGDB_CLIENT_ID,
"Accept": "application/json",
}
@classmethod
def is_enabled(cls) -> bool:
return bool(IGDB_CLIENT_ID and IGDB_CLIENT_SECRET)
@staticmethod
def check_twitch_token(func):
@functools.wraps(func)
async def wrapper(*args):
token = await args[0].twitch_auth.get_oauth_token()
args[0].headers["Authorization"] = f"Bearer {token}"
return await func(*args)
return wrapper
async def _request(self, url: str, data: str) -> list:
httpx_client = ctx_httpx_client.get()
masked_headers = {}
try:
masked_headers = self._mask_sensitive_values(self.headers)
log.debug(
"API request: URL=%s, Headers=%s, Content=%s, Timeout=%s",
url,
masked_headers,
f"{data} limit {self.pagination_limit};",
120,
)
res = await httpx_client.post(
url,
content=f"{data} limit {self.pagination_limit};",
headers=self.headers,
timeout=120,
)
res.raise_for_status()
return res.json()
except httpx.LocalProtocolError as exc:
if str(exc) == "Illegal header value b'Bearer '":
log.critical("IGDB Error: Invalid IGDB_CLIENT_ID or IGDB_CLIENT_SECRET")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Invalid IGDB credentials",
) from exc
else:
log.critical("Connection error: can't connect to IGDB")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Can't connect to IGDB, check your internet connection",
) from exc
except httpx.NetworkError as exc:
log.critical("Connection error: can't connect to IGDB")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Can't connect to IGDB, check your internet connection",
) from exc
except httpx.HTTPStatusError as exc:
# Retry once if the auth token is invalid
if exc.response.status_code != 401:
log.error(exc)
return [] # All requests to the IGDB API return a list
# Attempt to force a token refresh if the token is invalid
log.info("Twitch token invalid: fetching a new one...")
token = await self.twitch_auth._update_twitch_token()
self.headers["Authorization"] = f"Bearer {token}"
except json.decoder.JSONDecodeError as exc:
# Log the error and return an empty list if the response is not valid JSON
log.error(exc)
return []
except httpx.TimeoutException:
pass
# Retry once the request if it times out
try:
log.debug(
"Making a second attempt API request: URL=%s, Headers=%s, Content=%s, Timeout=%s",
url,
masked_headers,
f"{data} limit {self.pagination_limit};",
120,
)
res = await httpx_client.post(
url,
content=f"{data} limit {self.pagination_limit};",
headers=self.headers,
timeout=120,
)
res.raise_for_status()
return res.json()
except (httpx.HTTPError, json.decoder.JSONDecodeError) as exc:
# Log the error and return an empty list if the request fails again
log.error(exc)
return []
def extract_igdb_id_from_filename(fs_name: str) -> int | None:
"""Extract IGDB ID from filename tag like (igdb-12345)."""
match = IGDB_TAG_REGEX.search(fs_name)
if match:
return int(match.group(1))
return None
async def _search_rom(
self, search_term: str, platform_igdb_id: int, with_game_type: bool = False
) -> dict | None:
) -> Game | None:
if not platform_igdb_id:
return None
@@ -333,18 +245,21 @@ class IGDBHandler(MetadataHandler):
game_type_filter = ""
log.debug("Searching in games endpoint with game_type %s", game_type_filter)
roms = await self._request(
self.games_endpoint,
data=f'search "{uc(search_term)}"; fields {",".join(self.games_fields)}; where platforms=[{platform_igdb_id}] {game_type_filter};',
roms = await self.igdb_service.list_games(
search_term=search_term,
fields=GAMES_FIELDS,
where=f"platforms=[{platform_igdb_id}] {game_type_filter}",
limit=self.pagination_limit,
)
games_by_name: dict[str, dict] = {}
games_by_name: dict[str, Game] = {}
for game in roms:
game_name = game.get("name", "")
if (
game["name"] not in games_by_name
or game["id"] < games_by_name[game["name"]]["id"]
game_name not in games_by_name
or game["id"] < games_by_name[game_name]["id"]
):
games_by_name[game["name"]] = game
games_by_name[game_name] = game
best_match, best_score = self.find_best_match(
search_term,
@@ -357,9 +272,10 @@ class IGDBHandler(MetadataHandler):
return games_by_name[best_match]
log.debug("Searching expanded in search endpoint")
roms_expanded = await self._request(
self.search_endpoint,
data=f'fields {",".join(self.search_fields)}; where game.platforms=[{platform_igdb_id}] & (name ~ *"{search_term}"* | alternative_name ~ *"{search_term}"*);',
roms_expanded = await self.igdb_service.search(
fields=SEARCH_FIELDS,
where=f'game.platforms=[{platform_igdb_id}] & (name ~ *"{search_term}"* | alternative_name ~ *"{search_term}"*)',
limit=self.pagination_limit,
)
if roms_expanded:
@@ -367,15 +283,17 @@ class IGDBHandler(MetadataHandler):
"Searching expanded in games endpoint for expanded game %s",
roms_expanded[0]["game"],
)
extra_roms = await self._request(
self.games_endpoint,
f'fields {",".join(self.games_fields)}; where id={roms_expanded[0]["game"]["id"]};',
extra_roms = await self.igdb_service.list_games(
fields=GAMES_FIELDS,
where=f"id={roms_expanded[0]['game']['id']}",
limit=self.pagination_limit,
)
extra_games_by_name: dict[str, dict] = {}
extra_games_by_name: dict[str, Game] = {}
for game in extra_roms:
if game["name"] not in extra_games_by_name:
extra_games_by_name[game["name"]] = game
game_name = game.get("name", "")
if game_name not in extra_games_by_name:
extra_games_by_name[game_name] = game
best_match, best_score = self.find_best_match(
search_term,
@@ -391,13 +309,6 @@ class IGDBHandler(MetadataHandler):
return None
# @check_twitch_token
# async def get_platforms(self) -> None:
# platforms = await self._request(
# self.platform_endpoint,
# f'fields {",".join(self.platforms_fields)}; limit 500;',
# )
def get_platform(self, slug: str) -> IGDBPlatform:
if slug in IGDB_PLATFORM_LIST:
platform = IGDB_PLATFORM_LIST[UPS(slug)]
@@ -436,16 +347,30 @@ class IGDBHandler(MetadataHandler):
return IGDBPlatform(igdb_id=None, slug=slug)
@check_twitch_token
async def get_rom(self, fs_name: str, platform_igdb_id: int) -> IGDBRom:
from handler.filesystem import fs_rom_handler
if not IGDB_API_ENABLED:
if not self.is_enabled():
return IGDBRom(igdb_id=None)
if not platform_igdb_id:
return IGDBRom(igdb_id=None)
# Check for IGDB ID tag in filename first
igdb_id_from_tag = self.extract_igdb_id_from_filename(fs_name)
if igdb_id_from_tag:
log.debug(f"Found IGDB ID tag in filename: {igdb_id_from_tag}")
rom_by_id = await self.get_rom_by_id(igdb_id_from_tag)
if rom_by_id["igdb_id"]:
log.debug(
f"Successfully matched ROM by IGDB ID tag: {fs_name} -> {igdb_id_from_tag}"
)
return rom_by_id
else:
log.warning(
f"IGDB ID {igdb_id_from_tag} from filename tag not found in IGDB"
)
search_term = fs_rom_handler.get_file_name_with_no_tags(fs_name)
fallback_rom = IGDBRom(igdb_id=None)
@@ -516,86 +441,85 @@ class IGDBHandler(MetadataHandler):
if not rom:
return fallback_rom
rom_screenshots = rom.get("screenshots", [])
assert mark_list_expanded(rom_screenshots)
return IGDBRom(
igdb_id=rom["id"],
slug=rom["slug"],
name=rom["name"],
slug=rom.get("slug", ""),
name=rom.get("name", ""),
summary=rom.get("summary", ""),
url_cover=self.normalize_cover_url(
pydash.get(rom, "cover.url", "")
).replace("t_thumb", "t_1080p"),
url_screenshots=[
self.normalize_cover_url(s.get("url", "")).replace("t_thumb", "t_720p")
for s in rom.get("screenshots", [])
for s in rom_screenshots
],
igdb_metadata=extract_metadata_from_igdb_rom(self, rom),
)
@check_twitch_token
async def get_rom_by_id(self, igdb_id: int) -> IGDBRom:
if not IGDB_API_ENABLED:
if not self.is_enabled():
return IGDBRom(igdb_id=None)
roms = await self._request(
self.games_endpoint,
f'fields {",".join(self.games_fields)}; where id={igdb_id};',
roms = await self.igdb_service.list_games(
fields=GAMES_FIELDS,
where=f"id={igdb_id}",
limit=self.pagination_limit,
)
rom = pydash.get(roms, "[0]", None)
if not rom:
if not roms:
return IGDBRom(igdb_id=None)
rom = roms[0]
rom_screenshots = rom.get("screenshots", [])
assert mark_list_expanded(rom_screenshots)
return IGDBRom(
igdb_id=rom["id"],
slug=rom["slug"],
name=rom["name"],
slug=rom.get("slug", ""),
name=rom.get("name", ""),
summary=rom.get("summary", ""),
url_cover=self.normalize_cover_url(
pydash.get(rom, "cover.url", "")
).replace("t_thumb", "t_1080p"),
url_screenshots=[
self.normalize_cover_url(s.get("url", "")).replace("t_thumb", "t_720p")
for s in rom.get("screenshots", [])
for s in rom_screenshots
],
igdb_metadata=extract_metadata_from_igdb_rom(self, rom),
)
@check_twitch_token
async def get_matched_rom_by_id(self, igdb_id: int) -> IGDBRom | None:
if not IGDB_API_ENABLED:
if not self.is_enabled():
return None
rom = await self.get_rom_by_id(igdb_id)
return rom if rom["igdb_id"] else None
@check_twitch_token
async def get_matched_roms_by_name(
self, search_term: str, platform_igdb_id: int | None
) -> list[IGDBRom]:
if not IGDB_API_ENABLED:
if not self.is_enabled():
return []
if not platform_igdb_id:
return []
matched_roms = await self._request(
self.games_endpoint,
data=f'search "{uc(search_term)}"; fields {",".join(self.games_fields)}; where platforms=[{platform_igdb_id}];',
matched_roms = await self.igdb_service.list_games(
search_term=search_term,
fields=GAMES_FIELDS,
where=f"platforms=[{platform_igdb_id}]",
limit=self.pagination_limit,
)
alternative_matched_roms = await self._request(
self.search_endpoint,
data=f'fields {",".join(self.search_fields)}; where game.platforms=[{platform_igdb_id}] & (name ~ *"{search_term}"* | alternative_name ~ *"{search_term}"*);',
alternative_matched_roms = await self.igdb_service.search(
fields=SEARCH_FIELDS,
where=f'game.platforms=[{platform_igdb_id}] & (name ~ *"{search_term}"* | alternative_name ~ *"{search_term}"*)',
limit=self.pagination_limit,
)
if alternative_matched_roms:
alternative_roms_ids = []
for rom in alternative_matched_roms:
alternative_roms_ids.append(
pydash.get(rom, "game.id", "")
if "game" in rom.keys()
else rom.get("id", "")
)
id_filter = " | ".join(
list(
map(
@@ -608,14 +532,15 @@ class IGDBHandler(MetadataHandler):
)
)
)
alternative_matched_roms = await self._request(
self.games_endpoint,
f'fields {",".join(self.games_fields)}; where {id_filter};',
alternative_roms = await self.igdb_service.list_games(
fields=GAMES_FIELDS,
where=id_filter,
limit=self.pagination_limit,
)
matched_roms.extend(alternative_matched_roms)
matched_roms.extend(alternative_roms)
# Use a dictionary to keep track of unique ids
unique_ids: dict[str, dict[str, str]] = {}
unique_ids: dict[int, Game] = {}
# Use a list comprehension to filter duplicates based on the 'id' key
matched_roms = [
@@ -630,8 +555,8 @@ class IGDBHandler(MetadataHandler):
k: v
for k, v in {
"igdb_id": rom["id"],
"slug": rom["slug"],
"name": rom["name"],
"slug": rom.get("slug", ""),
"name": rom.get("name", ""),
"summary": rom.get("summary", ""),
"url_cover": self.normalize_cover_url(
pydash.get(rom, "cover.url", "").replace(
@@ -664,8 +589,12 @@ class TwitchAuth(MetadataHandler):
self.masked_params = self._mask_sensitive_values(self.params)
self.timeout = 10
@classmethod
def is_enabled(cls) -> bool:
return IGDBHandler.is_enabled()
async def _update_twitch_token(self) -> str:
if not IGDB_API_ENABLED:
if not self.is_enabled():
return ""
token = None
@@ -711,7 +640,7 @@ class TwitchAuth(MetadataHandler):
if IS_PYTEST_RUN:
return "test_token"
if not IGDB_API_ENABLED:
if not self.is_enabled():
return ""
# Fetch the token cache
@@ -723,26 +652,6 @@ class TwitchAuth(MetadataHandler):
return token
PLATFORMS_FIELDS = (
"id",
"slug",
"name",
"platform_type",
"generation",
"url",
"platform_family.name",
"platform_family.slug",
"platform_logo.url",
)
PLATFORMS_VERSION_FIELDS = (
"id",
"slug",
"name",
"url",
"platform_logo.url",
)
GAMES_FIELDS = (
"id",
"name",

View File

@@ -1,22 +1,30 @@
import json
import re
from datetime import datetime
from typing import NotRequired, TypedDict
from typing import Final, NotRequired, TypedDict
import pydash
from config import LAUNCHBOX_API_ENABLED, str_to_bool
from handler.redis_handler import async_cache
from logger.logger import log
from tasks.scheduled.update_launchbox_metadata import ( # LAUNCHBOX_MAME_KEY,
LAUNCHBOX_METADATA_ALTERNATE_NAME_KEY,
LAUNCHBOX_METADATA_DATABASE_ID_KEY,
LAUNCHBOX_METADATA_IMAGE_KEY,
LAUNCHBOX_METADATA_NAME_KEY,
update_launchbox_metadata_task,
)
from .base_handler import BaseRom, MetadataHandler
from .base_handler import UniversalPlatformSlug as UPS
LAUNCHBOX_PLATFORMS_KEY: Final[str] = "romm:launchbox_platforms"
LAUNCHBOX_METADATA_DATABASE_ID_KEY: Final[str] = "romm:launchbox_metadata_database_id"
LAUNCHBOX_METADATA_NAME_KEY: Final[str] = "romm:launchbox_metadata_name"
LAUNCHBOX_METADATA_ALTERNATE_NAME_KEY: Final[str] = (
"romm:launchbox_metadata_alternate_name"
)
LAUNCHBOX_METADATA_IMAGE_KEY: Final[str] = "romm:launchbox_metadata_image"
LAUNCHBOX_MAME_KEY: Final[str] = "romm:launchbox_mame"
LAUNCHBOX_FILES_KEY: Final[str] = "romm:launchbox_files"
# Regex to detect LaunchBox ID tags in filenames like (launchbox-12345)
LAUNCHBOX_TAG_REGEX = re.compile(r"\(launchbox-(\d+)\)", re.IGNORECASE)
class LaunchboxPlatform(TypedDict):
slug: str
@@ -115,11 +123,28 @@ def extract_metadata_from_launchbox_rom(
class LaunchboxHandler(MetadataHandler):
@classmethod
def is_enabled(cls) -> bool:
return LAUNCHBOX_API_ENABLED
@staticmethod
def extract_launchbox_id_from_filename(fs_name: str) -> int | None:
"""Extract LaunchBox ID from filename tag like (launchbox-12345)."""
match = LAUNCHBOX_TAG_REGEX.search(fs_name)
if match:
return int(match.group(1))
return None
async def _get_rom_from_metadata(
self, file_name: str, platform_slug: str
) -> dict | None:
if not (await async_cache.exists(LAUNCHBOX_METADATA_NAME_KEY)):
log.info("Fetching the Launchbox Metadata.xml file...")
from tasks.scheduled.update_launchbox_metadata import (
update_launchbox_metadata_task,
)
await update_launchbox_metadata_task.run(force=True)
if not (await async_cache.exists(LAUNCHBOX_METADATA_NAME_KEY)):
@@ -215,9 +240,24 @@ class LaunchboxHandler(MetadataHandler):
fallback_rom = LaunchboxRom(launchbox_id=None)
if not LAUNCHBOX_API_ENABLED:
if not self.is_enabled():
return fallback_rom
# Check for LaunchBox ID tag in filename first
launchbox_id_from_tag = self.extract_launchbox_id_from_filename(fs_name)
if launchbox_id_from_tag:
log.debug(f"Found LaunchBox ID tag in filename: {launchbox_id_from_tag}")
rom_by_id = await self.get_rom_by_id(launchbox_id_from_tag)
if rom_by_id["launchbox_id"]:
log.debug(
f"Successfully matched ROM by LaunchBox ID tag: {fs_name} -> {launchbox_id_from_tag}"
)
return rom_by_id
else:
log.warning(
f"LaunchBox ID {launchbox_id_from_tag} from filename tag not found in LaunchBox"
)
# We replace " - " with ": " to match Launchbox's naming convention
search_term = fs_rom_handler.get_file_name_with_no_tags(fs_name).replace(
" - ", ": "
@@ -254,7 +294,7 @@ class LaunchboxHandler(MetadataHandler):
return LaunchboxRom({k: v for k, v in rom.items() if v}) # type: ignore[misc]
async def get_rom_by_id(self, database_id: int) -> LaunchboxRom:
if not LAUNCHBOX_API_ENABLED:
if not self.is_enabled():
return LaunchboxRom(launchbox_id=None)
metadata_database_index_entry = await async_cache.hget(
@@ -264,6 +304,8 @@ class LaunchboxHandler(MetadataHandler):
if not metadata_database_index_entry:
return LaunchboxRom(launchbox_id=None)
# Parse the JSON string from cache
metadata_database_index_entry = json.loads(metadata_database_index_entry)
game_images = await self._get_game_images(
metadata_database_index_entry["DatabaseID"]
)
@@ -281,7 +323,7 @@ class LaunchboxHandler(MetadataHandler):
return LaunchboxRom({k: v for k, v in rom.items() if v}) # type: ignore[misc]
async def get_matched_rom_by_id(self, database_id: int) -> LaunchboxRom | None:
if not LAUNCHBOX_API_ENABLED:
if not self.is_enabled():
return None
return await self.get_rom_by_id(database_id)

View File

@@ -3,11 +3,12 @@ from typing import Final, NotRequired, TypedDict
from urllib.parse import quote
import pydash
from unidecode import unidecode as uc
from adapters.services.mobygames import MobyGamesService
from adapters.services.mobygames_types import MobyGame
from config import MOBYGAMES_API_KEY
from logger.logger import log
from unidecode import unidecode as uc
from .base_handler import (
PS2_OPL_REGEX,
@@ -19,15 +20,15 @@ from .base_handler import (
)
from .base_handler import UniversalPlatformSlug as UPS
# Used to display the Mobygames API status in the frontend
MOBY_API_ENABLED: Final = bool(MOBYGAMES_API_KEY)
PS1_MOBY_ID: Final = 6
PS2_MOBY_ID: Final = 7
PSP_MOBY_ID: Final = 46
SWITCH_MOBY_ID: Final = 203
ARCADE_MOBY_IDS: Final = [143, 36]
# Regex to detect MobyGames ID tags in filenames like (moby-12345)
MOBYGAMES_TAG_REGEX = re.compile(r"\(moby-(\d+)\)", re.IGNORECASE)
class MobyGamesPlatform(TypedDict):
slug: str
@@ -77,6 +78,18 @@ class MobyGamesHandler(MetadataHandler):
self.moby_service = MobyGamesService()
self.min_similarity_score = 0.6
@classmethod
def is_enabled(cls) -> bool:
return bool(MOBYGAMES_API_KEY)
@staticmethod
def extract_mobygames_id_from_filename(fs_name: str) -> int | None:
"""Extract MobyGames ID from filename tag like (moby-12345)."""
match = MOBYGAMES_TAG_REGEX.search(fs_name)
if match:
return int(match.group(1))
return None
async def _search_rom(
self, search_term: str, platform_moby_id: int, split_game_name: bool = False
) -> MobyGame | None:
@@ -128,12 +141,27 @@ class MobyGamesHandler(MetadataHandler):
async def get_rom(self, fs_name: str, platform_moby_id: int) -> MobyGamesRom:
from handler.filesystem import fs_rom_handler
if not MOBY_API_ENABLED:
if not self.is_enabled():
return MobyGamesRom(moby_id=None)
if not platform_moby_id:
return MobyGamesRom(moby_id=None)
# Check for MobyGames ID tag in filename first
mobygames_id_from_tag = self.extract_mobygames_id_from_filename(fs_name)
if mobygames_id_from_tag:
log.debug(f"Found MobyGames ID tag in filename: {mobygames_id_from_tag}")
rom_by_id = await self.get_rom_by_id(mobygames_id_from_tag)
if rom_by_id["moby_id"]:
log.debug(
f"Successfully matched ROM by MobyGames ID tag: {fs_name} -> {mobygames_id_from_tag}"
)
return rom_by_id
else:
log.warning(
f"MobyGames ID {mobygames_id_from_tag} from filename tag not found in MobyGames"
)
search_term = fs_rom_handler.get_file_name_with_no_tags(fs_name)
fallback_rom = MobyGamesRom(moby_id=None)
@@ -222,7 +250,7 @@ class MobyGamesHandler(MetadataHandler):
return MobyGamesRom({k: v for k, v in rom.items() if v}) # type: ignore[misc]
async def get_rom_by_id(self, moby_id: int) -> MobyGamesRom:
if not MOBY_API_ENABLED:
if not self.is_enabled():
return MobyGamesRom(moby_id=None)
roms = await self.moby_service.list_games(game_id=moby_id)
@@ -242,7 +270,7 @@ class MobyGamesHandler(MetadataHandler):
return MobyGamesRom({k: v for k, v in rom.items() if v}) # type: ignore[misc]
async def get_matched_rom_by_id(self, moby_id: int) -> MobyGamesRom | None:
if not MOBY_API_ENABLED:
if not self.is_enabled():
return None
rom = await self.get_rom_by_id(moby_id)
@@ -251,7 +279,7 @@ class MobyGamesHandler(MetadataHandler):
async def get_matched_roms_by_name(
self, search_term: str, platform_moby_id: int | None
) -> list[MobyGamesRom]:
if not MOBY_API_ENABLED:
if not self.is_enabled():
return []
if not platform_moby_id:

View File

@@ -4,8 +4,10 @@ from typing import NotRequired, TypedDict
import httpx
import yarl
from config import PLAYMATCH_API_ENABLED
from fastapi import HTTPException, status
from config import PLAYMATCH_API_ENABLED
from handler.metadata.base_handler import MetadataHandler
from logger.logger import log
from models.rom import RomFile
from utils import get_version
@@ -38,7 +40,7 @@ class PlaymatchRomMatch(TypedDict):
igdb_id: int | None
class PlaymatchHandler:
class PlaymatchHandler(MetadataHandler):
"""
Handler for [Playmatch](https://github.com/RetroRealm/playmatch), a service for matching Roms by Hashes.
"""
@@ -47,6 +49,10 @@ class PlaymatchHandler:
self.base_url = "https://playmatch.retrorealm.dev/api"
self.identify_url = f"{self.base_url}/identify/ids"
@classmethod
def is_enabled(cls) -> bool:
return PLAYMATCH_API_ENABLED
async def _request(self, url: str, query: dict) -> dict:
"""
Sends a Request to Playmatch API.
@@ -100,7 +106,7 @@ class PlaymatchHandler:
:return: A PlaymatchRomMatch objects containing the matched ROM information.
:raises HTTPException: If the request fails or the service is unavailable.
"""
if not PLAYMATCH_API_ENABLED:
if not self.is_enabled():
return PlaymatchRomMatch(igdb_id=None)
first_file = next(

View File

@@ -1,10 +1,12 @@
import json
import os
import re
import time
from datetime import datetime
from typing import Final, NotRequired, TypedDict
from typing import NotRequired, TypedDict
import pydash
from adapters.services.retroachievements import RetroAchievementsService
from adapters.services.retroachievements_types import (
RAGameExtendedDetails,
@@ -15,13 +17,14 @@ from config import (
RETROACHIEVEMENTS_API_KEY,
)
from handler.filesystem import fs_resource_handler
from logger.logger import log
from models.rom import Rom
from .base_handler import BaseRom, MetadataHandler
from .base_handler import UniversalPlatformSlug as UPS
# Used to display the Retroachievements API status in the frontend
RA_API_ENABLED: Final = bool(RETROACHIEVEMENTS_API_KEY)
# Regex to detect RetroAchievements ID tags in filenames like (ra-12345)
RA_TAG_REGEX = re.compile(r"\(ra-(\d+)\)", re.IGNORECASE)
class RAGamesPlatform(TypedDict):
@@ -125,6 +128,18 @@ class RAHandler(MetadataHandler):
self.ra_service = RetroAchievementsService()
self.HASHES_FILE_NAME = "ra_hashes.json"
@classmethod
def is_enabled(cls) -> bool:
return bool(RETROACHIEVEMENTS_API_KEY)
@staticmethod
def extract_ra_id_from_filename(fs_name: str) -> int | None:
"""Extract RetroAchievements ID from filename tag like (ra-12345)."""
match = RA_TAG_REGEX.search(fs_name)
if match:
return int(match.group(1))
return None
def _get_hashes_file_path(self, platform_id: int) -> str:
platform_resources_path = fs_resource_handler.get_platform_resources_path(
platform_id
@@ -199,7 +214,25 @@ class RAHandler(MetadataHandler):
)
async def get_rom(self, rom: Rom, ra_hash: str) -> RAGameRom:
if not rom.platform.ra_id or not ra_hash:
if not rom.platform.ra_id:
return RAGameRom(ra_id=None)
# Check for RetroAchievements ID tag in filename first
ra_id_from_tag = self.extract_ra_id_from_filename(rom.fs_name)
if ra_id_from_tag:
log.debug(f"Found RetroAchievements ID tag in filename: {ra_id_from_tag}")
rom_by_id = await self.get_rom_by_id(rom=rom, ra_id=ra_id_from_tag)
if rom_by_id["ra_id"]:
log.debug(
f"Successfully matched ROM by RetroAchievements ID tag: {rom.fs_name} -> {ra_id_from_tag}"
)
return rom_by_id
else:
log.warning(
f"RetroAchievements ID {ra_id_from_tag} from filename tag not found in RetroAchievements"
)
if not ra_hash:
return RAGameRom(ra_id=None)
ra_game_list_item = await self._search_rom(rom, ra_hash)
@@ -220,7 +253,6 @@ class RAHandler(MetadataHandler):
if rom_details.get("ImageTitle")
else ""
),
url_manual=rom_details.get("GuideURL") or "",
url_screenshots=pydash.compact(
[
(
@@ -249,7 +281,6 @@ class RAHandler(MetadataHandler):
if rom_details.get("ImageTitle")
else ""
),
url_manual=rom_details.get("GuideURL") or "",
url_screenshots=pydash.compact(
[
(

View File

@@ -8,9 +8,6 @@ from logger.logger import log
from .base_handler import MetadataHandler
# Used to display the Mobygames API status in the frontend
STEAMGRIDDB_API_ENABLED: Final = bool(STEAMGRIDDB_API_KEY)
class SGDBResource(TypedDict):
thumb: str
@@ -33,8 +30,44 @@ class SGDBBaseHandler(MetadataHandler):
self.sgdb_service = SteamGridDBService()
self.min_similarity_score: Final = 0.98
@classmethod
def is_enabled(cls) -> bool:
return bool(STEAMGRIDDB_API_KEY)
async def get_rom_by_id(self, sgdb_id: int) -> SGDBRom:
"""Get ROM details by SteamGridDB ID."""
if not self.is_enabled():
return SGDBRom(sgdb_id=None)
try:
game = await self.sgdb_service.get_game_by_id(sgdb_id)
if not game:
return SGDBRom(sgdb_id=None)
# Get covers for the game
game_details = await self._get_game_covers(
game_id=game["id"],
game_name=game["name"],
types=(SGDBType.STATIC,),
is_nsfw=False,
is_humor=False,
is_epilepsy=False,
)
first_resource = next(
(res for res in game_details["resources"] if res["url"]), None
)
result = SGDBRom(sgdb_id=game["id"])
if first_resource:
result["url_cover"] = first_resource["url"]
return result
except Exception as e:
log.warning(f"Failed to fetch ROM by SteamGridDB ID {sgdb_id}: {e}")
return SGDBRom(sgdb_id=None)
async def get_details(self, search_term: str) -> list[SGDBResult]:
if not STEAMGRIDDB_API_ENABLED:
if not self.is_enabled():
return []
games = await self.sgdb_service.search_games(term=search_term)
@@ -51,7 +84,7 @@ class SGDBBaseHandler(MetadataHandler):
return list(filter(None, results))
async def get_details_by_names(self, game_names: list[str]) -> SGDBRom:
if not STEAMGRIDDB_API_ENABLED:
if not self.is_enabled():
return SGDBRom(sgdb_id=None)
for game_name in game_names:

View File

@@ -5,11 +5,12 @@ from typing import Final, NotRequired, TypedDict
from urllib.parse import quote
import pydash
from unidecode import unidecode as uc
from adapters.services.screenscraper import ScreenScraperService
from adapters.services.screenscraper_types import SSGame, SSGameDate
from config import SCREENSCRAPER_PASSWORD, SCREENSCRAPER_USER
from logger.logger import log
from unidecode import unidecode as uc
from .base_handler import (
PS2_OPL_REGEX,
@@ -21,8 +22,6 @@ from .base_handler import (
)
from .base_handler import UniversalPlatformSlug as UPS
# Used to display the Screenscraper API status in the frontend
SS_API_ENABLED: Final = bool(SCREENSCRAPER_USER) and bool(SCREENSCRAPER_PASSWORD)
SS_DEV_ID: Final = base64.b64decode("enVyZGkxNQ==").decode()
SS_DEV_PASSWORD: Final = base64.b64decode("eFRKd29PRmpPUUc=").decode()
@@ -103,6 +102,9 @@ ARCADE_SS_IDS: Final = [
269,
]
# Regex to detect ScreenScraper ID tags in filenames like (ssfr-12345)
SS_TAG_REGEX = re.compile(r"\(ssfr-(\d+)\)", re.IGNORECASE)
class SSPlatform(TypedDict):
slug: str
@@ -276,6 +278,18 @@ class SSHandler(MetadataHandler):
def __init__(self) -> None:
self.ss_service = ScreenScraperService()
@classmethod
def is_enabled(cls) -> bool:
return bool(SCREENSCRAPER_USER and SCREENSCRAPER_PASSWORD)
@staticmethod
def extract_ss_id_from_filename(fs_name: str) -> int | None:
"""Extract ScreenScraper ID from filename tag like (ss-12345)."""
match = SS_TAG_REGEX.search(fs_name)
if match:
return int(match.group(1))
return None
async def _search_rom(
self, search_term: str, platform_ss_id: int, split_game_name: bool = False
) -> SSGame | None:
@@ -323,12 +337,27 @@ class SSHandler(MetadataHandler):
async def get_rom(self, file_name: str, platform_ss_id: int) -> SSRom:
from handler.filesystem import fs_rom_handler
if not SS_API_ENABLED:
if not self.is_enabled():
return SSRom(ss_id=None)
if not platform_ss_id:
return SSRom(ss_id=None)
# Check for ScreenScraper ID tag in filename first
ss_id_from_tag = self.extract_ss_id_from_filename(file_name)
if ss_id_from_tag:
log.debug(f"Found ScreenScraper ID tag in filename: {ss_id_from_tag}")
rom_by_id = await self.get_rom_by_id(ss_id_from_tag)
if rom_by_id["ss_id"]:
log.debug(
f"Successfully matched ROM by ScreenScraper ID tag: {file_name} -> {ss_id_from_tag}"
)
return rom_by_id
else:
log.warning(
f"ScreenScraper ID {ss_id_from_tag} from filename tag not found in ScreenScraper"
)
search_term = fs_rom_handler.get_file_name_with_no_tags(file_name)
fallback_rom = SSRom(ss_id=None)
@@ -411,7 +440,7 @@ class SSHandler(MetadataHandler):
return build_ss_rom(res)
async def get_rom_by_id(self, ss_id: int) -> SSRom:
if not SS_API_ENABLED:
if not self.is_enabled():
return SSRom(ss_id=None)
res = await self.ss_service.get_game_info(game_id=ss_id)
@@ -421,7 +450,7 @@ class SSHandler(MetadataHandler):
return build_ss_rom(res)
async def get_matched_rom_by_id(self, ss_id: int) -> SSRom | None:
if not SS_API_ENABLED:
if not self.is_enabled():
return None
rom = await self.get_rom_by_id(ss_id)
@@ -430,7 +459,7 @@ class SSHandler(MetadataHandler):
async def get_matched_roms_by_name(
self, search_term: str, platform_ss_id: int | None
) -> list[SSRom]:
if not SS_API_ENABLED:
if not self.is_enabled():
return []
if not platform_ss_id:

View File

@@ -1,5 +1,7 @@
from typing import NotRequired, TypedDict
from config import TGDB_API_ENABLED
from .base_handler import MetadataHandler
from .base_handler import UniversalPlatformSlug as UPS
@@ -22,6 +24,10 @@ class TGDBHandler(MetadataHandler):
self.platform_endpoint = f"{self.BASE_URL}/Lookup/Platforms"
self.games_endpoint = f"{self.BASE_URL}/Lookup/ByHash"
@classmethod
def is_enabled(cls) -> bool:
return TGDB_API_ENABLED
def get_platform(self, slug: str) -> TGDBPlatform:
if slug not in TGDB_PLATFORM_LIST:
return TGDBPlatform(tgdb_id=None, slug=slug)

View File

@@ -2,12 +2,13 @@ import os
import sys
from enum import Enum
from config import IS_PYTEST_RUN, REDIS_URL
from logger.logger import log
from redis import Redis
from redis.asyncio import Redis as AsyncRedis
from rq import Queue
from config import IS_PYTEST_RUN, REDIS_URL
from logger.logger import log
class QueuePrio(Enum):
HIGH = "high"

View File

@@ -571,6 +571,16 @@ async def scan_rom(
if igdb_handler_rom.get("igdb_id"):
rom_attrs.update({**igdb_handler_rom})
# Screenshots are a special case
rom_attrs["url_screenshots"] = (
igdb_handler_rom.get("url_screenshots", [])
or ss_handler_rom.get("url_screenshots", [])
or moby_handler_rom.get("url_screenshots", [])
or ra_handler_rom.get("url_screenshots", [])
or hasheous_handler_rom.get("url_screenshots", [])
or launchbox_handler_rom.get("url_screenshots", [])
)
# Stop IDs from getting overridden by empty values
rom_attrs.update(
{

Some files were not shown because too many files have changed in this diff Show More