|
|
-# Installation
+## Installation
To start using RomM, check out the [Quick Start Guide][docs-quick-start-guide] in the docs. If you are having issues with RomM, please review the page for [troubleshooting steps][docs-troubleshooting].
-# Contributing
+## Contributing
To contribute to RomM, please check [Contribution Guide](./CONTRIBUTING.md).
-# Community
+## Community
Here are a few projects maintained by members of our community. Please note that the RomM team does not regularly review their source code.
-- [romm-comm][romm-comm-discord-bot]: Discord Bot by @idio-sync
-- [DeckRommSync][deck-romm-sync]: SteamOS downloader and sync by @PeriBluGaming
-- [RommBrowser][romm-browser]: An electron client for RomM by @smurflabs
-- [RomM Android][romm-android]: An Android app for RomM by @mattsays
+### Mobile
+
+- [romm-mobile][romm-mobile]: Android (and soon iOS) app by @mattsays
+- [romm-android][romm-android]: Android app by @samwelnella
+
+### Desktop
+
+- [RommBrowser][romm-browser]: Electron client by @smurflabs
+- [RetroArch Sync][romm-retroarch-sync]: Sync RetroArch library with RomM by @Covin90
+- [RomMate][rommate]: Desktop app for browsing your collection by @brenoprata10
+- [romm-client][romm-client]: Desktop client by @chaun14
+
+### Other
+
+- [romm-comm][romm-comm-discord-bot]: Discord bot by @idio-sync
+- [DeckRommSync][deck-romm-sync]: SteamOS downloader and syncer by @PeriBluGaming
+- [GGRequestz][ggrequestz]: Game discovery and request tool by @XTREEMMAK
Join us on Discord, where you can ask questions, submit ideas, get help, showcase your collection, and discuss RomM with other users.
[![discord-invite-img]][discord-invite]
-# Technical Support
+## Technical Support
If you have any issues with RomM, please [open an issue](https://github.com/rommapp/romm/issues/new) in this repository.
-# Project Support
+## Project Support
Consider supporting the development of this project on Open Collective. All funds will be used to cover the costs of hosting, development, and maintenance of RomM.
[![oc-donate-img]][oc-donate]
-# Our Friends
+## Our Friends
Here are a few projects that we think you might like:
@@ -148,6 +148,11 @@ Here are a few projects that we think you might like:
[romm-comm-discord-bot]: https://github.com/idio-sync/romm-comm
[deck-romm-sync]: https://github.com/PeriBluGaming/DeckRommSync-Standalone
[romm-browser]: https://github.com/smurflabs/RommBrowser/
-[romm-android]: https://github.com/mattsays/romm-android
+[romm-mobile]: https://github.com/mattsays/romm-mobile
[playnite-app]: https://github.com/rommapp/playnite-plugin
[muos-app]: https://github.com/rommapp/muos-app
+[ggrequestz]: https://github.com/XTREEMMAK/ggrequestz
+[romm-client]: https://github.com/chaun14/romm-client
+[romm-retroarch-sync]: https://github.com/Covin90/romm-retroarch-sync
+[rommate]: https://github.com/brenoprata10/rommate
+[romm-android]: https://github.com/samwelnella/romm-android
diff --git a/backend/alembic/versions/0025_roms_hashes.py b/backend/alembic/versions/0025_roms_hashes.py
index 760957987..7ad90c19a 100644
--- a/backend/alembic/versions/0025_roms_hashes.py
+++ b/backend/alembic/versions/0025_roms_hashes.py
@@ -32,11 +32,19 @@ def upgrade() -> None:
# Run a no-scan in the background on migrate
if not IS_PYTEST_RUN:
high_prio_queue.enqueue(
- scan_platforms, [], ScanType.QUICK, [], [], job_timeout=SCAN_TIMEOUT
+ scan_platforms,
+ platform_ids=[],
+ metadata_sources=[],
+ scan_type=ScanType.QUICK,
+ job_timeout=SCAN_TIMEOUT,
)
high_prio_queue.enqueue(
- scan_platforms, [], ScanType.HASHES, [], [], job_timeout=SCAN_TIMEOUT
+ scan_platforms,
+ platform_ids=[],
+ metadata_sources=[],
+ scan_type=ScanType.HASHES,
+ job_timeout=SCAN_TIMEOUT,
)
diff --git a/backend/alembic/versions/0033_rom_file_and_hashes.py b/backend/alembic/versions/0033_rom_file_and_hashes.py
index 382a2c1b9..0cd89bfbf 100644
--- a/backend/alembic/versions/0033_rom_file_and_hashes.py
+++ b/backend/alembic/versions/0033_rom_file_and_hashes.py
@@ -176,11 +176,19 @@ def upgrade() -> None:
# Run a no-scan in the background on migrate
if not IS_PYTEST_RUN:
high_prio_queue.enqueue(
- scan_platforms, [], ScanType.QUICK, [], [], job_timeout=SCAN_TIMEOUT
+ scan_platforms,
+ platform_ids=[],
+ metadata_sources=[],
+ scan_type=ScanType.QUICK,
+ job_timeout=SCAN_TIMEOUT,
)
high_prio_queue.enqueue(
- scan_platforms, [], ScanType.HASHES, [], [], job_timeout=SCAN_TIMEOUT
+ scan_platforms,
+ platform_ids=[],
+ metadata_sources=[],
+ scan_type=ScanType.HASHES,
+ job_timeout=SCAN_TIMEOUT,
)
diff --git a/backend/alembic/versions/0055_collection_is_favorite.py b/backend/alembic/versions/0055_collection_is_favorite.py
new file mode 100644
index 000000000..7f709558e
--- /dev/null
+++ b/backend/alembic/versions/0055_collection_is_favorite.py
@@ -0,0 +1,42 @@
+"""empty message
+
+Revision ID: 0055_collection_is_favorite
+Revises: 0054_add_platform_metadata_slugs
+Create Date: 2025-10-18 13:24:15.119652
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "0055_collection_is_favorite"
+down_revision = "0054_add_platform_metadata_slugs"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ with op.batch_alter_table("collections", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("is_favorite", sa.Boolean(), nullable=False))
+
+ # Find favorite collection and set is_favorite to True
+ from handler.database import db_collection_handler, db_user_handler
+
+ users = db_user_handler.get_users()
+ for user in users:
+ collection = db_collection_handler.get_collection_by_name("favourites", user.id)
+ if not collection:
+ collection = db_collection_handler.get_collection_by_name(
+ "favorites", user.id
+ )
+
+ if collection:
+ db_collection_handler.update_collection(
+ collection.id, {"is_favorite": True}
+ )
+
+
+def downgrade() -> None:
+ with op.batch_alter_table("collections", schema=None) as batch_op:
+ batch_op.drop_column("is_favorite")
diff --git a/backend/config/__init__.py b/backend/config/__init__.py
index 7a5ddf7b7..0a230d564 100644
--- a/backend/config/__init__.py
+++ b/backend/config/__init__.py
@@ -137,6 +137,7 @@ OIDC_TLS_CACERTFILE: Final = os.environ.get("OIDC_TLS_CACERTFILE", None)
# SCANS
SCAN_TIMEOUT: Final = int(os.environ.get("SCAN_TIMEOUT", 60 * 60 * 4)) # 4 hours
+SCAN_WORKERS: Final = max(1, int(os.environ.get("SCAN_WORKERS", "1")))
# TASKS
TASK_TIMEOUT: Final = int(os.environ.get("TASK_TIMEOUT", 60 * 5)) # 5 minutes
diff --git a/backend/config/config_manager.py b/backend/config/config_manager.py
index 6ccda2085..a5974fc3c 100644
--- a/backend/config/config_manager.py
+++ b/backend/config/config_manager.py
@@ -46,6 +46,7 @@ EjsOption = dict[str, str] # option_name -> option_value
class Config:
CONFIG_FILE_MOUNTED: bool
+ CONFIG_FILE_WRITABLE: bool
EXCLUDED_PLATFORMS: list[str]
EXCLUDED_SINGLE_EXT: list[str]
EXCLUDED_SINGLE_FILES: list[str]
@@ -82,6 +83,7 @@ class ConfigManager:
_self = None
_raw_config: dict = {}
_config_file_mounted: bool = False
+ _config_file_writable: bool = False
def __new__(cls, *args, **kwargs):
if cls._self is None:
@@ -94,17 +96,19 @@ class ConfigManager:
self.config_file = config_file
try:
- with open(self.config_file, "r+") as cf:
+ # Check if the config file is mounted
+ with open(self.config_file, "r") as cf:
self._config_file_mounted = True
self._raw_config = yaml.load(cf, Loader=SafeLoader) or {}
+
+ # Also check if the config file is writable
+ self._config_file_writable = os.access(self.config_file, os.W_OK)
except FileNotFoundError:
- self._config_file_mounted = False
log.critical(
"Config file not found! Any changes made to the configuration will not persist after the application restarts."
)
except PermissionError:
- self._config_file_mounted = False
- log.critical(
+ log.warning(
"Config file not writable! Any changes made to the configuration will not persist after the application restarts."
)
finally:
@@ -159,6 +163,7 @@ class ConfigManager:
self.config = Config(
CONFIG_FILE_MOUNTED=self._config_file_mounted,
+ CONFIG_FILE_WRITABLE=self._config_file_writable,
EXCLUDED_PLATFORMS=pydash.get(self._raw_config, "exclude.platforms", []),
EXCLUDED_SINGLE_EXT=[
e.lower()
@@ -417,11 +422,10 @@ class ConfigManager:
def get_config(self) -> Config:
try:
- with open(self.config_file, "r+") as config_file:
+ with open(self.config_file, "r") as config_file:
self._raw_config = yaml.load(config_file, Loader=SafeLoader) or {}
- except (FileNotFoundError, PermissionError):
- log.debug("Config file not found or not writable")
- pass
+ except FileNotFoundError:
+ log.debug("Config file not found!")
self._parse_config()
self._validate_config()
@@ -429,8 +433,8 @@ class ConfigManager:
return self.config
def _update_config_file(self) -> None:
- if not self._config_file_mounted:
- log.warning("Config file not mounted, skipping config file update")
+ if not self._config_file_writable:
+ log.warning("Config file not writable, skipping config file update")
raise ConfigNotWritableException
self._raw_config = {
diff --git a/backend/endpoints/collections.py b/backend/endpoints/collections.py
index 146262583..3b01f2552 100644
--- a/backend/endpoints/collections.py
+++ b/backend/endpoints/collections.py
@@ -53,6 +53,7 @@ async def add_collection(
"description": data.get("description", ""),
"url_cover": data.get("url_cover", ""),
"is_public": data.get("is_public", False),
+ "is_favorite": data.get("is_favorite", False),
"user_id": request.user.id,
}
db_collection = db_collection_handler.get_collection_by_name(
diff --git a/backend/endpoints/configs.py b/backend/endpoints/configs.py
index bec1350d2..8fc4f1bde 100644
--- a/backend/endpoints/configs.py
+++ b/backend/endpoints/configs.py
@@ -25,6 +25,7 @@ def get_config() -> ConfigResponse:
cfg = cm.get_config()
return ConfigResponse(
CONFIG_FILE_MOUNTED=cfg.CONFIG_FILE_MOUNTED,
+ CONFIG_FILE_WRITABLE=cfg.CONFIG_FILE_WRITABLE,
EXCLUDED_PLATFORMS=cfg.EXCLUDED_PLATFORMS,
EXCLUDED_SINGLE_EXT=cfg.EXCLUDED_SINGLE_EXT,
EXCLUDED_SINGLE_FILES=cfg.EXCLUDED_SINGLE_FILES,
diff --git a/backend/endpoints/responses/__init__.py b/backend/endpoints/responses/__init__.py
index 5708a3dea..7b25841af 100644
--- a/backend/endpoints/responses/__init__.py
+++ b/backend/endpoints/responses/__init__.py
@@ -12,10 +12,10 @@ class ScanStats(TypedDict):
new_platforms: int
identified_platforms: int
scanned_roms: int
- added_roms: int
+ new_roms: int
identified_roms: int
scanned_firmware: int
- added_firmware: int
+ new_firmware: int
class ScanTaskMeta(TypedDict):
diff --git a/backend/endpoints/responses/config.py b/backend/endpoints/responses/config.py
index 487b98b62..a972b9508 100644
--- a/backend/endpoints/responses/config.py
+++ b/backend/endpoints/responses/config.py
@@ -5,6 +5,7 @@ from config.config_manager import EjsControls
class ConfigResponse(TypedDict):
CONFIG_FILE_MOUNTED: bool
+ CONFIG_FILE_WRITABLE: bool
EXCLUDED_PLATFORMS: list[str]
EXCLUDED_SINGLE_EXT: list[str]
EXCLUDED_SINGLE_FILES: list[str]
diff --git a/backend/endpoints/responses/feeds.py b/backend/endpoints/responses/feeds.py
index dc5bfdcaf..02332fbcc 100644
--- a/backend/endpoints/responses/feeds.py
+++ b/backend/endpoints/responses/feeds.py
@@ -1,8 +1,10 @@
-from typing import Annotated, Any, NotRequired, TypedDict
+from typing import Annotated, Any, Final, NotRequired, TypedDict
-from pydantic import BaseModel, BeforeValidator, Field
+from pydantic import BaseModel, BeforeValidator, Field, field_validator
from handler.metadata.base_handler import UniversalPlatformSlug as UPS
+from tasks.scheduled.update_switch_titledb import TITLEDB_REGION_LIST
+from utils.database import safe_int
WEBRCADE_SUPPORTED_PLATFORM_SLUGS = frozenset(
(
@@ -64,29 +66,6 @@ WEBRCADE_SLUG_TO_TYPE_MAP = {
# Webrcade feed format
# Source: https://docs.webrcade.com/feeds/format/
-
-
-def coerce_to_string(value: Any) -> str:
- """Coerce value to string, returning empty string for None."""
- return "" if value is None else str(value)
-
-
-def coerce_to_int(value: Any) -> int:
- """Coerce value to int, returning 0 for None/empty values."""
- if value in (None, ""):
- return 0
-
- try:
- return int(value)
- except (ValueError, TypeError):
- return 0
-
-
-# Annotated types for cleaner field definitions
-StringField = Annotated[str, BeforeValidator(coerce_to_string)]
-IntField = Annotated[int, BeforeValidator(coerce_to_int)]
-
-
class WebrcadeFeedItemPropsSchema(TypedDict):
rom: str
@@ -122,6 +101,23 @@ class WebrcadeFeedSchema(TypedDict):
# Tinfoil feed format
# Source: https://blawar.github.io/tinfoil/custom_index/
+UNIX_EPOCH_START_DATE: Final = 19700101
+
+
+def coerce_to_string(value: Any) -> str:
+ """Coerce value to string, returning empty string for None."""
+ return "" if value is None else str(value)
+
+
+def coerce_to_int(value: Any) -> int:
+ """Coerce value to int, returning 0 for None/empty values."""
+ return safe_int(value, default=0)
+
+
+# Annotated types for cleaner field definitions
+StringField = Annotated[str, BeforeValidator(coerce_to_string)]
+IntField = Annotated[int, BeforeValidator(coerce_to_int)]
+
class TinfoilFeedFileSchema(TypedDict):
url: str
@@ -142,10 +138,24 @@ class TinfoilFeedTitleDBSchema(BaseModel):
publisher: StringField = Field(default="")
size: IntField = Field(default=0, ge=0)
version: IntField = Field(default=0, ge=0)
- releaseDate: IntField = Field(default=19700101, ge=19700101)
+ releaseDate: IntField = Field(
+ default=UNIX_EPOCH_START_DATE, ge=UNIX_EPOCH_START_DATE
+ )
rating: IntField = Field(default=0, ge=0, le=100)
rank: IntField = Field(default=0, ge=0)
+ @field_validator("region")
+ def validate_region(cls, v: str) -> str:
+ if v not in TITLEDB_REGION_LIST:
+ return "US"
+ return v
+
+ @field_validator("releaseDate")
+ def validate_release_date(cls, v: int) -> int:
+ if v < UNIX_EPOCH_START_DATE:
+ return UNIX_EPOCH_START_DATE
+ return v
+
class TinfoilFeedSchema(TypedDict):
files: list[TinfoilFeedFileSchema]
diff --git a/backend/endpoints/responses/rom.py b/backend/endpoints/responses/rom.py
index edcbf9701..169c46e24 100644
--- a/backend/endpoints/responses/rom.py
+++ b/backend/endpoints/responses/rom.py
@@ -214,7 +214,6 @@ class RomSchema(BaseModel):
platform_id: int
platform_slug: str
platform_fs_slug: str
- platform_name: str
platform_custom_name: str | None
platform_display_name: str
diff --git a/backend/endpoints/responses/search.py b/backend/endpoints/responses/search.py
index e13c720dd..ca4983cf8 100644
--- a/backend/endpoints/responses/search.py
+++ b/backend/endpoints/responses/search.py
@@ -11,7 +11,6 @@ class SearchRomSchema(BaseModel):
sgdb_id: int | None = None
flashpoint_id: str | None = None
launchbox_id: int | None = None
- hltb_id: int | None = None
platform_id: int
name: str
slug: str = ""
@@ -22,7 +21,6 @@ class SearchRomSchema(BaseModel):
sgdb_url_cover: str = ""
flashpoint_url_cover: str = ""
launchbox_url_cover: str = ""
- hltb_url_cover: str = ""
is_unidentified: bool
is_identified: bool
diff --git a/backend/endpoints/rom.py b/backend/endpoints/rom.py
index 355b28fc6..c13b09af6 100644
--- a/backend/endpoints/rom.py
+++ b/backend/endpoints/rom.py
@@ -1,4 +1,5 @@
import binascii
+import json
from base64 import b64encode
from datetime import datetime, timezone
from io import BytesIO
@@ -21,6 +22,7 @@ from fastapi import (
UploadFile,
status,
)
+from fastapi.datastructures import FormData
from fastapi.responses import Response
from fastapi_pagination.ext.sqlalchemy import paginate
from fastapi_pagination.limit_offset import LimitOffsetPage, LimitOffsetParams
@@ -53,16 +55,17 @@ from handler.filesystem import fs_resource_handler, fs_rom_handler
from handler.filesystem.base_handler import CoverSize
from handler.metadata import (
meta_flashpoint_handler,
- meta_hltb_handler,
meta_igdb_handler,
meta_launchbox_handler,
meta_moby_handler,
+ meta_ra_handler,
meta_ss_handler,
)
from logger.formatter import BLUE
from logger.formatter import highlight as hl
from logger.logger import log
from models.rom import Rom
+from utils.database import safe_int
from utils.filesystem import sanitize_filename
from utils.hashing import crc32_to_hex
from utils.nginx import FileRedirectResponse, ZipContentLine, ZipResponse
@@ -74,6 +77,18 @@ router = APIRouter(
)
+def parse_raw_metadata(data: FormData, form_key: str) -> dict | None:
+ raw_json = data.get(form_key, None)
+ if not raw_json or str(raw_json).strip() == "":
+ return None
+
+ try:
+ return json.loads(str(raw_json))
+ except json.JSONDecodeError as e:
+ log.warning(f"Invalid JSON for {form_key}: {e}")
+ return None
+
+
@protected_route(
router.post,
"",
@@ -197,9 +212,9 @@ def get_roms(
bool | None,
Query(description="Whether the rom matched a metadata source."),
] = None,
- favourite: Annotated[
+ favorite: Annotated[
bool | None,
- Query(description="Whether the rom is marked as favourite."),
+ Query(description="Whether the rom is marked as favorite."),
] = None,
duplicate: Annotated[
bool | None,
@@ -289,7 +304,7 @@ def get_roms(
smart_collection_id=smart_collection_id,
search_term=search_term,
matched=matched,
- favourite=favourite,
+ favorite=favorite,
duplicate=duplicate,
playable=playable,
has_ra=has_ra,
@@ -705,6 +720,8 @@ async def update_rom(
"ss_id": None,
"ra_id": None,
"launchbox_id": None,
+ "hasheous_id": None,
+ "tgdb_id": None,
"flashpoint_id": None,
"hltb_id": None,
"name": rom.fs_name,
@@ -721,6 +738,7 @@ async def update_rom(
"ss_metadata": {},
"ra_metadata": {},
"launchbox_metadata": {},
+ "hasheous_metadata": {},
"flashpoint_metadata": {},
"hltb_metadata": {},
"revision": "",
@@ -734,81 +752,99 @@ async def update_rom(
return DetailedRomSchema.from_orm_with_request(rom, request)
cleaned_data: dict[str, Any] = {
- "igdb_id": data.get("igdb_id", rom.igdb_id),
- "moby_id": data.get("moby_id", rom.moby_id),
- "ss_id": data.get("ss_id", rom.ss_id),
- "launchbox_id": data.get("launchbox_id", rom.launchbox_id),
- "flashpoint_id": data.get("flashpoint_id", rom.flashpoint_id),
- "hltb_id": data.get("hltb_id", rom.hltb_id),
+ "igdb_id": safe_int(data.get("igdb_id")) or rom.igdb_id,
+ "sgdb_id": safe_int(data.get("sgdb_id")) or rom.sgdb_id,
+ "moby_id": safe_int(data.get("moby_id")) or rom.moby_id,
+ "ss_id": safe_int(data.get("ss_id")) or rom.ss_id,
+ "ra_id": safe_int(data.get("ra_id")) or rom.ra_id,
+ "launchbox_id": safe_int(data.get("launchbox_id")) or rom.launchbox_id,
+ "hasheous_id": safe_int(data.get("hasheous_id")) or rom.hasheous_id,
+ "tgdb_id": safe_int(data.get("tgdb_id")) or rom.tgdb_id,
+ "flashpoint_id": safe_int(data.get("flashpoint_id")) or rom.flashpoint_id,
+ "hltb_id": safe_int(data.get("hltb_id")) or rom.hltb_id,
}
- if (
- cleaned_data.get("hltb_id", "")
- and int(cleaned_data.get("hltb_id", "")) != rom.hltb_id
- ):
- hltb_rom = await meta_hltb_handler.get_rom_by_id(cleaned_data["hltb_id"])
- cleaned_data.update(hltb_rom)
+ # Add raw metadata parsing
+ raw_igdb_metadata = parse_raw_metadata(data, "raw_igdb_metadata")
+ raw_moby_metadata = parse_raw_metadata(data, "raw_moby_metadata")
+ raw_ss_metadata = parse_raw_metadata(data, "raw_ss_metadata")
+ raw_launchbox_metadata = parse_raw_metadata(data, "raw_launchbox_metadata")
+ raw_hasheous_metadata = parse_raw_metadata(data, "raw_hasheous_metadata")
+ raw_flashpoint_metadata = parse_raw_metadata(data, "raw_flashpoint_metadata")
+ raw_hltb_metadata = parse_raw_metadata(data, "raw_hltb_metadata")
+ if cleaned_data["igdb_id"] and raw_igdb_metadata is not None:
+ cleaned_data["igdb_metadata"] = raw_igdb_metadata
+ if cleaned_data["moby_id"] and raw_moby_metadata is not None:
+ cleaned_data["moby_metadata"] = raw_moby_metadata
+ if cleaned_data["ss_id"] and raw_ss_metadata is not None:
+ cleaned_data["ss_metadata"] = raw_ss_metadata
+ if cleaned_data["launchbox_id"] and raw_launchbox_metadata is not None:
+ cleaned_data["launchbox_metadata"] = raw_launchbox_metadata
+ if cleaned_data["hasheous_id"] and raw_hasheous_metadata is not None:
+ cleaned_data["hasheous_metadata"] = raw_hasheous_metadata
+ if cleaned_data["flashpoint_id"] and raw_flashpoint_metadata is not None:
+ cleaned_data["flashpoint_metadata"] = raw_flashpoint_metadata
+ if cleaned_data["hltb_id"] and raw_hltb_metadata is not None:
+ cleaned_data["hltb_metadata"] = raw_hltb_metadata
+
+ # Fetch metadata from external sources
if (
- cleaned_data.get("flashpoint_id", "")
- and cleaned_data.get("flashpoint_id", "") != rom.flashpoint_id
+ cleaned_data["flashpoint_id"]
+ and cleaned_data["flashpoint_id"] != rom.flashpoint_id
):
flashpoint_rom = await meta_flashpoint_handler.get_rom_by_id(
cleaned_data["flashpoint_id"]
)
cleaned_data.update(flashpoint_rom)
+ elif rom.flashpoint_id and not cleaned_data["flashpoint_id"]:
+ cleaned_data.update({"flashpoint_id": None, "flashpoint_metadata": {}})
if (
- cleaned_data.get("launchbox_id", "")
- and int(cleaned_data.get("launchbox_id", "")) != rom.launchbox_id
+ cleaned_data["launchbox_id"]
+ and int(cleaned_data["launchbox_id"]) != rom.launchbox_id
):
launchbox_rom = await meta_launchbox_handler.get_rom_by_id(
cleaned_data["launchbox_id"]
)
cleaned_data.update(launchbox_rom)
- path_screenshots = await fs_resource_handler.get_rom_screenshots(
- rom=rom,
- url_screenshots=cleaned_data.get("url_screenshots", []),
- )
- cleaned_data.update({"path_screenshots": path_screenshots})
+ elif rom.launchbox_id and not cleaned_data["launchbox_id"]:
+ cleaned_data.update({"launchbox_id": None, "launchbox_metadata": {}})
- if (
- cleaned_data.get("moby_id", "")
- and int(cleaned_data.get("moby_id", "")) != rom.moby_id
- ):
+ if cleaned_data["ra_id"] and int(cleaned_data["ra_id"]) != rom.ra_id:
+ ra_rom = await meta_ra_handler.get_rom_by_id(rom, ra_id=cleaned_data["ra_id"])
+ cleaned_data.update(ra_rom)
+ elif rom.ra_id and not cleaned_data["ra_id"]:
+ cleaned_data.update({"ra_id": None, "ra_metadata": {}})
+
+ if cleaned_data["moby_id"] and int(cleaned_data["moby_id"]) != rom.moby_id:
moby_rom = await meta_moby_handler.get_rom_by_id(
int(cleaned_data.get("moby_id", ""))
)
cleaned_data.update(moby_rom)
- path_screenshots = await fs_resource_handler.get_rom_screenshots(
- rom=rom,
- url_screenshots=cleaned_data.get("url_screenshots", []),
- )
- cleaned_data.update({"path_screenshots": path_screenshots})
+ elif rom.moby_id and not cleaned_data["moby_id"]:
+ cleaned_data.update({"moby_id": None, "moby_metadata": {}})
- if (
- cleaned_data.get("ss_id", "")
- and int(cleaned_data.get("ss_id", "")) != rom.ss_id
- ):
+ if cleaned_data["ss_id"] and int(cleaned_data["ss_id"]) != rom.ss_id:
ss_rom = await meta_ss_handler.get_rom_by_id(cleaned_data["ss_id"])
cleaned_data.update(ss_rom)
- path_screenshots = await fs_resource_handler.get_rom_screenshots(
- rom=rom,
- url_screenshots=cleaned_data.get("url_screenshots", []),
- )
- cleaned_data.update({"path_screenshots": path_screenshots})
+ elif rom.ss_id and not cleaned_data["ss_id"]:
+ cleaned_data.update({"ss_id": None, "ss_metadata": {}})
- if (
- cleaned_data.get("igdb_id", "")
- and int(cleaned_data.get("igdb_id", "")) != rom.igdb_id
- ):
+ if cleaned_data["igdb_id"] and int(cleaned_data["igdb_id"]) != rom.igdb_id:
igdb_rom = await meta_igdb_handler.get_rom_by_id(cleaned_data["igdb_id"])
cleaned_data.update(igdb_rom)
+ elif rom.igdb_id and not cleaned_data["igdb_id"]:
+ cleaned_data.update({"igdb_id": None, "igdb_metadata": {}})
+
+ if cleaned_data.get("url_screenshots", []):
path_screenshots = await fs_resource_handler.get_rom_screenshots(
rom=rom,
url_screenshots=cleaned_data.get("url_screenshots", []),
)
- cleaned_data.update({"path_screenshots": path_screenshots})
+ cleaned_data.update(
+ {"path_screenshots": path_screenshots, "url_screenshots": []}
+ )
cleaned_data.update(
{
diff --git a/backend/endpoints/search.py b/backend/endpoints/search.py
index 41c65592e..0d958f51f 100644
--- a/backend/endpoints/search.py
+++ b/backend/endpoints/search.py
@@ -9,7 +9,6 @@ from handler.auth.constants import Scope
from handler.database import db_rom_handler
from handler.metadata import (
meta_flashpoint_handler,
- meta_hltb_handler,
meta_igdb_handler,
meta_launchbox_handler,
meta_moby_handler,
@@ -17,7 +16,6 @@ from handler.metadata import (
meta_ss_handler,
)
from handler.metadata.flashpoint_handler import FlashpointRom
-from handler.metadata.hltb_handler import HLTBRom
from handler.metadata.igdb_handler import IGDBRom
from handler.metadata.launchbox_handler import LaunchboxRom
from handler.metadata.moby_handler import MobyGamesRom
@@ -63,7 +61,6 @@ async def search_rom(
and not meta_moby_handler.is_enabled()
and not meta_flashpoint_handler.is_enabled()
and not meta_launchbox_handler.is_enabled()
- and not meta_hltb_handler.is_enabled()
):
log.error("Search error: No metadata providers enabled")
raise HTTPException(
@@ -94,7 +91,6 @@ async def search_rom(
ss_matched_roms: list[SSRom] = []
flashpoint_matched_roms: list[FlashpointRom] = []
launchbox_matched_roms: list[LaunchboxRom] = []
- hltb_matched_roms: list[HLTBRom] = []
if search_by.lower() == "id":
try:
@@ -120,7 +116,6 @@ async def search_rom(
ss_matched_roms,
flashpoint_matched_roms,
launchbox_matched_roms,
- hltb_matched_roms,
) = await asyncio.gather(
meta_igdb_handler.get_matched_roms_by_name(
search_term, get_main_platform_igdb_id(rom.platform)
@@ -135,7 +130,6 @@ async def search_rom(
meta_launchbox_handler.get_matched_roms_by_name(
search_term, rom.platform.slug
),
- meta_hltb_handler.get_matched_roms_by_name(search_term, rom.platform.slug),
)
merged_dict: dict[str, dict] = {}
@@ -215,21 +209,6 @@ async def search_rom(
**merged_dict.get(launchbox_name, {}),
}
- for hltb_rom in hltb_matched_roms:
- if hltb_rom["hltb_id"]:
- hltb_name = meta_hltb_handler.normalize_search_term(
- hltb_rom.get("name", ""),
- remove_articles=False,
- )
- merged_dict[hltb_name] = {
- **hltb_rom,
- "is_identified": True,
- "is_unidentified": False,
- "platform_id": rom.platform_id,
- "hltb_url_cover": hltb_rom.pop("url_cover", ""),
- **merged_dict.get(hltb_name, {}),
- }
-
async def get_sgdb_rom(name: str) -> tuple[str, SGDBRom]:
return name, await meta_sgdb_handler.get_details_by_names([name])
diff --git a/backend/endpoints/sockets/scan.py b/backend/endpoints/sockets/scan.py
index 16f443eaa..5f772a211 100644
--- a/backend/endpoints/sockets/scan.py
+++ b/backend/endpoints/sockets/scan.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import asyncio
from dataclasses import dataclass
from itertools import batched
from typing import Any, Final
@@ -8,7 +9,7 @@ import socketio # type: ignore
from rq import Worker
from rq.job import Job
-from config import DEV_MODE, REDIS_URL, SCAN_TIMEOUT, TASK_RESULT_TTL
+from config import DEV_MODE, REDIS_URL, SCAN_TIMEOUT, SCAN_WORKERS, TASK_RESULT_TTL
from endpoints.responses import TaskType
from endpoints.responses.platform import PlatformSchema
from endpoints.responses.rom import SimpleRomSchema
@@ -55,17 +56,33 @@ class ScanStats:
new_platforms: int = 0
identified_platforms: int = 0
scanned_roms: int = 0
- added_roms: int = 0
+ new_roms: int = 0
identified_roms: int = 0
scanned_firmware: int = 0
- added_firmware: int = 0
+ new_firmware: int = 0
- def update(self, **kwargs):
- for key, value in kwargs.items():
- if hasattr(self, key):
- setattr(self, key, value)
+ def __post_init__(self):
+ # Lock for thread-safe updates
+ self._lock = asyncio.Lock()
- update_job_meta({"scan_stats": self.to_dict()})
+ async def update(self, socket_manager: socketio.AsyncRedisManager, **kwargs):
+ async with self._lock:
+ for key, value in kwargs.items():
+ if hasattr(self, key):
+ setattr(self, key, value)
+
+ update_job_meta({"scan_stats": self.to_dict()})
+ await socket_manager.emit("scan:update_stats", self.to_dict())
+
+ async def increment(self, socket_manager: socketio.AsyncRedisManager, **kwargs):
+ async with self._lock:
+ for key, value in kwargs.items():
+ if hasattr(self, key):
+ current_value = getattr(self, key)
+ setattr(self, key, current_value + value)
+
+ update_job_meta({"scan_stats": self.to_dict()})
+ await socket_manager.emit("scan:update_stats", self.to_dict())
def to_dict(self) -> dict[str, Any]:
return {
@@ -75,10 +92,10 @@ class ScanStats:
"new_platforms": self.new_platforms,
"identified_platforms": self.identified_platforms,
"scanned_roms": self.scanned_roms,
- "added_roms": self.added_roms,
+ "new_roms": self.new_roms,
"identified_roms": self.identified_roms,
"scanned_firmware": self.scanned_firmware,
- "added_firmware": self.added_firmware,
+ "new_firmware": self.new_firmware,
}
@@ -91,10 +108,11 @@ async def _identify_firmware(
platform: Platform,
fs_fw: str,
scan_stats: ScanStats,
-) -> ScanStats:
+ socket_manager: socketio.AsyncRedisManager,
+) -> None:
# Break early if the flag is set
if redis_client.get(STOP_SCAN_FLAG):
- return scan_stats
+ return
firmware = db_firmware_handler.get_firmware_by_filename(platform.id, fs_fw)
@@ -113,25 +131,24 @@ async def _identify_firmware(
crc_hash=scanned_firmware.crc_hash,
)
- scan_stats.update(
- scanned_firmware=scan_stats.scanned_firmware + 1,
- added_firmware=scan_stats.added_firmware + (1 if not firmware else 0),
+ await scan_stats.increment(
+ socket_manager=socket_manager,
+ scanned_firmware=1,
+ new_firmware=1 if not firmware else 0,
)
scanned_firmware.missing_from_fs = False
scanned_firmware.is_verified = is_verified
db_firmware_handler.add_firmware(scanned_firmware)
- return scan_stats
-
def _should_scan_rom(scan_type: ScanType, rom: Rom | None, roms_ids: list[int]) -> bool:
"""Decide if a rom should be scanned or not
Args:
- scan_type (str): Type of scan to be performed.
- roms_ids (list[int], optional): List of selected roms to be scanned.
- metadata_sources (list[str], optional): List of metadata sources to be used
+ scan_type (ScanType): Type of scan to be performed.
+ rom (Rom | None): The rom to be scanned.
+ roms_ids (list[int]): List of selected roms to be scanned.
"""
# This logic is tricky so only touch it if you know what you're doing"""
@@ -165,10 +182,10 @@ async def _identify_rom(
metadata_sources: list[str],
socket_manager: socketio.AsyncRedisManager,
scan_stats: ScanStats,
-) -> ScanStats:
+) -> None:
# Break early if the flag is set
if redis_client.get(STOP_SCAN_FLAG):
- return scan_stats
+ return
if not _should_scan_rom(scan_type=scan_type, rom=rom, roms_ids=roms_ids):
if rom:
@@ -180,8 +197,7 @@ async def _identify_rom(
if rom.missing_from_fs:
db_rom_handler.update_rom(rom.id, {"missing_from_fs": False})
- scan_stats.update(scanned_roms=scan_stats.scanned_roms + 1)
- return scan_stats
+ return
# Update properties that don't require metadata
fs_regions, fs_revisions, fs_languages, fs_other_tags = fs_rom_handler.parse_tags(
@@ -217,7 +233,7 @@ async def _identify_rom(
# Silly checks to make the type checker happy
if not rom:
- return scan_stats
+ return
# Build rom files object before scanning
log.debug(f"Calculating file hashes for {rom.fs_name}...")
@@ -245,11 +261,11 @@ async def _identify_rom(
socket_manager=socket_manager,
)
- scan_stats.update(
- scanned_roms=scan_stats.scanned_roms + 1,
- added_roms=scan_stats.added_roms + (1 if not rom else 0),
- identified_roms=scan_stats.identified_roms
- + (1 if scanned_rom.is_identified else 0),
+ await scan_stats.increment(
+ socket_manager=socket_manager,
+ scanned_roms=1,
+ new_roms=1 if newly_added else 0,
+ identified_roms=1 if scanned_rom.is_identified else 0,
)
_added_rom = db_rom_handler.add_rom(scanned_rom)
@@ -340,9 +356,6 @@ async def _identify_rom(
exclude={"created_at", "updated_at", "rom_user"}
),
)
- await socket_manager.emit("", None)
-
- return scan_stats
async def _identify_platform(
@@ -366,11 +379,11 @@ async def _identify_platform(
if platform:
scanned_platform.id = platform.id
- scan_stats.update(
- scanned_platforms=scan_stats.scanned_platforms + 1,
- new_platforms=scan_stats.new_platforms + (1 if not platform else 0),
- identified_platforms=scan_stats.identified_platforms
- + (1 if scanned_platform.is_identified else 0),
+ await scan_stats.increment(
+ socket_manager=socket_manager,
+ scanned_platforms=1,
+ new_platforms=1 if not platform else 0,
+ identified_platforms=1 if scanned_platform.is_identified else 0,
)
platform = db_platform_handler.add_platform(scanned_platform)
@@ -378,10 +391,9 @@ async def _identify_platform(
await socket_manager.emit(
"scan:scanning_platform",
PlatformSchema.model_validate(platform).model_dump(
- include={"id", "name", "slug", "fs_slug", "is_identified"}
+ include={"id", "name", "display_name", "slug", "fs_slug", "is_identified"}
),
)
- await socket_manager.emit("", None)
# Scanning firmware
try:
@@ -397,7 +409,8 @@ async def _identify_platform(
log.info(f"{hl(str(len(fs_firmware)))} firmware files found")
for fs_fw in fs_firmware:
- scan_stats = await _identify_firmware(
+ await _identify_firmware(
+ socket_manager=socket_manager,
platform=platform,
fs_fw=fs_fw,
scan_stats=scan_stats,
@@ -417,17 +430,16 @@ async def _identify_platform(
else:
log.info(f"{hl(str(len(fs_roms)))} roms found in the file system")
- for fs_roms_batch in batched(fs_roms, 200, strict=False):
- rom_by_filename_map = db_rom_handler.get_roms_by_fs_name(
- platform_id=platform.id,
- fs_names={fs_rom["fs_name"] for fs_rom in fs_roms_batch},
- )
+ # Create semaphore to limit concurrent ROM scanning
+ scan_semaphore = asyncio.Semaphore(SCAN_WORKERS)
- for fs_rom in fs_roms_batch:
- scan_stats = await _identify_rom(
+ async def scan_rom_with_semaphore(fs_rom: FSRom, rom: Rom | None) -> None:
+ """Scan a single ROM with semaphore limiting"""
+ async with scan_semaphore:
+ await _identify_rom(
platform=platform,
fs_rom=fs_rom,
- rom=rom_by_filename_map.get(fs_rom["fs_name"]),
+ rom=rom,
scan_type=scan_type,
roms_ids=roms_ids,
metadata_sources=metadata_sources,
@@ -435,6 +447,26 @@ async def _identify_platform(
scan_stats=scan_stats,
)
+ for fs_roms_batch in batched(fs_roms, 200, strict=False):
+ roms_by_fs_name = db_rom_handler.get_roms_by_fs_name(
+ platform_id=platform.id,
+ fs_names={fs_rom["fs_name"] for fs_rom in fs_roms_batch},
+ )
+
+ # Process ROMs concurrently within the batch
+ scan_tasks = [
+ scan_rom_with_semaphore(
+ fs_rom=fs_rom, rom=roms_by_fs_name.get(fs_rom["fs_name"])
+ )
+ for fs_rom in fs_roms_batch
+ ]
+
+ # Wait for all ROMs in the batch to complete
+ batched_results = await asyncio.gather(*scan_tasks, return_exceptions=True)
+ for result, fs_rom in zip(batched_results, fs_roms_batch, strict=False):
+ if isinstance(result, Exception):
+ log.error(f"Error scanning ROM {fs_rom['fs_name']}: {result}")
+
missing_roms = db_rom_handler.mark_missing_roms(
platform.id, [rom["fs_name"] for rom in fs_roms]
)
@@ -457,17 +489,17 @@ async def _identify_platform(
@initialize_context()
async def scan_platforms(
platform_ids: list[int],
+ metadata_sources: list[str],
scan_type: ScanType = ScanType.QUICK,
roms_ids: list[int] | None = None,
- metadata_sources: list[str] | None = None,
) -> ScanStats:
"""Scan all the listed platforms and fetch metadata from different sources
Args:
- platform_slugs (list[str]): List of platform slugs to be scanned
- scan_type (str): Type of scan to be performed. Defaults to "quick".
- roms_ids (list[int], optional): List of selected roms to be scanned. Defaults to [].
- metadata_sources (list[str], optional): List of metadata sources to be used. Defaults to all sources.
+ platform_ids (list[int]): List of platform ids to be scanned
+ metadata_sources (list[str]): List of metadata sources to be used
+ scan_type (ScanType): Type of scan to be performed.
+ roms_ids (list[int], optional): List of selected roms to be scanned.
"""
if not roms_ids:
@@ -476,11 +508,6 @@ async def scan_platforms(
socket_manager = _get_socket_manager()
scan_stats = ScanStats()
- if not metadata_sources:
- log.error("No metadata sources provided")
- await socket_manager.emit("scan:done_ko", "No metadata sources provided")
- return scan_stats
-
try:
fs_platforms: list[str] = await fs_platform_handler.get_platforms()
except FolderStructureNotMatchException as e:
@@ -489,10 +516,15 @@ async def scan_platforms(
return scan_stats
# Precalculate total platforms and ROMs
- scan_stats.update(total_platforms=len(fs_platforms))
+ total_roms = 0
for platform_slug in fs_platforms:
fs_roms = await fs_rom_handler.get_roms(Platform(fs_slug=platform_slug))
- scan_stats.update(total_roms=scan_stats.total_roms + len(fs_roms))
+ total_roms += len(fs_roms)
+ await scan_stats.update(
+ socket_manager=socket_manager,
+ total_platforms=len(fs_platforms),
+ total_roms=total_roms,
+ )
async def stop_scan():
log.info(f"{emoji.EMOJI_STOP_SIGN} Scan stopped manually")
@@ -566,17 +598,17 @@ async def scan_handler(_sid: str, options: dict[str, Any]):
if DEV_MODE:
return await scan_platforms(
platform_ids=platform_ids,
+ metadata_sources=metadata_sources,
scan_type=scan_type,
roms_ids=roms_ids,
- metadata_sources=metadata_sources,
)
return high_prio_queue.enqueue(
scan_platforms,
- platform_ids,
- scan_type,
- roms_ids,
- metadata_sources,
+ platform_ids=platform_ids,
+ metadata_sources=metadata_sources,
+ scan_type=scan_type,
+ roms_ids=roms_ids,
job_timeout=SCAN_TIMEOUT, # Timeout (default of 4 hours)
result_ttl=TASK_RESULT_TTL,
meta={
diff --git a/backend/handler/auth/base_handler.py b/backend/handler/auth/base_handler.py
index 5562b7506..3bba2456b 100644
--- a/backend/handler/auth/base_handler.py
+++ b/backend/handler/auth/base_handler.py
@@ -332,7 +332,7 @@ class OpenIDHandler:
role = Role.VIEWER
if OIDC_CLAIM_ROLES and OIDC_CLAIM_ROLES in userinfo:
- roles = userinfo[OIDC_CLAIM_ROLES]
+ roles = userinfo[OIDC_CLAIM_ROLES] or []
if OIDC_ROLE_ADMIN and OIDC_ROLE_ADMIN in roles:
role = Role.ADMIN
elif OIDC_ROLE_EDITOR and OIDC_ROLE_EDITOR in roles:
diff --git a/backend/handler/database/collections_handler.py b/backend/handler/database/collections_handler.py
index 6c9264808..2b1f348be 100644
--- a/backend/handler/database/collections_handler.py
+++ b/backend/handler/database/collections_handler.py
@@ -59,6 +59,15 @@ class DBCollectionsHandler(DBBaseHandler):
) -> Collection | None:
return session.scalar(query.filter_by(name=name, user_id=user_id).limit(1))
+ @begin_session
+ @with_roms
+ def get_favorite_collection(
+ self, user_id: int, query: Query = None, session: Session = None
+ ) -> Collection | None:
+ return session.scalar(
+ query.filter_by(is_favorite=True, user_id=user_id).limit(1)
+ )
+
@begin_session
@with_roms
def get_collections(
@@ -211,7 +220,7 @@ class DBCollectionsHandler(DBBaseHandler):
virtual_collection_id=criteria.get("virtual_collection_id"),
search_term=criteria.get("search_term"),
matched=criteria.get("matched"),
- favourite=criteria.get("favourite"),
+ favorite=criteria.get("favorite"),
duplicate=criteria.get("duplicate"),
playable=criteria.get("playable"),
has_ra=criteria.get("has_ra"),
diff --git a/backend/handler/database/roms_handler.py b/backend/handler/database/roms_handler.py
index 9f703a954..ff7d98c43 100644
--- a/backend/handler/database/roms_handler.py
+++ b/backend/handler/database/roms_handler.py
@@ -245,27 +245,24 @@ class DBRomsHandler(DBBaseHandler):
predicate = not_(predicate)
return query.filter(predicate)
- def filter_by_favourite(
+ def filter_by_favorite(
self, query: Query, session: Session, value: bool, user_id: int | None
) -> Query:
- """Filter based on whether the rom is in the user's Favourites collection."""
+ """Filter based on whether the rom is in the user's favorites collection."""
if not user_id:
return query
from . import db_collection_handler
- favourites_collection = db_collection_handler.get_collection_by_name(
- "favourites", user_id
- )
-
- if favourites_collection:
- predicate = Rom.id.in_(favourites_collection.rom_ids)
+ favorites_collection = db_collection_handler.get_favorite_collection(user_id)
+ if favorites_collection:
+ predicate = Rom.id.in_(favorites_collection.rom_ids)
if not value:
predicate = not_(predicate)
return query.filter(predicate)
- # If no Favourites collection exists, return the original query if non-favourites
- # were requested, or an empty query if favourites were requested.
+ # If no favorites collection exists, return the original query if non-favorites
+ # were requested, or an empty query if favorites were requested.
if not value:
return query
return query.filter(false())
@@ -377,7 +374,7 @@ class DBRomsHandler(DBBaseHandler):
smart_collection_id: int | None = None,
search_term: str | None = None,
matched: bool | None = None,
- favourite: bool | None = None,
+ favorite: bool | None = None,
duplicate: bool | None = None,
playable: bool | None = None,
has_ra: bool | None = None,
@@ -419,9 +416,9 @@ class DBRomsHandler(DBBaseHandler):
if matched is not None:
query = self.filter_by_matched(query, value=matched)
- if favourite is not None:
- query = self.filter_by_favourite(
- query, session=session, value=favourite, user_id=user_id
+ if favorite is not None:
+ query = self.filter_by_favorite(
+ query, session=session, value=favorite, user_id=user_id
)
if duplicate is not None:
@@ -651,7 +648,7 @@ class DBRomsHandler(DBBaseHandler):
virtual_collection_id=kwargs.get("virtual_collection_id", None),
search_term=kwargs.get("search_term", None),
matched=kwargs.get("matched", None),
- favourite=kwargs.get("favourite", None),
+ favorite=kwargs.get("favorite", None),
duplicate=kwargs.get("duplicate", None),
playable=kwargs.get("playable", None),
has_ra=kwargs.get("has_ra", None),
diff --git a/backend/handler/filesystem/roms_handler.py b/backend/handler/filesystem/roms_handler.py
index 519d150f6..ae5c7621d 100644
--- a/backend/handler/filesystem/roms_handler.py
+++ b/backend/handler/filesystem/roms_handler.py
@@ -330,16 +330,29 @@ class FSRomsHandler(FSHandler):
):
continue
+ # Check if this is a top-level file (not in a subdirectory)
+ is_top_level = f_path.samefile(Path(abs_fs_path, rom.fs_name))
+
if hashable_platform:
try:
- crc_c, rom_crc_c, md5_h, rom_md5_h, sha1_h, rom_sha1_h = (
- self._calculate_rom_hashes(
- Path(f_path, file_name),
- rom_crc_c,
- rom_md5_h,
- rom_sha1_h,
+ if is_top_level:
+ # Include this file in the main ROM hash calculation
+ crc_c, rom_crc_c, md5_h, rom_md5_h, sha1_h, rom_sha1_h = (
+ self._calculate_rom_hashes(
+ Path(f_path, file_name),
+ rom_crc_c,
+ rom_md5_h,
+ rom_sha1_h,
+ )
+ )
+ else:
+ # Calculate individual file hash only
+ crc_c, _, md5_h, _, sha1_h, _ = self._calculate_rom_hashes(
+ Path(f_path, file_name),
+ 0,
+ hashlib.md5(usedforsecurity=False),
+ hashlib.sha1(usedforsecurity=False),
)
- )
except zlib.error:
crc_c = 0
md5_h = hashlib.md5(usedforsecurity=False)
diff --git a/backend/handler/metadata/__init__.py b/backend/handler/metadata/__init__.py
index be58e4712..f09657f25 100644
--- a/backend/handler/metadata/__init__.py
+++ b/backend/handler/metadata/__init__.py
@@ -1,7 +1,7 @@
from .flashpoint_handler import FlashpointHandler
from .gamelist_handler import GamelistHandler
from .hasheous_handler import HasheousHandler
-from .hltb_handler import HowLongToBeatHandler
+from .hltb_handler import HLTBHandler
from .igdb_handler import IGDBHandler
from .launchbox_handler import LaunchboxHandler
from .moby_handler import MobyGamesHandler
@@ -21,5 +21,5 @@ meta_launchbox_handler = LaunchboxHandler()
meta_hasheous_handler = HasheousHandler()
meta_tgdb_handler = TGDBHandler()
meta_flashpoint_handler = FlashpointHandler()
-meta_hltb_handler = HowLongToBeatHandler()
meta_gamelist_handler = GamelistHandler()
+meta_hltb_handler = HLTBHandler()
diff --git a/backend/handler/metadata/base_handler.py b/backend/handler/metadata/base_handler.py
index 19961a3bc..32d0433e3 100644
--- a/backend/handler/metadata/base_handler.py
+++ b/backend/handler/metadata/base_handler.py
@@ -14,7 +14,6 @@ from logger.logger import log
from tasks.scheduled.update_switch_titledb import (
SWITCH_PRODUCT_ID_KEY,
SWITCH_TITLEDB_INDEX_KEY,
- update_switch_titledb_task,
)
jarowinkler = JaroWinkler()
@@ -190,12 +189,8 @@ class MetadataHandler(abc.ABC):
title_id = match.group(1)
if not (await async_cache.exists(SWITCH_TITLEDB_INDEX_KEY)):
- log.warning("Fetching the Switch titleID index file...")
- await update_switch_titledb_task.run(force=True)
-
- if not (await async_cache.exists(SWITCH_TITLEDB_INDEX_KEY)):
- log.error("Could not fetch the Switch titleID index file")
- return search_term, None
+ log.error("Could not find the Switch titleID index file in cache")
+ return search_term, None
index_entry = await async_cache.hget(SWITCH_TITLEDB_INDEX_KEY, title_id)
if index_entry:
@@ -215,12 +210,8 @@ class MetadataHandler(abc.ABC):
product_id = "".join(product_id)
if not (await async_cache.exists(SWITCH_PRODUCT_ID_KEY)):
- log.warning("Fetching the Switch productID index file...")
- await update_switch_titledb_task.run(force=True)
-
- if not (await async_cache.exists(SWITCH_PRODUCT_ID_KEY)):
- log.error("Could not fetch the Switch productID index file")
- return search_term, None
+ log.error("Could not find the Switch productID index file in cache")
+ return search_term, None
index_entry = await async_cache.hget(SWITCH_PRODUCT_ID_KEY, product_id)
if index_entry:
diff --git a/backend/handler/metadata/flashpoint_handler.py b/backend/handler/metadata/flashpoint_handler.py
index 8f812a95c..ea0371645 100644
--- a/backend/handler/metadata/flashpoint_handler.py
+++ b/backend/handler/metadata/flashpoint_handler.py
@@ -9,7 +9,7 @@ from fastapi import HTTPException, status
from config import FLASHPOINT_API_ENABLED
from logger.logger import log
-from utils import get_version
+from utils import get_version, is_valid_uuid
from utils.context import ctx_httpx_client
from .base_handler import MetadataHandler
@@ -243,6 +243,11 @@ class FlashpointHandler(MetadataHandler):
if platform_slug not in FLASHPOINT_PLATFORM_LIST:
return FlashpointRom(flashpoint_id=None)
+ # Check if the filename is a UUID
+ fs_name_no_tags = fs_rom_handler.get_file_name_with_no_tags(fs_name)
+ if is_valid_uuid(fs_name_no_tags):
+ return await self.get_rom_by_id(flashpoint_id=fs_name_no_tags)
+
# Normalize the search term
search_term = fs_rom_handler.get_file_name_with_no_tags(fs_name)
search_term = self.normalize_search_term(search_term, remove_punctuation=False)
diff --git a/backend/handler/metadata/hltb_handler.py b/backend/handler/metadata/hltb_handler.py
index 210e7738d..6b04fc2d7 100644
--- a/backend/handler/metadata/hltb_handler.py
+++ b/backend/handler/metadata/hltb_handler.py
@@ -167,7 +167,7 @@ def extract_hltb_metadata(game: HLTBGame) -> HLTBMetadata:
GITHUB_FILE_URL = "https://raw.githubusercontent.com/rommapp/romm/refs/heads/master/backend/handler/metadata/fixtures/hltb_api_url"
-class HowLongToBeatHandler(MetadataHandler):
+class HLTBHandler(MetadataHandler):
"""
Handler for HowLongToBeat, a service that provides game completion times.
"""
@@ -212,14 +212,6 @@ class HowLongToBeatHandler(MetadataHandler):
return True
- @staticmethod
- def extract_hltb_id_from_filename(fs_name: str) -> int | None:
- """Extract HLTB ID from filename tag like (hltb-12345)."""
- match = HLTB_TAG_REGEX.search(fs_name)
- if match:
- return int(match.group(1))
- return None
-
async def _request(self, url: str, payload: dict) -> dict:
"""
Sends a POST request to HowLongToBeat API.
@@ -386,21 +378,6 @@ class HowLongToBeatHandler(MetadataHandler):
if not HLTB_API_ENABLED:
return HLTBRom(hltb_id=None)
- # Check for HLTB ID tag in filename first
- hltb_id_from_tag = self.extract_hltb_id_from_filename(fs_name)
- if hltb_id_from_tag:
- log.debug(f"Found HLTB ID tag in filename: {hltb_id_from_tag}")
- rom_by_id = await self.get_rom_by_id(hltb_id_from_tag)
- if rom_by_id["hltb_id"]:
- log.debug(
- f"Successfully matched ROM by HLTB ID tag: {fs_name} -> {hltb_id_from_tag}"
- )
- return rom_by_id
- else:
- log.warning(
- f"HLTB ID tag found but no match: {fs_name} -> {hltb_id_from_tag}"
- )
-
# We replace " - " with ": " to match HowLongToBeat's naming convention
search_term = fs_rom_handler.get_file_name_with_no_tags(fs_name).replace(
" - ", ": "
@@ -493,29 +470,6 @@ class HowLongToBeatHandler(MetadataHandler):
return roms
- async def get_rom_by_id(self, hltb_id: int) -> HLTBRom:
- """
- Get ROM information by HowLongToBeat ID.
- Note: HLTB doesn't have a direct "get by ID" endpoint,
- so this method searches by the game name if we can find it.
-
- :param hltb_id: The HowLongToBeat game ID.
- :return: A HLTBRom object.
- """
- if not HLTB_API_ENABLED:
- return HLTBRom(hltb_id=None)
-
- if not hltb_id:
- return HLTBRom(hltb_id=None)
-
- # Unfortunately, HLTB doesn't provide a direct "get by ID" endpoint
- # This is a limitation of their API - we would need to search and filter
- # In practice, this method might not be very useful for HLTB
- log.debug(
- f"get_rom_by_id not fully supported for HowLongToBeat (ID: {hltb_id})"
- )
- return HLTBRom(hltb_id=hltb_id)
-
async def price_check(
self, hltb_id: int, steam_id: int = 0, itch_id: int = 0
) -> HLTBPriceCheckResponse | None:
diff --git a/backend/handler/metadata/igdb_handler.py b/backend/handler/metadata/igdb_handler.py
index 1f5bf6337..670a247dc 100644
--- a/backend/handler/metadata/igdb_handler.py
+++ b/backend/handler/metadata/igdb_handler.py
@@ -245,10 +245,18 @@ class IGDBHandler(MetadataHandler):
game_type_filter = ""
log.debug("Searching in games endpoint with game_type %s", game_type_filter)
+ where_filter = f"platforms=[{platform_igdb_id}] {game_type_filter}"
+
+ # Special case for ScummVM games
+ # https://github.com/rommapp/romm/issues/2424
+ scummvm_platform = self.get_platform(UPS.SCUMMVM)
+ if scummvm_platform["igdb_id"] == platform_igdb_id:
+ where_filter = f"keywords=[{platform_igdb_id}] {game_type_filter}"
+
roms = await self.igdb_service.list_games(
search_term=search_term,
fields=GAMES_FIELDS,
- where=f"platforms=[{platform_igdb_id}] {game_type_filter}",
+ where=where_filter,
limit=self.pagination_limit,
)
@@ -727,7 +735,7 @@ IGDB_PLATFORM_CATEGORIES: dict[int, str] = {
1: "Console",
2: "Arcade",
3: "Platform",
- 4: "Operative System",
+ 4: "Operating System",
5: "Portable Console",
6: "Computer",
}
@@ -1110,10 +1118,10 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plf7.jpg",
},
UPS.ANALOGUEELECTRONICS: {
- "category": "Unknown",
+ "category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 1,
"id": 100,
"name": "Analogue electronics",
"slug": "analogueelectronics",
@@ -1121,7 +1129,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "",
},
UPS.ANDROID: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "",
"family_slug": "",
"generation": -1,
@@ -1179,7 +1187,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 2,
"id": 473,
"name": "Arcadia 2001",
"slug": "arcadia-2001",
@@ -1199,8 +1207,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.ASTROCADE: {
"category": "Console",
- "family_name": "",
- "family_slug": "",
+ "family_name": "Bally",
+ "family_slug": "bally",
"generation": 2,
"id": 91,
"name": "Bally Astrocade",
@@ -1289,7 +1297,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 145,
"name": "AY-3-8603",
"slug": "ay-3-8603",
@@ -1300,7 +1308,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 146,
"name": "AY-3-8605",
"slug": "ay-3-8605",
@@ -1311,7 +1319,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 147,
"name": "AY-3-8606",
"slug": "ay-3-8606",
@@ -1322,7 +1330,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 148,
"name": "AY-3-8607",
"slug": "ay-3-8607",
@@ -1333,7 +1341,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Computer",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 141,
"name": "AY-3-8610",
"slug": "ay-3-8610",
@@ -1344,7 +1352,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 144,
"name": "AY-3-8710",
"slug": "ay-3-8710",
@@ -1355,7 +1363,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "General Instruments",
"family_slug": "general-instruments",
- "generation": -1,
+ "generation": 1,
"id": 143,
"name": "AY-3-8760",
"slug": "ay-3-8760",
@@ -1374,7 +1382,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pl86.jpg",
},
UPS.BLACKBERRY: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "",
"family_slug": "",
"generation": -1,
@@ -1388,7 +1396,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 7,
"id": 239,
"name": "Blu-ray Player",
"slug": "blu-ray-player",
@@ -1454,7 +1462,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Casio",
"family_slug": "casio",
- "generation": -1,
+ "generation": 5,
"id": 380,
"name": "Casio Loopy",
"slug": "casio-loopy",
@@ -1506,10 +1514,10 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plf3.jpg",
},
UPS.DAYDREAM: {
- "category": "Unknown",
- "family_name": "",
- "family_slug": "",
- "generation": -1,
+ "category": "Console",
+ "family_name": "Google",
+ "family_slug": "google",
+ "generation": 8,
"id": 164,
"name": "Daydream",
"slug": "daydream",
@@ -1550,7 +1558,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "",
},
UPS.DOS: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Microsoft",
"family_slug": "microsoft",
"generation": -1,
@@ -1575,7 +1583,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 6,
"id": 238,
"name": "DVD Player",
"slug": "dvd-player",
@@ -1795,7 +1803,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Samsung",
"family_slug": "samsung",
- "generation": -1,
+ "generation": 8,
"id": 388,
"name": "Gear VR",
"slug": "gear-vr",
@@ -1825,7 +1833,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plnl.jpg",
},
UPS.GT40: {
- "category": "Unknown",
+ "category": "Computer",
"family_name": "DEC",
"family_slug": "dec",
"generation": -1,
@@ -1839,7 +1847,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Portable Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 1,
"id": 411,
"name": "Handheld Electronic LCD",
"slug": "handheld-electronic-lcd",
@@ -1872,7 +1880,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Arcade",
"family_name": "SNK",
"family_slug": "snk",
- "generation": -1,
+ "generation": 5,
"id": 135,
"name": "Hyper Neo Geo 64",
"slug": "hyper-neo-geo-64",
@@ -1891,7 +1899,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plj2.jpg",
},
UPS.IMLAC_PDS1: {
- "category": "Unknown",
+ "category": "Computer",
"family_name": "",
"family_slug": "",
"generation": -1,
@@ -1916,7 +1924,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 9,
"id": 382,
"name": "Intellivision Amico",
"slug": "intellivision-amico",
@@ -1924,7 +1932,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plkp.jpg",
},
UPS.IOS: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Apple",
"family_slug": "apple",
"generation": -1,
@@ -2001,7 +2009,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "",
},
UPS.LINUX: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Linux",
"family_slug": "linux",
"generation": -1,
@@ -2023,7 +2031,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pl82.jpg",
},
UPS.MAC: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Apple",
"family_slug": "apple",
"generation": -1,
@@ -2048,7 +2056,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Meta",
"family_slug": "meta",
- "generation": -1,
+ "generation": 9,
"id": 386,
"name": "Meta Quest 2",
"slug": "meta-quest-2",
@@ -2067,7 +2075,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plnb.jpg",
},
UPS.MICROCOMPUTER: {
- "category": "Unknown",
+ "category": "Computer",
"family_name": "",
"family_slug": "",
"generation": -1,
@@ -2092,7 +2100,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Portable Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 7,
"id": 55,
"name": "Legacy Mobile Device",
"slug": "mobile",
@@ -2202,7 +2210,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Arcade",
"family_name": "SNK",
"family_slug": "snk",
- "generation": -1,
+ "generation": 4,
"id": 79,
"name": "Neo Geo MVS",
"slug": "neogeomvs",
@@ -2279,7 +2287,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 6,
"id": 122,
"name": "Nuon",
"slug": "nuon",
@@ -2290,7 +2298,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Meta",
"family_slug": "meta",
- "generation": -1,
+ "generation": 8,
"id": 387,
"name": "Oculus Go",
"slug": "oculus-go",
@@ -2301,7 +2309,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Meta",
"family_slug": "meta",
- "generation": -1,
+ "generation": 8,
"id": 384,
"name": "Oculus Quest",
"slug": "oculus-quest",
@@ -2312,7 +2320,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Meta",
"family_slug": "meta",
- "generation": -1,
+ "generation": 7,
"id": 385,
"name": "Oculus Rift",
"slug": "oculus-rift",
@@ -2320,10 +2328,10 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pln8.jpg",
},
UPS.OCULUS_VR: {
- "category": "Unknown",
+ "category": "Console",
"family_name": "Meta",
"family_slug": "meta",
- "generation": -1,
+ "generation": 7,
"id": 162,
"name": "Oculus VR",
"slug": "oculus-vr",
@@ -2386,7 +2394,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pl6k.jpg",
},
UPS.PALM_OS: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "",
"family_slug": "",
"generation": -1,
@@ -2463,7 +2471,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plf8.jpg",
},
UPS.PDP_7: {
- "category": "Unknown",
+ "category": "Computer",
"family_name": "DEC",
"family_slug": "dec",
"generation": -1,
@@ -2521,7 +2529,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "Philips",
"family_slug": "philips",
- "generation": -1,
+ "generation": 4,
"id": 117,
"name": "Philips CD-i",
"slug": "philips-cd-i",
@@ -2574,8 +2582,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.POCKETSTATION: {
"category": "Portable Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 5,
"id": 441,
"name": "PocketStation",
@@ -2587,7 +2595,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Portable Console",
"family_name": "Nintendo",
"family_slug": "nintendo",
- "generation": -1,
+ "generation": 6,
"id": 166,
"name": "Pokémon mini",
"slug": "pokemon-mini",
@@ -2598,7 +2606,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 9,
"id": 509,
"name": "Polymega",
"slug": "polymega",
@@ -2607,8 +2615,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PSX: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 5,
"id": 7,
"name": "PlayStation",
@@ -2618,8 +2626,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PS2: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 6,
"id": 8,
"name": "PlayStation 2",
@@ -2629,8 +2637,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PS3: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 7,
"id": 9,
"name": "PlayStation 3",
@@ -2640,8 +2648,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PS4: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 8,
"id": 48,
"name": "PlayStation 4",
@@ -2651,8 +2659,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PS5: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 9,
"id": 167,
"name": "PlayStation 5",
@@ -2662,8 +2670,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PSP: {
"category": "Portable Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 7,
"id": 38,
"name": "PlayStation Portable",
@@ -2673,8 +2681,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PSVITA: {
"category": "Portable Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 8,
"id": 46,
"name": "PlayStation Vita",
@@ -2684,8 +2692,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PSVR: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 8,
"id": 165,
"name": "PlayStation VR",
@@ -2695,8 +2703,8 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
},
UPS.PSVR2: {
"category": "Console",
- "family_name": "PlayStation",
- "family_slug": "playstation",
+ "family_name": "Sony",
+ "family_slug": "sony",
"generation": 9,
"id": 390,
"name": "PlayStation VR2",
@@ -2737,6 +2745,18 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url": "https://www.igdb.com/platforms/saturn",
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/hrmqljpwunky1all3v78.jpg",
},
+ UPS.SCUMMVM: {
+ "category": "Computer",
+ "family_name": "",
+ "family_slug": "",
+ "generation": -1,
+ # Note: The ID 50501 is a keyword ID (not a platform ID) in IGDB's system
+ "id": 50501,
+ "name": "ScummVM",
+ "slug": "scummvm",
+ "url": "https://www.igdb.com/categories/scummvm-compatible",
+ "url_logo": "",
+ },
UPS.SDSSIGMA7: {
"category": "Computer",
"family_name": "",
@@ -2914,10 +2934,10 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pl94.jpg",
},
UPS.STEAM_VR: {
- "category": "Unknown",
- "family_name": "",
- "family_slug": "",
- "generation": -1,
+ "category": "Platform",
+ "family_name": "Valve",
+ "family_slug": "valve",
+ "generation": 8,
"id": 163,
"name": "SteamVR",
"slug": "steam-vr",
@@ -2961,7 +2981,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Portable Console",
"family_name": "Bandai",
"family_slug": "bandai",
- "generation": -1,
+ "generation": 5,
"id": 124,
"name": "SwanCrystal",
"slug": "swancrystal",
@@ -3093,7 +3113,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 9,
"id": 504,
"name": "Uzebox",
"slug": "uzebox",
@@ -3156,7 +3176,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pl7s.jpg",
},
UPS.VISIONOS: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Apple",
"family_slug": "apple",
"generation": -1,
@@ -3222,7 +3242,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/pl6n.jpg",
},
UPS.WIN: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Microsoft",
"family_slug": "microsoft",
"generation": -1,
@@ -3233,10 +3253,10 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plim.jpg",
},
UPS.WINDOWS_MIXED_REALITY: {
- "category": "Unknown",
+ "category": "Platform",
"family_name": "Microsoft",
"family_slug": "microsoft",
- "generation": -1,
+ "generation": 8,
"id": 161,
"name": "Windows Mixed Reality",
"slug": "windows-mixed-reality",
@@ -3244,7 +3264,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plm4.jpg",
},
UPS.WINDOWS_MOBILE: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Microsoft",
"family_slug": "microsoft",
"generation": -1,
@@ -3255,7 +3275,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"url_logo": "https://images.igdb.com/igdb/image/upload/t_1080p/plkl.jpg",
},
UPS.WINPHONE: {
- "category": "Operative System",
+ "category": "Operating System",
"family_name": "Microsoft",
"family_slug": "microsoft",
"generation": -1,
@@ -3346,7 +3366,7 @@ IGDB_PLATFORM_LIST: dict[UPS, SlugToIGDB] = {
"category": "Portable Console",
"family_name": "",
"family_slug": "",
- "generation": -1,
+ "generation": 5,
"id": 44,
"name": "Tapwave Zodiac",
"slug": "zod",
diff --git a/backend/handler/metadata/launchbox_handler.py b/backend/handler/metadata/launchbox_handler.py
index 6a53c35e8..5da3de952 100644
--- a/backend/handler/metadata/launchbox_handler.py
+++ b/backend/handler/metadata/launchbox_handler.py
@@ -142,17 +142,8 @@ class LaunchboxHandler(MetadataHandler):
self, file_name: str, platform_slug: str
) -> dict | None:
if not (await async_cache.exists(LAUNCHBOX_METADATA_NAME_KEY)):
- log.info("Fetching the Launchbox Metadata.xml file...")
-
- from tasks.scheduled.update_launchbox_metadata import (
- update_launchbox_metadata_task,
- )
-
- await update_launchbox_metadata_task.run(force=True)
-
- if not (await async_cache.exists(LAUNCHBOX_METADATA_NAME_KEY)):
- log.error("Could not fetch the Launchbox Metadata.xml file")
- return None
+ log.error("Could not find the Launchbox Metadata.xml file in cache")
+ return None
lb_platform = self.get_platform(platform_slug)
platform_name = lb_platform.get("name", None)
diff --git a/backend/handler/metadata/tgdb_handler.py b/backend/handler/metadata/tgdb_handler.py
index b1c162f83..4e3abdaca 100644
--- a/backend/handler/metadata/tgdb_handler.py
+++ b/backend/handler/metadata/tgdb_handler.py
@@ -81,7 +81,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.ACORN_ARCHIMEDES: {
"id": 4944,
"name": "Acorn Archimedes",
- "manufacturer": "Acorn Computers",
+ "manufacturer": "Acorn",
"developer": "Acorn Computers",
"media_medium": None,
"cpu": "Acorn RISC Machine",
@@ -96,7 +96,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.ATOM: {
"id": 5014,
"name": "Acorn Atom",
- "manufacturer": "Acorn Computers",
+ "manufacturer": "Acorn",
"developer": "Acorn Computers",
"media_medium": "100KB 5¼ Floppy, Cassette",
"cpu": "MOS Technology 6502 clocked at 1MHz",
@@ -111,7 +111,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.ACORN_ELECTRON: {
"id": 4954,
"name": "Acorn Electron",
- "manufacturer": "Acorn Computers",
+ "manufacturer": "Acorn",
"developer": "Acorn Computers",
"media_medium": "Cassette tape, floppy disk (optional), ROM cartridge (optional)",
"cpu": "MOS Technology 6502A with 2/1 MHz",
@@ -321,7 +321,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.ATARI800: {
"id": 4943,
"name": "Atari 800",
- "manufacturer": "Atari Corporation",
+ "manufacturer": "Atari",
"developer": "Atari Corporation",
"media_medium": None,
"cpu": "MOS Technology 6502B",
@@ -366,7 +366,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.LYNX: {
"id": 4924,
"name": "Atari Lynx",
- "manufacturer": "Atari Corporation",
+ "manufacturer": "Atari",
"developer": "Epyx / Atari",
"media_medium": None,
"cpu": "MOS Technology 6502",
@@ -381,7 +381,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.ATARI_ST: {
"id": 4937,
"name": "Atari ST",
- "manufacturer": "Atari Corporation",
+ "manufacturer": "Atari",
"developer": "Atari Corporation",
"media_medium": "Floppy",
"cpu": "Motorola 680x0 @ 8 MHz & higher",
@@ -411,7 +411,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.ASTROCADE: {
"id": 4968,
"name": "Bally Astrocade",
- "manufacturer": "Bally Manufacturing",
+ "manufacturer": "Bally",
"developer": "Bally Manufacturing",
"media_medium": None,
"cpu": None,
@@ -426,7 +426,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.BBCMICRO: {
"id": 5013,
"name": "BBC Micro",
- "manufacturer": "Acorn Computers",
+ "manufacturer": "Acorn",
"developer": "BBC",
"media_medium": "Cassette, Floppy, Hard Disk, Laserdisc",
"cpu": "2 MHz MOS Technology 6502/6512",
@@ -501,7 +501,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.C128: {
"id": 4946,
"name": "Commodore 128",
- "manufacturer": "Commodore Business Machines",
+ "manufacturer": "Commodore",
"developer": "Commodore International",
"media_medium": None,
"cpu": "Zilog Z80A @ 4 MHz",
@@ -516,7 +516,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.C16: {
"id": 5006,
"name": "Commodore 16",
- "manufacturer": "Commodore Business Machines",
+ "manufacturer": "Commodore",
"developer": None,
"media_medium": "ROM cartridge, Compact Cassette",
"cpu": "MOS Technology 7501 @ 0.89 MHz / MOS Technology 8501 @ 1.76 MHz",
@@ -531,7 +531,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.C64: {
"id": 40,
"name": "Commodore 64",
- "manufacturer": "Commodore International",
+ "manufacturer": "Commodore",
"developer": "Commodore International",
"media_medium": "Cartridge",
"cpu": "MOS Technology 6510",
@@ -546,7 +546,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.CPET: {
"id": 5008,
"name": "Commodore PET",
- "manufacturer": "Commodore International",
+ "manufacturer": "Commodore",
"developer": None,
"media_medium": "Cassette tape, 5.25-inch floppy, 8-inch floppy, hard disk",
"cpu": "MOS Technology 6502 @ 1 MHz",
@@ -561,7 +561,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.C_PLUS_4: {
"id": 5007,
"name": "Commodore Plus/4",
- "manufacturer": "Commodore Business Machines",
+ "manufacturer": "Commodore",
"developer": None,
"media_medium": None,
"cpu": "MOS Technology 7501 or 8501 @ 1.76 MHz",
@@ -576,7 +576,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.VIC_20: {
"id": 4945,
"name": "Commodore VIC-20",
- "manufacturer": "Commodore Business Machines",
+ "manufacturer": "Commodore",
"developer": "Commodore International",
"media_medium": None,
"cpu": "MOS Technology 6502",
@@ -1416,7 +1416,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.NUON: {
"id": 4935,
"name": "Nuon",
- "manufacturer": "Motorola, Samsung, Toshiba",
+ "manufacturer": "Samsung",
"developer": "VM Labs",
"media_medium": None,
"cpu": None,
@@ -1491,7 +1491,7 @@ TGDB_PLATFORM_LIST: dict[UPS, SlugToTGDBId] = {
UPS.WIN: {
"id": 1,
"name": "PC",
- "manufacturer": None,
+ "manufacturer": "Microsoft",
"developer": "IBM",
"media_medium": None,
"cpu": "x86 Based",
diff --git a/backend/handler/scan_handler.py b/backend/handler/scan_handler.py
index 6c78f8830..b0b46f39f 100644
--- a/backend/handler/scan_handler.py
+++ b/backend/handler/scan_handler.py
@@ -289,10 +289,6 @@ async def scan_rom(
newly_added: bool,
socket_manager: socketio.AsyncRedisManager | None = None,
) -> Rom:
- if not metadata_sources:
- log.error("No metadata sources provided")
- raise ValueError("No metadata sources provided")
-
filesize = sum([file.file_size_bytes for file in fs_rom["files"]])
rom_attrs = {
"platform_id": platform.id,
diff --git a/backend/models/collection.py b/backend/models/collection.py
index ddd7b0d3a..1bf4df450 100644
--- a/backend/models/collection.py
+++ b/backend/models/collection.py
@@ -24,6 +24,7 @@ class Collection(BaseModel):
name: Mapped[str] = mapped_column(String(length=400))
description: Mapped[str | None] = mapped_column(Text)
is_public: Mapped[bool] = mapped_column(default=False)
+ is_favorite: Mapped[bool] = mapped_column(default=False)
path_cover_l: Mapped[str | None] = mapped_column(Text, default="")
path_cover_s: Mapped[str | None] = mapped_column(Text, default="")
url_cover: Mapped[str | None] = mapped_column(
@@ -89,10 +90,6 @@ class Collection(BaseModel):
if r.path_cover_l
]
- @property
- def is_favorite(self) -> bool:
- return self.name.lower() == "favourites"
-
def __repr__(self) -> str:
return self.name
diff --git a/backend/models/rom.py b/backend/models/rom.py
index 13e97ddff..37e9368ef 100644
--- a/backend/models/rom.py
+++ b/backend/models/rom.py
@@ -268,10 +268,6 @@ class Rom(BaseModel):
def platform_fs_slug(self) -> str:
return self.platform.fs_slug
- @property
- def platform_name(self) -> str:
- return self.platform.name
-
@property
def platform_custom_name(self) -> str | None:
return self.platform.custom_name
diff --git a/backend/romm_test/library/n64/roms/Sonic (EU) [T]/Sonic (EU) [T].n64 b/backend/romm_test/library/n64/roms/Sonic (EU) [T]/Sonic (EU) [T].n64
new file mode 100644
index 000000000..5b290d000
--- /dev/null
+++ b/backend/romm_test/library/n64/roms/Sonic (EU) [T]/Sonic (EU) [T].n64
@@ -0,0 +1 @@
+00000000
diff --git a/backend/romm_test/library/n64/roms/Sonic (EU) [T]/translation/Sonic (EU) [T-En].z64 b/backend/romm_test/library/n64/roms/Sonic (EU) [T]/translation/Sonic (EU) [T-En].z64
new file mode 100644
index 000000000..ca028fbba
--- /dev/null
+++ b/backend/romm_test/library/n64/roms/Sonic (EU) [T]/translation/Sonic (EU) [T-En].z64
@@ -0,0 +1 @@
+11111111
diff --git a/backend/tasks/scheduled/scan_library.py b/backend/tasks/scheduled/scan_library.py
index 75408288e..90738c360 100644
--- a/backend/tasks/scheduled/scan_library.py
+++ b/backend/tasks/scheduled/scan_library.py
@@ -61,7 +61,9 @@ class ScanLibraryTask(PeriodicTask):
log.info("Scheduled library scan started...")
scan_stats = await scan_platforms(
- [], scan_type=ScanType.UNIDENTIFIED, metadata_sources=metadata_sources
+ platform_ids=[],
+ metadata_sources=metadata_sources,
+ scan_type=ScanType.UNIDENTIFIED,
)
log.info("Scheduled library scan done")
diff --git a/backend/tasks/scheduled/update_switch_titledb.py b/backend/tasks/scheduled/update_switch_titledb.py
index 387e5a4ee..bf48adb9e 100644
--- a/backend/tasks/scheduled/update_switch_titledb.py
+++ b/backend/tasks/scheduled/update_switch_titledb.py
@@ -71,3 +71,57 @@ class UpdateSwitchTitleDBTask(RemoteFilePullTask):
update_switch_titledb_task = UpdateSwitchTitleDBTask()
+
+TITLEDB_REGION_LANG_MAP: Final = {
+ "BG": ["en"],
+ "BR": ["en", "pt"],
+ "CH": ["fr", "de", "it"],
+ "CY": ["en"],
+ "EE": ["en"],
+ "HR": ["en"],
+ "IE": ["en"],
+ "LT": ["en"],
+ "LU": ["fr", "de"],
+ "LV": ["en"],
+ "MT": ["en"],
+ "RO": ["en"],
+ "SI": ["en"],
+ "SK": ["en"],
+ "CO": ["en", "es"],
+ "AR": ["en", "es"],
+ "CL": ["en", "es"],
+ "PE": ["en", "es"],
+ "KR": ["ko"],
+ "HK": ["zh"],
+ "CN": ["zh"],
+ "NZ": ["en"],
+ "AT": ["de"],
+ "BE": ["fr", "nl"],
+ "CZ": ["en"],
+ "DK": ["en"],
+ "ES": ["es"],
+ "FI": ["en"],
+ "GR": ["en"],
+ "HU": ["en"],
+ "NL": ["nl"],
+ "NO": ["en"],
+ "PL": ["en"],
+ "PT": ["pt"],
+ "RU": ["ru"],
+ "ZA": ["en"],
+ "SE": ["en"],
+ "MX": ["en", "es"],
+ "IT": ["it"],
+ "CA": ["en", "fr"],
+ "FR": ["fr"],
+ "DE": ["de"],
+ "JP": ["ja"],
+ "AU": ["en"],
+ "GB": ["en"],
+ "US": ["en", "es"],
+}
+
+TITLEDB_REGION_LIST: Final = list(TITLEDB_REGION_LANG_MAP.keys())
+TITLEDB_LANGUAGE_LIST: Final = list(
+ set(lang for sublist in TITLEDB_REGION_LANG_MAP.values() for lang in sublist)
+)
diff --git a/backend/tests/endpoints/sockets/test_scan.py b/backend/tests/endpoints/sockets/test_scan.py
index 6bbc3b4af..23e3b299f 100644
--- a/backend/tests/endpoints/sockets/test_scan.py
+++ b/backend/tests/endpoints/sockets/test_scan.py
@@ -1,6 +1,7 @@
from unittest.mock import Mock
import pytest
+import socketio
from endpoints.sockets.scan import ScanStats, _should_scan_rom
from handler.scan_handler import ScanType
@@ -13,61 +14,62 @@ def test_scan_stats():
assert stats.new_platforms == 0
assert stats.identified_platforms == 0
assert stats.scanned_roms == 0
- assert stats.added_roms == 0
+ assert stats.new_roms == 0
assert stats.identified_roms == 0
assert stats.scanned_firmware == 0
- assert stats.added_firmware == 0
+ assert stats.new_firmware == 0
stats.scanned_platforms += 1
stats.new_platforms += 1
stats.identified_platforms += 1
stats.scanned_roms += 1
- stats.added_roms += 1
+ stats.new_roms += 1
stats.identified_roms += 1
stats.scanned_firmware += 1
- stats.added_firmware += 1
+ stats.new_firmware += 1
assert stats.scanned_platforms == 1
assert stats.new_platforms == 1
assert stats.identified_platforms == 1
assert stats.scanned_roms == 1
- assert stats.added_roms == 1
+ assert stats.new_roms == 1
assert stats.identified_roms == 1
assert stats.scanned_firmware == 1
- assert stats.added_firmware == 1
+ assert stats.new_firmware == 1
-def test_merging_scan_stats():
+async def test_merging_scan_stats():
stats = ScanStats(
scanned_platforms=1,
new_platforms=2,
identified_platforms=3,
scanned_roms=4,
- added_roms=5,
+ new_roms=5,
identified_roms=6,
scanned_firmware=7,
- added_firmware=8,
+ new_firmware=8,
)
- stats.update(
+ await stats.update(
+ socket_manager=Mock(spec=socketio.AsyncRedisManager),
scanned_platforms=stats.scanned_platforms + 10,
new_platforms=stats.new_platforms + 11,
identified_platforms=stats.identified_platforms + 12,
scanned_roms=stats.scanned_roms + 13,
- added_roms=stats.added_roms + 14,
+ new_roms=stats.new_roms + 14,
identified_roms=stats.identified_roms + 15,
scanned_firmware=stats.scanned_firmware + 16,
- added_firmware=stats.added_firmware + 17,
+ new_firmware=stats.new_firmware + 17,
)
assert stats.scanned_platforms == 11
assert stats.new_platforms == 13
assert stats.identified_platforms == 15
assert stats.scanned_roms == 17
- assert stats.added_roms == 19
+ assert stats.new_roms == 19
assert stats.identified_roms == 21
assert stats.scanned_firmware == 23
- assert stats.added_firmware == 25
+ assert stats.new_firmware == 25
class TestShouldScanRom:
diff --git a/backend/tests/endpoints/test_rom.py b/backend/tests/endpoints/test_rom.py
index b224ff36e..dda57c8d9 100644
--- a/backend/tests/endpoints/test_rom.py
+++ b/backend/tests/endpoints/test_rom.py
@@ -1,4 +1,5 @@
-from unittest.mock import patch
+import json
+from unittest.mock import AsyncMock, patch
import pytest
from fastapi import status
@@ -6,7 +7,14 @@ from fastapi.testclient import TestClient
from main import app
from handler.filesystem.roms_handler import FSRomsHandler
+from handler.metadata.flashpoint_handler import FlashpointHandler, FlashpointRom
from handler.metadata.igdb_handler import IGDBHandler, IGDBRom
+from handler.metadata.launchbox_handler import LaunchboxHandler, LaunchboxRom
+from handler.metadata.moby_handler import MobyGamesHandler, MobyGamesRom
+from handler.metadata.ra_handler import RAGameRom, RAHandler
+from handler.metadata.ss_handler import SSHandler, SSRom
+from models.platform import Platform
+from models.rom import Rom
@pytest.fixture
@@ -15,7 +23,18 @@ def client():
yield client
-def test_get_rom(client, access_token, rom):
+MOCK_IGDB_ID = 11111
+MOCK_MOBY_ID = 22222
+MOCK_SS_ID = 33333
+MOCK_RA_ID = 44444
+MOCK_LAUNCHBOX_ID = 55555
+MOCK_FLASHPOINT_ID = 66666
+MOCK_HLTB_ID = 77777
+MOCK_SGDB_ID = 88888
+MOCK_HASHEOUS_ID = 99999
+
+
+def test_get_rom(client: TestClient, access_token: str, rom: Rom):
response = client.get(
f"/api/roms/{rom.id}",
headers={"Authorization": f"Bearer {access_token}"},
@@ -26,7 +45,9 @@ def test_get_rom(client, access_token, rom):
assert body["id"] == rom.id
-def test_get_all_roms(client, access_token, rom, platform):
+def test_get_all_roms(
+ client: TestClient, access_token: str, rom: Rom, platform: Platform
+):
response = client.get(
"/api/roms",
headers={"Authorization": f"Bearer {access_token}"},
@@ -47,12 +68,18 @@ def test_get_all_roms(client, access_token, rom, platform):
@patch.object(FSRomsHandler, "rename_fs_rom")
@patch.object(IGDBHandler, "get_rom_by_id", return_value=IGDBRom(igdb_id=None))
-def test_update_rom(rename_fs_rom_mock, get_rom_by_id_mock, client, access_token, rom):
+def test_update_rom(
+ rename_fs_rom_mock: AsyncMock,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+):
response = client.put(
f"/api/roms/{rom.id}",
headers={"Authorization": f"Bearer {access_token}"},
data={
- "igdb_id": "236663",
+ "igdb_id": str(MOCK_IGDB_ID),
"name": "Metroid Prime Remastered",
"slug": "metroid-prime-remastered",
"fs_name": "Metroid Prime Remastered.zip",
@@ -64,7 +91,7 @@ def test_update_rom(rename_fs_rom_mock, get_rom_by_id_mock, client, access_token
"expansions": "[]",
"dlcs": "[]",
"companies": '[{"id": 203227, "company": {"id": 70, "name": "Nintendo"}}, {"id": 203307, "company": {"id": 766, "name": "Retro Studios"}}]',
- "first_release_date": 1675814400,
+ "first_release_date": "1675814400",
"youtube_video_id": "dQw4w9WgXcQ",
"remasters": "[]",
"remakes": "[]",
@@ -83,7 +110,7 @@ def test_update_rom(rename_fs_rom_mock, get_rom_by_id_mock, client, access_token
assert get_rom_by_id_mock.called
-def test_delete_roms(client, access_token, rom):
+def test_delete_roms(client: TestClient, access_token: str, rom: Rom):
response = client.post(
"/api/roms/delete",
headers={"Authorization": f"Bearer {access_token}"},
@@ -93,3 +120,652 @@ def test_delete_roms(client, access_token, rom):
body = response.json()
assert body["successful_items"] == 1
+
+
+class TestUpdateMetadataIDs:
+ @patch.object(
+ IGDBHandler, "get_rom_by_id", return_value=IGDBRom(igdb_id=MOCK_IGDB_ID)
+ )
+ def test_update_rom_igdb_id(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating IGDB ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"igdb_id": str(MOCK_IGDB_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["igdb_id"] == MOCK_IGDB_ID
+ assert get_rom_by_id_mock.called
+
+ @patch.object(
+ MobyGamesHandler,
+ "get_rom_by_id",
+ return_value=MobyGamesRom(moby_id=MOCK_MOBY_ID),
+ )
+ def test_update_rom_moby_id(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating MobyGames ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"moby_id": str(MOCK_MOBY_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["moby_id"] == MOCK_MOBY_ID
+ assert get_rom_by_id_mock.called
+
+ @patch.object(SSHandler, "get_rom_by_id", return_value=SSRom(ss_id=MOCK_SS_ID))
+ def test_update_rom_ss_id(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating ScreenScraper ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"ss_id": str(MOCK_SS_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["ss_id"] == MOCK_SS_ID
+ assert get_rom_by_id_mock.called
+
+ @patch.object(RAHandler, "get_rom_by_id", return_value=RAGameRom(ra_id=MOCK_RA_ID))
+ def test_update_rom_ra_id(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating RetroAchievements ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"ra_id": str(MOCK_RA_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["ra_id"] == MOCK_RA_ID
+ assert get_rom_by_id_mock.called
+
+ @patch.object(
+ LaunchboxHandler,
+ "get_rom_by_id",
+ return_value=LaunchboxRom(launchbox_id=MOCK_LAUNCHBOX_ID),
+ )
+ def test_update_rom_launchbox_id(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating LaunchBox ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"launchbox_id": str(MOCK_LAUNCHBOX_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["launchbox_id"] == MOCK_LAUNCHBOX_ID
+ assert get_rom_by_id_mock.called
+
+ @patch.object(
+ FlashpointHandler,
+ "get_rom_by_id",
+ return_value=FlashpointRom(flashpoint_id=str(MOCK_FLASHPOINT_ID)),
+ )
+ def test_update_rom_flashpoint_id(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating Flashpoint ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"flashpoint_id": str(MOCK_FLASHPOINT_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["flashpoint_id"] == str(MOCK_FLASHPOINT_ID)
+ assert get_rom_by_id_mock.called
+
+ # These metadata sources are not called when updating roms
+ def test_update_rom_sgdb_id(self, client: TestClient, access_token: str, rom: Rom):
+ """Test updating SteamGridDB ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"sgdb_id": str(MOCK_SGDB_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["sgdb_id"] == MOCK_SGDB_ID
+
+ def test_update_rom_hasheous_id(
+ self, client: TestClient, access_token: str, rom: Rom
+ ):
+ """Test updating Hasheous ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"hasheous_id": str(MOCK_HASHEOUS_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["hasheous_id"] == MOCK_HASHEOUS_ID
+
+ def test_update_rom_hltb_id(self, client: TestClient, access_token: str, rom: Rom):
+ """Test updating HowLongToBeat ID."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"hltb_id": str(MOCK_HLTB_ID)},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["hltb_id"] == MOCK_HLTB_ID
+
+
+class TestUpdateRawMetadata:
+ @patch.object(
+ IGDBHandler,
+ "get_rom_by_id",
+ return_value=IGDBRom(igdb_id=MOCK_IGDB_ID),
+ )
+ def test_update_raw_igdb_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating raw IGDB metadata."""
+ raw_metadata = {
+ "genres": ["Action"],
+ "franchises": ["Metroid"],
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "igdb_id": str(MOCK_IGDB_ID),
+ "raw_igdb_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["igdb_metadata"] is not None
+ assert body["igdb_metadata"]["genres"] == ["Action"]
+ assert body["igdb_metadata"]["franchises"] == ["Metroid"]
+
+ @patch.object(
+ MobyGamesHandler,
+ "get_rom_by_id",
+ return_value=MobyGamesRom(moby_id=MOCK_MOBY_ID),
+ )
+ def test_update_raw_moby_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating raw MobyGames metadata."""
+ raw_metadata = {
+ "genres": ["Action", "Adventure"],
+ "moby_score": "90",
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "moby_id": str(MOCK_MOBY_ID),
+ "raw_moby_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["moby_metadata"] is not None
+ assert body["moby_metadata"]["moby_score"] == "90"
+ assert body["moby_metadata"]["genres"] == ["Action", "Adventure"]
+
+ @patch.object(
+ SSHandler,
+ "get_rom_by_id",
+ return_value=SSRom(ss_id=MOCK_SS_ID),
+ )
+ def test_update_raw_ss_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating raw ScreenScraper metadata."""
+ raw_metadata = {
+ "ss_score": "85",
+ "alternative_names": ["Test SS Game"],
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "ss_id": str(MOCK_SS_ID),
+ "raw_ss_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["ss_metadata"] is not None
+ assert body["ss_metadata"]["ss_score"] == "85"
+ assert body["ss_metadata"]["alternative_names"] == ["Test SS Game"]
+
+ @patch.object(
+ LaunchboxHandler,
+ "get_rom_by_id",
+ return_value=LaunchboxRom(launchbox_id=MOCK_LAUNCHBOX_ID),
+ )
+ def test_update_raw_launchbox_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating raw LaunchBox metadata."""
+ raw_metadata = {
+ "first_release_date": "1675814400",
+ "max_players": 4,
+ "release_type": "Single",
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "launchbox_id": str(MOCK_LAUNCHBOX_ID),
+ "raw_launchbox_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["launchbox_metadata"] is not None
+ assert body["launchbox_metadata"]["first_release_date"] == 1675814400
+ assert body["launchbox_metadata"]["max_players"] == 4
+ assert body["launchbox_metadata"]["release_type"] == "Single"
+
+ def test_update_raw_hasheous_metadata(
+ self, client: TestClient, access_token: str, rom: Rom
+ ):
+ """Test updating raw Hasheous metadata."""
+ raw_metadata = {
+ "tosec_match": True,
+ "mame_arcade_match": False,
+ "mame_mess_match": True,
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "hasheous_id": str(MOCK_HASHEOUS_ID),
+ "raw_hasheous_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["hasheous_metadata"] is not None
+ assert body["hasheous_metadata"]["tosec_match"] is True
+ assert body["hasheous_metadata"]["mame_arcade_match"] is False
+ assert body["hasheous_metadata"]["mame_mess_match"] is True
+
+ @patch.object(
+ FlashpointHandler,
+ "get_rom_by_id",
+ return_value=FlashpointRom(flashpoint_id=str(MOCK_FLASHPOINT_ID)),
+ )
+ def test_update_raw_flashpoint_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating raw Flashpoint metadata."""
+ raw_metadata = {
+ "franchises": ["Metroid"],
+ "companies": ["Nintendo"],
+ "source": "Flashpoint",
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "flashpoint_id": str(MOCK_FLASHPOINT_ID),
+ "raw_flashpoint_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["flashpoint_metadata"] is not None
+ assert body["flashpoint_metadata"]["franchises"] == ["Metroid"]
+ assert body["flashpoint_metadata"]["companies"] == ["Nintendo"]
+ assert body["flashpoint_metadata"]["source"] == "Flashpoint"
+
+ def test_update_raw_hltb_metadata(
+ self,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating raw HowLongToBeat metadata."""
+ raw_metadata = {
+ "main_story": 10000,
+ "main_story_count": 1,
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "hltb_id": str(MOCK_HLTB_ID),
+ "raw_hltb_metadata": json.dumps(raw_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+ assert body["hltb_metadata"] is not None
+ assert body["hltb_metadata"]["main_story"] == 10000
+ assert body["hltb_metadata"]["main_story_count"] == 1
+
+ # Tests for combined updates
+ @patch.object(
+ IGDBHandler, "get_rom_by_id", return_value=IGDBRom(igdb_id=MOCK_IGDB_ID)
+ )
+ def test_update_rom_metadata_id_and_raw_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating both metadata ID and raw metadata in the same request."""
+ raw_igdb_metadata = {
+ "genres": ["Action"],
+ "franchises": ["Metroid"],
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "igdb_id": str(MOCK_IGDB_ID),
+ "raw_igdb_metadata": json.dumps(raw_igdb_metadata),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+ assert get_rom_by_id_mock.called
+
+ body = response.json()
+ assert body["igdb_id"] == MOCK_IGDB_ID
+ assert body["igdb_metadata"] is not None
+ assert body["igdb_metadata"]["genres"] == ["Action"]
+ assert body["igdb_metadata"]["franchises"] == ["Metroid"]
+
+ @patch.object(
+ IGDBHandler, "get_rom_by_id", return_value=IGDBRom(igdb_id=MOCK_IGDB_ID)
+ )
+ @patch.object(
+ MobyGamesHandler,
+ "get_rom_by_id",
+ return_value=MobyGamesRom(moby_id=MOCK_MOBY_ID),
+ )
+ @patch.object(SSHandler, "get_rom_by_id", return_value=SSRom(ss_id=MOCK_SS_ID))
+ def test_update_rom_multiple_metadata_ids(
+ self,
+ igdb_get_rom_by_id_mock: AsyncMock,
+ moby_get_rom_by_id_mock: AsyncMock,
+ ss_get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating multiple metadata IDs in the same request."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "igdb_id": str(MOCK_IGDB_ID),
+ "moby_id": str(MOCK_MOBY_ID),
+ "ss_id": str(MOCK_SS_ID),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+ assert igdb_get_rom_by_id_mock.called
+ assert moby_get_rom_by_id_mock.called
+ assert ss_get_rom_by_id_mock.called
+
+ body = response.json()
+ assert body["igdb_id"] == MOCK_IGDB_ID
+ assert body["moby_id"] == MOCK_MOBY_ID
+ assert body["ss_id"] == MOCK_SS_ID
+
+ @patch.object(
+ IGDBHandler, "get_rom_by_id", return_value=IGDBRom(igdb_id=MOCK_IGDB_ID)
+ )
+ @patch.object(
+ MobyGamesHandler,
+ "get_rom_by_id",
+ return_value=MobyGamesRom(moby_id=MOCK_MOBY_ID),
+ )
+ @patch.object(SSHandler, "get_rom_by_id", return_value=SSRom(ss_id=MOCK_SS_ID))
+ def test_update_rom_multiple_raw_metadata(
+ self,
+ igdb_get_rom_by_id_mock: AsyncMock,
+ moby_get_rom_by_id_mock: AsyncMock,
+ ss_get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test updating multiple raw metadata fields in the same request."""
+ raw_igdb = {
+ "genres": ["Action"],
+ "franchises": ["Metroid"],
+ }
+ raw_moby = {
+ "genres": ["Action", "Adventure"],
+ "moby_score": "90",
+ }
+ raw_ss = {
+ "ss_score": "85",
+ "alternative_names": ["Test SS Game"],
+ }
+
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={
+ "igdb_id": str(MOCK_IGDB_ID),
+ "moby_id": str(MOCK_MOBY_ID),
+ "ss_id": str(MOCK_SS_ID),
+ "raw_igdb_metadata": json.dumps(raw_igdb),
+ "raw_moby_metadata": json.dumps(raw_moby),
+ "raw_ss_metadata": json.dumps(raw_ss),
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+ assert igdb_get_rom_by_id_mock.called
+ assert moby_get_rom_by_id_mock.called
+ assert ss_get_rom_by_id_mock.called
+
+ body = response.json()
+ assert body["igdb_metadata"] is not None
+ assert body["igdb_metadata"]["genres"] == ["Action"]
+ assert body["igdb_metadata"]["franchises"] == ["Metroid"]
+
+ assert body["moby_metadata"] is not None
+ assert body["moby_metadata"]["genres"] == ["Action", "Adventure"]
+ assert body["moby_metadata"]["moby_score"] == "90"
+
+ assert body["ss_metadata"] is not None
+ assert body["ss_metadata"]["ss_score"] == "85"
+ assert body["ss_metadata"]["alternative_names"] == ["Test SS Game"]
+
+ # Tests for invalid JSON handling
+ def test_update_rom_invalid_json_raw_metadata(
+ self,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test that invalid JSON in raw metadata is handled gracefully."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"raw_igdb_metadata": "invalid json {["},
+ )
+ # Should still succeed, but raw metadata should not be updated
+ assert response.status_code == status.HTTP_200_OK
+ # The invalid JSON should be ignored, so igdb_metadata should remain unchanged
+ body = response.json()
+ assert body["igdb_metadata"] == {}
+
+ def test_update_rom_empty_raw_metadata(
+ self, client: TestClient, access_token: str, rom: Rom
+ ):
+ """Test that empty raw metadata is handled correctly."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"raw_igdb_metadata": ""},
+ )
+ assert response.status_code == status.HTTP_200_OK
+ # Empty string should be ignored, so igdb_metadata should remain unchanged
+ body = response.json()
+ assert body["igdb_metadata"] == {}
+
+
+class TestUnmatchMetadata:
+ @patch.object(
+ IGDBHandler, "get_rom_by_id", return_value=IGDBRom(igdb_id=MOCK_IGDB_ID)
+ )
+ def test_update_rom_unmatch_metadata(
+ self,
+ get_rom_by_id_mock: AsyncMock,
+ client: TestClient,
+ access_token: str,
+ rom: Rom,
+ ):
+ """Test the unmatch_metadata functionality that clears all metadata."""
+ # Verify the ROM has existing metadata
+ initial_response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ data={"igdb_id": str(MOCK_IGDB_ID)},
+ )
+ assert initial_response.status_code == status.HTTP_200_OK
+ assert get_rom_by_id_mock.called
+
+ initial_body = initial_response.json()
+ assert initial_body["igdb_id"] == MOCK_IGDB_ID
+ assert initial_body["igdb_metadata"] is not None
+
+ # Now unmatch all metadata
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ params={"unmatch_metadata": True},
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+
+ assert body["igdb_id"] is None
+ assert body["moby_id"] is None
+ assert body["ss_id"] is None
+ assert body["ra_id"] is None
+ assert body["launchbox_id"] is None
+ assert body["hasheous_id"] is None
+ assert body["tgdb_id"] is None
+ assert body["flashpoint_id"] is None
+ assert body["hltb_id"] is None
+
+ assert body["name"] == rom.fs_name
+ assert body["summary"] == ""
+ assert body["url_cover"] == ""
+ assert body["slug"] == ""
+
+ assert body["igdb_metadata"] == {}
+ assert body["moby_metadata"] == {}
+ assert body["ss_metadata"] == {}
+ assert body["merged_ra_metadata"] == {} # Special case
+ assert body["launchbox_metadata"] == {}
+ assert body["hasheous_metadata"] == {}
+ assert body["flashpoint_metadata"] == {}
+ assert body["hltb_metadata"] == {}
+
+ def test_update_rom_unmatch_metadata_with_other_data(
+ self, client: TestClient, access_token: str, rom: Rom
+ ):
+ """Test that unmatch_metadata works even when other data is provided."""
+ response = client.put(
+ f"/api/roms/{rom.id}",
+ headers={"Authorization": f"Bearer {access_token}"},
+ params={"unmatch_metadata": True},
+ data={
+ "igdb_id": str(MOCK_IGDB_ID), # This should be ignored
+ "name": "Should be ignored", # This should be ignored
+ "summary": "Should be ignored", # This should be ignored
+ },
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ body = response.json()
+
+ # All metadata should still be cleared despite other data being provided
+ assert body["igdb_id"] is None
+ assert body["name"] == rom.fs_name
+ assert body["summary"] == ""
diff --git a/backend/tests/handler/filesystem/test_roms_handler.py b/backend/tests/handler/filesystem/test_roms_handler.py
index 0e22d00b6..27060d81f 100644
--- a/backend/tests/handler/filesystem/test_roms_handler.py
+++ b/backend/tests/handler/filesystem/test_roms_handler.py
@@ -46,6 +46,28 @@ class TestFSRomsHandler:
full_path="n64/roms/Paper Mario (USA).z64",
)
+ @pytest.fixture
+ def rom_single_nested(self, platform: Platform):
+ return Rom(
+ id=3,
+ fs_name="Sonic (EU) [T]",
+ fs_path="n64/roms",
+ platform=platform,
+ full_path="n64/roms/Sonic (EU) [T]",
+ files=[
+ RomFile(
+ id=1,
+ file_name="Sonic (EU) [T].n64",
+ file_path="n64/roms/Sonic (EU) [T]",
+ ),
+ RomFile(
+ id=2,
+ file_name="Sonic (EU) [T-En].z64",
+ file_path="n64/roms/Sonic (EU) [T]/translation",
+ ),
+ ],
+ )
+
@pytest.fixture
def rom_multi(self, platform: Platform):
return Rom(
@@ -555,3 +577,47 @@ class TestFSRomsHandler:
async with await handler.stream_file("psx/roms/PaRappa the Rapper.zip") as f:
content = await f.read()
assert len(content) > 0
+
+ async def test_top_level_files_only_in_main_hash(
+ self, handler: FSRomsHandler, rom_single_nested
+ ):
+ """Test that only top-level files contribute to main ROM hash calculation"""
+ rom_files, rom_crc, rom_md5, rom_sha1, rom_ra = await handler.get_rom_files(
+ rom_single_nested
+ )
+
+ # Verify we have multiple files (base game + translation)
+ assert len(rom_files) == 2
+
+ base_game_rom_file = None
+ translation_rom_file = None
+
+ for rom_file in rom_files:
+ if rom_file.file_name == "Sonic (EU) [T].n64":
+ base_game_rom_file = rom_file
+ elif rom_file.file_name == "Sonic (EU) [T-En].z64":
+ translation_rom_file = rom_file
+
+ assert base_game_rom_file is not None, "Base game file not found"
+ assert translation_rom_file is not None, "Translation file not found"
+
+ # Verify file categories
+ assert base_game_rom_file.category is None
+ assert translation_rom_file.category == RomFileCategory.TRANSLATION
+
+ # The main ROM hash should be different from the translation file hash
+ # (this verifies that the translation is not included in the main hash)
+
+ assert (
+ rom_md5 == base_game_rom_file.md5_hash
+ ), "Main ROM hash should include base game file"
+ assert (
+ rom_md5 != translation_rom_file.md5_hash
+ ), "Main ROM hash should not include translation file"
+
+ assert (
+ rom_sha1 == base_game_rom_file.sha1_hash
+ ), "Main ROM hash should include base game file"
+ assert (
+ rom_sha1 != translation_rom_file.sha1_hash
+ ), "Main ROM hash should not include translation file"
diff --git a/backend/tests/handler/metadata/test_base_handler.py b/backend/tests/handler/metadata/test_base_handler.py
index 6a067a69b..d55aa2a8b 100644
--- a/backend/tests/handler/metadata/test_base_handler.py
+++ b/backend/tests/handler/metadata/test_base_handler.py
@@ -268,54 +268,6 @@ class TestMetadataHandlerMethods:
assert index_entry is not None
assert index_entry["publisher"] == "Nintendo"
- @pytest.mark.asyncio
- async def test_switch_titledb_format_cache_missing_fetch_success(
- self, handler: MetadataHandler
- ):
- """Test Switch TitleDB format when cache is missing but fetch succeeds."""
- with patch.object(
- async_cache, "exists", new_callable=AsyncMock
- ) as mock_exists, patch.object(
- async_cache, "hget", new_callable=AsyncMock
- ) as mock_hget, patch(
- "handler.metadata.base_handler.update_switch_titledb_task"
- ) as mock_task:
-
- # First call returns False (cache missing), second returns True (after fetch)
- mock_exists.side_effect = [False, True]
- mock_hget.return_value = json.dumps({"name": "Fetched Game"})
- mock_task.run = AsyncMock()
-
- match = re.match(SWITCH_TITLEDB_REGEX, "70123456789012")
- assert match is not None
- result = await handler._switch_titledb_format(match, "original")
-
- mock_task.run.assert_called_once_with(force=True)
- assert result[0] == "Fetched Game"
-
- @pytest.mark.asyncio
- async def test_switch_titledb_format_cache_missing_fetch_fails(
- self, handler: MetadataHandler
- ):
- """Test Switch TitleDB format when cache is missing and fetch fails."""
- with patch.object(
- async_cache, "exists", new_callable=AsyncMock
- ) as mock_exists, patch(
- "handler.metadata.base_handler.update_switch_titledb_task"
- ) as mock_task, patch(
- "handler.metadata.base_handler.log"
- ) as mock_log:
-
- mock_exists.return_value = False # Cache always missing
- mock_task.run = AsyncMock()
-
- match = re.match(SWITCH_TITLEDB_REGEX, "70123456789012")
- assert match is not None
- result = await handler._switch_titledb_format(match, "original")
-
- mock_log.error.assert_called()
- assert result == ("original", None)
-
@pytest.mark.asyncio
async def test_switch_titledb_format_not_found(self, handler: MetadataHandler):
"""Test Switch TitleDB format when title ID not found."""
diff --git a/backend/tests/tasks/test_scan_library.py b/backend/tests/tasks/test_scan_library.py
index 182b2c0d7..57251a8a9 100644
--- a/backend/tests/tasks/test_scan_library.py
+++ b/backend/tests/tasks/test_scan_library.py
@@ -43,9 +43,9 @@ class TestScanLibraryTask:
mock_log.info.assert_any_call("Scheduled library scan started...")
mock_scan_platforms.assert_called_once_with(
- [],
- scan_type=ScanType.UNIDENTIFIED,
+ platform_ids=[],
metadata_sources=[MetadataSource.RA, MetadataSource.LB],
+ scan_type=ScanType.UNIDENTIFIED,
)
mock_log.info.assert_any_call("Scheduled library scan done")
diff --git a/backend/utils/__init__.py b/backend/utils/__init__.py
index a0861fc26..bc1fdd441 100644
--- a/backend/utils/__init__.py
+++ b/backend/utils/__init__.py
@@ -1,3 +1,5 @@
+import uuid
+
from __version__ import __version__
@@ -7,3 +9,12 @@ def get_version() -> str:
return __version__
return "development"
+
+
+def is_valid_uuid(uuid_str: str) -> bool:
+ """Check if a string is a valid UUID."""
+ try:
+ uuid.UUID(uuid_str, version=4)
+ return True
+ except ValueError:
+ return False
diff --git a/backend/watcher.py b/backend/watcher.py
index b4081fe09..4a45bd037 100644
--- a/backend/watcher.py
+++ b/backend/watcher.py
@@ -7,7 +7,8 @@ from typing import cast
import sentry_sdk
from opentelemetry import trace
-from rq.job import Job
+from rq import Worker
+from rq.job import Job, JobStatus
from config import (
ENABLE_RESCAN_ON_FILESYSTEM_CHANGE,
@@ -32,6 +33,7 @@ from handler.metadata import (
meta_ss_handler,
meta_tgdb_handler,
)
+from handler.redis_handler import low_prio_queue, redis_client
from handler.scan_handler import MetadataSource, ScanType
from logger.formatter import CYAN
from logger.formatter import highlight as hl
@@ -67,17 +69,60 @@ VALID_EVENTS = frozenset(
Change = tuple[EventType, str]
+def get_pending_scan_jobs() -> list[Job]:
+ """Get all pending scan jobs (scheduled, queued, or running) for scan_platforms function.
+
+ Returns:
+ list[Job]: List of pending scan jobs that are not completed or failed
+ """
+ pending_jobs = []
+
+ # Get jobs from the scheduler (delayed/scheduled jobs)
+ scheduled_jobs = tasks_scheduler.get_jobs()
+ for job in scheduled_jobs:
+ if (
+ isinstance(job, Job)
+ and job.func_name == "endpoints.sockets.scan.scan_platforms"
+ and job.get_status()
+ in [JobStatus.SCHEDULED, JobStatus.QUEUED, JobStatus.STARTED]
+ ):
+ pending_jobs.append(job)
+
+ # Get jobs from the queue (immediate jobs)
+ queue_jobs = low_prio_queue.get_jobs()
+ for job in queue_jobs:
+ if (
+ isinstance(job, Job)
+ and job.func_name == "endpoints.sockets.scan.scan_platforms"
+ and job.get_status() in [JobStatus.QUEUED, JobStatus.STARTED]
+ ):
+ pending_jobs.append(job)
+
+ # Get currently running jobs from workers
+ workers = Worker.all(connection=redis_client)
+ for worker in workers:
+ current_job = worker.get_current_job()
+ if (
+ current_job
+ and current_job.func_name == "endpoints.sockets.scan.scan_platforms"
+ and current_job.get_status() == JobStatus.STARTED
+ ):
+ pending_jobs.append(current_job)
+
+ return pending_jobs
+
+
def process_changes(changes: Sequence[Change]) -> None:
if not ENABLE_RESCAN_ON_FILESYSTEM_CHANGE:
return
- # Filter for valid events.
+ # Filter for valid events
changes = [change for change in changes if change[0] in VALID_EVENTS]
if not changes:
return
with tracer.start_as_current_span("process_changes"):
- # Find affected platform slugs.
+ # Find affected platform slugs
fs_slugs: set[str] = set()
changes_platform_directory = False
for change in changes:
@@ -101,7 +146,7 @@ def process_changes(changes: Sequence[Change]) -> None:
log.info("No valid filesystem slugs found in changes, exiting...")
return
- # Check whether any metadata source is enabled.
+ # Check whether any metadata source is enabled
source_mapping: dict[str, bool] = {
MetadataSource.IGDB: meta_igdb_handler.is_enabled(),
MetadataSource.SS: meta_ss_handler.is_enabled(),
@@ -119,31 +164,29 @@ def process_changes(changes: Sequence[Change]) -> None:
log.warning("No metadata sources enabled, skipping rescan")
return
- # Get currently scheduled jobs for the scan_platforms function.
- already_scheduled_jobs = [
- job
- for job in tasks_scheduler.get_jobs()
- if isinstance(job, Job)
- and job.func_name == "endpoints.sockets.scan.scan_platforms"
- ]
+ # Get currently pending scan jobs (scheduled, queued, or running)
+ pending_jobs = get_pending_scan_jobs()
- # If a full rescan is already scheduled, skip further processing.
- if any(job.args[0] == [] for job in already_scheduled_jobs):
- log.info("Full rescan already scheduled")
+ # If a full rescan is already scheduled, skip further processing
+ full_rescan_jobs = [
+ job for job in pending_jobs if job.args and job.args[0] == []
+ ]
+ if full_rescan_jobs:
+ log.info(f"Full rescan already scheduled ({len(full_rescan_jobs)} job(s))")
return
time_delta = timedelta(minutes=RESCAN_ON_FILESYSTEM_CHANGE_DELAY)
rescan_in_msg = f"rescanning in {hl(str(RESCAN_ON_FILESYSTEM_CHANGE_DELAY), color=CYAN)} minutes."
- # Any change to a platform directory should trigger a full rescan.
+ # Any change to a platform directory should trigger a full rescan
if changes_platform_directory:
log.info(f"Platform directory changed, {rescan_in_msg}")
tasks_scheduler.enqueue_in(
time_delta,
scan_platforms,
- [],
- scan_type=ScanType.UNIDENTIFIED,
+ platform_ids=[],
metadata_sources=metadata_sources,
+ scan_type=ScanType.UNIDENTIFIED,
timeout=SCAN_TIMEOUT,
result_ttl=TASK_RESULT_TTL,
meta={
@@ -153,25 +196,32 @@ def process_changes(changes: Sequence[Change]) -> None:
)
return
- # Otherwise, process each platform slug.
+ # Otherwise, process each platform slug
for fs_slug in fs_slugs:
- # TODO: Query platforms from the database in bulk.
+ # TODO: Query platforms from the database in bulk
db_platform = db_platform_handler.get_platform_by_fs_slug(fs_slug)
if not db_platform:
continue
- # Skip if a scan is already scheduled for this platform.
- if any(db_platform.id in job.args[0] for job in already_scheduled_jobs):
- log.info(f"Scan already scheduled for {hl(fs_slug)}")
+ # Skip if a scan is already scheduled for this platform
+ platform_scan_jobs = [
+ job
+ for job in pending_jobs
+ if job.args and db_platform.id in job.args[0]
+ ]
+ if platform_scan_jobs:
+ log.info(
+ f"Scan already scheduled for {hl(fs_slug)} ({len(platform_scan_jobs)} job(s))"
+ )
continue
log.info(f"Change detected in {hl(fs_slug)} folder, {rescan_in_msg}")
tasks_scheduler.enqueue_in(
time_delta,
scan_platforms,
- [db_platform.id],
- scan_type=ScanType.QUICK,
+ platform_ids=[db_platform.id],
metadata_sources=metadata_sources,
+ scan_type=ScanType.QUICK,
timeout=SCAN_TIMEOUT,
result_ttl=TASK_RESULT_TTL,
meta={
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 86a109a26..f176798f3 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -14,15 +14,15 @@
# ARGUMENT DECLARATIONS
ARG ALPINE_VERSION=3.22
-ARG ALPINE_SHA256=4bcff63911fcb4448bd4fdacec207030997caf25e9bea4045fa6c8c44de311d1
+ARG ALPINE_SHA256=4b7ce07002c69e8f3d704a9c5d6fd3053be500b7f1c69fc0d80990c2ad8dd412
ARG PYTHON_VERSION=3.13
-ARG PYTHON_ALPINE_SHA256=9ba6d8cbebf0fb6546ae71f2a1c14f6ffd2fdab83af7fa5669734ef30ad48844
+ARG PYTHON_ALPINE_SHA256=e5fa639e49b85986c4481e28faa2564b45aa8021413f31026c3856e5911618b1
ARG NODE_VERSION=20.19
-ARG NODE_ALPINE_SHA256=eabac870db94f7342d6c33560d6613f188bbcf4bbe1f4eb47d5e2a08e1a37722
-ARG NGINX_VERSION=1.29.1
-ARG NGINX_SHA256=42a516af16b852e33b7682d5ef8acbd5d13fe08fecadc7ed98605ba5e3b26ab8
-ARG UV_VERSION=0.7.19
-ARG UV_SHA256=9ce16aa2fe33496c439996865dc121371bb33fd5fb37500007d48e2078686b0d
+ARG NODE_ALPINE_SHA256=96ee26670a085b1a61231a468db85ae7e493ddfbd8c35797bfa0b99b634665fe
+ARG NGINX_VERSION=1.29.2
+ARG NGINX_SHA256=61e01287e546aac28a3f56839c136b31f590273f3b41187a36f46f6a03bbfe22
+ARG UV_VERSION=0.8.24
+ARG UV_SHA256=779f3d612539b4696a1b228724cd79b6e8b8604075a9ac7d15378bccf4053373
FROM python:${PYTHON_VERSION}-alpine${ALPINE_VERSION}@sha256:${PYTHON_ALPINE_SHA256} AS python-alias
diff --git a/env.template b/env.template
index 2db8e2312..e81fec2ae 100644
--- a/env.template
+++ b/env.template
@@ -117,6 +117,10 @@ WEB_SERVER_MAX_REQUESTS_JITTER=100
WEB_SERVER_WORKER_CONNECTIONS=1000
IPV4_ONLY=false
+# Redis Workers
+SCAN_TIMEOUT=
+SCAN_WORKERS=
+
# Development only
DEV_MODE=true
DEV_HTTPS=false
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index a0777596d..e709aa20f 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -47,7 +47,7 @@
"openapi-typescript-codegen": "^0.29.0",
"typescript": "^5.7.3",
"typescript-eslint": "^8.42.0",
- "vite": "^6.3.6",
+ "vite": "^6.4.1",
"vite-plugin-mkcert": "^1.17.8",
"vite-plugin-pwa": "^0.21.1",
"vite-plugin-vuetify": "^2.0.4",
@@ -2978,6 +2978,7 @@
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.41.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.41.1.tgz",
+ "integrity": "sha512-egpJACny8QOdHNNMZKf8xY0Is6gIMz+tuqXlusxquWu3F833DcMwmGM7WlvCO9sB3OsPjdC4U0wHw5FabzCGZg==",
"cpu": [
"x64"
],
@@ -2985,8 +2986,7 @@
"optional": true,
"os": [
"darwin"
- ],
- "integrity": "sha512-egpJACny8QOdHNNMZKf8xY0Is6gIMz+tuqXlusxquWu3F833DcMwmGM7WlvCO9sB3OsPjdC4U0wHw5FabzCGZg=="
+ ]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.41.1",
@@ -8858,10 +8858,11 @@
"license": "MIT"
},
"node_modules/vite": {
- "version": "6.3.6",
- "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.6.tgz",
- "integrity": "sha512-0msEVHJEScQbhkbVTb/4iHZdJ6SXp/AvxL2sjwYQFfBqleHtnCqv1J3sa9zbWz/6kW1m9Tfzn92vW+kZ1WV6QA==",
+ "version": "6.4.1",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz",
+ "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
"devOptional": true,
+ "license": "MIT",
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.4.4",
diff --git a/frontend/package.json b/frontend/package.json
index 43a986deb..a4d8a952e 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -66,7 +66,7 @@
"openapi-typescript-codegen": "^0.29.0",
"typescript": "^5.7.3",
"typescript-eslint": "^8.42.0",
- "vite": "^6.3.6",
+ "vite": "^6.4.1",
"vite-plugin-mkcert": "^1.17.8",
"vite-plugin-pwa": "^0.21.1",
"vite-plugin-vuetify": "^2.0.4",
diff --git a/frontend/src/__generated__/models/ConfigResponse.ts b/frontend/src/__generated__/models/ConfigResponse.ts
index bdcb25a10..92015583a 100644
--- a/frontend/src/__generated__/models/ConfigResponse.ts
+++ b/frontend/src/__generated__/models/ConfigResponse.ts
@@ -5,6 +5,7 @@
import type { EjsControls } from './EjsControls';
export type ConfigResponse = {
CONFIG_FILE_MOUNTED: boolean;
+ CONFIG_FILE_WRITABLE: boolean;
EXCLUDED_PLATFORMS: Array- Versions of the same platform. A common example is Capcom Play System - 1 is an arcade system. Platform versions will let you setup a custom - platform for RomM to import and tell RomM which platform it needs to - scrape against. + Platform versions allow you to create custom platform entries for + games that belong to the same system but have different versions.
@@ -52,7 +50,7 @@ const editable = ref(false); variant="text" icon="mdi-cog" @click="editable = !editable" - :disabled="!config.CONFIG_FILE_MOUNTED" + :disabled="!config.CONFIG_FILE_WRITABLE" /> diff --git a/frontend/src/components/Settings/ServerStats/PlatformsStats.vue b/frontend/src/components/Settings/ServerStats/PlatformsStats.vue index 1d54e18a8..42a3657bd 100644 --- a/frontend/src/components/Settings/ServerStats/PlatformsStats.vue +++ b/frontend/src/components/Settings/ServerStats/PlatformsStats.vue @@ -12,22 +12,22 @@ const props = defineProps<{ }>(); const { t } = useI18n(); const platformsStore = storePlatforms(); -const { filteredPlatforms } = storeToRefs(platformsStore); +const { allPlatforms } = storeToRefs(platformsStore); const orderBy = ref<"name" | "size" | "count">("name"); const sortedPlatforms = computed(() => { - const platforms = [...filteredPlatforms.value]; if (orderBy.value === "size") { - return platforms.sort( + return allPlatforms.value.sort( (a, b) => Number(b.fs_size_bytes) - Number(a.fs_size_bytes), ); } if (orderBy.value === "count") { - return platforms.sort((a, b) => b.rom_count - a.rom_count); + return allPlatforms.value.sort((a, b) => b.rom_count - a.rom_count); } - // Default to name - return platforms.sort((a, b) => - a.name.localeCompare(b.name, undefined, { sensitivity: "base" }), + return allPlatforms.value.sort((a, b) => + a.display_name.localeCompare(b.display_name, undefined, { + sensitivity: "base", + }), ); }); diff --git a/frontend/src/components/common/Collection/Card.vue b/frontend/src/components/common/Collection/Card.vue index 51cc8c0b2..a332c98a0 100644 --- a/frontend/src/components/common/Collection/Card.vue +++ b/frontend/src/components/common/Collection/Card.vue @@ -52,7 +52,7 @@ const memoizedCovers = ref({ }); const collectionCoverImage = computed(() => - props.collection.name?.toLowerCase() == "favourites" + props.collection.is_favorite ? getFavoriteCoverImage(props.collection.name) : getCollectionCoverImage(props.collection.name), ); diff --git a/frontend/src/components/common/Collection/Dialog/CreateCollection.vue b/frontend/src/components/common/Collection/Dialog/CreateCollection.vue index 2da836d08..f31cd9cb6 100644 --- a/frontend/src/components/common/Collection/Dialog/CreateCollection.vue +++ b/frontend/src/components/common/Collection/Dialog/CreateCollection.vue @@ -20,7 +20,11 @@ const { mdAndUp } = useDisplay(); const router = useRouter(); const show = ref(false); const heartbeat = storeHeartbeat(); -const collection = ref