diff --git a/.trunk/configs/.bandit b/.trunk/configs/.bandit new file mode 100644 index 000000000..4a9f16d0b --- /dev/null +++ b/.trunk/configs/.bandit @@ -0,0 +1,3 @@ +[bandit] +exclude = tests +skips=B101 diff --git a/SECURITY.md b/SECURITY.md index 71508d11f..547fe07e1 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,7 +1,7 @@ -Thanks for helping make RomM safer for everyone. - ## Reporting Security Issues +Thanks for helping make RomM safer for everyone. + If you believe you have found a security vulnerability in RomM, please report it to us through coordinated disclosure. **Do not report security vulnerabilities through public GitHub issues, discussions, pull requests, or on our public Discord server.** diff --git a/backend/alembic/versions/0014_asset_files.py b/backend/alembic/versions/0014_asset_files.py index 1ac2c58c8..c2690ed58 100644 --- a/backend/alembic/versions/0014_asset_files.py +++ b/backend/alembic/versions/0014_asset_files.py @@ -64,14 +64,16 @@ def migrate_to_mysql() -> None: continue table_data = sqlite_conn.execute( - text(f"SELECT * FROM {table_name}") + text(f"SELECT * FROM {table_name}") # nosec B608 ).fetchall() # Insert data into MariaDB table for row in table_data: mapped_row = {f"{i}": value for i, value in enumerate(row, start=1)} columns = ",".join([f":{i}" for i in range(1, len(row) + 1)]) - insert_query = f"INSERT INTO {table_name} VALUES ({columns})" + insert_query = ( + f"INSERT INTO {table_name} VALUES ({columns})" # nosec B608 + ) maria_conn.execute(text(insert_query), mapped_row) maria_conn.execute(text("SET FOREIGN_KEY_CHECKS=1")) diff --git a/backend/config/__init__.py b/backend/config/__init__.py index a4e7296f3..fbdbf3339 100644 --- a/backend/config/__init__.py +++ b/backend/config/__init__.py @@ -8,7 +8,7 @@ load_dotenv() # UVICORN DEV_PORT: Final = int(os.environ.get("VITE_BACKEND_DEV_PORT", "5000")) -DEV_HOST: Final = "0.0.0.0" +DEV_HOST: Final = "127.0.0.1" ROMM_HOST: Final = os.environ.get("ROMM_HOST", DEV_HOST) # PATHS diff --git a/backend/config/config_manager.py b/backend/config/config_manager.py index 3928d4c5a..50339dedc 100644 --- a/backend/config/config_manager.py +++ b/backend/config/config_manager.py @@ -220,9 +220,9 @@ class ConfigManager: self._raw_config = yaml.load(config_file, Loader=SafeLoader) or {} except FileNotFoundError: self._raw_config = {} - except PermissionError: + except PermissionError as exc: self._raw_config = {} - raise ConfigNotReadableException + raise ConfigNotReadableException from exc self._parse_config() self._validate_config() @@ -259,9 +259,9 @@ class ConfigManager: yaml.dump(self._raw_config, config_file) except FileNotFoundError: self._raw_config = {} - except PermissionError: + except PermissionError as exc: self._raw_config = {} - raise ConfigNotWritableException + raise ConfigNotWritableException from exc def add_platform_binding(self, fs_slug: str, slug: str) -> None: platform_bindings = self.config.PLATFORMS_BINDING diff --git a/backend/config/tests/fixtures/config/config.yml b/backend/config/tests/fixtures/config/config.yml index 72a368b23..553c0250e 100644 --- a/backend/config/tests/fixtures/config/config.yml +++ b/backend/config/tests/fixtures/config/config.yml @@ -3,29 +3,29 @@ exclude: platforms: - - "romm" + - romm roms: single_file: names: - - "info.txt" + - info.txt extensions: - - "xml" + - xml multi_file: names: - - "my_multi_file_game" - - "DLC" + - my_multi_file_game + - DLC parts: names: - - "data.xml" + - data.xml extensions: - - "txt" + - txt system: platforms: - gc: "ngc" + gc: ngc versions: - naomi: "arcade" + naomi: arcade filesystem: - roms_folder: "ROMS" - firmware_folder: "BIOS" + roms_folder: ROMS + firmware_folder: BIOS diff --git a/backend/decorators/auth.py b/backend/decorators/auth.py index 6394037b1..37c174e2b 100644 --- a/backend/decorators/auth.py +++ b/backend/decorators/auth.py @@ -25,17 +25,17 @@ oauth2_password_bearer = OAuth2PasswordBearer( def protected_route( method: Any, path: str, - scopes: list[str] = [], + scopes: list[str] | None = None, **kwargs, ): def decorator(func: DecoratedCallable): - fn = requires(scopes)(func) + fn = requires(scopes or [])(func) return method( path, dependencies=[ Security( dependency=oauth2_password_bearer, - scopes=scopes, + scopes=scopes or [], ), Security(dependency=HTTPBasic(auto_error=False)), ], diff --git a/backend/decorators/database.py b/backend/decorators/database.py index 6e4fa0a32..230e6ae13 100644 --- a/backend/decorators/database.py +++ b/backend/decorators/database.py @@ -15,10 +15,10 @@ def begin_session(func): with args[0].session.begin() as s: kwargs["session"] = s return func(*args, **kwargs) - except ProgrammingError as e: - log.critical(str(e)) + except ProgrammingError as exc: + log.critical(str(exc)) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc) + ) from exc return wrapper diff --git a/backend/endpoints/auth.py b/backend/endpoints/auth.py index 6e5122fa8..03811f02e 100644 --- a/backend/endpoints/auth.py +++ b/backend/endpoints/auth.py @@ -133,7 +133,9 @@ async def token(form_data: Annotated[OAuth2RequestForm, Depends()]) -> TokenResp @router.post("/login") -def login(request: Request, credentials=Depends(HTTPBasic())) -> MessageResponse: +def login( + request: Request, credentials=Depends(HTTPBasic()) +) -> MessageResponse: # nosec B008 """Session login endpoint Args: diff --git a/backend/endpoints/config.py b/backend/endpoints/config.py index 07c6eb488..6a5f6bd92 100644 --- a/backend/endpoints/config.py +++ b/backend/endpoints/config.py @@ -22,11 +22,11 @@ def get_config() -> ConfigResponse: try: return cm.get_config().__dict__ - except ConfigNotReadableException as e: - log.critical(e.message) + except ConfigNotReadableException as exc: + log.critical(exc.message) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=e.message - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=exc.message + ) from exc @protected_route(router.post, "/config/system/platforms", ["platforms.write"]) @@ -39,11 +39,11 @@ async def add_platform_binding(request: Request) -> MessageResponse: try: cm.add_platform_binding(fs_slug, slug) - except ConfigNotWritableException as e: - log.critical(e.message) + except ConfigNotWritableException as exc: + log.critical(exc.message) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=e.message - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=exc.message + ) from exc return {"msg": f"{fs_slug} binded to: {slug} successfully!"} @@ -56,11 +56,11 @@ async def delete_platform_binding(request: Request, fs_slug: str) -> MessageResp try: cm.remove_platform_binding(fs_slug) - except ConfigNotWritableException as e: - log.critical(e.message) + except ConfigNotWritableException as exc: + log.critical(exc.message) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=e.message - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=exc.message + ) from exc return {"msg": f"{fs_slug} bind removed successfully!"} @@ -75,11 +75,11 @@ async def add_platform_version(request: Request) -> MessageResponse: try: cm.add_platform_version(fs_slug, slug) - except ConfigNotWritableException as e: - log.critical(e.message) + except ConfigNotWritableException as exc: + log.critical(exc.message) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=e.message - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=exc.message + ) from exc return {"msg": f"Added {fs_slug} as version of: {slug} successfully!"} @@ -92,11 +92,11 @@ async def delete_platform_version(request: Request, fs_slug: str) -> MessageResp try: cm.remove_platform_version(fs_slug) - except ConfigNotWritableException as e: - log.critical(e.message) + except ConfigNotWritableException as exc: + log.critical(exc.message) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=e.message - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=exc.message + ) from exc return {"msg": f"{fs_slug} version removed successfully!"} diff --git a/backend/endpoints/firmware.py b/backend/endpoints/firmware.py index 5ca13d20a..aafd959b1 100644 --- a/backend/endpoints/firmware.py +++ b/backend/endpoints/firmware.py @@ -2,7 +2,7 @@ from config import DISABLE_DOWNLOAD_ENDPOINT_AUTH, LIBRARY_BASE_PATH from decorators.auth import protected_route from endpoints.responses import MessageResponse from endpoints.responses.firmware import AddFirmwareResponse, FirmwareSchema -from fastapi import APIRouter, File, HTTPException, Request, UploadFile, status +from fastapi import APIRouter, HTTPException, Request, UploadFile, status from fastapi.responses import FileResponse from handler.database import db_firmware_handler, db_platform_handler from handler.filesystem import fs_firmware_handler @@ -14,7 +14,7 @@ router = APIRouter() @protected_route(router.post, "/firmware", ["firmware.write"]) def add_firmware( - request: Request, platform_id: int, files: list[UploadFile] = File(...) + request: Request, platform_id: int, files: list[UploadFile] | None = None ) -> AddFirmwareResponse: """Upload firmware files endpoint @@ -196,9 +196,11 @@ async def delete_firmware( fs_firmware_handler.remove_file( file_name=firmware.file_name, file_path=firmware.file_path ) - except FileNotFoundError: + except FileNotFoundError as exc: error = f"Firmware file {firmware.file_name} not found for platform {firmware.platform_slug}" log.error(error) - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error) + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=error + ) from exc return {"msg": f"{len(firmare_ids)} firmware files deleted successfully!"} diff --git a/backend/endpoints/forms/identity.py b/backend/endpoints/forms/identity.py index 9286558ee..2020219cf 100644 --- a/backend/endpoints/forms/identity.py +++ b/backend/endpoints/forms/identity.py @@ -1,6 +1,6 @@ from typing import Optional -from fastapi import File, UploadFile +from fastapi import UploadFile from fastapi.param_functions import Form @@ -11,7 +11,7 @@ class UserForm: password: Optional[str] = None, role: Optional[str] = None, enabled: Optional[bool] = None, - avatar: Optional[UploadFile] = File(None), + avatar: Optional[UploadFile] = None, ): self.username = username self.password = password diff --git a/backend/endpoints/rom.py b/backend/endpoints/rom.py index 5cfda8c80..b82ffa3a3 100644 --- a/backend/endpoints/rom.py +++ b/backend/endpoints/rom.py @@ -13,7 +13,7 @@ from endpoints.responses.rom import ( RomSchema, ) from exceptions.fs_exceptions import RomAlreadyExistsException -from fastapi import APIRouter, File, HTTPException, Query, Request, UploadFile, status +from fastapi import APIRouter, HTTPException, Query, Request, UploadFile, status from fastapi.responses import FileResponse from fastapi_pagination.cursor import CursorPage, CursorParams from fastapi_pagination.ext.sqlalchemy import paginate @@ -29,7 +29,7 @@ router = APIRouter() @protected_route(router.post, "/roms", ["roms.write"]) def add_roms( - request: Request, platform_id: int, roms: list[UploadFile] = File(...) + request: Request, platform_id: int, roms: list[UploadFile] | None = None ) -> AddRomsResponse: """Upload roms endpoint (one or more at the same time) @@ -247,7 +247,7 @@ async def update_rom( id: int, rename_as_igdb: bool = False, remove_cover: bool = False, - artwork: Optional[UploadFile] = File(None), + artwork: Optional[UploadFile] = None, ) -> RomSchema: """Update rom endpoint @@ -305,11 +305,11 @@ async def update_rom( new_name=fs_safe_file_name, file_path=db_rom.file_path, ) - except RomAlreadyExistsException as e: - log.error(str(e)) + except RomAlreadyExistsException as exc: + log.error(str(exc)) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) - ) + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc) + ) from exc cleaned_data["file_name"] = fs_safe_file_name cleaned_data["file_name_no_tags"] = fs_rom_handler.get_file_name_with_no_tags( @@ -415,10 +415,12 @@ async def delete_roms( fs_rom_handler.remove_file( file_name=rom.file_name, file_path=rom.file_path ) - except FileNotFoundError: + except FileNotFoundError as exc: error = f"Rom file {rom.file_name} not found for platform {rom.platform_slug}" log.error(error) - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error) + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=error + ) from exc return {"msg": f"{len(roms_ids)} roms deleted successfully!"} diff --git a/backend/endpoints/saves.py b/backend/endpoints/saves.py index cb42241dd..4a958c26e 100644 --- a/backend/endpoints/saves.py +++ b/backend/endpoints/saves.py @@ -1,7 +1,7 @@ from decorators.auth import protected_route from endpoints.responses import MessageResponse from endpoints.responses.assets import SaveSchema, UploadedSavesResponse -from fastapi import APIRouter, File, HTTPException, Request, UploadFile, status +from fastapi import APIRouter, HTTPException, Request, UploadFile, status from handler.database import db_rom_handler, db_save_handler, db_screenshot_handler from handler.filesystem import fs_asset_handler from handler.scan_handler import scan_save @@ -14,7 +14,7 @@ router = APIRouter() def add_saves( request: Request, rom_id: int, - saves: list[UploadFile] = File(...), + saves: list[UploadFile] | None = None, emulator: str = None, ) -> UploadedSavesResponse: rom = db_rom_handler.get_roms(rom_id) @@ -129,10 +129,12 @@ async def delete_saves(request: Request) -> MessageResponse: fs_asset_handler.remove_file( file_name=save.file_name, file_path=save.file_path ) - except FileNotFoundError: + except FileNotFoundError as exc: error = f"Save file {save.file_name} not found for platform {save.rom.platform_slug}" log.error(error) - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error) + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=error + ) from exc if save.screenshot: db_screenshot_handler.delete_screenshot(save.screenshot.id) diff --git a/backend/endpoints/screenshots.py b/backend/endpoints/screenshots.py index 48b8ec110..3dfd258fe 100644 --- a/backend/endpoints/screenshots.py +++ b/backend/endpoints/screenshots.py @@ -1,6 +1,6 @@ from decorators.auth import protected_route from endpoints.responses.assets import UploadedScreenshotsResponse -from fastapi import APIRouter, File, HTTPException, Request, UploadFile, status +from fastapi import APIRouter, HTTPException, Request, UploadFile, status from handler.database import db_rom_handler, db_screenshot_handler from handler.filesystem import fs_asset_handler from handler.scan_handler import scan_screenshot @@ -11,7 +11,7 @@ router = APIRouter() @protected_route(router.post, "/screenshots", ["assets.write"]) def add_screenshots( - request: Request, rom_id: int, screenshots: list[UploadFile] = File(...) + request: Request, rom_id: int, screenshots: list[UploadFile] | None = None ) -> UploadedScreenshotsResponse: rom = db_rom_handler.get_roms(rom_id) current_user = request.user diff --git a/backend/endpoints/search.py b/backend/endpoints/search.py index 658f01750..6ba084bf2 100644 --- a/backend/endpoints/search.py +++ b/backend/endpoints/search.py @@ -64,12 +64,12 @@ async def search_rom( moby_matched_roms = meta_moby_handler.get_matched_roms_by_id( int(search_term) ) - except ValueError: + except ValueError as exc: log.error(f"Search error: invalid ID '{search_term}'") raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Tried searching by ID, but '{search_term}' is not a valid ID", - ) + ) from exc elif search_by.lower() == "name": igdb_matched_roms = meta_igdb_handler.get_matched_roms_by_name( search_term, _get_main_platform_igdb_id(rom.platform), search_extended diff --git a/backend/endpoints/sockets/scan.py b/backend/endpoints/sockets/scan.py index 9928edc45..3db87e6d0 100644 --- a/backend/endpoints/sockets/scan.py +++ b/backend/endpoints/sockets/scan.py @@ -41,8 +41,8 @@ def _get_socket_manager(): async def scan_platforms( platform_ids: list[int], scan_type: ScanType = ScanType.QUICK, - selected_roms: list[str] = (), - metadata_sources: list[str] = ["igdb", "moby"], + selected_roms: list[str] | None = None, + metadata_sources: list[str] | None = None, ): """Scan all the listed platforms and fetch metadata from different sources @@ -53,6 +53,12 @@ async def scan_platforms( metadata_sources (list[str], optional): List of metadata sources to be used. Defaults to all sources. """ + if not selected_roms: + selected_roms = [] + + if not metadata_sources: + metadata_sources = ["igdb", "moby"] + sm = _get_socket_manager() if not IGDB_API_ENABLED and not MOBY_API_ENABLED: diff --git a/backend/endpoints/states.py b/backend/endpoints/states.py index 2aeeb5fa0..2fc002e4e 100644 --- a/backend/endpoints/states.py +++ b/backend/endpoints/states.py @@ -1,7 +1,7 @@ from decorators.auth import protected_route from endpoints.responses import MessageResponse from endpoints.responses.assets import StateSchema, UploadedStatesResponse -from fastapi import APIRouter, File, HTTPException, Request, UploadFile, status +from fastapi import APIRouter, HTTPException, Request, UploadFile, status from handler.database import db_rom_handler, db_screenshot_handler, db_state_handler from handler.filesystem import fs_asset_handler from handler.scan_handler import scan_state @@ -14,7 +14,7 @@ router = APIRouter() def add_states( request: Request, rom_id: int, - states: list[UploadFile] = File(...), + states: list[UploadFile] | None = None, emulator: str = None, ) -> UploadedStatesResponse: rom = db_rom_handler.get_roms(rom_id) @@ -128,10 +128,12 @@ async def delete_states(request: Request) -> MessageResponse: fs_asset_handler.remove_file( file_name=state.file_name, file_path=state.file_path ) - except FileNotFoundError: + except FileNotFoundError as exc: error = f"Save file {state.file_name} not found for platform {state.rom.platform_slug}" log.error(error) - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error) + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=error + ) from exc if state.screenshot: db_screenshot_handler.delete_screenshot(state.screenshot.id) diff --git a/backend/handler/auth/base_handler.py b/backend/handler/auth/base_handler.py index 85a7bc646..f41470f40 100644 --- a/backend/handler/auth/base_handler.py +++ b/backend/handler/auth/base_handler.py @@ -131,8 +131,8 @@ class OAuthHandler: try: payload = jwt.decode(token, ROMM_AUTH_SECRET_KEY) - except (BadSignatureError, ValueError): - raise OAuthCredentialsException + except (BadSignatureError, ValueError) as exc: + raise OAuthCredentialsException from exc issuer = payload.claims.get("iss") if not issuer or issuer != "romm:oauth": diff --git a/backend/handler/filesystem/base_handler.py b/backend/handler/filesystem/base_handler.py index 801f4b2df..b7a1c56aa 100644 --- a/backend/handler/filesystem/base_handler.py +++ b/backend/handler/filesystem/base_handler.py @@ -1,7 +1,6 @@ import fnmatch import os import re -from abc import ABC from enum import Enum from typing import Final @@ -84,10 +83,7 @@ class Asset(Enum): SCREENSHOTS = "screenshots" -class FSHandler(ABC): - def __init__(self) -> None: - pass - +class FSHandler: def get_roms_fs_structure(self, fs_slug: str) -> str: cnfg = cm.get_config() return ( diff --git a/backend/handler/filesystem/firmware_handler.py b/backend/handler/filesystem/firmware_handler.py index e780ecd9b..eb8cb32e4 100644 --- a/backend/handler/filesystem/firmware_handler.py +++ b/backend/handler/filesystem/firmware_handler.py @@ -55,8 +55,8 @@ class FSFirmwareHandler(FSHandler): "crc_hash": (binascii.crc32(data) & 0xFFFFFFFF) .to_bytes(4, byteorder="big") .hex(), - "md5_hash": hashlib.md5(data).hexdigest(), - "sha1_hash": hashlib.sha1(data).hexdigest(), + "md5_hash": hashlib.md5(data, usedforsecurity=False).hexdigest(), + "sha1_hash": hashlib.sha1(data, usedforsecurity=False).hexdigest(), } def file_exists(self, path: str, file_name: str): diff --git a/backend/handler/filesystem/platforms_handler.py b/backend/handler/filesystem/platforms_handler.py index 78607d326..e2843b0e5 100644 --- a/backend/handler/filesystem/platforms_handler.py +++ b/backend/handler/filesystem/platforms_handler.py @@ -33,8 +33,8 @@ class FSPlatformsHandler(FSHandler): parents=True ) ) - except FileExistsError: - raise PlatformAlreadyExistsException(fs_slug) + except FileExistsError as exc: + raise PlatformAlreadyExistsException(fs_slug) from exc def get_platforms(self) -> list[str]: """Gets all filesystem platforms diff --git a/backend/handler/filesystem/resources_handler.py b/backend/handler/filesystem/resources_handler.py index 534e4b875..274632ba5 100644 --- a/backend/handler/filesystem/resources_handler.py +++ b/backend/handler/filesystem/resources_handler.py @@ -95,12 +95,12 @@ class FSResourcesHandler(FSHandler): stream=True, timeout=120, ) - except requests.exceptions.ConnectionError: + except requests.exceptions.ConnectionError as exc: log.critical("Connection error: can't connect to IGDB") raise HTTPException( status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Can't connect to IGDB, check your internet connection.", - ) + ) from exc if res.status_code == 200: Path(cover_path).mkdir(parents=True, exist_ok=True) @@ -191,12 +191,12 @@ class FSResourcesHandler(FSHandler): try: res = requests.get(url, stream=True, timeout=120) - except requests.exceptions.ConnectionError: + except requests.exceptions.ConnectionError as exc: log.critical("Connection error: can't connect to IGDB") raise HTTPException( status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Can't connect to IGDB, check your internet connection.", - ) + ) from exc if res.status_code == 200: Path(screenshot_path).mkdir(parents=True, exist_ok=True) diff --git a/backend/handler/filesystem/roms_handler.py b/backend/handler/filesystem/roms_handler.py index 606fc049f..63169c4e7 100644 --- a/backend/handler/filesystem/roms_handler.py +++ b/backend/handler/filesystem/roms_handler.py @@ -130,8 +130,15 @@ class FSRomsHandler(FSHandler): ] def get_rom_file_size( - self, roms_path: str, file_name: str, multi: bool, multi_files: list = [] + self, + roms_path: str, + file_name: str, + multi: bool, + multi_files: list[str] | None = None, ): + if multi_files is None: + multi_files = [] + files = ( [f"{LIBRARY_BASE_PATH}/{roms_path}/{file_name}"] if not multi diff --git a/backend/handler/github_handler.py b/backend/handler/github_handler.py index b6e0f3cb6..f3de78cd7 100644 --- a/backend/handler/github_handler.py +++ b/backend/handler/github_handler.py @@ -1,4 +1,4 @@ -import subprocess as sp +import subprocess as sp # nosec B404 import requests from __version__ import __version__ @@ -18,7 +18,9 @@ class GithubHandler: else: try: output = str( - sp.check_output(["git", "branch"], universal_newlines=True) + sp.check_output( + ["git", "branch"], universal_newlines=True + ) # nosec B603, B607 ) except (sp.CalledProcessError, FileNotFoundError): return "1.0.0" diff --git a/backend/handler/metadata/igdb_handler.py b/backend/handler/metadata/igdb_handler.py index 7654e1e85..0c9165a4f 100644 --- a/backend/handler/metadata/igdb_handler.py +++ b/backend/handler/metadata/igdb_handler.py @@ -168,12 +168,12 @@ class IGDBBaseHandler(MetadataHandler): res.raise_for_status() return res.json() - except requests.exceptions.ConnectionError: + except requests.exceptions.ConnectionError as exc: log.critical("Connection error: can't connect to IGDB", exc_info=True) raise HTTPException( status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Can't connect to IGDB, check your internet connection", - ) + ) from exc except HTTPError as err: # Retry once if the auth token is invalid if err.response.status_code != 401: @@ -509,11 +509,11 @@ class IGDBBaseHandler(MetadataHandler): class TwitchAuth: def _update_twitch_token(self) -> str: - token = "" + token = None expires_in = 0 if not IGDB_API_ENABLED: - return token + return "" try: res = requests.post( @@ -528,16 +528,16 @@ class TwitchAuth: if res.status_code == 400: log.critical("IGDB Error: Invalid IGDB_CLIENT_ID or IGDB_CLIENT_SECRET") - return token + return "" else: token = res.json().get("access_token", "") expires_in = res.json().get("expires_in", 0) except requests.exceptions.ConnectionError: log.critical("Can't connect to IGDB, check your internet connection.") - return token + return "" if not token or expires_in == 0: - return token + return "" # Set token in redis to expire in seconds cache.set("romm:twitch_token", token, ex=expires_in - 10) # type: ignore[attr-defined] diff --git a/backend/handler/metadata/moby_handler.py b/backend/handler/metadata/moby_handler.py index a6f71c362..b72fdf479 100644 --- a/backend/handler/metadata/moby_handler.py +++ b/backend/handler/metadata/moby_handler.py @@ -81,12 +81,12 @@ class MobyGamesHandler(MetadataHandler): res = requests.get(authorized_url, timeout=timeout) res.raise_for_status() return res.json() - except requests.exceptions.ConnectionError: + except requests.exceptions.ConnectionError as exc: log.critical("Connection error: can't connect to Mobygames", exc_info=True) raise HTTPException( status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="Can't connect to Mobygames, check your internet connection", - ) + ) from exc except HTTPError as err: if err.response.status_code == 401: # Sometimes Mobygames returns 401 even with a valid API key diff --git a/backend/handler/metadata/sgdb_handler.py b/backend/handler/metadata/sgdb_handler.py index 43d4f7dee..c75ff29fb 100644 --- a/backend/handler/metadata/sgdb_handler.py +++ b/backend/handler/metadata/sgdb_handler.py @@ -14,7 +14,9 @@ class SGDBBaseHandler: def get_details(self, term): search_response = requests.get( - f"{self.BASE_URL}/search/autocomplete/{term}", headers=self.headers + f"{self.BASE_URL}/search/autocomplete/{term}", + headers=self.headers, + timeout=120, ).json() if len(search_response["data"]) == 0: @@ -25,7 +27,7 @@ class SGDBBaseHandler: game_name = search_response["data"][0]["name"] game_response = requests.get( - f"{self.BASE_URL}/grid/game/{game_id}", headers=self.headers + f"{self.BASE_URL}/grid/game/{game_id}", headers=self.headers, timeout=120 ).json() if len(game_response["data"]) == 0: diff --git a/backend/handler/scan_handler.py b/backend/handler/scan_handler.py index c5acea78a..341ff8b8d 100644 --- a/backend/handler/scan_handler.py +++ b/backend/handler/scan_handler.py @@ -150,8 +150,11 @@ async def scan_rom( rom_attrs: dict, scan_type: ScanType, rom: Rom | None = None, - metadata_sources: list[str] = ["igdb", "moby"], + metadata_sources: list[str] | None = None, ) -> Rom: + if not metadata_sources: + metadata_sources = ["igdb", "moby"] + roms_path = fs_rom_handler.get_roms_fs_structure(platform.fs_slug) log.info(f"\t ยท {rom_attrs['file_name']}") diff --git a/backend/models/tests/test_user.py b/backend/models/tests/test_user.py index 9f8899a38..2ffed171d 100644 --- a/backend/models/tests/test_user.py +++ b/backend/models/tests/test_user.py @@ -1,13 +1,14 @@ from handler.auth.base_handler import DEFAULT_SCOPES, FULL_SCOPES, WRITE_SCOPES +from models.user import User -def test_admin(admin_user): - admin_user.oauth_scopes == FULL_SCOPES +def test_admin(admin_user: User): + assert admin_user.oauth_scopes == FULL_SCOPES -def test_editor(editor_user): - editor_user.oauth_scopes == WRITE_SCOPES +def test_editor(editor_user: User): + assert editor_user.oauth_scopes == WRITE_SCOPES -def test_user(viewer_user): - viewer_user.oauth_scopes == DEFAULT_SCOPES +def test_user(viewer_user: User): + assert viewer_user.oauth_scopes == DEFAULT_SCOPES diff --git a/backend/tasks/tasks.py b/backend/tasks/tasks.py index 16d46a1e2..a20e1ba30 100644 --- a/backend/tasks/tasks.py +++ b/backend/tasks/tasks.py @@ -84,7 +84,7 @@ class RemoteFilePullTask(PeriodicTask): log.info(f"Scheduled {self.description} started...") try: - response = requests.get(self.url) + response = requests.get(self.url, timeout=120) response.raise_for_status() return response.content except requests.exceptions.RequestException as e: diff --git a/docker/Dockerfile b/docker/Dockerfile index e0e1a73ab..2036651c3 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -17,7 +17,7 @@ RUN mkdir -p ${WEBSERVER_FOLDER}/assets/romm && \ ln -s /romm/assets ${WEBSERVER_FOLDER}/assets/romm/assets # install generall required packages -RUN apk add --upgrade \ +RUN apk add --no-cache --upgrade \ bash \ curl \ libffi \ @@ -27,7 +27,7 @@ RUN apk add --upgrade \ tzdata # Install additional build dependencies -RUN apk add --upgrade \ +RUN apk add --no-cache --upgrade \ gcc \ libffi-dev \ mariadb-connector-c-dev \ @@ -78,7 +78,7 @@ ENV REDIS_VERSION 7.2.4 ENV REDIS_DOWNLOAD_URL http://download.redis.io/releases/redis-7.2.4.tar.gz ENV REDIS_DOWNLOAD_SHA 8d104c26a154b29fd67d6568b4f375212212ad41e0c2caa3d66480e78dbd3b59 -RUN wget -O redis.tar.gz "$REDIS_DOWNLOAD_URL"; \ +RUN wget --no-cache -O redis.tar.gz "$REDIS_DOWNLOAD_URL"; \ echo "$REDIS_DOWNLOAD_SHA *redis.tar.gz" | sha256sum -c -; \ mkdir -p /usr/src/redis; \ tar -xzf redis.tar.gz -C /usr/src/redis --strip-components=1; \ diff --git a/docker/build_local_image.sh b/docker/build_local_image.sh index 5c4b7b061..21805abd8 100755 --- a/docker/build_local_image.sh +++ b/docker/build_local_image.sh @@ -2,4 +2,4 @@ branch_name="$(git symbolic-ref HEAD 2>/dev/null)" branch_name=${branch_name##refs/heads/} -docker build -t rommapp/romm:local-${branch_name} . --file ./docker/Dockerfile +docker build -t "rommapp/romm:local-${branch_name}" . --file ./docker/Dockerfile diff --git a/docker/init_scripts/init b/docker/init_scripts/init index 957e25419..c22f68271 100755 --- a/docker/init_scripts/init +++ b/docker/init_scripts/init @@ -6,6 +6,7 @@ set -o pipefail # treat errors in pipes as fatal shopt -s inherit_errexit # inherit errexit # use virtualenvs +# shellcheck disable=SC1091 source /backend/bin/activate # make it possible to disable the inotify watcher process @@ -17,19 +18,19 @@ INIT_DEBUG="${INIT_DEBUG:="false"}" # print debug log output if enabled debug_log() { - if [ "${INIT_DEBUG}" == "true" ]; then - echo "DEBUG: [init][$(date +"%Y-%m-%d %T")]" "${@}" + if [[ ${INIT_DEBUG} == "true" ]]; then + echo "DEBUG: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true fi } # print debug log output if enabled info_log() { - echo "INFO: [init][$(date +"%Y-%m-%d %T")]" "${@}" + echo "INFO: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true } # print error log output if enabled error_log() { - echo "ERROR: [init][$(date +"%Y-%m-%d %T")]" "${@}" + echo "ERROR: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true exit 1 } @@ -53,7 +54,7 @@ start_bin_gunicorn() { # Commands to start nginx (handling PID creation internally) start_bin_nginx() { info_log "starting nginx" - if [ "$EUID" -ne 0 ]; then + if [[ ${EUID} -ne 0 ]]; then nginx else # if container runs as root, drop permissions @@ -65,13 +66,13 @@ start_bin_redis-server() { info_log "starting redis-server" # Check if /usr/local/etc/redis/redis.conf exists and use it if so - if [ -f /usr/local/etc/redis/redis.conf ]; then + if [[ -f /usr/local/etc/redis/redis.conf ]]; then redis-server /usr/local/etc/redis/redis.conf & else redis-server --dir /redis-data & fi REDIS_PID=$! - echo $REDIS_PID >/tmp/redis-server.pid + echo "${REDIS_PID}" >/tmp/redis-server.pid } # function that runs our independent python scripts and creates corresponding PID files, @@ -80,27 +81,28 @@ start_python() { info_log "starting ${SCRIPT}.py" python3 "${SCRIPT}.py" & WATCHER_PID=$! - echo $WATCHER_PID >"/tmp/${SCRIPT}.pid" + echo "${WATCHER_PID}" >"/tmp/${SCRIPT}.pid" } watchdog_process_pid() { TYPE=$1 PROCESS=$2 - if [ -f "/tmp/${PROCESS}.pid" ]; then + if [[ -f "/tmp/${PROCESS}.pid" ]]; then # check if the pid we last wrote to our state file is actually active - if [ -d "/proc/$(cat "/tmp/${PROCESS}.pid")" ]; then + PID=$(cat "/tmp/${PROCESS}.pid") || true + if [[ -d "/proc/${PID}" ]]; then debug_log "${PROCESS} still running, no need to start" else - if [ "${TYPE}" == "bin" ]; then + if [[ ${TYPE} == "bin" ]]; then start_bin_"${PROCESS}" - elif [ "${TYPE}" == "python" ]; then + elif [[ ${TYPE} == "python" ]]; then start_python "${PROCESS}" fi fi else - if [ "${TYPE}" == "bin" ]; then + if [[ ${TYPE} == "bin" ]]; then start_bin_"${PROCESS}" - elif [ "${TYPE}" == "python" ]; then + elif [[ ${TYPE} == "python" ]]; then start_python "${PROCESS}" fi fi diff --git a/examples/config.example.yml b/examples/config.example.yml index c7ae859ab..0c4f2fa19 100644 --- a/examples/config.example.yml +++ b/examples/config.example.yml @@ -1,7 +1,7 @@ exclude: # Exclude platforms to be scanned platforms: - - "romm" + - romm # Exclude roms or parts of roms to be scanned roms: @@ -10,11 +10,11 @@ exclude: single_file: # Exclude all files with certain extensions to be scanned extensions: - - "xml" + - xml # Exclude matched file names to be scanned names: - - "info.txt" - - "._*" # Supports unix filename pattern matching + - info.txt + - ._* # Supports unix filename pattern matching - "*.nfo" # Can also exclude files by extension ## Multi files games section @@ -22,30 +22,30 @@ exclude: multi_file: # Exclude matched 'folder' names to be scanned (RomM identifies folders as multi file games) names: - - "my_multi_file_game" - - "DLC" + - my_multi_file_game + - DLC # Exclude files within sub-folders. parts: # Exclude matched file names to be scanned from multi file roms # Keep in mind that RomM doesn't scan folders inside multi files games, # so there is no need to exclude folders from inside of multi files games. names: - - "data.xml" - - "._*" # Supports unix filename pattern matching + - data.xml + - ._* # Supports unix filename pattern matching # Exclude all files with certain extensions to be scanned from multi file roms extensions: - - "txt" + - txt system: # Asociate different platform names to your current file system platform names platforms: # [your custom platform folder name]: [RomM platform name] - gc: "ngc" # In this example if you have a 'gc' folder, RomM will treat it like the 'ngc' folder - psx: "ps" # In this example if you have a 'psx' folder, RomM will treat it like the 'ps' folder + gc: ngc # In this example if you have a 'gc' folder, RomM will treat it like the 'ngc' folder + psx: ps # Asociate one platform to it's main version versions: - naomi: "arcade" + naomi: arcade filesystem: # The folder name where your roms are located - roms_folder: "roms" # For example if your folder structure is /home/user/library/roms_folder + roms_folder: roms # For example if your folder structure is /home/user/library/roms_folder