mirror of
https://github.com/rommapp/romm.git
synced 2026-02-18 00:27:41 +01:00
Merge branch 'fix/reduce-ram-usage' into feature/retroachievements
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
# To learn more about the format of this file, see https://docs.trunk.io/reference/trunk-yaml
|
||||
version: 0.1
|
||||
cli:
|
||||
version: 1.22.12
|
||||
version: 1.22.15
|
||||
# Trunk provides extensibility via plugins. (https://docs.trunk.io/plugins)
|
||||
plugins:
|
||||
sources:
|
||||
@@ -21,25 +21,25 @@ lint:
|
||||
- pyright
|
||||
enabled:
|
||||
- markdownlint@0.44.0
|
||||
- eslint@9.25.1
|
||||
- eslint@9.26.0
|
||||
- actionlint@1.7.7
|
||||
- bandit@1.8.3
|
||||
- black@25.1.0
|
||||
- checkov@3.2.408
|
||||
- checkov@3.2.416
|
||||
- git-diff-check
|
||||
- isort@6.0.1
|
||||
- mypy@1.15.0
|
||||
- osv-scanner@2.0.1
|
||||
- oxipng@9.1.4
|
||||
- osv-scanner@2.0.2
|
||||
- oxipng@9.1.5
|
||||
- prettier@3.5.3
|
||||
- ruff@0.11.6
|
||||
- ruff@0.11.8
|
||||
- shellcheck@0.10.0
|
||||
- shfmt@3.6.0
|
||||
- svgo@3.3.2
|
||||
- taplo@0.9.3
|
||||
- trivy@0.61.1
|
||||
- trufflehog@3.88.25
|
||||
- yamllint@1.37.0
|
||||
- trivy@0.62.1
|
||||
- trufflehog@3.88.29
|
||||
- yamllint@1.37.1
|
||||
ignore:
|
||||
- linters: [ALL]
|
||||
paths:
|
||||
|
||||
@@ -63,7 +63,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
|
||||
sqlalchemy.url = ''
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# [post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
@@ -73,38 +73,3 @@ sqlalchemy.url = ''
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)s: [RomM][%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from pathlib import Path
|
||||
|
||||
from alembic import context
|
||||
from config.config_manager import ConfigManager
|
||||
from logger.logger import unify_logger
|
||||
from models.assets import Save, Screenshot, State # noqa
|
||||
from models.base import BaseModel
|
||||
from models.collection import VirtualCollection
|
||||
@@ -17,10 +17,7 @@ from sqlalchemy import create_engine
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name, disable_existing_loggers=False)
|
||||
unify_logger("alembic")
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
|
||||
@@ -19,6 +19,8 @@ from exceptions.config_exceptions import (
|
||||
ConfigNotReadableException,
|
||||
ConfigNotWritableException,
|
||||
)
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from sqlalchemy import URL
|
||||
from yaml.loader import SafeLoader
|
||||
@@ -83,12 +85,6 @@ class ConfigManager:
|
||||
str: database connection string
|
||||
"""
|
||||
|
||||
# DEPRECATED
|
||||
if ROMM_DB_DRIVER == "sqlite":
|
||||
log.critical("Sqlite is not supported anymore, migrate to mariaDB")
|
||||
sys.exit(6)
|
||||
# DEPRECATED
|
||||
|
||||
if ROMM_DB_DRIVER == "mariadb":
|
||||
driver = "mariadb+mariadbconnector"
|
||||
elif ROMM_DB_DRIVER == "mysql":
|
||||
@@ -96,7 +92,7 @@ class ConfigManager:
|
||||
elif ROMM_DB_DRIVER == "postgresql":
|
||||
driver = "postgresql+psycopg"
|
||||
else:
|
||||
log.critical(f"{ROMM_DB_DRIVER} database not supported")
|
||||
log.critical(f"{hl(ROMM_DB_DRIVER)} database not supported")
|
||||
sys.exit(3)
|
||||
|
||||
if not DB_USER or not DB_PASSWD:
|
||||
@@ -279,7 +275,7 @@ class ConfigManager:
|
||||
def add_platform_binding(self, fs_slug: str, slug: str) -> None:
|
||||
platform_bindings = self.config.PLATFORMS_BINDING
|
||||
if fs_slug in platform_bindings:
|
||||
log.warning(f"Binding for {fs_slug} already exists")
|
||||
log.warning(f"Binding for {hl(fs_slug)} already exists")
|
||||
return
|
||||
|
||||
platform_bindings[fs_slug] = slug
|
||||
@@ -300,7 +296,7 @@ class ConfigManager:
|
||||
def add_platform_version(self, fs_slug: str, slug: str) -> None:
|
||||
platform_versions = self.config.PLATFORMS_VERSIONS
|
||||
if fs_slug in platform_versions:
|
||||
log.warning(f"Version for {fs_slug} already exists")
|
||||
log.warning(f"Version for {hl(fs_slug)} already exists")
|
||||
return
|
||||
|
||||
platform_versions[fs_slug] = slug
|
||||
@@ -321,7 +317,9 @@ class ConfigManager:
|
||||
def add_exclusion(self, exclusion_type: str, exclusion_value: str):
|
||||
config_item = self.config.__getattribute__(exclusion_type)
|
||||
if exclusion_value in config_item:
|
||||
log.warning(f"{exclusion_value} already excluded in {exclusion_type}")
|
||||
log.warning(
|
||||
f"{hl(exclusion_value)} already excluded in {hl(exclusion_type, color=BLUE)}"
|
||||
)
|
||||
return
|
||||
|
||||
config_item.append(exclusion_value)
|
||||
|
||||
@@ -17,6 +17,8 @@ from handler.auth.constants import Scope
|
||||
from handler.database import db_collection_handler
|
||||
from handler.filesystem import fs_resource_handler
|
||||
from handler.filesystem.base_handler import CoverSize
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from models.collection import Collection
|
||||
from PIL import Image
|
||||
@@ -284,12 +286,14 @@ async def delete_collections(request: Request, id: int) -> MessageResponse:
|
||||
if not collection:
|
||||
raise CollectionNotFoundInDatabaseException(id)
|
||||
|
||||
log.info(f"Deleting {collection.name} from database")
|
||||
log.info(f"Deleting {hl(collection.name, color=BLUE)} from database")
|
||||
db_collection_handler.delete_collection(id)
|
||||
|
||||
try:
|
||||
rmtree(f"{RESOURCES_BASE_PATH}/{collection.fs_resources_path}")
|
||||
except FileNotFoundError:
|
||||
log.error(f"Couldn't find resources to delete for {collection.name}")
|
||||
log.error(
|
||||
f"Couldn't find resources to delete for {hl(collection.name, color=BLUE)}"
|
||||
)
|
||||
|
||||
return {"msg": f"{collection.name} deleted successfully!"}
|
||||
return {"msg": f"{hl(collection.name)} deleted successfully!"}
|
||||
|
||||
@@ -8,6 +8,8 @@ from handler.auth.constants import Scope
|
||||
from handler.database import db_firmware_handler, db_platform_handler
|
||||
from handler.filesystem import fs_firmware_handler
|
||||
from handler.scan_handler import scan_firmware
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from utils.router import APIRouter
|
||||
|
||||
@@ -43,8 +45,6 @@ def add_firmware(
|
||||
log.error(error)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error)
|
||||
|
||||
log.info(f"Uploading firmware to {db_platform.fs_slug}")
|
||||
|
||||
uploaded_firmware = []
|
||||
firmware_path = fs_firmware_handler.build_upload_file_path(db_platform.fs_slug)
|
||||
|
||||
@@ -53,6 +53,10 @@ def add_firmware(
|
||||
log.warning("Empty filename, skipping")
|
||||
continue
|
||||
|
||||
log.info(
|
||||
f"Uploading firmware {hl(file.filename)} to {hl(db_platform.custom_name or db_platform.name, color=BLUE)}"
|
||||
)
|
||||
|
||||
fs_firmware_handler.write_file(file=file, path=firmware_path)
|
||||
|
||||
db_firmware = db_firmware_handler.get_firmware_by_filename(
|
||||
@@ -217,21 +221,21 @@ async def delete_firmware(
|
||||
for id in firmare_ids:
|
||||
firmware = db_firmware_handler.get_firmware(id)
|
||||
if not firmware:
|
||||
error = f"Firmware with ID {id} not found"
|
||||
error = f"Firmware with ID {hl(id)} not found"
|
||||
log.error(error)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error)
|
||||
|
||||
log.info(f"Deleting {firmware.file_name} from database")
|
||||
log.info(f"Deleting {hl(firmware.file_name)} from database")
|
||||
db_firmware_handler.delete_firmware(id)
|
||||
|
||||
if id in delete_from_fs:
|
||||
log.info(f"Deleting {firmware.file_name} from filesystem")
|
||||
log.info(f"Deleting {hl(firmware.file_name)} from filesystem")
|
||||
try:
|
||||
fs_firmware_handler.remove_file(
|
||||
file_name=firmware.file_name, file_path=firmware.file_path
|
||||
)
|
||||
except FileNotFoundError as exc:
|
||||
error = f"Firmware file {firmware.file_name} not found for platform {firmware.platform_slug}"
|
||||
error = f"Firmware file {hl(firmware.file_name)} not found for platform {hl(firmware.platform_slug)}"
|
||||
log.error(error)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=error
|
||||
|
||||
@@ -11,6 +11,8 @@ from handler.database import db_platform_handler
|
||||
from handler.filesystem import fs_platform_handler
|
||||
from handler.metadata.igdb_handler import IGDB_PLATFORM_LIST
|
||||
from handler.scan_handler import scan_platform
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from utils.router import APIRouter
|
||||
|
||||
@@ -36,7 +38,7 @@ async def add_platforms(request: Request) -> PlatformSchema:
|
||||
try:
|
||||
fs_platform_handler.add_platforms(fs_slug=fs_slug)
|
||||
except PlatformAlreadyExistsException:
|
||||
log.info(f"Detected platform: {fs_slug}")
|
||||
log.info(f"Detected platform: {hl(fs_slug)}")
|
||||
scanned_platform = await scan_platform(fs_slug, [fs_slug])
|
||||
return PlatformSchema.model_validate(
|
||||
db_platform_handler.add_platform(scanned_platform)
|
||||
@@ -164,7 +166,9 @@ async def delete_platforms(request: Request, id: int) -> MessageResponse:
|
||||
if not platform:
|
||||
raise PlatformNotFoundInDatabaseException(id)
|
||||
|
||||
log.info(f"Deleting {platform.name} [{platform.fs_slug}] from database")
|
||||
log.info(
|
||||
f"Deleting {hl(platform.name, color=BLUE)} [{hl(platform.fs_slug)}] from database"
|
||||
)
|
||||
db_platform_handler.delete_platform(id)
|
||||
|
||||
return {"msg": f"{platform.name} - [{platform.fs_slug}] deleted successfully!"}
|
||||
|
||||
@@ -37,6 +37,7 @@ from handler.database.base_handler import sync_session
|
||||
from handler.filesystem import fs_resource_handler, fs_rom_handler
|
||||
from handler.filesystem.base_handler import CoverSize
|
||||
from handler.metadata import meta_igdb_handler, meta_moby_handler, meta_ss_handler
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from models.rom import RomFile
|
||||
@@ -85,7 +86,9 @@ async def add_rom(request: Request):
|
||||
|
||||
platform_fs_slug = db_platform.fs_slug
|
||||
roms_path = fs_rom_handler.build_upload_fs_path(platform_fs_slug)
|
||||
log.info(f"Uploading file to {platform_fs_slug}")
|
||||
log.info(
|
||||
f"Uploading file to {hl(db_platform.custom_name or db_platform.name, color=BLUE)}[{hl(platform_fs_slug)}]"
|
||||
)
|
||||
|
||||
file_location = Path(f"{roms_path}/{filename}")
|
||||
parser = StreamingFormDataParser(headers=request.headers)
|
||||
@@ -93,7 +96,7 @@ async def add_rom(request: Request):
|
||||
parser.register(filename, FileTarget(str(file_location)))
|
||||
|
||||
if await file_location.exists():
|
||||
log.warning(f" - Skipping {filename} since the file already exists")
|
||||
log.warning(f" - Skipping {hl(filename)} since the file already exists")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"File {filename} already exists",
|
||||
@@ -361,7 +364,9 @@ async def get_rom_content(
|
||||
files = [f for f in rom.files if f.id in file_ids or not file_ids]
|
||||
files.sort(key=lambda x: x.file_name)
|
||||
|
||||
log.info(f"User {current_username} is downloading {rom.fs_name}")
|
||||
log.info(
|
||||
f"User {hl(current_username, color=BLUE)} is downloading {hl(rom.fs_name)}"
|
||||
)
|
||||
|
||||
# Serve the file directly in development mode for emulatorjs
|
||||
if DEV_MODE:
|
||||
@@ -406,7 +411,7 @@ async def get_rom_content(
|
||||
zip_file.writestr(zip_info, content)
|
||||
|
||||
except FileNotFoundError:
|
||||
log.error(f"File {file_path} not found!")
|
||||
log.error(f"File {hl(file_path)} not found!")
|
||||
raise
|
||||
|
||||
# Add M3U file if not already present
|
||||
@@ -648,7 +653,7 @@ async def update_rom(
|
||||
cleaned_data.update({"path_manual": path_manual})
|
||||
|
||||
log.debug(
|
||||
f"Updating {hl(cleaned_data.get('name', ''))} [{id}] with data {cleaned_data}"
|
||||
f"Updating {hl(cleaned_data.get('name', ''), color=BLUE)} [{hl(cleaned_data.get('fs_name', ''))}] with data {cleaned_data}"
|
||||
)
|
||||
|
||||
db_rom_handler.update_rom(id, cleaned_data)
|
||||
@@ -706,7 +711,7 @@ async def add_rom_manuals(request: Request, id: int):
|
||||
|
||||
manuals_path = f"{RESOURCES_BASE_PATH}/{rom.fs_resources_path}/manual"
|
||||
file_location = Path(f"{manuals_path}/{rom.id}.pdf")
|
||||
log.info(f"Uploading {file_location}")
|
||||
log.info(f"Uploading {hl(file_location)}")
|
||||
|
||||
if not os.path.exists(manuals_path):
|
||||
await Path(manuals_path).mkdir(parents=True, exist_ok=True)
|
||||
@@ -769,16 +774,20 @@ async def delete_roms(
|
||||
if not rom:
|
||||
raise RomNotFoundInDatabaseException(id)
|
||||
|
||||
log.info(f"Deleting {rom.fs_name} from database")
|
||||
log.info(
|
||||
f"Deleting {hl(rom.name, color=BLUE)} [{hl(rom.fs_name)}] from database"
|
||||
)
|
||||
db_rom_handler.delete_rom(id)
|
||||
|
||||
try:
|
||||
rmtree(f"{RESOURCES_BASE_PATH}/{rom.fs_resources_path}")
|
||||
except FileNotFoundError:
|
||||
log.error(f"Couldn't find resources to delete for {rom.name}")
|
||||
log.error(
|
||||
f"Couldn't find resources to delete for {hl(rom.name, color=BLUE)}"
|
||||
)
|
||||
|
||||
if id in delete_from_fs:
|
||||
log.info(f"Deleting {rom.fs_name} from filesystem")
|
||||
log.info(f"Deleting {hl(rom.fs_name)} from filesystem")
|
||||
try:
|
||||
fs_rom_handler.remove_from_fs(fs_path=rom.fs_path, fs_name=rom.fs_name)
|
||||
except FileNotFoundError as exc:
|
||||
@@ -887,7 +896,7 @@ async def get_romfile_content(
|
||||
detail="File not found",
|
||||
)
|
||||
|
||||
log.info(f"User {current_username} is downloading {file_name}")
|
||||
log.info(f"User {hl(current_username, color=BLUE)} is downloading {hl(file_name)}")
|
||||
|
||||
# Serve the file directly in development mode for emulatorjs
|
||||
if DEV_MODE:
|
||||
|
||||
@@ -8,6 +8,8 @@ from handler.auth.constants import Scope
|
||||
from handler.database import db_rom_handler, db_save_handler, db_screenshot_handler
|
||||
from handler.filesystem import fs_asset_handler
|
||||
from handler.scan_handler import scan_save, scan_screenshot
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from utils.router import APIRouter
|
||||
|
||||
@@ -25,16 +27,6 @@ async def add_save(
|
||||
) -> SaveSchema:
|
||||
data = await request.form()
|
||||
|
||||
rom = db_rom_handler.get_rom(rom_id)
|
||||
if not rom:
|
||||
raise RomNotFoundInDatabaseException(rom_id)
|
||||
|
||||
log.info(f"Uploading save of {rom.name}")
|
||||
|
||||
saves_path = fs_asset_handler.build_saves_file_path(
|
||||
user=request.user, platform_fs_slug=rom.platform.fs_slug, emulator=emulator
|
||||
)
|
||||
|
||||
if "saveFile" not in data:
|
||||
log.error("No save file provided")
|
||||
raise HTTPException(
|
||||
@@ -42,12 +34,23 @@ async def add_save(
|
||||
)
|
||||
|
||||
saveFile: UploadFile = data["saveFile"] # type: ignore
|
||||
|
||||
if not saveFile.filename:
|
||||
log.error("Save file has no filename")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="Save file has no filename"
|
||||
)
|
||||
|
||||
rom = db_rom_handler.get_rom(rom_id)
|
||||
if not rom:
|
||||
raise RomNotFoundInDatabaseException(rom_id)
|
||||
|
||||
log.info(f"Uploading save {hl(saveFile.filename)} for {hl(rom.name, color=BLUE)}")
|
||||
|
||||
saves_path = fs_asset_handler.build_saves_file_path(
|
||||
user=request.user, platform_fs_slug=rom.platform.fs_slug, emulator=emulator
|
||||
)
|
||||
|
||||
fs_asset_handler.write_file(file=saveFile, path=saves_path)
|
||||
|
||||
# Scan or update save
|
||||
@@ -95,9 +98,7 @@ async def add_save(
|
||||
else:
|
||||
scanned_screenshot.rom_id = rom.id
|
||||
scanned_screenshot.user_id = request.user.id
|
||||
db_screenshot = db_screenshot_handler.add_screenshot(
|
||||
screenshot=scanned_screenshot
|
||||
)
|
||||
db_screenshot_handler.add_screenshot(screenshot=scanned_screenshot)
|
||||
|
||||
# Set the last played time for the current user
|
||||
rom_user = db_rom_handler.get_rom_user(rom_id=rom.id, user_id=request.user.id)
|
||||
@@ -155,6 +156,35 @@ async def update_save(request: Request, id: int) -> SaveSchema:
|
||||
db_save.id, {"file_size_bytes": saveFile.size}
|
||||
)
|
||||
|
||||
screenshotFile: UploadFile | None = data.get("screenshotFile", None) # type: ignore
|
||||
if screenshotFile and screenshotFile.filename:
|
||||
screenshots_path = fs_asset_handler.build_screenshots_file_path(
|
||||
user=request.user, platform_fs_slug=db_save.rom.platform_slug
|
||||
)
|
||||
|
||||
fs_asset_handler.write_file(file=screenshotFile, path=screenshots_path)
|
||||
|
||||
# Scan or update screenshot
|
||||
scanned_screenshot = scan_screenshot(
|
||||
file_name=screenshotFile.filename,
|
||||
user=request.user,
|
||||
platform_fs_slug=db_save.rom.platform_slug,
|
||||
)
|
||||
db_screenshot = db_screenshot_handler.get_screenshot_by_filename(
|
||||
rom_id=db_save.rom.id,
|
||||
user_id=request.user.id,
|
||||
file_name=screenshotFile.filename,
|
||||
)
|
||||
if db_screenshot:
|
||||
db_screenshot = db_screenshot_handler.update_screenshot(
|
||||
db_screenshot.id,
|
||||
{"file_size_bytes": scanned_screenshot.file_size_bytes},
|
||||
)
|
||||
else:
|
||||
scanned_screenshot.rom_id = db_save.rom.id
|
||||
scanned_screenshot.user_id = request.user.id
|
||||
db_screenshot_handler.add_screenshot(screenshot=scanned_screenshot)
|
||||
|
||||
# Set the last played time for the current user
|
||||
rom_user = db_rom_handler.get_rom_user(db_save.rom_id, request.user.id)
|
||||
if not rom_user:
|
||||
@@ -186,7 +216,9 @@ async def delete_saves(request: Request) -> list[int]:
|
||||
|
||||
db_save_handler.delete_save(save_id)
|
||||
|
||||
log.info(f"Deleting {save.file_name} from filesystem")
|
||||
log.info(
|
||||
f"Deleting save {hl(save.file_name)} [{save.rom.platform_slug}] from filesystem"
|
||||
)
|
||||
try:
|
||||
fs_asset_handler.remove_file(
|
||||
file_name=save.file_name, file_path=save.file_path
|
||||
|
||||
@@ -6,6 +6,8 @@ from handler.auth.constants import Scope
|
||||
from handler.database import db_rom_handler, db_screenshot_handler
|
||||
from handler.filesystem import fs_asset_handler
|
||||
from handler.scan_handler import scan_screenshot
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from utils.router import APIRouter
|
||||
|
||||
@@ -27,7 +29,7 @@ async def add_screenshot(
|
||||
raise RomNotFoundInDatabaseException(rom_id)
|
||||
|
||||
current_user = request.user
|
||||
log.info(f"Uploading screenshots to {rom.name}")
|
||||
log.info(f"Uploading screenshots to {hl(rom.name, color=BLUE)}")
|
||||
|
||||
screenshots_path = fs_asset_handler.build_screenshots_file_path(
|
||||
user=request.user, platform_fs_slug=rom.platform_slug
|
||||
|
||||
@@ -18,6 +18,8 @@ from handler.metadata.moby_handler import MOBY_API_ENABLED, MobyGamesRom
|
||||
from handler.metadata.sgdb_handler import STEAMGRIDDB_API_ENABLED
|
||||
from handler.metadata.ss_handler import SS_API_ENABLED, SSRom
|
||||
from handler.scan_handler import _get_main_platform_igdb_id
|
||||
from logger.formatter import BLUE, CYAN
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from utils.router import APIRouter
|
||||
|
||||
@@ -68,8 +70,12 @@ async def search_rom(
|
||||
)
|
||||
matched_roms: list = []
|
||||
|
||||
log.info(f"Searching by {search_by.lower()}: {search_term}")
|
||||
log.info(emoji.emojize(f":video_game: {rom.platform_slug}: {rom.fs_name}"))
|
||||
log.info(f"Searching by {hl(search_by.lower(), color=CYAN)}:")
|
||||
log.info(
|
||||
emoji.emojize(
|
||||
f":video_game: {hl(rom.platform_display_name, color=BLUE)} [{rom.platform_fs_slug}]: {hl(search_term)}[{rom.fs_name}]"
|
||||
)
|
||||
)
|
||||
|
||||
igdb_matched_roms: list[IGDBRom] = []
|
||||
moby_matched_roms: list[MobyGamesRom] = []
|
||||
|
||||
@@ -34,7 +34,7 @@ from handler.scan_handler import (
|
||||
scan_rom,
|
||||
)
|
||||
from handler.socket_handler import socket_handler
|
||||
from logger.formatter import LIGHTYELLOW, RED
|
||||
from logger.formatter import BLUE, LIGHTYELLOW, RED
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from models.platform import Platform
|
||||
@@ -187,12 +187,14 @@ async def scan_platforms(
|
||||
if len(platform_list) == 0:
|
||||
log.warning(
|
||||
emoji.emojize(
|
||||
f"{hl(':warning:', color=LIGHTYELLOW)} No platforms found, verify that the folder structure is right and the volume is mounted correctly. \
|
||||
f"{hl(':warning:', color=LIGHTYELLOW)} No platforms found, verify that the folder structure is right and the volume is mounted correctly. \
|
||||
Check https://github.com/rommapp/romm?tab=readme-ov-file#folder-structure for more details."
|
||||
)
|
||||
)
|
||||
else:
|
||||
log.info(f"Found {len(platform_list)} platforms in the file system")
|
||||
log.info(
|
||||
f"Found {hl(str(len(platform_list)))} platforms in the file system"
|
||||
)
|
||||
|
||||
for platform_slug in platform_list:
|
||||
scan_stats += await _identify_platform(
|
||||
@@ -210,11 +212,11 @@ async def scan_platforms(
|
||||
if len(fs_platforms) > 0:
|
||||
purged_platforms = db_platform_handler.purge_platforms(fs_platforms)
|
||||
if len(purged_platforms) > 0:
|
||||
log.info("Purging platforms not found in the filesystem:")
|
||||
log.warning("Purging platforms not found in the filesystem:")
|
||||
for p in purged_platforms:
|
||||
log.info(f" - {p.slug}")
|
||||
log.warning(f" - {p.slug}")
|
||||
|
||||
log.info(emoji.emojize(":check_mark: Scan completed "))
|
||||
log.info(emoji.emojize(":check_mark: Scan completed "))
|
||||
await sm.emit("scan:done", scan_stats.__dict__)
|
||||
except ScanStoppedException:
|
||||
await stop_scan()
|
||||
@@ -279,11 +281,11 @@ async def _identify_platform(
|
||||
if len(fs_firmware) == 0:
|
||||
log.warning(
|
||||
emoji.emojize(
|
||||
f" {hl(':warning:', color=LIGHTYELLOW)} No firmware found, skipping firmware scan for this platform"
|
||||
f"{hl(':warning:', color=LIGHTYELLOW)} No firmware found for {hl(platform.custom_name or platform.name, color=BLUE)}[{hl(platform.fs_slug)}]"
|
||||
)
|
||||
)
|
||||
else:
|
||||
log.info(f" {len(fs_firmware)} firmware files found")
|
||||
log.info(f"{hl(str(len(fs_firmware)))} firmware files found")
|
||||
|
||||
for fs_fw in fs_firmware:
|
||||
scan_stats += await _identify_firmware(
|
||||
@@ -301,11 +303,11 @@ async def _identify_platform(
|
||||
if len(fs_roms) == 0:
|
||||
log.warning(
|
||||
emoji.emojize(
|
||||
f" {hl(':warning:', color=LIGHTYELLOW)} No roms found, verify that the folder structure is correct"
|
||||
f"{hl(':warning:', color=LIGHTYELLOW)} No roms found, verify that the folder structure is correct"
|
||||
)
|
||||
)
|
||||
else:
|
||||
log.info(f" {len(fs_roms)} roms found in the file system")
|
||||
log.info(f"{hl(str(len(fs_roms)))} roms found in the file system")
|
||||
|
||||
for fs_roms_batch in batched(fs_roms, 200):
|
||||
rom_by_filename_map = db_rom_handler.get_roms_by_fs_name(
|
||||
@@ -332,9 +334,9 @@ async def _identify_platform(
|
||||
platform.id, [rom["fs_name"] for rom in fs_roms]
|
||||
)
|
||||
if len(purged_roms) > 0:
|
||||
log.info("Purging roms not found in the filesystem:")
|
||||
log.warning("Purging roms not found in the filesystem:")
|
||||
for r in purged_roms:
|
||||
log.info(f" - {r.fs_name}")
|
||||
log.warning(f" - {r.fs_name}")
|
||||
|
||||
# Same protection for firmware
|
||||
if len(fs_firmware) > 0:
|
||||
@@ -342,9 +344,9 @@ async def _identify_platform(
|
||||
platform.id, [fw for fw in fs_firmware]
|
||||
)
|
||||
if len(purged_firmware) > 0:
|
||||
log.info("Purging firmware not found in the filesystem:")
|
||||
log.warning("Purging firmware not found in the filesystem:")
|
||||
for f in purged_firmware:
|
||||
log.info(f" - {f}")
|
||||
log.warning(f" - {f}")
|
||||
|
||||
return scan_stats
|
||||
|
||||
@@ -391,7 +393,7 @@ def _set_rom_hashes(rom_id: int) -> str:
|
||||
except zlib.error as e:
|
||||
# Set empty hashes if calculating them fails for corrupted files
|
||||
log.error(
|
||||
f"Hashes of {rom.fs_name} couldn't be calculated: {hl(str(e), color=RED)}"
|
||||
f"Hashes of {hl(rom.fs_name)} couldn't be calculated: {hl(str(e), color=RED)}"
|
||||
)
|
||||
db_rom_handler.update_rom(
|
||||
rom_id,
|
||||
@@ -577,7 +579,9 @@ async def scan_handler(_sid: str, options: dict):
|
||||
Args:
|
||||
options (dict): Socket options
|
||||
"""
|
||||
log.info(emoji.emojize(":magnifying_glass_tilted_right: Scanning "))
|
||||
|
||||
log.info(emoji.emojize(":magnifying_glass_tilted_right: Scanning"))
|
||||
|
||||
platform_ids = options.get("platforms", [])
|
||||
scan_type = ScanType[options.get("type", "quick").upper()]
|
||||
roms_ids = options.get("roms_ids", [])
|
||||
|
||||
@@ -8,6 +8,8 @@ from handler.auth.constants import Scope
|
||||
from handler.database import db_rom_handler, db_screenshot_handler, db_state_handler
|
||||
from handler.filesystem import fs_asset_handler
|
||||
from handler.scan_handler import scan_screenshot, scan_state
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from utils.router import APIRouter
|
||||
|
||||
@@ -25,16 +27,6 @@ async def add_state(
|
||||
) -> StateSchema:
|
||||
data = await request.form()
|
||||
|
||||
rom = db_rom_handler.get_rom(rom_id)
|
||||
if not rom:
|
||||
raise RomNotFoundInDatabaseException(rom_id)
|
||||
|
||||
log.info(f"Uploading state of {rom.name}")
|
||||
|
||||
states_path = fs_asset_handler.build_states_file_path(
|
||||
user=request.user, platform_fs_slug=rom.platform.fs_slug, emulator=emulator
|
||||
)
|
||||
|
||||
if "stateFile" not in data:
|
||||
log.error("No state file provided")
|
||||
raise HTTPException(
|
||||
@@ -42,12 +34,23 @@ async def add_state(
|
||||
)
|
||||
|
||||
stateFile: UploadFile = data["stateFile"] # type: ignore
|
||||
|
||||
if not stateFile.filename:
|
||||
log.error("State file has no filename")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="State file has no filename"
|
||||
)
|
||||
|
||||
rom = db_rom_handler.get_rom(rom_id)
|
||||
if not rom:
|
||||
raise RomNotFoundInDatabaseException(rom_id)
|
||||
|
||||
log.info(f"Uploading state {hl(stateFile.filename)} for {hl(rom.name, color=BLUE)}")
|
||||
|
||||
states_path = fs_asset_handler.build_states_file_path(
|
||||
user=request.user, platform_fs_slug=rom.platform.fs_slug, emulator=emulator
|
||||
)
|
||||
|
||||
fs_asset_handler.write_file(file=stateFile, path=states_path)
|
||||
|
||||
# Scan or update state
|
||||
@@ -155,6 +158,37 @@ async def update_state(request: Request, id: int) -> StateSchema:
|
||||
db_state.id, {"file_size_bytes": stateFile.size}
|
||||
)
|
||||
|
||||
screenshotFile: UploadFile | None = data.get("screenshotFile", None) # type: ignore
|
||||
if screenshotFile and screenshotFile.filename:
|
||||
screenshots_path = fs_asset_handler.build_screenshots_file_path(
|
||||
user=request.user, platform_fs_slug=db_state.rom.platform_slug
|
||||
)
|
||||
|
||||
fs_asset_handler.write_file(file=screenshotFile, path=screenshots_path)
|
||||
|
||||
# Scan or update screenshot
|
||||
scanned_screenshot = scan_screenshot(
|
||||
file_name=screenshotFile.filename,
|
||||
user=request.user,
|
||||
platform_fs_slug=db_state.rom.platform_slug,
|
||||
)
|
||||
db_screenshot = db_screenshot_handler.get_screenshot_by_filename(
|
||||
rom_id=db_state.rom.id,
|
||||
user_id=request.user.id,
|
||||
file_name=screenshotFile.filename,
|
||||
)
|
||||
if db_screenshot:
|
||||
db_screenshot = db_screenshot_handler.update_screenshot(
|
||||
db_screenshot.id,
|
||||
{"file_size_bytes": scanned_screenshot.file_size_bytes},
|
||||
)
|
||||
else:
|
||||
scanned_screenshot.rom_id = db_state.rom.id
|
||||
scanned_screenshot.user_id = request.user.id
|
||||
db_screenshot = db_screenshot_handler.add_screenshot(
|
||||
screenshot=scanned_screenshot
|
||||
)
|
||||
|
||||
# Set the last played time for the current user
|
||||
rom_user = db_rom_handler.get_rom_user(db_state.rom_id, request.user.id)
|
||||
if not rom_user:
|
||||
@@ -185,7 +219,9 @@ async def delete_states(request: Request) -> list[int]:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=error)
|
||||
|
||||
db_state_handler.delete_state(state_id)
|
||||
log.info(f"Deleting {state.file_name} from filesystem")
|
||||
log.info(
|
||||
f"Deleting state {hl(state.file_name)} [{state.rom.platform_slug}] from filesystem"
|
||||
)
|
||||
|
||||
try:
|
||||
fs_asset_handler.remove_file(
|
||||
|
||||
@@ -10,6 +10,8 @@ from handler.auth.constants import ALGORITHM, DEFAULT_OAUTH_TOKEN_EXPIRY
|
||||
from joserfc import jwt
|
||||
from joserfc.errors import BadSignatureError
|
||||
from joserfc.jwk import OctKey
|
||||
from logger.formatter import CYAN
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from passlib.context import CryptContext
|
||||
from starlette.requests import HTTPConnection
|
||||
@@ -54,7 +56,7 @@ class AuthHandler:
|
||||
conn.session.clear()
|
||||
log.error(
|
||||
"User '%s' %s",
|
||||
username,
|
||||
hl(username, color=CYAN),
|
||||
"not found" if user is None else "not enabled",
|
||||
)
|
||||
return None
|
||||
@@ -148,7 +150,10 @@ class OpenIDHandler:
|
||||
|
||||
user = db_user_handler.get_user_by_email(email)
|
||||
if user is None:
|
||||
log.info("User with email '%s' not found, creating new user", email)
|
||||
log.info(
|
||||
"User with email '%s' not found, creating new user",
|
||||
hl(email, color=CYAN),
|
||||
)
|
||||
user = User(
|
||||
username=preferred_username,
|
||||
hashed_password=str(uuid.uuid4()),
|
||||
@@ -161,5 +166,5 @@ class OpenIDHandler:
|
||||
if not user.enabled:
|
||||
raise UserDisabledException
|
||||
|
||||
log.info("User successfully authenticated: %s", email)
|
||||
log.info("User successfully authenticated: %s", hl(email, color=CYAN))
|
||||
return user, userinfo
|
||||
|
||||
@@ -26,7 +26,6 @@ class FSAssetsHandler(FSHandler):
|
||||
return
|
||||
|
||||
Path(os.path.join(ASSETS_BASE_PATH, path)).mkdir(parents=True, exist_ok=True)
|
||||
log.info(f" - Uploading {file.filename}")
|
||||
file_location = os.path.join(ASSETS_BASE_PATH, path, file.filename)
|
||||
|
||||
with open(file_location, "wb") as f:
|
||||
|
||||
@@ -90,7 +90,6 @@ class FSFirmwareHandler(FSHandler):
|
||||
return
|
||||
|
||||
Path(path).mkdir(parents=True, exist_ok=True)
|
||||
log.info(f" - Uploading {file.filename}")
|
||||
file_location = os.path.join(path, file.filename)
|
||||
|
||||
with open(file_location, "wb") as f:
|
||||
|
||||
@@ -3,6 +3,8 @@ import shutil
|
||||
import httpx
|
||||
from anyio import Path, open_file
|
||||
from config import RESOURCES_BASE_PATH
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from models.collection import Collection
|
||||
from models.rom import Rom
|
||||
@@ -141,7 +143,7 @@ class FSResourcesHandler(FSHandler):
|
||||
shutil.rmtree(cover_path)
|
||||
except FileNotFoundError:
|
||||
log.warning(
|
||||
f"Couldn't remove cover from '{entity.name or entity.id}' since '{cover_path}' doesn't exists."
|
||||
f"Couldn't remove cover from '{hl(entity.name or entity.id, color=BLUE)}' since '{cover_path}' doesn't exists."
|
||||
)
|
||||
|
||||
return {"path_cover_s": "", "path_cover_l": ""}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
from enum import Enum
|
||||
|
||||
@@ -28,10 +29,11 @@ def __get_sync_cache() -> Redis:
|
||||
|
||||
return FakeRedis(version=7)
|
||||
|
||||
log.info(f"Connecting to sync redis in {sys.argv[0]}...")
|
||||
# A separate client that auto-decodes responses is needed
|
||||
client = Redis.from_url(str(REDIS_URL), decode_responses=True)
|
||||
log.info(f"Redis sync connection established in {sys.argv[0]}!")
|
||||
log.debug(
|
||||
f"Sync redis/valkey connection established in {os.path.splitext(os.path.basename(sys.argv[0]))[0]}"
|
||||
)
|
||||
return client
|
||||
|
||||
|
||||
@@ -42,10 +44,11 @@ def __get_async_cache() -> AsyncRedis:
|
||||
|
||||
return FakeAsyncRedis(version=7)
|
||||
|
||||
log.info(f"Connecting to async redis in {sys.argv[0]}...")
|
||||
# A separate client that auto-decodes responses is needed
|
||||
client = AsyncRedis.from_url(str(REDIS_URL), decode_responses=True)
|
||||
log.info(f"Redis async connection established in {sys.argv[0]}!")
|
||||
log.debug(
|
||||
f"Async redis/valkey connection established in {os.path.splitext(os.path.basename(sys.argv[0]))[0]}"
|
||||
)
|
||||
return client
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ from handler.metadata.igdb_handler import IGDBPlatform, IGDBRom
|
||||
from handler.metadata.moby_handler import MobyGamesPlatform, MobyGamesRom
|
||||
from handler.metadata.ra_handler import RAGameRom, RAGamesPlatform
|
||||
from handler.metadata.ss_handler import SSPlatform, SSRom
|
||||
from logger.formatter import BLUE
|
||||
from logger.formatter import BLUE, LIGHTYELLOW
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from models.assets import Save, Screenshot, State
|
||||
@@ -26,6 +26,8 @@ from models.platform import Platform
|
||||
from models.rom import Rom
|
||||
from models.user import User
|
||||
|
||||
LOGGER_MODULE_NAME = {"module_name": "scan"}
|
||||
|
||||
|
||||
class ScanType(Enum):
|
||||
NEW_PLATFORMS = "new_platforms"
|
||||
@@ -88,8 +90,6 @@ async def scan_platform(
|
||||
Platform object
|
||||
"""
|
||||
|
||||
log.info(f"· {hl(fs_slug)}")
|
||||
|
||||
if metadata_sources is None:
|
||||
metadata_sources = [
|
||||
MetadataSource.IGDB,
|
||||
@@ -108,7 +108,8 @@ async def scan_platform(
|
||||
# Sometimes users change the name of the folder, so we try to match it with the config
|
||||
if fs_slug not in fs_platforms:
|
||||
log.warning(
|
||||
f" {fs_slug} not found in file system, trying to match via config..."
|
||||
f"{hl(fs_slug)} not found in file system, trying to match via config",
|
||||
extra=LOGGER_MODULE_NAME,
|
||||
)
|
||||
if fs_slug in swapped_platform_bindings.keys():
|
||||
platform = db_platform_handler.get_platform_by_fs_slug(fs_slug)
|
||||
@@ -162,14 +163,16 @@ async def scan_platform(
|
||||
):
|
||||
log.info(
|
||||
emoji.emojize(
|
||||
f" Identified as {hl(platform_attrs['name'], color=BLUE)} :video_game:"
|
||||
)
|
||||
f"Folder {hl(platform_attrs['fs_slug'])}[{hl(fs_slug)}] identified as {hl(platform_attrs['name'], color=BLUE)} :video_game:"
|
||||
),
|
||||
extra={"module_name": "scan"},
|
||||
)
|
||||
else:
|
||||
log.warning(
|
||||
emoji.emojize(
|
||||
f" Platform {platform_attrs['slug']} not identified :cross_mark:"
|
||||
)
|
||||
f"Platform {hl(platform_attrs['slug'])} not identified :cross_mark:"
|
||||
),
|
||||
extra=LOGGER_MODULE_NAME,
|
||||
)
|
||||
|
||||
return Platform(**platform_attrs)
|
||||
@@ -182,8 +185,6 @@ def scan_firmware(
|
||||
) -> Firmware:
|
||||
firmware_path = fs_firmware_handler.get_firmware_fs_structure(platform.fs_slug)
|
||||
|
||||
log.info(f"\t · {file_name}")
|
||||
|
||||
# Set default properties
|
||||
firmware_attrs = {
|
||||
"id": firmware.id if firmware else None,
|
||||
@@ -237,12 +238,6 @@ async def scan_rom(
|
||||
|
||||
roms_path = fs_rom_handler.get_roms_fs_structure(platform.fs_slug)
|
||||
|
||||
log.info(f"\t · {hl(fs_rom['fs_name'])}")
|
||||
|
||||
if fs_rom.get("multi", False):
|
||||
for file in fs_rom["files"]:
|
||||
log.info(f"\t\t · {file.file_name}")
|
||||
|
||||
# Set default properties
|
||||
rom_attrs = {
|
||||
"id": rom.id if rom else None,
|
||||
@@ -384,20 +379,28 @@ async def scan_rom(
|
||||
and not ss_handler_rom.get("ss_id")
|
||||
):
|
||||
log.warning(
|
||||
emoji.emojize(
|
||||
f"\t Rom {rom_attrs['fs_name']} not identified :cross_mark:"
|
||||
)
|
||||
emoji.emojize(f"{hl(rom_attrs['fs_name'])} not identified :cross_mark:"),
|
||||
extra=LOGGER_MODULE_NAME,
|
||||
)
|
||||
return Rom(**rom_attrs)
|
||||
|
||||
log.info(emoji.emojize(f"\t Identified as {rom_attrs['name']} :alien_monster:"))
|
||||
log.info(
|
||||
emoji.emojize(
|
||||
f"{hl(rom_attrs['fs_name'])} identified as {hl(rom_attrs['name'], color=BLUE)} :alien_monster:"
|
||||
),
|
||||
extra=LOGGER_MODULE_NAME,
|
||||
)
|
||||
if fs_rom.get("multi", False):
|
||||
for file in fs_rom["files"]:
|
||||
log.info(
|
||||
f"\t · {hl(file.file_name, color=LIGHTYELLOW)}",
|
||||
extra=LOGGER_MODULE_NAME,
|
||||
)
|
||||
|
||||
return Rom(**rom_attrs)
|
||||
|
||||
|
||||
def _scan_asset(file_name: str, path: str):
|
||||
log.info(f"\t\t · {file_name}")
|
||||
|
||||
file_size = fs_asset_handler.get_asset_size(file_name=file_name, asset_path=path)
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
import logging
|
||||
import os
|
||||
from pprint import pformat
|
||||
|
||||
from colorama import Fore, Style, init
|
||||
from config import FORCE_COLOR, NO_COLOR
|
||||
|
||||
RED = Fore.RED
|
||||
LIGHTRED = Fore.LIGHTRED_EX
|
||||
GREEN = Fore.GREEN
|
||||
LIGHTYELLOW = Fore.LIGHTYELLOW_EX
|
||||
YELLOW = Fore.YELLOW
|
||||
BLUE = Fore.BLUE
|
||||
CYAN = Fore.CYAN
|
||||
LIGHTMAGENTA = Fore.LIGHTMAGENTA_EX
|
||||
RESET = Fore.RESET
|
||||
RESET_ALL = Style.RESET_ALL
|
||||
|
||||
|
||||
def should_strip_ansi() -> bool:
|
||||
@@ -19,9 +23,8 @@ def should_strip_ansi() -> bool:
|
||||
return False
|
||||
if NO_COLOR:
|
||||
return True
|
||||
|
||||
# For other environments, strip colors if not a TTY
|
||||
return not os.isatty(1)
|
||||
# Default: do not strip (Docker will handle colors)
|
||||
return False
|
||||
|
||||
|
||||
# Initialize Colorama once, considering different environments
|
||||
@@ -44,40 +47,40 @@ class Formatter(logging.Formatter):
|
||||
The formatted log record as a string.
|
||||
"""
|
||||
level = "%(levelname)s"
|
||||
dots = f"{Fore.RESET}:"
|
||||
dots = f"{RESET}:"
|
||||
identifier = (
|
||||
f"\t {Fore.BLUE}[RomM]{Fore.LIGHTMAGENTA_EX}[{record.module.lower()}]"
|
||||
f"\t {BLUE}[RomM]{LIGHTMAGENTA}[{record.module_name.lower()}]"
|
||||
if hasattr(record, "module_name")
|
||||
else f"\t {Fore.BLUE}[RomM]{Fore.LIGHTMAGENTA_EX}[%(module)s]"
|
||||
else f"\t {BLUE}[RomM]{LIGHTMAGENTA}[%(module)s]"
|
||||
)
|
||||
identifier_warning = (
|
||||
f" {Fore.BLUE}[RomM]{Fore.LIGHTMAGENTA_EX}[{record.module.lower()}]"
|
||||
f" {BLUE}[RomM]{LIGHTMAGENTA}[{record.module_name.lower()}]"
|
||||
if hasattr(record, "module_name")
|
||||
else f" {Fore.BLUE}[RomM]{Fore.LIGHTMAGENTA_EX}[%(module)s]"
|
||||
else f" {BLUE}[RomM]{LIGHTMAGENTA}[%(module)s]"
|
||||
)
|
||||
identifier_critical = (
|
||||
f" {Fore.BLUE}[RomM]{Fore.LIGHTMAGENTA_EX}[{record.module.lower()}]"
|
||||
f" {BLUE}[RomM]{LIGHTMAGENTA}[{record.module_name.lower()}]"
|
||||
if hasattr(record, "module_name")
|
||||
else f" {Fore.BLUE}[RomM]{Fore.LIGHTMAGENTA_EX}[%(module)s]"
|
||||
else f" {BLUE}[RomM]{LIGHTMAGENTA}[%(module)s]"
|
||||
)
|
||||
msg = f"{Style.RESET_ALL}%(message)s"
|
||||
msg = f"{RESET_ALL}%(message)s"
|
||||
|
||||
message = pformat(record.msg) if hasattr(record, "pprint") else "%(message)s"
|
||||
msg = f"{Style.RESET_ALL}{message}"
|
||||
date = f"{Fore.CYAN}[%(asctime)s] "
|
||||
msg = f"{RESET_ALL}{message}"
|
||||
date = f"{CYAN}[%(asctime)s] "
|
||||
formats = {
|
||||
logging.DEBUG: f"{Fore.LIGHTMAGENTA_EX}{level}{dots}{identifier}{date}{msg}",
|
||||
logging.INFO: f"{Fore.GREEN}{level}{dots}{identifier}{date}{msg}",
|
||||
logging.WARNING: f"{Fore.YELLOW}{level}{dots}{identifier_warning}{date}{msg}",
|
||||
logging.ERROR: f"{Fore.LIGHTRED_EX}{level}{dots}{identifier}{date}{msg}",
|
||||
logging.CRITICAL: f"{Fore.RED}{level}{dots}{identifier_critical}{date}{msg}",
|
||||
logging.DEBUG: f"{LIGHTMAGENTA}{level}{dots}{identifier}{date}{msg}",
|
||||
logging.INFO: f"{GREEN}{level}{dots}{identifier}{date}{msg}",
|
||||
logging.WARNING: f"{YELLOW}{level}{dots}{identifier_warning}{date}{msg}",
|
||||
logging.ERROR: f"{LIGHTRED}{level}{dots}{identifier}{date}{msg}",
|
||||
logging.CRITICAL: f"{RED}{level}{dots}{identifier_critical}{date}{msg}",
|
||||
}
|
||||
log_fmt = formats.get(record.levelno)
|
||||
formatter = logging.Formatter(fmt=log_fmt, datefmt="%Y-%m-%d %H:%M:%S")
|
||||
return formatter.format(record)
|
||||
|
||||
|
||||
def highlight(msg: str = "", color=Fore.YELLOW) -> str:
|
||||
def highlight(msg: str = "", color=YELLOW) -> str:
|
||||
"""
|
||||
Highlights the message to send to the fancylog.
|
||||
|
||||
@@ -88,4 +91,4 @@ def highlight(msg: str = "", color=Fore.YELLOW) -> str:
|
||||
Returns:
|
||||
The highlighted message as a string.
|
||||
"""
|
||||
return f"{color}{msg}{Style.RESET_ALL}"
|
||||
return f"{color}{msg}{RESET_ALL}"
|
||||
|
||||
@@ -5,18 +5,32 @@ from config import LOGLEVEL
|
||||
from logger.formatter import Formatter
|
||||
|
||||
# Set up logger
|
||||
log = logging.getLogger("romm")
|
||||
log = logging.getLogger()
|
||||
log.setLevel(LOGLEVEL)
|
||||
|
||||
# Set up sqlachemy logger
|
||||
# sql_log = logging.getLogger("sqlalchemy.engine")
|
||||
# sql_log.setLevel(LOGLEVEL)
|
||||
|
||||
# Define stdout handler
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(Formatter())
|
||||
log.addHandler(stdout_handler)
|
||||
# sql_log.addHandler(stdout_handler)
|
||||
if not log.hasHandlers():
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(Formatter())
|
||||
log.addHandler(stdout_handler)
|
||||
|
||||
# Hush passlib warnings
|
||||
logging.getLogger("passlib").setLevel(logging.ERROR)
|
||||
|
||||
|
||||
def unify_logger(logger: str) -> None:
|
||||
"""
|
||||
Unify the logger to use the same format and level as the main logger.
|
||||
|
||||
Args:
|
||||
logger (str): The name of the logger to unify.
|
||||
"""
|
||||
alembic_logger = logging.getLogger(logger)
|
||||
alembic_logger.setLevel(log.level)
|
||||
|
||||
if not alembic_logger.hasHandlers():
|
||||
for handler in log.handlers:
|
||||
alembic_logger.addHandler(handler)
|
||||
else:
|
||||
for handler in alembic_logger.handlers:
|
||||
handler.setFormatter(log.handlers[0].formatter)
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
import sentry_sdk
|
||||
from config import SENTRY_DSN
|
||||
from config import (
|
||||
ENABLE_SCHEDULED_RESCAN,
|
||||
ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB,
|
||||
SENTRY_DSN,
|
||||
)
|
||||
from logger.logger import log
|
||||
from tasks.scan_library import scan_library_task
|
||||
from tasks.tasks import tasks_scheduler
|
||||
from tasks.update_switch_titledb import update_switch_titledb_task
|
||||
from utils import get_version
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
release="romm@" + get_version(),
|
||||
)
|
||||
sentry_sdk.init(dsn=SENTRY_DSN, release=f"romm@{get_version()}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Initialize the tasks
|
||||
scan_library_task.init()
|
||||
update_switch_titledb_task.init()
|
||||
|
||||
log.info("Starting scheduler")
|
||||
|
||||
if ENABLE_SCHEDULED_RESCAN:
|
||||
log.info("Starting scheduled rescan")
|
||||
scan_library_task.init()
|
||||
if ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB:
|
||||
log.info("Starting scheduled update switch titledb")
|
||||
update_switch_titledb_task.init()
|
||||
# Start the scheduler
|
||||
tasks_scheduler.run()
|
||||
|
||||
@@ -12,6 +12,8 @@ from config.config_manager import config_manager as cm
|
||||
from endpoints.sockets.scan import scan_platforms
|
||||
from handler.database import db_platform_handler
|
||||
from handler.scan_handler import ScanType
|
||||
from logger.formatter import CYAN
|
||||
from logger.formatter import highlight as hl
|
||||
from logger.logger import log
|
||||
from rq.job import Job
|
||||
from tasks.tasks import tasks_scheduler
|
||||
@@ -21,7 +23,7 @@ from watchdog.observers import Observer
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
release="romm@" + get_version(),
|
||||
release=f"romm@{get_version()}",
|
||||
)
|
||||
|
||||
path = (
|
||||
@@ -66,11 +68,11 @@ class EventHandler(FileSystemEventHandler):
|
||||
return
|
||||
|
||||
if db_platform and db_platform.id in job.args[0]:
|
||||
log.info(f"Scan already scheduled for {fs_slug}")
|
||||
log.info(f"Scan already scheduled for {hl(fs_slug)}")
|
||||
return
|
||||
|
||||
time_delta = timedelta(minutes=RESCAN_ON_FILESYSTEM_CHANGE_DELAY)
|
||||
rescan_in_msg = f"rescanning in {RESCAN_ON_FILESYSTEM_CHANGE_DELAY} minutes."
|
||||
rescan_in_msg = f"rescanning in {hl(str(RESCAN_ON_FILESYSTEM_CHANGE_DELAY), color=CYAN)} minutes."
|
||||
|
||||
# Any change to a platform directory should trigger a full rescan
|
||||
if event.is_directory and event_src.count("/") == 1:
|
||||
@@ -78,7 +80,7 @@ class EventHandler(FileSystemEventHandler):
|
||||
tasks_scheduler.enqueue_in(time_delta, scan_platforms, [])
|
||||
elif db_platform:
|
||||
# Otherwise trigger a rescan for the specific platform
|
||||
log.info(f"Change detected in {fs_slug} folder, {rescan_in_msg}")
|
||||
log.info(f"Change detected in {hl(fs_slug)} folder, {rescan_in_msg}")
|
||||
tasks_scheduler.enqueue_in(
|
||||
time_delta,
|
||||
scan_platforms,
|
||||
@@ -93,7 +95,7 @@ if __name__ == "__main__":
|
||||
observer.schedule(EventHandler(), path, recursive=True)
|
||||
observer.start()
|
||||
|
||||
log.info(f"Watching {path} for changes")
|
||||
log.info(f"Watching {hl(path)} for changes")
|
||||
|
||||
try:
|
||||
while observer.is_alive():
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import sentry_sdk
|
||||
from config import SENTRY_DSN
|
||||
from handler.redis_handler import redis_client
|
||||
from logger.logger import unify_logger
|
||||
from rq import Queue, Worker
|
||||
from utils import get_version
|
||||
|
||||
unify_logger("rq.worker")
|
||||
|
||||
listen = ("high", "default", "low")
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
release="romm@" + get_version(),
|
||||
release=f"romm@{get_version()}",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Start the worker
|
||||
worker = Worker([Queue(name, connection=redis_client) for name in listen])
|
||||
worker.work()
|
||||
|
||||
@@ -146,8 +146,8 @@ COPY --from=frontend-build /front/dist ${WEBSERVER_FOLDER}
|
||||
|
||||
COPY ./frontend/assets ${WEBSERVER_FOLDER}/assets
|
||||
RUN mkdir -p ${WEBSERVER_FOLDER}/assets/romm && \
|
||||
ln -s /romm/resources ${WEBSERVER_FOLDER}/assets/romm/resources && \
|
||||
ln -s /romm/assets ${WEBSERVER_FOLDER}/assets/romm/assets
|
||||
ln -sf /romm/resources ${WEBSERVER_FOLDER}/assets/romm/resources && \
|
||||
ln -sf /romm/assets ${WEBSERVER_FOLDER}/assets/romm/assets
|
||||
|
||||
COPY ./backend /backend
|
||||
|
||||
@@ -156,6 +156,7 @@ COPY ./docker/init_scripts/* /
|
||||
COPY ./docker/nginx/js/ /etc/nginx/js/
|
||||
COPY ./docker/nginx/templates/ /etc/nginx/templates/
|
||||
COPY ./docker/nginx/default.conf /etc/nginx/nginx.conf
|
||||
COPY ./docker/gunicorn/logging.conf /etc/gunicorn/logging.conf
|
||||
|
||||
# User permissions
|
||||
# - Create default user `romm` (1000) and group `romm` (1000).
|
||||
|
||||
@@ -2,4 +2,5 @@
|
||||
|
||||
branch_name="$(git symbolic-ref HEAD 2>/dev/null)"
|
||||
branch_name=${branch_name##refs/heads/}
|
||||
docker build -t "rommapp/romm:local-${branch_name}" . --file ./docker/Dockerfile
|
||||
branch_name=${branch_name//\//-} # Replace slashes with dashes
|
||||
docker build -t "rommapp/romm-testing:local-${branch_name}" . --file ./docker/Dockerfile
|
||||
|
||||
39
docker/gunicorn/logging.conf
Normal file
39
docker/gunicorn/logging.conf
Normal file
@@ -0,0 +1,39 @@
|
||||
[loggers]
|
||||
keys=root,gunicorn,error
|
||||
|
||||
[handlers]
|
||||
keys=console_gunicorn
|
||||
|
||||
[formatters]
|
||||
keys=gunicorn_format
|
||||
|
||||
# Root logger — KEEP but minimal
|
||||
[logger_root]
|
||||
level=WARNING
|
||||
handlers=
|
||||
|
||||
# Gunicorn internal logger
|
||||
[logger_gunicorn]
|
||||
level=INFO
|
||||
handlers=console_gunicorn
|
||||
qualname=gunicorn
|
||||
propagate=0
|
||||
|
||||
# Gunicorn error logger (optional)
|
||||
[logger_error]
|
||||
level=ERROR
|
||||
handlers=console_gunicorn
|
||||
qualname=gunicorn.error
|
||||
propagate=0
|
||||
|
||||
# Handler for Gunicorn logs
|
||||
[handler_console_gunicorn]
|
||||
class=StreamHandler
|
||||
formatter=gunicorn_format
|
||||
args=(sys.stdout,)
|
||||
|
||||
# Formatter for Gunicorn logs
|
||||
[formatter_gunicorn_format]
|
||||
format=INFO: [RomM][gunicorn][%(asctime)s] %(message)s
|
||||
datefmt=%Y-%m-%d %H:%M:%S
|
||||
|
||||
@@ -5,39 +5,74 @@ set -o nounset # treat unset variables as an error
|
||||
set -o pipefail # treat errors in pipes as fatal
|
||||
shopt -s inherit_errexit # inherit errexit
|
||||
|
||||
LOGLEVEL="${LOGLEVEL:="info"}"
|
||||
|
||||
# make it possible to disable the inotify watcher process
|
||||
ENABLE_RESCAN_ON_FILESYSTEM_CHANGE="${ENABLE_RESCAN_ON_FILESYSTEM_CHANGE:="true"}"
|
||||
ENABLE_RESCAN_ON_FILESYSTEM_CHANGE="${ENABLE_RESCAN_ON_FILESYSTEM_CHANGE:="false"}"
|
||||
ENABLE_SCHEDULED_RESCAN="${ENABLE_SCHEDULED_RESCAN:="false"}"
|
||||
ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB="${ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB:="false"}"
|
||||
|
||||
# Set INIT_DEBUG to false if not set by docker env
|
||||
# (this env var is currently undocumented and usually just needed for development purposes)
|
||||
INIT_DEBUG="${INIT_DEBUG:="false"}"
|
||||
# if REDIS_HOST is set, we assume that an external redis is used
|
||||
REDIS_HOST="${REDIS_HOST:=""}"
|
||||
|
||||
# set DEFAULT_WEB_CONCURRENCY to 1 if not set by docker env to reduce resource usage
|
||||
# (since backend is almost 100% async this won't block anything)
|
||||
DEFAULT_WEB_CONCURRENCY=1
|
||||
|
||||
# logger colors
|
||||
RED='\033[0;31m'
|
||||
LIGHTMAGENTA='\033[0;95m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
RESET='\033[0;00m'
|
||||
|
||||
print_banner() {
|
||||
info_log " _____ __ __ "
|
||||
info_log ' | __ \ | \/ |'
|
||||
info_log ' | |__) |___ _ __ ___ | \ / |'
|
||||
info_log " | _ // _ \\| '_ \` _ \\| |\\/| |"
|
||||
info_log ' | | \ \ (_) | | | | | | | | |'
|
||||
info_log ' |_| \_\___/|_| |_| |_|_| |_|'
|
||||
info_log ""
|
||||
info_log "The beautiful, powerful, self-hosted Rom Manager and player"
|
||||
info_log ""
|
||||
}
|
||||
|
||||
# print debug log output if enabled
|
||||
debug_log() {
|
||||
if [[ ${INIT_DEBUG} == "true" ]]; then
|
||||
echo "DEBUG: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true
|
||||
# print debug log output if enabled
|
||||
if [[ ${LOGLEVEL} == "debug" ]]; then
|
||||
echo -e "${LIGHTMAGENTA}DEBUG: ${BLUE}[RomM]${LIGHTMAGENTA}[init]${CYAN}[$(date +"%Y-%m-%d %T")]${RESET}" "${@}" || true
|
||||
fi
|
||||
}
|
||||
|
||||
info_log() {
|
||||
echo "INFO: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true
|
||||
echo -e "${GREEN}INFO: ${BLUE}[RomM]${LIGHTMAGENTA}[init]${CYAN}[$(date +"%Y-%m-%d %T")]${RESET}" "${@}" || true
|
||||
}
|
||||
|
||||
warn_log() {
|
||||
echo "WARNING: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true
|
||||
echo -e "${YELLOW}WARNING: ${BLUE}[RomM]${LIGHTMAGENTA}[init]${CYAN}[$(date +"%Y-%m-%d %T")]${RESET}" "${@}" || true
|
||||
}
|
||||
|
||||
error_log() {
|
||||
echo "ERROR: [init][$(date +"%Y-%m-%d %T")]" "${@}" || true
|
||||
echo -e "${RED}ERROR: ${BLUE}[RomM]${LIGHTMAGENTA}[init]${CYAN}[$(date +"%Y-%m-%d %T")]${RESET}" "${@}" || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for_gunicorn_socket() {
|
||||
info_log "waiting for gunicorn socket file..."
|
||||
while [[ ! -S /tmp/gunicorn.sock ]]; do
|
||||
sleep 1
|
||||
debug_log "Waiting for gunicorn socket file..."
|
||||
local retries=60
|
||||
while [[ ! -S /tmp/gunicorn.sock && retries -gt 0 ]]; do
|
||||
sleep 0.5
|
||||
((retries--))
|
||||
done
|
||||
info_log "gunicorn socket file found"
|
||||
|
||||
if [[ -S /tmp/gunicorn.sock ]]; then
|
||||
debug_log "Gunicorn socket file found"
|
||||
else
|
||||
error_log "Gunicorn socket file not found after waiting 30s"
|
||||
fi
|
||||
}
|
||||
|
||||
# function that runs or main process and creates a corresponding PID file,
|
||||
@@ -46,7 +81,7 @@ start_bin_gunicorn() {
|
||||
rm /tmp/gunicorn.sock -f
|
||||
|
||||
# commands to start our main application and store its PID to check for crashes
|
||||
info_log "starting gunicorn"
|
||||
info_log "Starting backend"
|
||||
|
||||
# TODO: Remove support for GUNICORN_WORKERS in future version.
|
||||
if [[ -n ${GUNICORN_WORKERS-} ]]; then
|
||||
@@ -55,14 +90,15 @@ start_bin_gunicorn() {
|
||||
fi
|
||||
|
||||
gunicorn \
|
||||
--access-logfile - \
|
||||
--error-logfile - \
|
||||
--worker-class uvicorn.workers.UvicornWorker \
|
||||
--bind=0.0.0.0:5000 \
|
||||
--bind=unix:/tmp/gunicorn.sock \
|
||||
--pid=/tmp/gunicorn.pid \
|
||||
--forwarded-allow-ips="*" \
|
||||
--workers "${WEB_CONCURRENCY:-2}" \
|
||||
--worker-class uvicorn.workers.UvicornWorker \
|
||||
--workers "${WEB_CONCURRENCY:-${DEFAULT_WEB_CONCURRENCY:-1}}" \
|
||||
--error-logfile - \
|
||||
--error-logfile - \
|
||||
--log-config /etc/gunicorn/logging.conf \
|
||||
main:app &
|
||||
}
|
||||
|
||||
@@ -70,31 +106,66 @@ start_bin_gunicorn() {
|
||||
start_bin_nginx() {
|
||||
wait_for_gunicorn_socket
|
||||
|
||||
info_log "starting nginx"
|
||||
info_log "Starting nginx"
|
||||
if [[ ${EUID} -ne 0 ]]; then
|
||||
nginx
|
||||
else
|
||||
# if container runs as root, drop permissions
|
||||
nginx -g 'user romm;'
|
||||
fi
|
||||
info_log "🚀 RomM is now available at http://0.0.0.0:8080"
|
||||
}
|
||||
|
||||
# Commands to start valkey-server (handling PID creation internally)
|
||||
start_bin_valkey-server() {
|
||||
info_log "starting valkey-server"
|
||||
# Check if /usr/local/etc/valkey/valkey.conf exists and use it if so
|
||||
info_log "Starting internal valkey"
|
||||
|
||||
if [[ -f /usr/local/etc/valkey/valkey.conf ]]; then
|
||||
valkey-server /usr/local/etc/valkey/valkey.conf &
|
||||
if [[ ${LOGLEVEL} == "debug" ]]; then
|
||||
valkey-server /usr/local/etc/valkey/valkey.conf &
|
||||
else
|
||||
valkey-server /usr/local/etc/valkey/valkey.conf >/dev/null 2>&1 &
|
||||
fi
|
||||
else
|
||||
valkey-server --dir /redis-data &
|
||||
if [[ ${LOGLEVEL} == "debug" ]]; then
|
||||
valkey-server --dir /redis-data &
|
||||
else
|
||||
valkey-server --dir /redis-data >/dev/null 2>&1 &
|
||||
fi
|
||||
fi
|
||||
|
||||
VALKEY_PID=$!
|
||||
echo "${VALKEY_PID}" >/tmp/valkey-server.pid
|
||||
|
||||
local host="127.0.0.1"
|
||||
local port="6379"
|
||||
local max_retries=120
|
||||
local retry=0
|
||||
|
||||
debug_log "Waiting for internal valkey to be ready..."
|
||||
|
||||
# Temporarily disable errexit for this part of the script
|
||||
set +o errexit
|
||||
|
||||
while ((retry < max_retries)); do
|
||||
# Attempt to check if valkey TCP port is open
|
||||
if (echo >/dev/tcp/"${host}"/"${port}") 2>/dev/null; then
|
||||
debug_log "Internal valkey is ready and accepting connections"
|
||||
set -o errexit # Re-enable errexit after success
|
||||
return 0
|
||||
fi
|
||||
|
||||
sleep 0.5
|
||||
((retry++))
|
||||
done
|
||||
|
||||
error_log "Internal valkey did not become ready after $((max_retries * 500))ms"
|
||||
}
|
||||
|
||||
# function that runs our independent python scripts and creates corresponding PID files,
|
||||
start_python() {
|
||||
SCRIPT="${1}"
|
||||
info_log "starting ${SCRIPT}.py"
|
||||
info_log "Starting ${SCRIPT}"
|
||||
python3 "${SCRIPT}.py" &
|
||||
WATCHER_PID=$!
|
||||
echo "${WATCHER_PID}" >"/tmp/${SCRIPT}.pid"
|
||||
@@ -106,9 +177,7 @@ watchdog_process_pid() {
|
||||
if [[ -f "/tmp/${PROCESS}.pid" ]]; then
|
||||
# check if the pid we last wrote to our state file is actually active
|
||||
PID=$(cat "/tmp/${PROCESS}.pid") || true
|
||||
if [[ -d "/proc/${PID}" ]]; then
|
||||
debug_log "${PROCESS} still running, no need to start"
|
||||
else
|
||||
if [[ ! -d "/proc/${PID}" ]]; then
|
||||
if [[ ${TYPE} == "bin" ]]; then
|
||||
start_bin_"${PROCESS}"
|
||||
elif [[ ${TYPE} == "python" ]]; then
|
||||
@@ -116,6 +185,7 @@ watchdog_process_pid() {
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# start process if we dont have a corresponding PID file
|
||||
if [[ ${TYPE} == "bin" ]]; then
|
||||
start_bin_"${PROCESS}"
|
||||
elif [[ ${TYPE} == "python" ]]; then
|
||||
@@ -129,7 +199,7 @@ stop_process_pid() {
|
||||
if [[ -f "/tmp/${PROCESS}.pid" ]]; then
|
||||
PID=$(cat "/tmp/${PROCESS}.pid") || true
|
||||
if [[ -d "/proc/${PID}" ]]; then
|
||||
info_log "stopping ${PROCESS}"
|
||||
info_log "Stopping ${PROCESS}"
|
||||
kill "${PID}" || true
|
||||
# wait for process exit
|
||||
while [[ -e "/proc/${PID}" ]]; do sleep 0.1; done
|
||||
@@ -147,11 +217,11 @@ shutdown() {
|
||||
stop_process_pid valkey-server
|
||||
}
|
||||
|
||||
print_banner
|
||||
|
||||
# switch to backend directory
|
||||
cd /backend || { error_log "/backend directory doesn't seem to exist"; }
|
||||
|
||||
info_log "Starting up, please wait..."
|
||||
|
||||
# setup trap handler
|
||||
exited=0
|
||||
trap 'exited=1 && shutdown' SIGINT SIGTERM EXIT
|
||||
@@ -159,45 +229,38 @@ trap 'exited=1 && shutdown' SIGINT SIGTERM EXIT
|
||||
# clear any leftover PID files
|
||||
rm /tmp/*.pid -f
|
||||
|
||||
# function definition done, lets start our main loop
|
||||
# Start Valkey server if REDIS_HOST is not set (which would mean user is using an external Redis/Valkey)
|
||||
if [[ -z ${REDIS_HOST} ]]; then
|
||||
watchdog_process_pid bin valkey-server
|
||||
else
|
||||
warn_log "REDIS_HOST is set, not starting internal valkey-server"
|
||||
fi
|
||||
|
||||
# Run needed database migrations once at startup
|
||||
info_log "Running database migrations"
|
||||
if alembic upgrade head; then
|
||||
info_log "Database migrations succeeded"
|
||||
else
|
||||
error_log "Failed to run database migrations"
|
||||
fi
|
||||
|
||||
# main loop
|
||||
while ! ((exited)); do
|
||||
# Start Valkey server if we dont have a corresponding PID file
|
||||
# and REDIS_HOST is not set (which would mean we're using an external Redis/Valkey)
|
||||
if [[ -z ${REDIS_HOST:=""} ]]; then
|
||||
watchdog_process_pid bin valkey-server
|
||||
fi
|
||||
|
||||
# Run needed database migrations on startup,
|
||||
# but only if it was not successful since the last full docker container start
|
||||
if [[ ${ALEMBIC_SUCCESS:="false"} == "false" ]]; then
|
||||
if alembic upgrade head; then
|
||||
debug_log "database schema migrations succeeded"
|
||||
ALEMBIC_SUCCESS="true"
|
||||
else
|
||||
error_log "Something went horribly wrong with our database"
|
||||
fi
|
||||
else
|
||||
debug_log "database schema already upgraded during current container lifecycle"
|
||||
fi
|
||||
|
||||
# Start gunicorn if we dont have a corresponding PID file
|
||||
watchdog_process_pid bin gunicorn
|
||||
|
||||
# Start nginx if we dont have a corresponding PID file
|
||||
watchdog_process_pid bin nginx
|
||||
watchdog_process_pid python worker
|
||||
|
||||
# only start the watcher.py if we actually want to use the rescan on fs change feature
|
||||
# only start the scheduler if enabled
|
||||
if [[ ${ENABLE_SCHEDULED_RESCAN} == "true" || ${ENABLE_SCHEDULED_UPDATE_SWITCH_TITLEDB} == "true" ]]; then
|
||||
watchdog_process_pid python scheduler
|
||||
fi
|
||||
|
||||
# only start the watcher if enabled
|
||||
if [[ ${ENABLE_RESCAN_ON_FILESYSTEM_CHANGE} == "true" ]]; then
|
||||
# Start watcher if we dont have a corresponding PID file
|
||||
watchdog_process_pid python watcher
|
||||
fi
|
||||
|
||||
# Start background worker processes
|
||||
debug_log "Starting worker and scheduler"
|
||||
# Start worker if we dont have a corresponding PID file
|
||||
watchdog_process_pid python worker
|
||||
# Start scheduler if we dont have a corresponding PID file
|
||||
watchdog_process_pid python scheduler
|
||||
watchdog_process_pid bin nginx
|
||||
|
||||
# check for died processes every 5 seconds
|
||||
sleep 5
|
||||
|
||||
@@ -42,13 +42,33 @@ http {
|
||||
~T([0-9:]+)\+ $1;
|
||||
}
|
||||
|
||||
#INFO: [nginx][2023-11-14 09:20:29] 127.0.0.1 - -"GET / HTTP/1.1" 500 177 "-" "Mozilla/5.0 (X11; Linux x86_64)"rt=0.000 uct="-" uht="-" urt="-"
|
||||
log_format romm_log 'INFO: [nginx][$date $time] $remote_addr - $remote_user '
|
||||
'"$request" $status $body_bytes_sent '
|
||||
'"$http_referer" "$http_user_agent" '
|
||||
'rt=$request_time uct="$upstream_connect_time" uht="$upstream_header_time" urt="$upstream_response_time"';
|
||||
# Map to extract the browser name (e.g., Chrome, Firefox, etc.)
|
||||
map $http_user_agent $browser {
|
||||
default "Unknown";
|
||||
"~Chrome/" "Chrome";
|
||||
"~Firefox/" "Firefox";
|
||||
"~Safari/" "Safari";
|
||||
"~Edge/" "Edge";
|
||||
"~Opera/" "Opera";
|
||||
}
|
||||
|
||||
access_log /dev/stdout romm_log;
|
||||
# Map to extract the OS (e.g., Windows, MacOS, Linux)
|
||||
map $http_user_agent $os {
|
||||
default "Unknown";
|
||||
"~Windows NT" "Windows";
|
||||
"~Macintosh" "macOS";
|
||||
"~Linux" "Linux";
|
||||
"~Android" "Android";
|
||||
"~iPhone" "iOS";
|
||||
}
|
||||
|
||||
#INFO: [nginx][2023-11-14 09:20:29] 127.0.0.1 - -"GET / HTTP/1.1" 500 177 "-" "Mozilla/5.0 (X11; Linux x86_64)"rt=0.000 uct="-" uht="-" urt="-"
|
||||
log_format romm_logs 'INFO: [RomM][nginx][$date $time] '
|
||||
'$remote_addr | $http_x_forwarded_for | '
|
||||
'$request_method $request_uri $status | $body_bytes_sent | '
|
||||
'$browser $os | $request_time';
|
||||
|
||||
access_log /dev/stdout romm_logs;
|
||||
error_log /dev/stderr;
|
||||
|
||||
gzip on;
|
||||
|
||||
270
frontend/package-lock.json
generated
270
frontend/package-lock.json
generated
@@ -43,7 +43,7 @@
|
||||
"openapi-typescript-codegen": "^0.29.0",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^8.28.0",
|
||||
"vite": "^6.2.6",
|
||||
"vite": "^6.3.4",
|
||||
"vite-plugin-pwa": "^0.21.1",
|
||||
"vite-plugin-vuetify": "^2.0.4",
|
||||
"vue-tsc": "^2.2.8"
|
||||
@@ -2916,228 +2916,260 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm-eabi": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.34.2.tgz",
|
||||
"integrity": "sha512-6Fyg9yQbwJR+ykVdT9sid1oc2ewejS6h4wzQltmJfSW53N60G/ah9pngXGANdy9/aaE/TcUFpWosdm7JXS1WTQ==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.40.1.tgz",
|
||||
"integrity": "sha512-kxz0YeeCrRUHz3zyqvd7n+TVRlNyTifBsmnmNPtk3hQURUyG9eAB+usz6DAwagMusjx/zb3AjvDUvhFGDAexGw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm64": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.34.2.tgz",
|
||||
"integrity": "sha512-K5GfWe+vtQ3kyEbihrimM38UgX57UqHp+oME7X/EX9Im6suwZfa7Hsr8AtzbJvukTpwMGs+4s29YMSO3rwWtsw==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.40.1.tgz",
|
||||
"integrity": "sha512-PPkxTOisoNC6TpnDKatjKkjRMsdaWIhyuMkA4UsBXT9WEZY4uHezBTjs6Vl4PbqQQeu6oION1w2voYZv9yquCw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-arm64": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.34.2.tgz",
|
||||
"integrity": "sha512-PSN58XG/V/tzqDb9kDGutUruycgylMlUE59f40ny6QIRNsTEIZsrNQTJKUN2keMMSmlzgunMFqyaGLmly39sug==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.40.1.tgz",
|
||||
"integrity": "sha512-VWXGISWFY18v/0JyNUy4A46KCFCb9NVsH+1100XP31lud+TzlezBbz24CYzbnA4x6w4hx+NYCXDfnvDVO6lcAA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-x64": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.34.2.tgz",
|
||||
"integrity": "sha512-gQhK788rQJm9pzmXyfBB84VHViDERhAhzGafw+E5mUpnGKuxZGkMVDa3wgDFKT6ukLC5V7QTifzsUKdNVxp5qQ==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.40.1.tgz",
|
||||
"integrity": "sha512-nIwkXafAI1/QCS7pxSpv/ZtFW6TXcNUEHAIA9EIyw5OzxJZQ1YDrX+CL6JAIQgZ33CInl1R6mHet9Y/UZTg2Bw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-freebsd-arm64": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.34.2.tgz",
|
||||
"integrity": "sha512-eiaHgQwGPpxLC3+zTAcdKl4VsBl3r0AiJOd1Um/ArEzAjN/dbPK1nROHrVkdnoE6p7Svvn04w3f/jEZSTVHunA==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.40.1.tgz",
|
||||
"integrity": "sha512-BdrLJ2mHTrIYdaS2I99mriyJfGGenSaP+UwGi1kB9BLOCu9SR8ZpbkmmalKIALnRw24kM7qCN0IOm6L0S44iWw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-freebsd-x64": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.34.2.tgz",
|
||||
"integrity": "sha512-lhdiwQ+jf8pewYOTG4bag0Qd68Jn1v2gO1i0mTuiD+Qkt5vNfHVK/jrT7uVvycV8ZchlzXp5HDVmhpzjC6mh0g==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.40.1.tgz",
|
||||
"integrity": "sha512-VXeo/puqvCG8JBPNZXZf5Dqq7BzElNJzHRRw3vjBE27WujdzuOPecDPc/+1DcdcTptNBep3861jNq0mYkT8Z6Q==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.34.2.tgz",
|
||||
"integrity": "sha512-lfqTpWjSvbgQP1vqGTXdv+/kxIznKXZlI109WkIFPbud41bjigjNmOAAKoazmRGx+k9e3rtIdbq2pQZPV1pMig==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.40.1.tgz",
|
||||
"integrity": "sha512-ehSKrewwsESPt1TgSE/na9nIhWCosfGSFqv7vwEtjyAqZcvbGIg4JAcV7ZEh2tfj/IlfBeZjgOXm35iOOjadcg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.34.2.tgz",
|
||||
"integrity": "sha512-RGjqULqIurqqv+NJTyuPgdZhka8ImMLB32YwUle2BPTDqDoXNgwFjdjQC59FbSk08z0IqlRJjrJ0AvDQ5W5lpw==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.40.1.tgz",
|
||||
"integrity": "sha512-m39iO/aaurh5FVIu/F4/Zsl8xppd76S4qoID8E+dSRQvTyZTOI2gVk3T4oqzfq1PtcvOfAVlwLMK3KRQMaR8lg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-gnu": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.34.2.tgz",
|
||||
"integrity": "sha512-ZvkPiheyXtXlFqHpsdgscx+tZ7hoR59vOettvArinEspq5fxSDSgfF+L5wqqJ9R4t+n53nyn0sKxeXlik7AY9Q==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.40.1.tgz",
|
||||
"integrity": "sha512-Y+GHnGaku4aVLSgrT0uWe2o2Rq8te9hi+MwqGF9r9ORgXhmHK5Q71N757u0F8yU1OIwUIFy6YiJtKjtyktk5hg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-musl": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.34.2.tgz",
|
||||
"integrity": "sha512-UlFk+E46TZEoxD9ufLKDBzfSG7Ki03fo6hsNRRRHF+KuvNZ5vd1RRVQm8YZlGsjcJG8R252XFK0xNPay+4WV7w==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.40.1.tgz",
|
||||
"integrity": "sha512-jEwjn3jCA+tQGswK3aEWcD09/7M5wGwc6+flhva7dsQNRZZTe30vkalgIzV4tjkopsTS9Jd7Y1Bsj6a4lzz8gQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-loongarch64-gnu": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.34.2.tgz",
|
||||
"integrity": "sha512-hJhfsD9ykx59jZuuoQgYT1GEcNNi3RCoEmbo5OGfG8RlHOiVS7iVNev9rhLKh7UBYq409f4uEw0cclTXx8nh8Q==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.40.1.tgz",
|
||||
"integrity": "sha512-ySyWikVhNzv+BV/IDCsrraOAZ3UaC8SZB67FZlqVwXwnFhPihOso9rPOxzZbjp81suB1O2Topw+6Ug3JNegejQ==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.34.2.tgz",
|
||||
"integrity": "sha512-g/O5IpgtrQqPegvqopvmdCF9vneLE7eqYfdPWW8yjPS8f63DNam3U4ARL1PNNB64XHZDHKpvO2Giftf43puB8Q==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.40.1.tgz",
|
||||
"integrity": "sha512-BvvA64QxZlh7WZWqDPPdt0GH4bznuL6uOO1pmgPnnv86rpUpc8ZxgZwcEgXvo02GRIZX1hQ0j0pAnhwkhwPqWg==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.34.2.tgz",
|
||||
"integrity": "sha512-bSQijDC96M6PuooOuXHpvXUYiIwsnDmqGU8+br2U7iPoykNi9JtMUpN7K6xml29e0evK0/g0D1qbAUzWZFHY5Q==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.40.1.tgz",
|
||||
"integrity": "sha512-EQSP+8+1VuSulm9RKSMKitTav89fKbHymTf25n5+Yr6gAPZxYWpj3DzAsQqoaHAk9YX2lwEyAf9S4W8F4l3VBQ==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-riscv64-musl": {
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.40.1.tgz",
|
||||
"integrity": "sha512-n/vQ4xRZXKuIpqukkMXZt9RWdl+2zgGNx7Uda8NtmLJ06NL8jiHxUawbwC+hdSq1rrw/9CghCpEONor+l1e2gA==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-s390x-gnu": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.34.2.tgz",
|
||||
"integrity": "sha512-49TtdeVAsdRuiUHXPrFVucaP4SivazetGUVH8CIxVsNsaPHV4PFkpLmH9LeqU/R4Nbgky9lzX5Xe1NrzLyraVA==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.40.1.tgz",
|
||||
"integrity": "sha512-h8d28xzYb98fMQKUz0w2fMc1XuGzLLjdyxVIbhbil4ELfk5/orZlSTpF/xdI9C8K0I8lCkq+1En2RJsawZekkg==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.34.2.tgz",
|
||||
"integrity": "sha512-j+jFdfOycLIQ7FWKka9Zd3qvsIyugg5LeZuHF6kFlXo6MSOc6R1w37YUVy8VpAKd81LMWGi5g9J25P09M0SSIw==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.40.1.tgz",
|
||||
"integrity": "sha512-XiK5z70PEFEFqcNj3/zRSz/qX4bp4QIraTy9QjwJAb/Z8GM7kVUsD0Uk8maIPeTyPCP03ChdI+VVmJriKYbRHQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-musl": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.34.2.tgz",
|
||||
"integrity": "sha512-aDPHyM/D2SpXfSNCVWCxyHmOqN9qb7SWkY1+vaXqMNMXslZYnwh9V/UCudl6psyG0v6Ukj7pXanIpfZwCOEMUg==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.40.1.tgz",
|
||||
"integrity": "sha512-2BRORitq5rQ4Da9blVovzNCMaUlyKrzMSvkVR0D4qPuOy/+pMCrh1d7o01RATwVy+6Fa1WBw+da7QPeLWU/1mQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-arm64-msvc": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.34.2.tgz",
|
||||
"integrity": "sha512-LQRkCyUBnAo7r8dbEdtNU08EKLCJMgAk2oP5H3R7BnUlKLqgR3dUjrLBVirmc1RK6U6qhtDw29Dimeer8d5hzQ==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.40.1.tgz",
|
||||
"integrity": "sha512-b2bcNm9Kbde03H+q+Jjw9tSfhYkzrDUf2d5MAd1bOJuVplXvFhWz7tRtWvD8/ORZi7qSCy0idW6tf2HgxSXQSg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-ia32-msvc": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.34.2.tgz",
|
||||
"integrity": "sha512-wt8OhpQUi6JuPFkm1wbVi1BByeag87LDFzeKSXzIdGcX4bMLqORTtKxLoCbV57BHYNSUSOKlSL4BYYUghainYA==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.40.1.tgz",
|
||||
"integrity": "sha512-DfcogW8N7Zg7llVEfpqWMZcaErKfsj9VvmfSyRjCyo4BI3wPEfrzTtJkZG6gKP/Z92wFm6rz2aDO7/JfiR/whA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-msvc": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.34.2.tgz",
|
||||
"integrity": "sha512-rUrqINax0TvrPBXrFKg0YbQx18NpPN3NNrgmaao9xRNbTwek7lOXObhx8tQy8gelmQ/gLaGy1WptpU2eKJZImg==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.40.1.tgz",
|
||||
"integrity": "sha512-ECyOuDeH3C1I8jH2MK1RtBJW+YPMvSfT0a5NN0nHfQYnDSJ6tUiZH3gzwVP5/Kfh/+Tt7tpWVF9LXNTnhTJ3kA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
@@ -3181,10 +3213,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
|
||||
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
|
||||
"devOptional": true
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz",
|
||||
"integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/js-cookie": {
|
||||
"version": "3.0.6",
|
||||
@@ -7076,12 +7109,13 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/rollup": {
|
||||
"version": "4.34.2",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.34.2.tgz",
|
||||
"integrity": "sha512-sBDUoxZEaqLu9QeNalL8v3jw6WjPku4wfZGyTU7l7m1oC+rpRihXc/n/H+4148ZkGz5Xli8CHMns//fFGKvpIQ==",
|
||||
"version": "4.40.1",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.40.1.tgz",
|
||||
"integrity": "sha512-C5VvvgCCyfyotVITIAv+4efVytl5F7wt+/I2i9q9GZcEXW9BP52YYOXC58igUi+LFZVHukErIIqQSWwv/M3WRw==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "1.0.6"
|
||||
"@types/estree": "1.0.7"
|
||||
},
|
||||
"bin": {
|
||||
"rollup": "dist/bin/rollup"
|
||||
@@ -7091,25 +7125,26 @@
|
||||
"npm": ">=8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-android-arm-eabi": "4.34.2",
|
||||
"@rollup/rollup-android-arm64": "4.34.2",
|
||||
"@rollup/rollup-darwin-arm64": "4.34.2",
|
||||
"@rollup/rollup-darwin-x64": "4.34.2",
|
||||
"@rollup/rollup-freebsd-arm64": "4.34.2",
|
||||
"@rollup/rollup-freebsd-x64": "4.34.2",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.34.2",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.34.2",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.34.2",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.34.2",
|
||||
"@rollup/rollup-linux-loongarch64-gnu": "4.34.2",
|
||||
"@rollup/rollup-linux-powerpc64le-gnu": "4.34.2",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.34.2",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.34.2",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.34.2",
|
||||
"@rollup/rollup-linux-x64-musl": "4.34.2",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.34.2",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.34.2",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.34.2",
|
||||
"@rollup/rollup-android-arm-eabi": "4.40.1",
|
||||
"@rollup/rollup-android-arm64": "4.40.1",
|
||||
"@rollup/rollup-darwin-arm64": "4.40.1",
|
||||
"@rollup/rollup-darwin-x64": "4.40.1",
|
||||
"@rollup/rollup-freebsd-arm64": "4.40.1",
|
||||
"@rollup/rollup-freebsd-x64": "4.40.1",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.40.1",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.40.1",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.40.1",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.40.1",
|
||||
"@rollup/rollup-linux-loongarch64-gnu": "4.40.1",
|
||||
"@rollup/rollup-linux-powerpc64le-gnu": "4.40.1",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.40.1",
|
||||
"@rollup/rollup-linux-riscv64-musl": "4.40.1",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.40.1",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.40.1",
|
||||
"@rollup/rollup-linux-x64-musl": "4.40.1",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.40.1",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.40.1",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.40.1",
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
@@ -7716,23 +7751,28 @@
|
||||
"devOptional": true
|
||||
},
|
||||
"node_modules/tinyglobby": {
|
||||
"version": "0.2.10",
|
||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.10.tgz",
|
||||
"integrity": "sha512-Zc+8eJlFMvgatPZTl6A9L/yht8QqdmUNtURHaKZLmKBE12hNPSrqNkUp2cs3M/UKmNVVAMFQYSjYIVHDjW5zew==",
|
||||
"dev": true,
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
|
||||
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fdir": "^6.4.2",
|
||||
"fdir": "^6.4.4",
|
||||
"picomatch": "^4.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/fdir": {
|
||||
"version": "6.4.3",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.3.tgz",
|
||||
"integrity": "sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==",
|
||||
"dev": true,
|
||||
"version": "6.4.4",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
|
||||
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"picomatch": "^3 || ^4"
|
||||
},
|
||||
@@ -7746,7 +7786,8 @@
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||
"dev": true,
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
@@ -8083,15 +8124,18 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "6.2.6",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-6.2.6.tgz",
|
||||
"integrity": "sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==",
|
||||
"version": "6.3.4",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz",
|
||||
"integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.25.0",
|
||||
"fdir": "^6.4.4",
|
||||
"picomatch": "^4.0.2",
|
||||
"postcss": "^8.5.3",
|
||||
"rollup": "^4.30.1"
|
||||
"rollup": "^4.34.9",
|
||||
"tinyglobby": "^0.2.13"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
@@ -8203,6 +8247,34 @@
|
||||
"vuetify": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/fdir": {
|
||||
"version": "6.4.4",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
|
||||
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"picomatch": "^3 || ^4"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"picomatch": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/picomatch": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/vscode-uri": {
|
||||
"version": "3.0.8",
|
||||
"resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz",
|
||||
|
||||
@@ -61,7 +61,7 @@
|
||||
"openapi-typescript-codegen": "^0.29.0",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^8.28.0",
|
||||
"vite": "^6.2.6",
|
||||
"vite": "^6.3.4",
|
||||
"vite-plugin-pwa": "^0.21.1",
|
||||
"vite-plugin-vuetify": "^2.0.4",
|
||||
"vue-tsc": "^2.2.8"
|
||||
|
||||
@@ -44,12 +44,15 @@ async function uploadSaves({
|
||||
async function updateSave({
|
||||
save,
|
||||
saveFile,
|
||||
screenshotFile,
|
||||
}: {
|
||||
save: SaveSchema;
|
||||
saveFile: File;
|
||||
screenshotFile?: File;
|
||||
}): Promise<{ data: SaveSchema }> {
|
||||
const formData = new FormData();
|
||||
formData.append("saveFile", saveFile);
|
||||
if (screenshotFile) formData.append("screenshotFile", screenshotFile);
|
||||
|
||||
return api.put(`/saves/${save.id}`, formData);
|
||||
}
|
||||
|
||||
@@ -44,12 +44,15 @@ async function uploadStates({
|
||||
async function updateState({
|
||||
state,
|
||||
stateFile,
|
||||
screenshotFile,
|
||||
}: {
|
||||
state: StateSchema;
|
||||
stateFile: File;
|
||||
screenshotFile?: File;
|
||||
}): Promise<{ data: StateSchema }> {
|
||||
const formData = new FormData();
|
||||
formData.append("stateFile", stateFile);
|
||||
if (screenshotFile) formData.append("screenshotFile", screenshotFile);
|
||||
|
||||
return api.put(`/states/${state.id}`, formData);
|
||||
}
|
||||
|
||||
@@ -72,6 +72,12 @@ export async function saveSave({
|
||||
saveFile: new File([saveFile], save.file_name, {
|
||||
type: "application/octet-stream",
|
||||
}),
|
||||
screenshotFile:
|
||||
screenshotFile && save.screenshot
|
||||
? new File([screenshotFile], save.screenshot.file_name, {
|
||||
type: "application/octet-stream",
|
||||
})
|
||||
: undefined,
|
||||
});
|
||||
|
||||
// Update the save in the rom object
|
||||
|
||||
45
poetry.lock
generated
45
poetry.lock
generated
@@ -3707,24 +3707,6 @@ files = [
|
||||
{file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.20"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\""
|
||||
files = [
|
||||
{file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
|
||||
{file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.3.0"
|
||||
@@ -3732,7 +3714,7 @@ description = "HTTP library with thread-safe connection pooling, file post, and
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation != \"PyPy\""
|
||||
markers = "platform_python_implementation == \"PyPy\""
|
||||
files = [
|
||||
{file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
|
||||
{file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
|
||||
@@ -3764,6 +3746,24 @@ h11 = ">=0.8"
|
||||
[package.extras]
|
||||
standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "vcrpy"
|
||||
version = "5.1.0"
|
||||
description = "Automatically mock your HTTP interactions to simplify and speed up testing"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and extra == \"test\""
|
||||
files = [
|
||||
{file = "vcrpy-5.1.0-py2.py3-none-any.whl", hash = "sha256:605e7b7a63dcd940db1df3ab2697ca7faf0e835c0852882142bafb19649d599e"},
|
||||
{file = "vcrpy-5.1.0.tar.gz", hash = "sha256:bbf1532f2618a04f11bce2a99af3a9647a32c880957293ff91e0a5f187b6b3d2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PyYAML = "*"
|
||||
wrapt = "*"
|
||||
yarl = "*"
|
||||
|
||||
[[package]]
|
||||
name = "vcrpy"
|
||||
version = "7.0.0"
|
||||
@@ -3771,7 +3771,7 @@ description = "Automatically mock your HTTP interactions to simplify and speed u
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and extra == \"test\""
|
||||
markers = "platform_python_implementation != \"PyPy\" and extra == \"test\""
|
||||
files = [
|
||||
{file = "vcrpy-7.0.0-py2.py3-none-any.whl", hash = "sha256:55791e26c18daa363435054d8b35bd41a4ac441b6676167635d1b37a71dbe124"},
|
||||
{file = "vcrpy-7.0.0.tar.gz", hash = "sha256:176391ad0425edde1680c5b20738ea3dc7fb942520a48d2993448050986b3a50"},
|
||||
@@ -3779,10 +3779,7 @@ files = [
|
||||
|
||||
[package.dependencies]
|
||||
PyYAML = "*"
|
||||
urllib3 = [
|
||||
{version = "<2", markers = "platform_python_implementation == \"PyPy\""},
|
||||
{version = "*", markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.10\""},
|
||||
]
|
||||
urllib3 = {version = "*", markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.10\""}
|
||||
wrapt = "*"
|
||||
yarl = "*"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user