diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 210c696ca..96f1eefc8 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,7 +2,6 @@ name: Bug report about: Report a bug, issue or problem title: "[Bug] Bug title" -labels: bug assignees: "" --- diff --git a/.github/ISSUE_TEMPLATE/custom.md b/.github/ISSUE_TEMPLATE/custom.md index 425fe62cf..c994e3653 100644 --- a/.github/ISSUE_TEMPLATE/custom.md +++ b/.github/ISSUE_TEMPLATE/custom.md @@ -2,6 +2,5 @@ name: Custom issue template about: Describe this issue template's purpose here. title: "[Other] Custom issue title" -labels: other assignees: "" --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 0db4178cd..f74cf4009 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -2,7 +2,6 @@ name: Feature request about: Suggest an idea for this project title: "[Feature] Feature title" -labels: feature assignees: "" --- diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from .github/PULL_REQUEST_TEMPLATE/pull_request_template.md rename to .github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/PULL_REQUEST_TEMPLATE/config.yml b/.github/PULL_REQUEST_TEMPLATE/config.yml deleted file mode 100644 index 4172e3df9..000000000 --- a/.github/PULL_REQUEST_TEMPLATE/config.yml +++ /dev/null @@ -1 +0,0 @@ -blank_pull_request_template_enabled: false diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..9b3ed2164 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/frontend" + schedule: + interval: "weekly" + reviewers: + - "gantoine" + target-branch: "master" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + reviewers: + - "adamantike" + target-branch: "master" diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 30b80beab..f9deec872 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -50,7 +50,7 @@ jobs: - name: Install dependencies run: | - poetry install --sync + poetry sync --extras test - name: Initiate database run: | diff --git a/DEVELOPER_SETUP.md b/DEVELOPER_SETUP.md index fec76bc6d..9c4c900f1 100644 --- a/DEVELOPER_SETUP.md +++ b/DEVELOPER_SETUP.md @@ -45,14 +45,14 @@ Then create the virtual environment ```sh # Fix disable parallel installation stuck: $> poetry config experimental.new-installer false # Fix Loading macOS/linux stuck: $> export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring -poetry install --sync +poetry sync ``` If you are on Arch Linux or another Arch-based distro, you need to run the command as follows: ```sh # https://bbs.archlinux.org/viewtopic.php?id=296542 -CFLAGS="-Wno-error=incompatible-pointer-types" poetry install --sync +CFLAGS="-Wno-error=incompatible-pointer-types" poetry sync ``` #### - Spin up mariadb in docker diff --git a/backend/alembic/versions/0009_models_refactor.py b/backend/alembic/versions/0009_models_refactor.py index 189462beb..9a48ea38e 100644 --- a/backend/alembic/versions/0009_models_refactor.py +++ b/backend/alembic/versions/0009_models_refactor.py @@ -9,7 +9,7 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.exc import OperationalError -from utils.database import CustomJSON +from utils.database import CustomJSON, is_postgresql # revision identifiers, used by Alembic. revision = "0009_models_refactor" @@ -21,6 +21,10 @@ def upgrade() -> None: connection = op.get_bind() + json_array_build_func = ( + "jsonb_build_array()" if is_postgresql(connection) else "JSON_ARRAY()" + ) + try: with op.batch_alter_table("platforms", schema=None) as batch_op: batch_op.alter_column( @@ -87,13 +91,13 @@ def upgrade() -> None: "url_screenshots", existing_type=CustomJSON(), nullable=True, - existing_server_default=sa.text("(JSON_ARRAY())"), + existing_server_default=sa.text(f"({json_array_build_func})"), ) batch_op.alter_column( "path_screenshots", existing_type=CustomJSON(), nullable=True, - existing_server_default=sa.text("(JSON_ARRAY())"), + existing_server_default=sa.text(f"({json_array_build_func})"), ) try: @@ -108,6 +112,10 @@ def upgrade() -> None: def downgrade() -> None: connection = op.get_bind() + json_array_build_func = ( + "jsonb_build_array()" if is_postgresql(connection) else "JSON_ARRAY()" + ) + with op.batch_alter_table("roms", schema=None) as batch_op: batch_op.alter_column( "igdb_id", @@ -136,13 +144,13 @@ def downgrade() -> None: "path_screenshots", existing_type=CustomJSON(), nullable=False, - existing_server_default=sa.text("(JSON_ARRAY())"), + existing_server_default=sa.text(f"({json_array_build_func})"), ) batch_op.alter_column( "url_screenshots", existing_type=CustomJSON(), nullable=False, - existing_server_default=sa.text("(JSON_ARRAY())"), + existing_server_default=sa.text(f"({json_array_build_func})"), ) batch_op.alter_column( "file_size_units", existing_type=sa.VARCHAR(length=10), nullable=True diff --git a/backend/alembic/versions/0012_add_regions_languages.py b/backend/alembic/versions/0012_add_regions_languages.py index 3cb2029da..b60010abe 100644 --- a/backend/alembic/versions/0012_add_regions_languages.py +++ b/backend/alembic/versions/0012_add_regions_languages.py @@ -18,19 +18,22 @@ def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### + connection = op.get_bind() + with op.batch_alter_table("roms", schema=None) as batch_op: batch_op.add_column(sa.Column("regions", CustomJSON(), nullable=True)) batch_op.add_column(sa.Column("languages", CustomJSON(), nullable=True)) with op.batch_alter_table("roms", schema=None) as batch_op: # Set default values for languages and regions - batch_op.execute("UPDATE roms SET languages = JSON_ARRAY()") - batch_op.execute("UPDATE roms SET regions = JSON_ARRAY(region)") + if is_postgresql(connection): + batch_op.execute("UPDATE roms SET languages = jsonb_build_array()") + batch_op.execute("UPDATE roms SET regions = jsonb_build_array(region)") + else: + batch_op.execute("UPDATE roms SET languages = JSON_ARRAY()") + batch_op.execute("UPDATE roms SET regions = JSON_ARRAY(region)") batch_op.drop_column("region") - # ### end Alembic commands ### - def downgrade() -> None: connection = op.get_bind() diff --git a/backend/alembic/versions/0014_asset_files.py b/backend/alembic/versions/0014_asset_files.py index f1c994301..cc433c261 100644 --- a/backend/alembic/versions/0014_asset_files.py +++ b/backend/alembic/versions/0014_asset_files.py @@ -191,7 +191,10 @@ def upgrade() -> None: # Move data around with op.batch_alter_table("roms", schema=None) as batch_op: - batch_op.execute("update roms set igdb_metadata = JSON_OBJECT()") + if is_postgresql(connection): + batch_op.execute("update roms set igdb_metadata = jsonb_build_object()") + else: + batch_op.execute("update roms set igdb_metadata = JSON_OBJECT()") batch_op.execute( "update roms set path_cover_s = '', path_cover_l = '', url_cover = '' where url_cover = 'https://images.igdb.com/igdb/image/upload/t_cover_big/nocover.png'" ) diff --git a/backend/alembic/versions/0015_mobygames_data.py b/backend/alembic/versions/0015_mobygames_data.py index f8ca0e1e2..b402b7340 100644 --- a/backend/alembic/versions/0015_mobygames_data.py +++ b/backend/alembic/versions/0015_mobygames_data.py @@ -8,7 +8,7 @@ import sqlalchemy as sa from alembic import op -from utils.database import CustomJSON +from utils.database import CustomJSON, is_postgresql # revision identifiers, used by Alembic. revision = "0015_mobygames_data" @@ -18,7 +18,8 @@ def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### + connection = op.get_bind() + with op.batch_alter_table("platforms", schema=None) as batch_op: batch_op.add_column(sa.Column("moby_id", sa.Integer(), nullable=True)) @@ -27,9 +28,10 @@ def upgrade() -> None: batch_op.add_column(sa.Column("moby_metadata", CustomJSON(), nullable=True)) with op.batch_alter_table("roms", schema=None) as batch_op: - batch_op.execute("update roms set moby_metadata = JSON_OBJECT()") - - # ### end Alembic commands ### + if is_postgresql(connection): + batch_op.execute("update roms set moby_metadata = jsonb_build_object()") + else: + batch_op.execute("update roms set moby_metadata = JSON_OBJECT()") def downgrade() -> None: diff --git a/backend/alembic/versions/0030_user_email_null.py b/backend/alembic/versions/0030_user_email_null.py new file mode 100644 index 000000000..14d6ba41f --- /dev/null +++ b/backend/alembic/versions/0030_user_email_null.py @@ -0,0 +1,25 @@ +"""Change empty string in users.email to NULL. + +Revision ID: 951473b0c581 +Revises: 0029_platforms_custom_name +Create Date: 2025-01-14 01:30:39.696257 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "0030_user_email_null" +down_revision = "0029_platforms_custom_name" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + with op.batch_alter_table("users", schema=None) as batch_op: + batch_op.execute("UPDATE users SET email = NULL WHERE email = ''") + + +def downgrade() -> None: + with op.batch_alter_table("users", schema=None) as batch_op: + batch_op.execute("UPDATE users SET email = '' WHERE email IS NULL") diff --git a/backend/alembic/versions/0031_datetime_to_timestamp.py b/backend/alembic/versions/0031_datetime_to_timestamp.py new file mode 100644 index 000000000..77074246d --- /dev/null +++ b/backend/alembic/versions/0031_datetime_to_timestamp.py @@ -0,0 +1,351 @@ +"""empty message + +Revision ID: 0031_datetime_to_timestamp +Revises: 0030_user_email_null +Create Date: 2025-01-14 04:13:33.209508 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = "0031_datetime_to_timestamp" +down_revision = "0030_user_email_null" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("collections", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("firmware", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("platforms", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("rom_user", schema=None) as batch_op: + batch_op.alter_column( + "last_played", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("roms", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("saves", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("screenshots", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("states", schema=None) as batch_op: + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("users", schema=None) as batch_op: + batch_op.alter_column( + "last_login", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + batch_op.alter_column( + "last_active", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=True, + ) + batch_op.alter_column( + "created_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "updated_at", + existing_type=mysql.DATETIME(), + type_=sa.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("users", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "last_active", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=True, + ) + batch_op.alter_column( + "last_login", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=True, + ) + + with op.batch_alter_table("states", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("screenshots", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("saves", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("roms", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("rom_user", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "last_played", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=True, + ) + + with op.batch_alter_table("platforms", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("firmware", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + with op.batch_alter_table("collections", schema=None) as batch_op: + batch_op.alter_column( + "updated_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + batch_op.alter_column( + "created_at", + existing_type=sa.TIMESTAMP(timezone=True), + type_=mysql.DATETIME(), + existing_nullable=False, + existing_server_default=sa.text("now()"), + ) + + # ### end Alembic commands ### diff --git a/backend/alembic/versions/0032_longer_fs_fields.py b/backend/alembic/versions/0032_longer_fs_fields.py new file mode 100644 index 000000000..ff99e426e --- /dev/null +++ b/backend/alembic/versions/0032_longer_fs_fields.py @@ -0,0 +1,66 @@ +"""empty message + +Revision ID: 0032_longer_fs_fields +Revises: 0031_datetime_to_timestamp +Create Date: 2025-01-24 02:18:30.069263 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = "0032_longer_fs_fields" +down_revision = "0031_datetime_to_timestamp" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("platforms", schema=None) as batch_op: + batch_op.alter_column( + "slug", + existing_type=mysql.VARCHAR(length=50), + type_=sa.String(length=100), + existing_nullable=False, + ) + batch_op.alter_column( + "fs_slug", + existing_type=mysql.VARCHAR(length=50), + type_=sa.String(length=100), + existing_nullable=False, + ) + batch_op.alter_column( + "category", + existing_type=mysql.VARCHAR(length=50), + type_=sa.String(length=100), + existing_nullable=True, + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("platforms", schema=None) as batch_op: + batch_op.alter_column( + "category", + existing_type=sa.String(length=100), + type_=mysql.VARCHAR(length=50), + existing_nullable=True, + ) + batch_op.alter_column( + "fs_slug", + existing_type=sa.String(length=100), + type_=mysql.VARCHAR(length=50), + existing_nullable=False, + ) + batch_op.alter_column( + "slug", + existing_type=sa.String(length=100), + type_=mysql.VARCHAR(length=50), + existing_nullable=False, + ) + + # ### end Alembic commands ### diff --git a/backend/alembic/versions/1.8.1_.py b/backend/alembic/versions/1.8.1_.py index 1ea4b759b..258405f71 100644 --- a/backend/alembic/versions/1.8.1_.py +++ b/backend/alembic/versions/1.8.1_.py @@ -8,7 +8,7 @@ import sqlalchemy as sa from alembic import op -from utils.database import CustomJSON +from utils.database import CustomJSON, is_postgresql # revision identifiers, used by Alembic. revision = "1.8.1" @@ -18,14 +18,19 @@ def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### + connection = op.get_bind() + + json_array_build_func = ( + "jsonb_build_array()" if is_postgresql(connection) else "JSON_ARRAY()" + ) + with op.batch_alter_table("roms") as batch_op: batch_op.add_column( sa.Column( "url_screenshots", CustomJSON(), nullable=False, - server_default=sa.text("(JSON_ARRAY())"), + server_default=sa.text(f"({json_array_build_func})"), ) ) batch_op.add_column( @@ -33,10 +38,9 @@ def upgrade() -> None: "path_screenshots", CustomJSON(), nullable=False, - server_default=sa.text("(JSON_ARRAY())"), + server_default=sa.text(f"({json_array_build_func})"), ) ) - # ### end Alembic commands ### def downgrade() -> None: diff --git a/backend/config/__init__.py b/backend/config/__init__.py index 238ab0258..e4ba1e0cd 100644 --- a/backend/config/__init__.py +++ b/backend/config/__init__.py @@ -117,7 +117,7 @@ def str_to_bool(value: str) -> bool: UPLOAD_TIMEOUT = int(os.environ.get("UPLOAD_TIMEOUT", 600)) # LOGGING -LOGLEVEL: Final = os.environ.get("LOGLEVEL", "INFO") +LOGLEVEL: Final = os.environ.get("LOGLEVEL", "INFO").upper() FORCE_COLOR: Final = str_to_bool(os.environ.get("FORCE_COLOR", "false")) NO_COLOR: Final = str_to_bool(os.environ.get("NO_COLOR", "false")) diff --git a/backend/endpoints/rom.py b/backend/endpoints/rom.py index ce2e54403..64421671a 100644 --- a/backend/endpoints/rom.py +++ b/backend/endpoints/rom.py @@ -283,6 +283,18 @@ async def get_rom_content( log.info(f"User {current_username} is downloading {rom.file_name}") if not rom.multi: + # Serve the file directly in development mode for emulatorjs + if DEV_MODE: + return FileResponse( + path=rom_path, + filename=rom.file_name, + headers={ + "Content-Disposition": f'attachment; filename="{quote(rom.file_name)}"', + "Content-Type": "application/octet-stream", + "Content-Length": str(rom.file_size_bytes), + }, + ) + return FileRedirectResponse( download_path=Path(f"/library/{rom.full_path}"), filename=rom.file_name, @@ -377,8 +389,8 @@ async def update_rom( ) cleaned_data = { - "igdb_id": data.get("igdb_id", None), - "moby_id": data.get("moby_id", None), + "igdb_id": data.get("igdb_id", rom.igdb_id), + "moby_id": data.get("moby_id", rom.moby_id), } if ( @@ -558,7 +570,7 @@ async def delete_roms( @protected_route(router.put, "/roms/{id}/props", [Scope.ROMS_USER_WRITE]) async def update_rom_user(request: Request, id: int) -> RomUserSchema: data = await request.json() - data = data.get("data", {}) + rom_user_data = data.get("data", {}) rom = db_rom_handler.get_rom(id) @@ -580,10 +592,13 @@ async def update_rom_user(request: Request, id: int) -> RomUserSchema: "difficulty", "completion", "status", - "last_played", ] - cleaned_data = {field: data[field] for field in fields_to_update if field in data} + cleaned_data = { + field: rom_user_data[field] + for field in fields_to_update + if field in rom_user_data + } if data.get("update_last_played", False): cleaned_data.update({"last_played": datetime.now(timezone.utc)}) diff --git a/backend/endpoints/user.py b/backend/endpoints/user.py index dbe24538e..bb341b279 100644 --- a/backend/endpoints/user.py +++ b/backend/endpoints/user.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Annotated +from typing import Annotated, Any from anyio import open_file from config import ASSETS_BASE_PATH @@ -51,8 +51,7 @@ def add_user( detail="Forbidden", ) - existing_user_by_username = db_user_handler.get_user_by_username(username.lower()) - if existing_user_by_username: + if db_user_handler.get_user_by_username(username.lower()): msg = f"Username {username.lower()} already exists" log.error(msg) raise HTTPException( @@ -60,9 +59,8 @@ def add_user( detail=msg, ) - existing_user_by_email = db_user_handler.get_user_by_email(email.lower()) - if existing_user_by_email: - msg = f"Uesr with email {email.lower()} already exists" + if email and db_user_handler.get_user_by_email(email.lower()): + msg = f"User with email {email.lower()} already exists" log.error(msg) raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, @@ -72,7 +70,7 @@ def add_user( user = User( username=username.lower(), hashed_password=auth_handler.get_password_hash(password), - email=email.lower(), + email=email.lower() or None, role=Role[role.upper()], ) @@ -154,7 +152,7 @@ async def update_user( if db_user.id != request.user.id and request.user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden") - cleaned_data = {} + cleaned_data: dict[str, Any] = {} if form_data.username and form_data.username != db_user.username: existing_user = db_user_handler.get_user_by_username(form_data.username.lower()) @@ -173,9 +171,10 @@ async def update_user( form_data.password ) - if form_data.email and form_data.email != db_user.email: - existing_user = db_user_handler.get_user_by_email(form_data.email.lower()) - if existing_user: + if form_data.email is not None and form_data.email != db_user.email: + if form_data.email and db_user_handler.get_user_by_email( + form_data.email.lower() + ): msg = f"User with email {form_data.email} already exists" log.error(msg) raise HTTPException( @@ -183,7 +182,7 @@ async def update_user( detail=msg, ) - cleaned_data["email"] = form_data.email.lower() + cleaned_data["email"] = form_data.email.lower() or None # You can't change your own role if form_data.role and request.user.id != id: diff --git a/backend/handler/auth/middleware.py b/backend/handler/auth/middleware.py index 112c1b09d..62b70dcd3 100644 --- a/backend/handler/auth/middleware.py +++ b/backend/handler/auth/middleware.py @@ -5,17 +5,26 @@ from joserfc.errors import BadSignatureError from joserfc.jwk import OctKey from starlette.datastructures import MutableHeaders, Secret -from starlette.requests import HTTPConnection +from starlette.requests import HTTPConnection, Request from starlette.types import ASGIApp, Message, Receive, Scope, Send from starlette_csrf.middleware import CSRFMiddleware class CustomCSRFMiddleware(CSRFMiddleware): async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + # Skip CSRF check if not an HTTP request, like websockets if scope["type"] != "http": await self.app(scope, receive, send) return + request = Request(scope, receive) + + # Skip CSRF check if Authorization header is present + auth_scheme = request.headers.get("Authorization", "").split(" ", 1)[0].lower() + if auth_scheme == "bearer" or auth_scheme == "basic": + await self.app(scope, receive, send) + return + await super().__call__(scope, receive, send) diff --git a/backend/handler/database/roms_handler.py b/backend/handler/database/roms_handler.py index e9cca91aa..161fae962 100644 --- a/backend/handler/database/roms_handler.py +++ b/backend/handler/database/roms_handler.py @@ -276,18 +276,19 @@ def update_rom_user(self, id: int, data: dict, session: Session = None) -> RomUs rom_user = self.get_rom_user_by_id(id) - if data.get("is_main_sibling", False): - rom = self.get_rom(rom_user.rom_id) + if not data.get("is_main_sibling", False): + return rom_user - session.execute( - update(RomUser) - .where( - and_( - RomUser.rom_id.in_(r.id for r in rom.sibling_roms), - RomUser.user_id == rom_user.user_id, - ) + rom = self.get_rom(rom_user.rom_id) + session.execute( + update(RomUser) + .where( + and_( + RomUser.rom_id.in_(r.id for r in rom.sibling_roms), + RomUser.user_id == rom_user.user_id, ) - .values(is_main_sibling=False) ) + .values(is_main_sibling=False) + ) return self.get_rom_user_by_id(id) diff --git a/backend/handler/scan_handler.py b/backend/handler/scan_handler.py index f78b8f3ee..e37d62de5 100644 --- a/backend/handler/scan_handler.py +++ b/backend/handler/scan_handler.py @@ -46,7 +46,7 @@ "switch", "wiiu", "win", - "xbox-360", + "xbox360", "xboxone", ) ) diff --git a/backend/handler/socket_handler.py b/backend/handler/socket_handler.py index a3392fb14..ac274c508 100644 --- a/backend/handler/socket_handler.py +++ b/backend/handler/socket_handler.py @@ -1,5 +1,6 @@ import socketio # type: ignore from config import REDIS_URL +from utils import json as json_module class SocketHandler: @@ -7,6 +8,7 @@ def __init__(self) -> None: self.socket_server = socketio.AsyncServer( cors_allowed_origins="*", async_mode="asgi", + json=json_module, logger=False, engineio_logger=False, client_manager=socketio.AsyncRedisManager(str(REDIS_URL)), diff --git a/backend/models/base.py b/backend/models/base.py index 286af85cf..d606a69f5 100644 --- a/backend/models/base.py +++ b/backend/models/base.py @@ -1,13 +1,13 @@ from datetime import datetime -from sqlalchemy import DateTime, func +from sqlalchemy import TIMESTAMP, func from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column class BaseModel(DeclarativeBase): created_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now() + TIMESTAMP(timezone=True), server_default=func.now() ) updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + TIMESTAMP(timezone=True), server_default=func.now(), onupdate=func.now() ) diff --git a/backend/models/platform.py b/backend/models/platform.py index 370affac6..c293e9831 100644 --- a/backend/models/platform.py +++ b/backend/models/platform.py @@ -21,11 +21,11 @@ class Platform(BaseModel): igdb_id: Mapped[int | None] sgdb_id: Mapped[int | None] moby_id: Mapped[int | None] - slug: Mapped[str] = mapped_column(String(length=50)) - fs_slug: Mapped[str] = mapped_column(String(length=50)) + slug: Mapped[str] = mapped_column(String(length=100)) + fs_slug: Mapped[str] = mapped_column(String(length=100)) name: Mapped[str] = mapped_column(String(length=400)) custom_name: Mapped[str | None] = mapped_column(String(length=400), default="") - category: Mapped[str | None] = mapped_column(String(length=50), default="") + category: Mapped[str | None] = mapped_column(String(length=100), default="") generation: Mapped[int | None] family_name: Mapped[str | None] = mapped_column(String(length=1000), default="") family_slug: Mapped[str | None] = mapped_column(String(length=1000), default="") diff --git a/backend/models/rom.py b/backend/models/rom.py index ddf45a801..a01ed0221 100644 --- a/backend/models/rom.py +++ b/backend/models/rom.py @@ -8,8 +8,8 @@ from config import FRONTEND_RESOURCES_PATH from models.base import BaseModel from sqlalchemy import ( + TIMESTAMP, BigInteger, - DateTime, Enum, ForeignKey, Index, @@ -72,7 +72,7 @@ class Rom(BaseModel): Text, default="", doc="URL to cover image stored in IGDB" ) - revision: Mapped[str | None] = mapped_column(String(100)) + revision: Mapped[str | None] = mapped_column(String(length=100)) regions: Mapped[list[str] | None] = mapped_column(CustomJSON(), default=[]) languages: Mapped[list[str] | None] = mapped_column(CustomJSON(), default=[]) tags: Mapped[list[str] | None] = mapped_column(CustomJSON(), default=[]) @@ -84,9 +84,9 @@ class Rom(BaseModel): multi: Mapped[bool] = mapped_column(default=False) files: Mapped[list[RomFile] | None] = mapped_column(CustomJSON(), default=[]) - crc_hash: Mapped[str | None] = mapped_column(String(100)) - md5_hash: Mapped[str | None] = mapped_column(String(100)) - sha1_hash: Mapped[str | None] = mapped_column(String(100)) + crc_hash: Mapped[str | None] = mapped_column(String(length=100)) + md5_hash: Mapped[str | None] = mapped_column(String(length=100)) + sha1_hash: Mapped[str | None] = mapped_column(String(length=100)) platform_id: Mapped[int] = mapped_column( ForeignKey("platforms.id", ondelete="CASCADE") @@ -254,7 +254,7 @@ class RomUser(BaseModel): note_is_public: Mapped[bool] = mapped_column(default=False) is_main_sibling: Mapped[bool] = mapped_column(default=False) - last_played: Mapped[datetime | None] = mapped_column(DateTime(timezone=True)) + last_played: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True)) backlogged: Mapped[bool] = mapped_column(default=False) now_playing: Mapped[bool] = mapped_column(default=False) diff --git a/backend/models/user.py b/backend/models/user.py index 34046e862..da1a1784b 100644 --- a/backend/models/user.py +++ b/backend/models/user.py @@ -6,7 +6,7 @@ from handler.auth.constants import DEFAULT_SCOPES, FULL_SCOPES, WRITE_SCOPES, Scope from models.base import BaseModel -from sqlalchemy import DateTime, Enum, String +from sqlalchemy import TIMESTAMP, Enum, String from sqlalchemy.orm import Mapped, mapped_column, relationship from starlette.authentication import SimpleUser @@ -36,8 +36,8 @@ class User(BaseModel, SimpleUser): enabled: Mapped[bool] = mapped_column(default=True) role: Mapped[Role] = mapped_column(Enum(Role), default=Role.VIEWER) avatar_path: Mapped[str] = mapped_column(String(length=255), default="") - last_login: Mapped[datetime | None] = mapped_column(DateTime(timezone=True)) - last_active: Mapped[datetime | None] = mapped_column(DateTime(timezone=True)) + last_login: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True)) + last_active: Mapped[datetime | None] = mapped_column(TIMESTAMP(timezone=True)) saves: Mapped[list[Save]] = relationship(back_populates="user") states: Mapped[list[State]] = relationship(back_populates="user") diff --git a/backend/utils/database.py b/backend/utils/database.py index e148fe39c..1bf87ae5f 100644 --- a/backend/utils/database.py +++ b/backend/utils/database.py @@ -31,7 +31,7 @@ def json_array_contains_value( return func.json_contains(column, value) -def safe_float(value, default=0.0): +def safe_float(value: Any, default: float = 0.0) -> float: """Safely convert a value to float, returning default if conversion fails.""" try: return float(value) @@ -39,7 +39,7 @@ def safe_float(value, default=0.0): return default -def safe_int(value, default=0): +def safe_int(value: Any, default: int = 0) -> int: """Safely convert a value to int, returning default if conversion fails.""" try: return int(value) diff --git a/backend/utils/json.py b/backend/utils/json.py new file mode 100644 index 000000000..5d537b22a --- /dev/null +++ b/backend/utils/json.py @@ -0,0 +1,29 @@ +"""JSON-compatible module with sane defaults. + +Inspiration taken from `python-engineio`. +https://github.com/miguelgrinberg/python-engineio/blob/main/src/engineio/json.py +""" + +import datetime +import decimal +import json +import uuid +from json import * # noqa: F401, F403 +from json import dumps as __original_dumps +from typing import Any + + +class DefaultJSONEncoder(json.JSONEncoder): + """Custom JSON encoder that supports encoding additional types.""" + + def default(self, o: Any) -> Any: + if isinstance(o, (datetime.date, datetime.datetime, datetime.time)): + return o.isoformat() + if isinstance(o, (decimal.Decimal, uuid.UUID)): + return str(o) + return super().default(o) + + +def dumps(*args: Any, **kwargs: Any) -> str: # type: ignore[no-redef] + kwargs.setdefault("cls", DefaultJSONEncoder) + return __original_dumps(*args, **kwargs) diff --git a/backend/worker.py b/backend/worker.py index cda32afd2..73d2c66ce 100644 --- a/backend/worker.py +++ b/backend/worker.py @@ -1,10 +1,10 @@ import sentry_sdk from config import SENTRY_DSN from handler.redis_handler import redis_client -from rq import Connection, Queue, Worker +from rq import Queue, Worker from utils import get_version -listen = ["high", "default", "low"] +listen = ("high", "default", "low") sentry_sdk.init( dsn=SENTRY_DSN, @@ -14,6 +14,5 @@ if __name__ == "__main__": # Start the worker - with Connection(redis_client): - worker = Worker(map(Queue, listen)) - worker.work() + worker = Worker([Queue(name, connection=redis_client) for name in listen]) + worker.work() diff --git a/docker/Dockerfile b/docker/Dockerfile index ad257a4dc..90282d554 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -55,7 +55,7 @@ RUN poetry install --no-ansi --no-cache --only main FROM backend-build AS backend-dev-build -RUN poetry install --no-ansi --no-cache +RUN poetry install --no-ansi --no-cache --all-extras # TODO: Upgrade Alpine to the same version as the other stages, when RAHasher is updated to work diff --git a/frontend/.eslintignore b/frontend/.eslintignore deleted file mode 100644 index 084c7158f..000000000 --- a/frontend/.eslintignore +++ /dev/null @@ -1,17 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - -node_modules -.DS_Store -dist -dist-ssr -coverage -*.local -__generated__ -*.config.js diff --git a/frontend/eslint.config.js b/frontend/eslint.config.js index 30f42430f..e084f36ff 100644 --- a/frontend/eslint.config.js +++ b/frontend/eslint.config.js @@ -1,16 +1,30 @@ -/* eslint-disable */ +import eslint from "@eslint/js"; +import tseslint from "typescript-eslint"; +import globals from "globals"; +import vue from "eslint-plugin-vue"; -let eslint = require("@eslint/js"); -let tseslint = require("typescript-eslint"); -let globals = require("globals"); -let vue = require("eslint-plugin-vue"); - -module.exports = tseslint.config( +export default tseslint.config( eslint.configs.recommended, ...tseslint.configs.recommended, ...vue.configs["flat/recommended"], { - ignores: ["node_modules", "dist", "__generated__", "*.config.js"], + ignores: [ + "logs", + "*.log", + "npm-debug.log*", + "yarn-debug.log*", + "yarn-error.log*", + "pnpm-debug.log*", + "lerna-debug.log*", + "node_modules", + ".DS_Store", + "dist", + "dist-ssr", + "coverage", + "*.local", + "__generated__", + "*.config.js", + ], languageOptions: { parserOptions: { parser: "@typescript-eslint/parser", diff --git a/frontend/package.json b/frontend/package.json index 0e77fb99f..c9ee0d648 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -13,6 +13,7 @@ "bugs": { "url": "https://github.com/rommapp/romm/issues" }, + "type": "module", "keywords": [ "rom", "manager", diff --git a/frontend/src/components/Details/ActionBar.vue b/frontend/src/components/Details/ActionBar.vue index b93a3ee75..77acd3a87 100644 --- a/frontend/src/components/Details/ActionBar.vue +++ b/frontend/src/components/Details/ActionBar.vue @@ -4,6 +4,7 @@ import CopyRomDownloadLinkDialog from "@/components/common/Game/Dialog/CopyDownl import romApi from "@/services/api/rom"; import storeDownload from "@/stores/download"; import storeHeartbeat from "@/stores/heartbeat"; +import storeConfig from "@/stores/config"; import type { DetailedRom } from "@/stores/roms"; import type { Events } from "@/types/emitter"; import { @@ -14,6 +15,7 @@ import { } from "@/utils"; import type { Emitter } from "mitt"; import { computed, inject, ref } from "vue"; +import { storeToRefs } from "pinia"; // Props const props = defineProps<{ rom: DetailedRom }>(); @@ -22,13 +24,23 @@ const heartbeatStore = storeHeartbeat(); const emitter = inject>("emitter"); const playInfoIcon = ref("mdi-play"); const qrCodeIcon = ref("mdi-qrcode"); +const configStore = storeConfig(); +const { config } = storeToRefs(configStore); -const ejsEmulationSupported = computed(() => - isEJSEmulationSupported(props.rom.platform_slug, heartbeatStore.value), -); -const ruffleEmulationSupported = computed(() => - isRuffleEmulationSupported(props.rom.platform_slug, heartbeatStore.value), +const platformSlug = computed(() => + props.rom.platform_slug in config.value.PLATFORMS_VERSIONS + ? config.value.PLATFORMS_VERSIONS[props.rom.platform_slug] + : props.rom.platform_slug, ); + +const ejsEmulationSupported = computed(() => { + return isEJSEmulationSupported(platformSlug.value, heartbeatStore.value); +}); + +const ruffleEmulationSupported = computed(() => { + return isRuffleEmulationSupported(platformSlug.value, heartbeatStore.value); +}); + const is3DSRom = computed(() => { return is3DSCIARom(props.rom); }); diff --git a/frontend/src/components/Details/Info/GameInfo.vue b/frontend/src/components/Details/Info/GameInfo.vue index 9e79c8220..6232608bf 100644 --- a/frontend/src/components/Details/Info/GameInfo.vue +++ b/frontend/src/components/Details/Info/GameInfo.vue @@ -6,13 +6,15 @@ import type { DetailedRom } from "@/stores/roms"; import { storeToRefs } from "pinia"; import { ref } from "vue"; import { useRouter } from "vue-router"; -import { useDisplay } from "vuetify"; +import { useDisplay, useTheme } from "vuetify"; import { useI18n } from "vue-i18n"; +import { MdPreview } from "md-editor-v3"; // Props const { t } = useI18n(); const props = defineProps<{ rom: DetailedRom }>(); const { xs } = useDisplay(); +const theme = useTheme(); const show = ref(false); const carousel = ref(0); const router = useRouter(); @@ -119,7 +121,13 @@ function onFilterClick(filter: FilterType, value: string) { - {{ rom.summary }} + diff --git a/frontend/src/components/Details/Personal.vue b/frontend/src/components/Details/Personal.vue index c811545fa..209f6efef 100644 --- a/frontend/src/components/Details/Personal.vue +++ b/frontend/src/components/Details/Personal.vue @@ -3,7 +3,7 @@ import romApi from "@/services/api/rom"; import storeAuth from "@/stores/auth"; import type { DetailedRom } from "@/stores/roms"; import type { RomUserStatus } from "@/__generated__"; -import { difficultyEmojis, getTextForStatus, getEmojiForStatus } from "@/utils"; +import { getTextForStatus, getEmojiForStatus } from "@/utils"; import { MdEditor, MdPreview } from "md-editor-v3"; import "md-editor-v3/lib/style.css"; import { ref, watch } from "vue"; @@ -133,39 +133,37 @@ watch( romUser.rating = typeof $event === 'number' ? $event : parseInt($event) " - active-color="romm-accent-1" + active-color="yellow" /> - + {{ t("rom.difficulty") }} - - + + @update:model-value=" + romUser.difficulty = + typeof $event === 'number' ? $event : parseInt($event) + " + active-color="red" + /> - + {{ t("rom.completion") }} % - + >("emitter"); @@ -61,47 +63,68 @@ function setFilters() { ]); } -function fetchRoms() { +async function fetchRoms() { if (searchText.value) { // Auto hide android keyboard const inputElement = document.getElementById("search-text-field"); inputElement?.blur(); gettingRoms.value = true; - romApi - .getRoms({ searchTerm: searchText.value }) - .then(({ data }) => { - data = data.sort((a, b) => { - return a.platform_name.localeCompare(b.platform_name); - }); - romsStore.set(data); - romsStore.setFiltered(data, galleryFilterStore); - }) - .catch((error) => { - emitter?.emit("snackbarShow", { - msg: `Couldn't fetch roms: ${error}`, - icon: "mdi-close-circle", - color: "red", - timeout: 4000, - }); - console.error(`Couldn't fetch roms: ${error}`); - }) - .finally(() => { - gettingRoms.value = false; + + // Update URL with search term + router.replace({ query: { search: searchText.value } }); + + try { + const { data } = await romApi.getRoms({ searchTerm: searchText.value }); + const sortedData = data.sort((a, b) => { + return a.platform_name.localeCompare(b.platform_name); }); - galleryFilterStore.setFilterPlatforms([ - ...new Map( - romsStore.filteredRoms.map((rom) => { - const platform = allPlatforms.value.find( - (p) => p.id === rom.platform_id, - ); - return [rom.platform_name, platform]; - }), - ).values(), - ] as Platform[]); - setFilters(); - galleryFilterStore.activeFilterDrawer = false; + romsStore.set(sortedData); + romsStore.setFiltered(sortedData, galleryFilterStore); + } catch (error) { + emitter?.emit("snackbarShow", { + msg: `Couldn't fetch roms: ${error}`, + icon: "mdi-close-circle", + color: "red", + timeout: 4000, + }); + console.error(`Couldn't fetch roms: ${error}`); + } finally { + gettingRoms.value = false; + + galleryFilterStore.setFilterPlatforms([ + ...new Map( + romsStore.filteredRoms.map((rom) => { + const platform = allPlatforms.value.find( + (p) => p.id === rom.platform_id, + ); + return [rom.platform_name, platform]; + }), + ).values(), + ] as Platform[]); + setFilters(); + galleryFilterStore.activeFilterDrawer = false; + } } } + +onMounted(() => { + const { search: searchTerm } = router.currentRoute.value.query; + if (searchTerm && searchTerm !== searchText.value) { + searchText.value = searchTerm as string; + fetchRoms(); + } +}); + +watch( + router.currentRoute.value.query, + (query) => { + if (query.search && query.search !== searchText.value) { + searchText.value = query.search as string; + fetchRoms(); + } + }, + { deep: true }, +);