Compare commits

..

1 Commits

Author SHA1 Message Date
Daniel Dietzler
d1e62c3736 refactor: web stores => managers (1) 2025-04-28 13:20:07 +02:00
313 changed files with 2689 additions and 6574 deletions

View File

@@ -96,7 +96,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64

View File

@@ -50,7 +50,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3
uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@28deaeda66b76a05916b6923827895f2b14ab387 # v3
uses: github/codeql-action/autobuild@45775bd8235c68ba998cffa5171334d58593da47 # v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3
uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3
with:
category: '/language:${{matrix.language}}'

View File

@@ -205,7 +205,7 @@ jobs:
- name: Build and push image
id: build
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
@@ -266,7 +266,7 @@ jobs:
- build_and_push_ml
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
path: ${{ runner.temp }}/digests
pattern: ml-digests-${{ matrix.device }}-*
@@ -407,7 +407,7 @@ jobs:
- name: Build and push image
id: build
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
@@ -454,7 +454,7 @@ jobs:
- build_and_push_server
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
path: ${{ runner.temp }}/digests
pattern: server-digests-*

View File

@@ -72,5 +72,4 @@ jobs:
with:
name: docs-build-output
path: docs/build/
include-hidden-files: true
retention-days: 1

View File

@@ -95,7 +95,7 @@ jobs:
persist-credentials: false
- name: Download APK
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
name: release-apk-signed

View File

@@ -105,12 +105,12 @@ jobs:
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5
uses: astral-sh/setup-uv@v5
- name: Run zizmor 🌈
run: uvx zizmor --format=sarif . > results.sarif
@@ -118,7 +118,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: results.sarif
category: zizmor

View File

@@ -461,7 +461,7 @@ jobs:
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:
# python-version: 3.11

80
.vscode/settings.json vendored
View File

@@ -1,63 +1,45 @@
{
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.tabSize": 2
},
"svelte.enable-ts-plugin": true,
"eslint.validate": [
"javascript",
"svelte"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"[dart]": {
"editor.defaultFormatter": "Dart-Code.dart-code",
"editor.formatOnSave": true,
"editor.selectionHighlight": false,
"editor.suggest.snippetsPreventQuickSuggestions": false,
"editor.suggestSelection": "first",
"editor.tabCompletion": "onlySnippets",
"editor.wordBasedSuggestions": "off"
"editor.wordBasedSuggestions": "off",
"editor.defaultFormatter": "Dart-Code.dart-code"
},
"[javascript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[svelte]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "svelte"],
"cSpell.words": [
"immich"
],
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
},
"svelte.enable-ts-plugin": true,
"typescript.preferences.importModuleSpecifier": "non-relative"
}
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart"
}
}

View File

@@ -17,9 +17,6 @@ e2e:
prod:
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-down:
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
prod-scale:
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans

View File

@@ -1,4 +1,4 @@
FROM node:22.15.0-alpine3.20@sha256:686b8892b69879ef5bfd6047589666933508f9a5451c67320df3070ba0e9807b AS core
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

254
cli/package-lock.json generated
View File

@@ -647,9 +647,9 @@
}
},
"node_modules/@eslint/core": {
"version": "0.13.0",
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.13.0.tgz",
"integrity": "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==",
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz",
"integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
@@ -697,9 +697,9 @@
}
},
"node_modules/@eslint/js": {
"version": "9.25.1",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.25.1.tgz",
"integrity": "sha512-dEIwmjntEx8u3Uvv+kr3PDeeArL8Hw07H9kyYxCjnM9pBjfEhk6uLXSchxxzgiwtRhhzVzqmUSDFBOi1TuZ7qg==",
"version": "9.24.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.24.0.tgz",
"integrity": "sha512-uIY/y3z0uvOGX8cp1C2fiC4+ZmBhp6yZWkojtHL1YEMnRt1Y63HB9TM17proGEmeG7HeUY+UP36F0aknKYTpYA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -730,6 +730,19 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
"node_modules/@eslint/plugin-kit/node_modules/@eslint/core": {
"version": "0.13.0",
"resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.13.0.tgz",
"integrity": "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"@types/json-schema": "^7.0.15"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
"node_modules/@humanfs/core": {
"version": "0.19.1",
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
@@ -1367,17 +1380,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.31.0.tgz",
"integrity": "sha512-evaQJZ/J/S4wisevDvC1KFZkPzRetH8kYZbkgcTRyql3mcKsf+ZFDV1BVWUGTCAW5pQHoqn5gK5b8kn7ou9aFQ==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.30.1.tgz",
"integrity": "sha512-v+VWphxMjn+1t48/jO4t950D6KR8JaJuNXzi33Ve6P8sEmPr5k6CEXjdGwT6+LodVnEa91EQCtwjWNUCPweo+Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
"@typescript-eslint/scope-manager": "8.31.0",
"@typescript-eslint/type-utils": "8.31.0",
"@typescript-eslint/utils": "8.31.0",
"@typescript-eslint/visitor-keys": "8.31.0",
"@typescript-eslint/scope-manager": "8.30.1",
"@typescript-eslint/type-utils": "8.30.1",
"@typescript-eslint/utils": "8.30.1",
"@typescript-eslint/visitor-keys": "8.30.1",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -1397,16 +1410,16 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.31.0.tgz",
"integrity": "sha512-67kYYShjBR0jNI5vsf/c3WG4u+zDnCTHTPqVMQguffaWWFs7artgwKmfwdifl+r6XyM5LYLas/dInj2T0SgJyw==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.30.1.tgz",
"integrity": "sha512-H+vqmWwT5xoNrXqWs/fesmssOW70gxFlgcMlYcBaWNPIEWDgLa4W9nkSPmhuOgLnXq9QYgkZ31fhDyLhleCsAg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/scope-manager": "8.31.0",
"@typescript-eslint/types": "8.31.0",
"@typescript-eslint/typescript-estree": "8.31.0",
"@typescript-eslint/visitor-keys": "8.31.0",
"@typescript-eslint/scope-manager": "8.30.1",
"@typescript-eslint/types": "8.30.1",
"@typescript-eslint/typescript-estree": "8.30.1",
"@typescript-eslint/visitor-keys": "8.30.1",
"debug": "^4.3.4"
},
"engines": {
@@ -1422,14 +1435,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.31.0.tgz",
"integrity": "sha512-knO8UyF78Nt8O/B64i7TlGXod69ko7z6vJD9uhSlm0qkAbGeRUSudcm0+K/4CrRjrpiHfBCjMWlc08Vav1xwcw==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.30.1.tgz",
"integrity": "sha512-+C0B6ChFXZkuaNDl73FJxRYT0G7ufVPOSQkqkpM/U198wUwUFOtgo1k/QzFh1KjpBitaK7R1tgjVz6o9HmsRPg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.31.0",
"@typescript-eslint/visitor-keys": "8.31.0"
"@typescript-eslint/types": "8.30.1",
"@typescript-eslint/visitor-keys": "8.30.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1440,14 +1453,14 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.31.0.tgz",
"integrity": "sha512-DJ1N1GdjI7IS7uRlzJuEDCgDQix3ZVYVtgeWEyhyn4iaoitpMBX6Ndd488mXSx0xah/cONAkEaYyylDyAeHMHg==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.30.1.tgz",
"integrity": "sha512-64uBF76bfQiJyHgZISC7vcNz3adqQKIccVoKubyQcOnNcdJBvYOILV1v22Qhsw3tw3VQu5ll8ND6hycgAR5fEA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/typescript-estree": "8.31.0",
"@typescript-eslint/utils": "8.31.0",
"@typescript-eslint/typescript-estree": "8.30.1",
"@typescript-eslint/utils": "8.30.1",
"debug": "^4.3.4",
"ts-api-utils": "^2.0.1"
},
@@ -1464,9 +1477,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.31.0.tgz",
"integrity": "sha512-Ch8oSjVyYyJxPQk8pMiP2FFGYatqXQfQIaMp+TpuuLlDachRWpUAeEu1u9B/v/8LToehUIWyiKcA/w5hUFRKuQ==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.30.1.tgz",
"integrity": "sha512-81KawPfkuulyWo5QdyG/LOKbspyyiW+p4vpn4bYO7DM/hZImlVnFwrpCTnmNMOt8CvLRr5ojI9nU1Ekpw4RcEw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1478,14 +1491,14 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.31.0.tgz",
"integrity": "sha512-xLmgn4Yl46xi6aDSZ9KkyfhhtnYI15/CvHbpOy/eR5NWhK/BK8wc709KKwhAR0m4ZKRP7h07bm4BWUYOCuRpQQ==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.30.1.tgz",
"integrity": "sha512-kQQnxymiUy9tTb1F2uep9W6aBiYODgq5EMSk6Nxh4Z+BDUoYUSa029ISs5zTzKBFnexQEh71KqwjKnRz58lusQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.31.0",
"@typescript-eslint/visitor-keys": "8.31.0",
"@typescript-eslint/types": "8.30.1",
"@typescript-eslint/visitor-keys": "8.30.1",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -1531,16 +1544,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.31.0.tgz",
"integrity": "sha512-qi6uPLt9cjTFxAb1zGNgTob4x9ur7xC6mHQJ8GwEzGMGE9tYniublmJaowOJ9V2jUzxrltTPfdG2nKlWsq0+Ww==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.30.1.tgz",
"integrity": "sha512-T/8q4R9En2tcEsWPQgB5BQ0XJVOtfARcUvOa8yJP3fh9M/mXraLxZrkCfGb6ChrO/V3W+Xbd04RacUEqk1CFEQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
"@typescript-eslint/scope-manager": "8.31.0",
"@typescript-eslint/types": "8.31.0",
"@typescript-eslint/typescript-estree": "8.31.0"
"@typescript-eslint/scope-manager": "8.30.1",
"@typescript-eslint/types": "8.30.1",
"@typescript-eslint/typescript-estree": "8.30.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1555,13 +1568,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.31.0.tgz",
"integrity": "sha512-QcGHmlRHWOl93o64ZUMNewCdwKGU6WItOU52H0djgNmn1EOrhVudrDzXz4OycCRSCPwFCDrE2iIt5vmuUdHxuQ==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.30.1.tgz",
"integrity": "sha512-aEhgas7aJ6vZnNFC7K4/vMGDGyOiqWcYZPpIWrTKuTAlsvDNKy2GFDqh9smL+iq069ZvR0YzEeq0B8NJlLzjFA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.31.0",
"@typescript-eslint/types": "8.30.1",
"eslint-visitor-keys": "^4.2.0"
},
"engines": {
@@ -1573,9 +1586,9 @@
}
},
"node_modules/@vitest/coverage-v8": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.1.2.tgz",
"integrity": "sha512-XDdaDOeaTMAMYW7N63AqoK32sYUWbXnTkC6tEbVcu3RlU1bB9of32T+PGf8KZvxqLNqeXhafDFqCkwpf2+dyaQ==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.1.1.tgz",
"integrity": "sha512-MgV6D2dhpD6Hp/uroUoAIvFqA8AuvXEFBC2eepG3WFc1pxTfdk1LEqqkWoWhjz+rytoqrnUUCdf6Lzco3iHkLQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1588,7 +1601,7 @@
"istanbul-reports": "^3.1.7",
"magic-string": "^0.30.17",
"magicast": "^0.3.5",
"std-env": "^3.9.0",
"std-env": "^3.8.1",
"test-exclude": "^7.0.1",
"tinyrainbow": "^2.0.0"
},
@@ -1596,8 +1609,8 @@
"url": "https://opencollective.com/vitest"
},
"peerDependencies": {
"@vitest/browser": "3.1.2",
"vitest": "3.1.2"
"@vitest/browser": "3.1.1",
"vitest": "3.1.1"
},
"peerDependenciesMeta": {
"@vitest/browser": {
@@ -1606,14 +1619,14 @@
}
},
"node_modules/@vitest/expect": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.1.2.tgz",
"integrity": "sha512-O8hJgr+zREopCAqWl3uCVaOdqJwZ9qaDwUP7vy3Xigad0phZe9APxKhPcDNqYYi0rX5oMvwJMSCAXY2afqeTSA==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.1.1.tgz",
"integrity": "sha512-q/zjrW9lgynctNbwvFtQkGK9+vvHA5UzVi2V8APrp1C6fG6/MuYYkmlx4FubuqLycCeSdHD5aadWfua/Vr0EUA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@vitest/spy": "3.1.2",
"@vitest/utils": "3.1.2",
"@vitest/spy": "3.1.1",
"@vitest/utils": "3.1.1",
"chai": "^5.2.0",
"tinyrainbow": "^2.0.0"
},
@@ -1622,13 +1635,13 @@
}
},
"node_modules/@vitest/mocker": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.1.2.tgz",
"integrity": "sha512-kOtd6K2lc7SQ0mBqYv/wdGedlqPdM/B38paPY+OwJ1XiNi44w3Fpog82UfOibmHaV9Wod18A09I9SCKLyDMqgw==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.1.1.tgz",
"integrity": "sha512-bmpJJm7Y7i9BBELlLuuM1J1Q6EQ6K5Ye4wcyOpOMXMcePYKSIYlpcrCm4l/O6ja4VJA5G2aMJiuZkZdnxlC3SA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@vitest/spy": "3.1.2",
"@vitest/spy": "3.1.1",
"estree-walker": "^3.0.3",
"magic-string": "^0.30.17"
},
@@ -1649,9 +1662,9 @@
}
},
"node_modules/@vitest/pretty-format": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.1.2.tgz",
"integrity": "sha512-R0xAiHuWeDjTSB3kQ3OQpT8Rx3yhdOAIm/JM4axXxnG7Q/fS8XUwggv/A4xzbQA+drYRjzkMnpYnOGAc4oeq8w==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.1.1.tgz",
"integrity": "sha512-dg0CIzNx+hMMYfNmSqJlLSXEmnNhMswcn3sXO7Tpldr0LiGmg3eXdLLhwkv2ZqgHb/d5xg5F7ezNFRA1fA13yA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1662,13 +1675,13 @@
}
},
"node_modules/@vitest/runner": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.1.2.tgz",
"integrity": "sha512-bhLib9l4xb4sUMPXnThbnhX2Yi8OutBMA8Yahxa7yavQsFDtwY/jrUZwpKp2XH9DhRFJIeytlyGpXCqZ65nR+g==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.1.1.tgz",
"integrity": "sha512-X/d46qzJuEDO8ueyjtKfxffiXraPRfmYasoC4i5+mlLEJ10UvPb0XH5M9C3gWuxd7BAQhpK42cJgJtq53YnWVA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@vitest/utils": "3.1.2",
"@vitest/utils": "3.1.1",
"pathe": "^2.0.3"
},
"funding": {
@@ -1676,13 +1689,13 @@
}
},
"node_modules/@vitest/snapshot": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.1.2.tgz",
"integrity": "sha512-Q1qkpazSF/p4ApZg1vfZSQ5Yw6OCQxVMVrLjslbLFA1hMDrT2uxtqMaw8Tc/jy5DLka1sNs1Y7rBcftMiaSH/Q==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.1.1.tgz",
"integrity": "sha512-bByMwaVWe/+1WDf9exFxWWgAixelSdiwo2p33tpqIlM14vW7PRV5ppayVXtfycqze4Qhtwag5sVhX400MLBOOw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@vitest/pretty-format": "3.1.2",
"@vitest/pretty-format": "3.1.1",
"magic-string": "^0.30.17",
"pathe": "^2.0.3"
},
@@ -1691,9 +1704,9 @@
}
},
"node_modules/@vitest/spy": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.1.2.tgz",
"integrity": "sha512-OEc5fSXMws6sHVe4kOFyDSj/+4MSwst0ib4un0DlcYgQvRuYQ0+M2HyqGaauUMnjq87tmUaMNDxKQx7wNfVqPA==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.1.1.tgz",
"integrity": "sha512-+EmrUOOXbKzLkTDwlsc/xrwOlPDXyVk3Z6P6K4oiCndxz7YLpp/0R0UsWVOKT0IXWjjBJuSMk6D27qipaupcvQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1704,13 +1717,13 @@
}
},
"node_modules/@vitest/utils": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.1.2.tgz",
"integrity": "sha512-5GGd0ytZ7BH3H6JTj9Kw7Prn1Nbg0wZVrIvou+UWxm54d+WoXXgAgjFJ8wn3LdagWLFSEfpPeyYrByZaGEZHLg==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.1.1.tgz",
"integrity": "sha512-1XIjflyaU2k3HMArJ50bwSh3wKWPD6Q47wz/NUSmRV0zNywPc4w79ARjg/i/aNINHwA+mIALhUVqD9/aUvZNgg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@vitest/pretty-format": "3.1.2",
"@vitest/pretty-format": "3.1.1",
"loupe": "^3.1.3",
"tinyrainbow": "^2.0.0"
},
@@ -2170,9 +2183,9 @@
"license": "MIT"
},
"node_modules/es-module-lexer": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz",
"integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==",
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz",
"integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==",
"dev": true,
"license": "MIT"
},
@@ -2241,20 +2254,20 @@
}
},
"node_modules/eslint": {
"version": "9.25.1",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.25.1.tgz",
"integrity": "sha512-E6Mtz9oGQWDCpV12319d59n4tx9zOTXSTmc8BLVxBx+G/0RdM5MvEEJLU9c0+aleoePYYgVTOsRblx433qmhWQ==",
"version": "9.24.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.24.0.tgz",
"integrity": "sha512-eh/jxIEJyZrvbWRe4XuVclLPDYSYYYgLy5zXGGxD6j8zjSAxFEzI2fL/8xNq6O2yKqVt+eF2YhV+hxjV6UKXwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.12.1",
"@eslint/config-array": "^0.20.0",
"@eslint/config-helpers": "^0.2.1",
"@eslint/core": "^0.13.0",
"@eslint/config-helpers": "^0.2.0",
"@eslint/core": "^0.12.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "9.25.1",
"@eslint/plugin-kit": "^0.2.8",
"@eslint/js": "9.24.0",
"@eslint/plugin-kit": "^0.2.7",
"@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
"@humanwhocodes/retry": "^0.4.2",
@@ -4184,15 +4197,15 @@
}
},
"node_modules/typescript-eslint": {
"version": "8.31.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.31.0.tgz",
"integrity": "sha512-u+93F0sB0An8WEAPtwxVhFby573E8ckdjwUUQUj9QA4v8JAvgtoDdIyYR3XFwFHq2W1KJ1AurwJCO+w+Y1ixyQ==",
"version": "8.30.1",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.30.1.tgz",
"integrity": "sha512-D7lC0kcehVH7Mb26MRQi64LMyRJsj3dToJxM1+JVTl53DQSV5/7oUGWQLcKl1C1KnoVHxMMU2FNQMffr7F3Row==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/eslint-plugin": "8.31.0",
"@typescript-eslint/parser": "8.31.0",
"@typescript-eslint/utils": "8.31.0"
"@typescript-eslint/eslint-plugin": "8.30.1",
"@typescript-eslint/parser": "8.30.1",
"@typescript-eslint/utils": "8.30.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -4279,18 +4292,18 @@
}
},
"node_modules/vite": {
"version": "6.3.3",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.3.tgz",
"integrity": "sha512-5nXH+QsELbFKhsEfWLkHrvgRpTdGJzqOZ+utSdmPTvwHmvU6ITTm3xx+mRusihkcI8GeC7lCDyn3kDtiki9scw==",
"version": "6.3.2",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.2.tgz",
"integrity": "sha512-ZSvGOXKGceizRQIZSz7TGJ0pS3QLlVY/9hwxVh17W3re67je1RKYzFHivZ/t0tubU78Vkyb9WnHPENSBCzbckg==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.4.4",
"fdir": "^6.4.3",
"picomatch": "^4.0.2",
"postcss": "^8.5.3",
"rollup": "^4.34.9",
"tinyglobby": "^0.2.13"
"tinyglobby": "^0.2.12"
},
"bin": {
"vite": "bin/vite.js"
@@ -4354,9 +4367,9 @@
}
},
"node_modules/vite-node": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.1.2.tgz",
"integrity": "sha512-/8iMryv46J3aK13iUXsei5G/A3CUlW4665THCPS+K8xAaqrVWiGB4RfXMQXCLjpK9P2eK//BczrVkn5JLAk6DA==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.1.1.tgz",
"integrity": "sha512-V+IxPAE2FvXpTCHXyNem0M+gWm6J7eRyWPR6vYoG/Gl+IscNOjXzztUhimQgTxaAoUoj40Qqimaa0NLIOOAH4w==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -4425,32 +4438,31 @@
}
},
"node_modules/vitest": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.1.2.tgz",
"integrity": "sha512-WaxpJe092ID1C0mr+LH9MmNrhfzi8I65EX/NRU/Ld016KqQNRgxSOlGNP1hHN+a/F8L15Mh8klwaF77zR3GeDQ==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.1.1.tgz",
"integrity": "sha512-kiZc/IYmKICeBAZr9DQ5rT7/6bD9G7uqQEki4fxazi1jdVl2mWGzedtBs5s6llz59yQhVb7FFY2MbHzHCnT79Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@vitest/expect": "3.1.2",
"@vitest/mocker": "3.1.2",
"@vitest/pretty-format": "^3.1.2",
"@vitest/runner": "3.1.2",
"@vitest/snapshot": "3.1.2",
"@vitest/spy": "3.1.2",
"@vitest/utils": "3.1.2",
"@vitest/expect": "3.1.1",
"@vitest/mocker": "3.1.1",
"@vitest/pretty-format": "^3.1.1",
"@vitest/runner": "3.1.1",
"@vitest/snapshot": "3.1.1",
"@vitest/spy": "3.1.1",
"@vitest/utils": "3.1.1",
"chai": "^5.2.0",
"debug": "^4.4.0",
"expect-type": "^1.2.1",
"expect-type": "^1.2.0",
"magic-string": "^0.30.17",
"pathe": "^2.0.3",
"std-env": "^3.9.0",
"std-env": "^3.8.1",
"tinybench": "^2.9.0",
"tinyexec": "^0.3.2",
"tinyglobby": "^0.2.13",
"tinypool": "^1.0.2",
"tinyrainbow": "^2.0.0",
"vite": "^5.0.0 || ^6.0.0",
"vite-node": "3.1.2",
"vite-node": "3.1.1",
"why-is-node-running": "^2.3.0"
},
"bin": {
@@ -4466,8 +4478,8 @@
"@edge-runtime/vm": "*",
"@types/debug": "^4.1.12",
"@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
"@vitest/browser": "3.1.2",
"@vitest/ui": "3.1.2",
"@vitest/browser": "3.1.1",
"@vitest/ui": "3.1.1",
"happy-dom": "*",
"jsdom": "*"
},

View File

@@ -116,7 +116,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
healthcheck:
test: redis-cli ping || exit 1

View File

@@ -56,7 +56,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -102,7 +102,7 @@ services:
command: [ './run.sh', '-disable-reporting' ]
ports:
- 3000:3000
image: grafana/grafana:11.6.1-ubuntu@sha256:6fc273288470ef499dd3c6b36aeade093170d4f608f864c5dd3a7fabeae77b50
image: grafana/grafana:11.6.0-ubuntu@sha256:fd8fa48213c624e1a95122f1d93abbf1cf1cbe85fc73212c1e599dbd76c63ff8
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -49,7 +49,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
healthcheck:
test: redis-cli ping || exit 1
restart: always

View File

@@ -1,5 +0,0 @@
Policy: https://github.com/immich-app/immich/blob/main/SECURITY.md
Contact: mailto:security@immich.app
Preferred-Languages: en
Expires: 2026-05-01T23:59:00.000Z
Canonical: https://immich.app/.well-known/security.txt

View File

@@ -34,7 +34,7 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
database:
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52

744
e2e/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -215,19 +215,6 @@ describe('/admin/users', () => {
const user = await getMyUser({ headers: asBearerAuth(token.accessToken) });
expect(user).toMatchObject({ email: nonAdmin.userEmail });
});
it('should update the avatar color', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}`)
.send({ avatarColor: 'orange' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatarColor: 'orange' });
const after = await getUserAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatarColor: 'orange' });
});
});
describe('PUT /admin/users/:id/preferences', () => {
@@ -253,6 +240,19 @@ describe('/admin/users', () => {
expect(after).toMatchObject({ memories: { enabled: false } });
});
it('should update the avatar color', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}/preferences`)
.send({ avatar: { color: 'orange' } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatar: { color: 'orange' } });
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatar: { color: 'orange' } });
});
it('should update download archive size', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}/preferences`)

View File

@@ -139,19 +139,6 @@ describe('/users', () => {
profileChangedAt: expect.anything(),
});
});
it('should update avatar color', async () => {
const { status, body } = await request(app)
.put(`/users/me`)
.send({ avatarColor: 'blue' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatarColor: 'blue' });
const after = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatarColor: 'blue' });
});
});
describe('PUT /users/me/preferences', () => {
@@ -171,6 +158,19 @@ describe('/users', () => {
expect(after).toMatchObject({ memories: { enabled: false } });
});
it('should update avatar color', async () => {
const { status, body } = await request(app)
.put(`/users/me/preferences`)
.send({ avatar: { color: 'blue' } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatar: { color: 'blue' } });
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatar: { color: 'blue' } });
});
it('should require an integer for download archive size', async () => {
const { status, body } = await request(app)
.put(`/users/me/preferences`)

View File

@@ -21,9 +21,23 @@ test.describe('Photo Viewer', () => {
test.beforeEach(async ({ context, page }) => {
// before each test, login as user
await utils.setAuthCookies(context, admin.accessToken);
await page.goto('/photos');
await page.waitForLoadState('networkidle');
});
test('initially shows a loading spinner', async ({ page }) => {
await page.route(`/api/assets/${asset.id}/thumbnail**`, async (route) => {
// slow down the request for thumbnail, so spinner has chance to show up
await new Promise((f) => setTimeout(f, 2000));
await route.continue();
});
await page.goto(`/photos/${asset.id}`);
await page.waitForLoadState('load');
// this is the spinner
await page.waitForSelector('svg[role=status]');
await expect(page.getByTestId('loading-spinner')).toBeVisible();
});
test('loads original photo when zoomed', async ({ page }) => {
await page.goto(`/photos/${asset.id}`);
await expect.poll(async () => await imageLocator(page).getAttribute('src')).toContain('thumbnail');

View File

@@ -192,22 +192,26 @@
"oauth_auto_register": "Auto register",
"oauth_auto_register_description": "Automatically register new users after signing in with OAuth",
"oauth_button_text": "Button text",
"oauth_client_secret_description": "Required if PKCE (Proof Key for Code Exchange) is not supported by the OAuth provider",
"oauth_client_id": "Client ID",
"oauth_client_secret": "Client Secret",
"oauth_enable_description": "Login with OAuth",
"oauth_issuer_url": "Issuer URL",
"oauth_mobile_redirect_uri": "Mobile redirect URI",
"oauth_mobile_redirect_uri_override": "Mobile redirect URI override",
"oauth_mobile_redirect_uri_override_description": "Enable when OAuth provider does not allow a mobile URI, like '{callback}'",
"oauth_profile_signing_algorithm": "Profile signing algorithm",
"oauth_profile_signing_algorithm_description": "Algorithm used to sign the user profile.",
"oauth_scope": "Scope",
"oauth_settings": "OAuth",
"oauth_settings_description": "Manage OAuth login settings",
"oauth_settings_more_details": "For more details about this feature, refer to the <link>docs</link>.",
"oauth_signing_algorithm": "Signing algorithm",
"oauth_storage_label_claim": "Storage label claim",
"oauth_storage_label_claim_description": "Automatically set the user's storage label to the value of this claim.",
"oauth_storage_quota_claim": "Storage quota claim",
"oauth_storage_quota_claim_description": "Automatically set the user's storage quota to the value of this claim.",
"oauth_storage_quota_default": "Default storage quota (GiB)",
"oauth_storage_quota_default_description": "Quota in GiB to be used when no claim is provided (Enter 0 for unlimited quota).",
"oauth_timeout": "Request Timeout",
"oauth_timeout_description": "Timeout for requests in milliseconds",
"offline_paths": "Offline Paths",
"offline_paths_description": "These results may be due to manual deletion of files that are not part of an external library.",
"password_enable_description": "Login with email and password",
@@ -849,12 +853,10 @@
"failed_to_keep_this_delete_others": "Failed to keep this asset and delete the other assets",
"failed_to_load_asset": "Failed to load asset",
"failed_to_load_assets": "Failed to load assets",
"failed_to_load_notifications": "Failed to load notifications",
"failed_to_load_people": "Failed to load people",
"failed_to_remove_product_key": "Failed to remove product key",
"failed_to_stack_assets": "Failed to stack assets",
"failed_to_unstack_assets": "Failed to un-stack assets",
"failed_to_update_notification_status": "Failed to update notification status",
"import_path_already_exists": "This import path already exists.",
"incorrect_email_or_password": "Incorrect email or password",
"paths_validation_failed": "{paths, plural, one {# path} other {# paths}} failed validation",
@@ -1197,9 +1199,6 @@
"map_settings_only_show_favorites": "Show Favorite Only",
"map_settings_theme_settings": "Map Theme",
"map_zoom_to_see_photos": "Zoom out to see photos",
"mark_as_read": "Mark as read",
"mark_all_as_read": "Mark all as read",
"marked_all_as_read": "Marked all as read",
"matches": "Matches",
"media_type": "Media type",
"memories": "Memories",
@@ -1226,8 +1225,6 @@
"month": "Month",
"monthly_title_text_date_format": "MMMM y",
"more": "More",
"moved_to_archive": "Moved {count, plural, one {# asset} other {# assets}} to archive",
"moved_to_library": "Moved {count, plural, one {# asset} other {# assets}} to library",
"moved_to_trash": "Moved to trash",
"multiselect_grid_edit_date_time_err_read_only": "Cannot edit date of read only asset(s), skipping",
"multiselect_grid_edit_gps_err_read_only": "Cannot edit location of read only asset(s), skipping",
@@ -1263,7 +1260,6 @@
"no_places": "No places",
"no_results": "No results",
"no_results_description": "Try a synonym or more general keyword",
"no_notifications": "No notifications",
"no_shared_albums_message": "Create an album to share photos and videos with people in your network",
"not_in_any_album": "Not in any album",
"not_selected": "Not selected",

View File

@@ -1,6 +1,6 @@
ARG DEVICE=cpu
FROM python:3.11-bookworm@sha256:ab60e444e04215a62671149f24c59cc2893b49cb5dad26f9d139077a86be760e AS builder-cpu
FROM python:3.11-bookworm@sha256:a3e280261e448b95d49423532ccd6e5329c39d171c10df1457891ff7c5e2301b AS builder-cpu
FROM builder-cpu AS builder-openvino
@@ -54,7 +54,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
RUN apt-get update && apt-get install -y --no-install-recommends g++
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:4a6c9444b126bd325fba904bff796bf91fb777bf6148d60109c4cb1de2ffc497 /uv /uvx /bin/
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:db305ce8edc1c2df4988b9d23471465d90d599cc55571e6501421c173a33bb0b /uv /uvx /bin/
RUN --mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=uv.lock,target=uv.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
@@ -63,11 +63,11 @@ RUN if [ "$DEVICE" = "rocm" ]; then \
uv pip install /opt/onnxruntime_rocm-*.whl; \
fi
FROM python:3.11-slim-bookworm@sha256:97ef3198ec8c78690587167bb6a4905d00ffe053900687bdae93ad667e507cbb AS prod-cpu
FROM python:3.11-slim-bookworm@sha256:82c07f2f6e35255b92eb16f38dbd22679d5e8fb523064138d7c6468e7bf0c15b AS prod-cpu
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
FROM python:3.11-slim-bookworm@sha256:97ef3198ec8c78690587167bb6a4905d00ffe053900687bdae93ad667e507cbb AS prod-openvino
FROM python:3.11-slim-bookworm@sha256:82c07f2f6e35255b92eb16f38dbd22679d5e8fb523064138d7c6468e7bf0c15b AS prod-openvino
RUN apt-get update && \
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \

142
machine-learning/uv.lock generated
View File

@@ -69,15 +69,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee", size = 85481, upload_time = "2023-12-16T17:06:55.989Z" },
]
[[package]]
name = "bidict"
version = "0.23.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload_time = "2024-02-18T19:09:05.748Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload_time = "2024-02-18T19:09:04.156Z" },
]
[[package]]
name = "black"
version = "25.1.0"
@@ -1209,16 +1200,15 @@ wheels = [
[[package]]
name = "locust"
version = "2.36.2"
version = "2.35.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "configargparse" },
{ name = "flask" },
{ name = "flask-cors" },
{ name = "flask-login" },
{ name = "gevent" },
{ name = "gevent", marker = "python_full_version != '3.13.*'" },
{ name = "geventhttpclient" },
{ name = "locust-cloud" },
{ name = "msgpack" },
{ name = "psutil" },
{ name = "pywin32", marker = "sys_platform == 'win32'" },
@@ -1229,25 +1219,9 @@ dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
{ name = "werkzeug" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6d/90/55d4fbc8911e5e6ec4072caaca9e8b7b2b11279435c0d1330c9966b0c898/locust-2.36.2.tar.gz", hash = "sha256:604aff7535f5a83b7f666d32373b2dc74ad260c7c3d1dc274f4c82844be72eb6", size = 2251110, upload_time = "2025-04-25T14:03:35.919Z" }
sdist = { url = "https://files.pythonhosted.org/packages/79/21/d5aeeee74173d73d7d8d392e307ec24d8281fca69a2bf1f19199bd84c498/locust-2.35.0.tar.gz", hash = "sha256:97f83e591646ca3227644cfb6d4fa590e9a3e3d791ab18b216ca98be235b9b24", size = 2240690, upload_time = "2025-04-16T12:10:25.037Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/f5/99dab104be69122eee3513dcdc6e0b32d59ca1f4cfd8715470c5f3aa7643/locust-2.36.2-py3-none-any.whl", hash = "sha256:74239f493f44035b25a87a0665deadf41d213b3dcd45774398e511dec15e26eb", size = 2267937, upload_time = "2025-04-25T14:03:33.671Z" },
]
[[package]]
name = "locust-cloud"
version = "1.20.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "configargparse" },
{ name = "gevent" },
{ name = "platformdirs" },
{ name = "python-socketio", extra = ["client"] },
{ name = "tomli", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/94/47/1ec2478f3d4e526fb8d667b01a75b22093b2e66aea665b5369dd656ceec9/locust_cloud-1.20.7.tar.gz", hash = "sha256:24c16b767adffab51b97f489bcf142e16e2439354fb4296ecbb3e87ad20e220a", size = 448622, upload_time = "2025-04-28T11:01:49.381Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/07/62b5b174c77d4281235405f1ffd439f12a877e434e007a24a5299c461e39/locust_cloud-1.20.7-py3-none-any.whl", hash = "sha256:f38214e77993d0ee87114dafa857e1689789ed4bfe4ae57c2b9dc754674f08bc", size = 406619, upload_time = "2025-04-28T11:01:43.135Z" },
{ url = "https://files.pythonhosted.org/packages/3d/15/9e92757c08af3f0c0168ab6480315ed6374396d635f70055df5c42cfa672/locust-2.35.0-py3-none-any.whl", hash = "sha256:fb9e0ec25c5db3ed6a3c6d48e7236d7c2c370b0ddae102e9badcb2d3d101abde", size = 2258054, upload_time = "2025-04-16T12:10:22.608Z" },
]
[[package]]
@@ -1748,11 +1722,11 @@ wheels = [
[[package]]
name = "platformdirs"
version = "4.3.7"
version = "4.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291, upload_time = "2025-03-19T20:36:10.989Z" }
sdist = { url = "https://files.pythonhosted.org/packages/62/d1/7feaaacb1a3faeba96c06e6c5091f90695cc0f94b7e8e1a3a3fe2b33ff9a/platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420", size = 19760, upload_time = "2023-12-04T15:32:15.925Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499, upload_time = "2025-03-19T20:36:09.038Z" },
{ url = "https://files.pythonhosted.org/packages/be/53/42fe5eab4a09d251a76d0043e018172db324a23fcdac70f77a551c11f618/platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380", size = 17420, upload_time = "2023-12-04T15:32:13.795Z" },
]
[[package]]
@@ -2031,18 +2005,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/2f/62ea1c8b593f4e093cc1a7768f0d46112107e790c3e478532329e434f00b/python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a", size = 19482, upload_time = "2023-02-24T06:46:36.009Z" },
]
[[package]]
name = "python-engineio"
version = "4.12.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "simple-websocket" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f7/e1/eee1129544b7f78fa2afa9fa0fce153cdcb21015b9b331d1b8adf90f45cb/python_engineio-4.12.0.tar.gz", hash = "sha256:f42a36a868d7063aa10ddccf6bd6117a169b6bd00d7ca53999772093b62014f9", size = 91503, upload_time = "2025-04-12T15:30:23.905Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2b/f7/0aeea75424c47633c1d98557a2323be23bed31fa950f00161b34a5150d06/python_engineio-4.12.0-py3-none-any.whl", hash = "sha256:a0c47c129c39777e8ebc6d18011efd50db2144e4e8f08983acae8a3614626535", size = 59319, upload_time = "2025-04-12T15:30:22.325Z" },
]
[[package]]
name = "python-multipart"
version = "0.0.20"
@@ -2052,25 +2014,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload_time = "2024-12-16T19:45:44.423Z" },
]
[[package]]
name = "python-socketio"
version = "5.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "bidict" },
{ name = "python-engineio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/21/1a/396d50ccf06ee539fa758ce5623b59a9cb27637fc4b2dc07ed08bf495e77/python_socketio-5.13.0.tar.gz", hash = "sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029", size = 121125, upload_time = "2025-04-12T15:46:59.933Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/32/b4fb8585d1be0f68bde7e110dffbcf354915f77ad8c778563f0ad9655c02/python_socketio-5.13.0-py3-none-any.whl", hash = "sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf", size = 77800, upload_time = "2025-04-12T15:46:58.412Z" },
]
[package.optional-dependencies]
client = [
{ name = "requests" },
{ name = "websocket-client" },
]
[[package]]
name = "pywin32"
version = "306"
@@ -2283,27 +2226,27 @@ wheels = [
[[package]]
name = "ruff"
version = "0.11.7"
version = "0.11.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5b/89/6f9c9674818ac2e9cc2f2b35b704b7768656e6b7c139064fc7ba8fbc99f1/ruff-0.11.7.tar.gz", hash = "sha256:655089ad3224070736dc32844fde783454f8558e71f501cb207485fe4eee23d4", size = 4054861, upload_time = "2025-04-24T18:49:37.007Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d9/11/bcef6784c7e5d200b8a1f5c2ddf53e5da0efec37e6e5a44d163fb97e04ba/ruff-0.11.6.tar.gz", hash = "sha256:bec8bcc3ac228a45ccc811e45f7eb61b950dbf4cf31a67fa89352574b01c7d79", size = 4010053, upload_time = "2025-04-17T13:35:53.905Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/ec/21927cb906c5614b786d1621dba405e3d44f6e473872e6df5d1a6bca0455/ruff-0.11.7-py3-none-linux_armv6l.whl", hash = "sha256:d29e909d9a8d02f928d72ab7837b5cbc450a5bdf578ab9ebee3263d0a525091c", size = 10245403, upload_time = "2025-04-24T18:48:40.459Z" },
{ url = "https://files.pythonhosted.org/packages/e2/af/fec85b6c2c725bcb062a354dd7cbc1eed53c33ff3aa665165871c9c16ddf/ruff-0.11.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dd1fb86b168ae349fb01dd497d83537b2c5541fe0626e70c786427dd8363aaee", size = 11007166, upload_time = "2025-04-24T18:48:44.742Z" },
{ url = "https://files.pythonhosted.org/packages/31/9a/2d0d260a58e81f388800343a45898fd8df73c608b8261c370058b675319a/ruff-0.11.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d3d7d2e140a6fbbc09033bce65bd7ea29d6a0adeb90b8430262fbacd58c38ada", size = 10378076, upload_time = "2025-04-24T18:48:47.918Z" },
{ url = "https://files.pythonhosted.org/packages/c2/c4/9b09b45051404d2e7dd6d9dbcbabaa5ab0093f9febcae664876a77b9ad53/ruff-0.11.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4809df77de390a1c2077d9b7945d82f44b95d19ceccf0c287c56e4dc9b91ca64", size = 10557138, upload_time = "2025-04-24T18:48:51.707Z" },
{ url = "https://files.pythonhosted.org/packages/5e/5e/f62a1b6669870a591ed7db771c332fabb30f83c967f376b05e7c91bccd14/ruff-0.11.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3a0c2e169e6b545f8e2dba185eabbd9db4f08880032e75aa0e285a6d3f48201", size = 10095726, upload_time = "2025-04-24T18:48:54.243Z" },
{ url = "https://files.pythonhosted.org/packages/45/59/a7aa8e716f4cbe07c3500a391e58c52caf665bb242bf8be42c62adef649c/ruff-0.11.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49b888200a320dd96a68e86736cf531d6afba03e4f6cf098401406a257fcf3d6", size = 11672265, upload_time = "2025-04-24T18:48:57.639Z" },
{ url = "https://files.pythonhosted.org/packages/dd/e3/101a8b707481f37aca5f0fcc3e42932fa38b51add87bfbd8e41ab14adb24/ruff-0.11.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2b19cdb9cf7dae00d5ee2e7c013540cdc3b31c4f281f1dacb5a799d610e90db4", size = 12331418, upload_time = "2025-04-24T18:49:00.697Z" },
{ url = "https://files.pythonhosted.org/packages/dd/71/037f76cbe712f5cbc7b852e4916cd3cf32301a30351818d32ab71580d1c0/ruff-0.11.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64e0ee994c9e326b43539d133a36a455dbaab477bc84fe7bfbd528abe2f05c1e", size = 11794506, upload_time = "2025-04-24T18:49:03.545Z" },
{ url = "https://files.pythonhosted.org/packages/ca/de/e450b6bab1fc60ef263ef8fcda077fb4977601184877dce1c59109356084/ruff-0.11.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bad82052311479a5865f52c76ecee5d468a58ba44fb23ee15079f17dd4c8fd63", size = 13939084, upload_time = "2025-04-24T18:49:07.159Z" },
{ url = "https://files.pythonhosted.org/packages/0e/2c/1e364cc92970075d7d04c69c928430b23e43a433f044474f57e425cbed37/ruff-0.11.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7940665e74e7b65d427b82bffc1e46710ec7f30d58b4b2d5016e3f0321436502", size = 11450441, upload_time = "2025-04-24T18:49:11.41Z" },
{ url = "https://files.pythonhosted.org/packages/9d/7d/1b048eb460517ff9accd78bca0fa6ae61df2b276010538e586f834f5e402/ruff-0.11.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:169027e31c52c0e36c44ae9a9c7db35e505fee0b39f8d9fca7274a6305295a92", size = 10441060, upload_time = "2025-04-24T18:49:14.184Z" },
{ url = "https://files.pythonhosted.org/packages/3a/57/8dc6ccfd8380e5ca3d13ff7591e8ba46a3b330323515a4996b991b10bd5d/ruff-0.11.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:305b93f9798aee582e91e34437810439acb28b5fc1fee6b8205c78c806845a94", size = 10058689, upload_time = "2025-04-24T18:49:17.559Z" },
{ url = "https://files.pythonhosted.org/packages/23/bf/20487561ed72654147817885559ba2aa705272d8b5dee7654d3ef2dbf912/ruff-0.11.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a681db041ef55550c371f9cd52a3cf17a0da4c75d6bd691092dfc38170ebc4b6", size = 11073703, upload_time = "2025-04-24T18:49:20.247Z" },
{ url = "https://files.pythonhosted.org/packages/9d/27/04f2db95f4ef73dccedd0c21daf9991cc3b7f29901a4362057b132075aa4/ruff-0.11.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:07f1496ad00a4a139f4de220b0c97da6d4c85e0e4aa9b2624167b7d4d44fd6b6", size = 11532822, upload_time = "2025-04-24T18:49:23.765Z" },
{ url = "https://files.pythonhosted.org/packages/e1/72/43b123e4db52144c8add336581de52185097545981ff6e9e58a21861c250/ruff-0.11.7-py3-none-win32.whl", hash = "sha256:f25dfb853ad217e6e5f1924ae8a5b3f6709051a13e9dad18690de6c8ff299e26", size = 10362436, upload_time = "2025-04-24T18:49:27.377Z" },
{ url = "https://files.pythonhosted.org/packages/c5/a0/3e58cd76fdee53d5c8ce7a56d84540833f924ccdf2c7d657cb009e604d82/ruff-0.11.7-py3-none-win_amd64.whl", hash = "sha256:0a931d85959ceb77e92aea4bbedfded0a31534ce191252721128f77e5ae1f98a", size = 11566676, upload_time = "2025-04-24T18:49:30.938Z" },
{ url = "https://files.pythonhosted.org/packages/68/ca/69d7c7752bce162d1516e5592b1cc6b6668e9328c0d270609ddbeeadd7cf/ruff-0.11.7-py3-none-win_arm64.whl", hash = "sha256:778c1e5d6f9e91034142dfd06110534ca13220bfaad5c3735f6cb844654f6177", size = 10677936, upload_time = "2025-04-24T18:49:34.392Z" },
{ url = "https://files.pythonhosted.org/packages/6e/1f/8848b625100ebcc8740c8bac5b5dd8ba97dd4ee210970e98832092c1635b/ruff-0.11.6-py3-none-linux_armv6l.whl", hash = "sha256:d84dcbe74cf9356d1bdb4a78cf74fd47c740bf7bdeb7529068f69b08272239a1", size = 10248105, upload_time = "2025-04-17T13:35:14.758Z" },
{ url = "https://files.pythonhosted.org/packages/e0/47/c44036e70c6cc11e6ee24399c2a1e1f1e99be5152bd7dff0190e4b325b76/ruff-0.11.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9bc583628e1096148011a5d51ff3c836f51899e61112e03e5f2b1573a9b726de", size = 11001494, upload_time = "2025-04-17T13:35:18.444Z" },
{ url = "https://files.pythonhosted.org/packages/ed/5b/170444061650202d84d316e8f112de02d092bff71fafe060d3542f5bc5df/ruff-0.11.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2959049faeb5ba5e3b378709e9d1bf0cab06528b306b9dd6ebd2a312127964a", size = 10352151, upload_time = "2025-04-17T13:35:20.563Z" },
{ url = "https://files.pythonhosted.org/packages/ff/91/f02839fb3787c678e112c8865f2c3e87cfe1744dcc96ff9fc56cfb97dda2/ruff-0.11.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63c5d4e30d9d0de7fedbfb3e9e20d134b73a30c1e74b596f40f0629d5c28a193", size = 10541951, upload_time = "2025-04-17T13:35:22.522Z" },
{ url = "https://files.pythonhosted.org/packages/9e/f3/c09933306096ff7a08abede3cc2534d6fcf5529ccd26504c16bf363989b5/ruff-0.11.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a4b9a4e1439f7d0a091c6763a100cef8fbdc10d68593df6f3cfa5abdd9246e", size = 10079195, upload_time = "2025-04-17T13:35:24.485Z" },
{ url = "https://files.pythonhosted.org/packages/e0/0d/a87f8933fccbc0d8c653cfbf44bedda69c9582ba09210a309c066794e2ee/ruff-0.11.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5edf270223dd622218256569636dc3e708c2cb989242262fe378609eccf1308", size = 11698918, upload_time = "2025-04-17T13:35:26.504Z" },
{ url = "https://files.pythonhosted.org/packages/52/7d/8eac0bd083ea8a0b55b7e4628428203441ca68cd55e0b67c135a4bc6e309/ruff-0.11.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f55844e818206a9dd31ff27f91385afb538067e2dc0beb05f82c293ab84f7d55", size = 12319426, upload_time = "2025-04-17T13:35:28.452Z" },
{ url = "https://files.pythonhosted.org/packages/c2/dc/d0c17d875662d0c86fadcf4ca014ab2001f867621b793d5d7eef01b9dcce/ruff-0.11.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d8f782286c5ff562e4e00344f954b9320026d8e3fae2ba9e6948443fafd9ffc", size = 11791012, upload_time = "2025-04-17T13:35:30.455Z" },
{ url = "https://files.pythonhosted.org/packages/f9/f3/81a1aea17f1065449a72509fc7ccc3659cf93148b136ff2a8291c4bc3ef1/ruff-0.11.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01c63ba219514271cee955cd0adc26a4083df1956d57847978383b0e50ffd7d2", size = 13949947, upload_time = "2025-04-17T13:35:33.133Z" },
{ url = "https://files.pythonhosted.org/packages/61/9f/a3e34de425a668284e7024ee6fd41f452f6fa9d817f1f3495b46e5e3a407/ruff-0.11.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15adac20ef2ca296dd3d8e2bedc6202ea6de81c091a74661c3666e5c4c223ff6", size = 11471753, upload_time = "2025-04-17T13:35:35.416Z" },
{ url = "https://files.pythonhosted.org/packages/df/c5/4a57a86d12542c0f6e2744f262257b2aa5a3783098ec14e40f3e4b3a354a/ruff-0.11.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4dd6b09e98144ad7aec026f5588e493c65057d1b387dd937d7787baa531d9bc2", size = 10417121, upload_time = "2025-04-17T13:35:38.224Z" },
{ url = "https://files.pythonhosted.org/packages/58/3f/a3b4346dff07ef5b862e2ba06d98fcbf71f66f04cf01d375e871382b5e4b/ruff-0.11.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:45b2e1d6c0eed89c248d024ea95074d0e09988d8e7b1dad8d3ab9a67017a5b03", size = 10073829, upload_time = "2025-04-17T13:35:40.255Z" },
{ url = "https://files.pythonhosted.org/packages/93/cc/7ed02e0b86a649216b845b3ac66ed55d8aa86f5898c5f1691797f408fcb9/ruff-0.11.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bd40de4115b2ec4850302f1a1d8067f42e70b4990b68838ccb9ccd9f110c5e8b", size = 11076108, upload_time = "2025-04-17T13:35:42.559Z" },
{ url = "https://files.pythonhosted.org/packages/39/5e/5b09840fef0eff1a6fa1dea6296c07d09c17cb6fb94ed5593aa591b50460/ruff-0.11.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:77cda2dfbac1ab73aef5e514c4cbfc4ec1fbef4b84a44c736cc26f61b3814cd9", size = 11512366, upload_time = "2025-04-17T13:35:45.702Z" },
{ url = "https://files.pythonhosted.org/packages/6f/4c/1cd5a84a412d3626335ae69f5f9de2bb554eea0faf46deb1f0cb48534042/ruff-0.11.6-py3-none-win32.whl", hash = "sha256:5151a871554be3036cd6e51d0ec6eef56334d74dfe1702de717a995ee3d5b287", size = 10485900, upload_time = "2025-04-17T13:35:47.695Z" },
{ url = "https://files.pythonhosted.org/packages/42/46/8997872bc44d43df986491c18d4418f1caff03bc47b7f381261d62c23442/ruff-0.11.6-py3-none-win_amd64.whl", hash = "sha256:cce85721d09c51f3b782c331b0abd07e9d7d5f775840379c640606d3159cae0e", size = 11558592, upload_time = "2025-04-17T13:35:49.837Z" },
{ url = "https://files.pythonhosted.org/packages/d7/6a/65fecd51a9ca19e1477c3879a7fda24f8904174d1275b419422ac00f6eee/ruff-0.11.6-py3-none-win_arm64.whl", hash = "sha256:3567ba0d07fb170b1b48d944715e3294b77f5b7679e8ba258199a250383ccb79", size = 10682766, upload_time = "2025-04-17T13:35:52.014Z" },
]
[[package]]
@@ -2406,18 +2349,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/15/88e46eb9387e905704b69849618e699dc2f54407d8953cc4ec4b8b46528d/setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc", size = 931070, upload_time = "2024-07-09T16:07:58.829Z" },
]
[[package]]
name = "simple-websocket"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wsproto" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300, upload_time = "2024-10-10T22:39:31.412Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842, upload_time = "2024-10-10T22:39:29.645Z" },
]
[[package]]
name = "six"
version = "1.16.0"
@@ -2721,15 +2652,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload_time = "2024-01-06T02:10:55.763Z" },
]
[[package]]
name = "websocket-client"
version = "1.8.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload_time = "2024-04-23T22:16:16.976Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload_time = "2024-04-23T22:16:14.422Z" },
]
[[package]]
name = "websockets"
version = "12.0"
@@ -2789,18 +2711,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/9d/6e/e792999e816d19d7fcbfa94c730936750036d65656a76a5a688b57a656c4/werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8", size = 227274, upload_time = "2024-05-05T23:10:29.567Z" },
]
[[package]]
name = "wsproto"
version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload_time = "2022-08-23T19:58:21.447Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload_time = "2022-08-23T19:58:19.96Z" },
]
[[package]]
name = "zope-event"
version = "5.0"

View File

@@ -8,7 +8,6 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/providers/asset.provider.dart';
import 'package:immich_mobile/services/asset.service.dart';
import 'package:immich_mobile/services/share.service.dart';
import 'package:immich_mobile/utils/translation.dart';
import 'package:immich_mobile/widgets/common/date_time_picker.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
import 'package:immich_mobile/widgets/common/location_picker.dart';
@@ -58,13 +57,12 @@ Future<void> handleArchiveAssets(
.read(assetProvider.notifier)
.toggleArchive(selection, shouldArchive);
final message = shouldArchive
? t('moved_to_archive', {'count': selection.length})
: t('moved_to_library', {'count': selection.length});
final assetOrAssets = selection.length > 1 ? 'assets' : 'asset';
final archiveOrLibrary = shouldArchive ? 'archive' : 'library';
if (context.mounted) {
ImmichToast.show(
context: context,
msg: message,
msg: 'Moved ${selection.length} $assetOrAssets to $archiveOrLibrary',
gravity: toastGravity,
);
}

View File

@@ -1,14 +0,0 @@
import 'package:easy_localization/easy_localization.dart';
import 'package:intl/message_format.dart';
String t(String key, [Map<String, Object>? args]) {
try {
String message = key.tr();
if (args != null) {
return MessageFormat(message).format(args);
}
return message;
} catch (e) {
return key;
}
}

View File

@@ -97,7 +97,6 @@ class ImmichAssetGrid extends HookConsumerWidget {
);
if (7 - scaleFactor.value.toInt() != perRow.value) {
perRow.value = 7 - scaleFactor.value.toInt();
settings.setSetting(AppSettingsEnum.tilesPerRow, perRow.value);
}
};
}),

View File

@@ -755,7 +755,7 @@ class _MonthTitle extends StatelessWidget {
key: Key("month-$title"),
padding: const EdgeInsets.only(left: 12.0, top: 24.0),
child: Text(
toBeginningOfSentenceCase(title, context.locale.languageCode),
title,
style: const TextStyle(
fontSize: 26,
fontWeight: FontWeight.w500,
@@ -786,7 +786,7 @@ class _Title extends StatelessWidget {
@override
Widget build(BuildContext context) {
return GroupDividerTitle(
text: toBeginningOfSentenceCase(title, context.locale.languageCode),
text: title,
multiselectEnabled: selectionActive,
onSelect: () => selectAssets(assets),
onDeselect: () => deselectAssets(assets),

View File

@@ -145,15 +145,8 @@ Class | Method | HTTP request | Description
*MemoriesApi* | [**removeMemoryAssets**](doc//MemoriesApi.md#removememoryassets) | **DELETE** /memories/{id}/assets |
*MemoriesApi* | [**searchMemories**](doc//MemoriesApi.md#searchmemories) | **GET** /memories |
*MemoriesApi* | [**updateMemory**](doc//MemoriesApi.md#updatememory) | **PUT** /memories/{id} |
*NotificationsApi* | [**deleteNotification**](doc//NotificationsApi.md#deletenotification) | **DELETE** /notifications/{id} |
*NotificationsApi* | [**deleteNotifications**](doc//NotificationsApi.md#deletenotifications) | **DELETE** /notifications |
*NotificationsApi* | [**getNotification**](doc//NotificationsApi.md#getnotification) | **GET** /notifications/{id} |
*NotificationsApi* | [**getNotifications**](doc//NotificationsApi.md#getnotifications) | **GET** /notifications |
*NotificationsApi* | [**updateNotification**](doc//NotificationsApi.md#updatenotification) | **PUT** /notifications/{id} |
*NotificationsApi* | [**updateNotifications**](doc//NotificationsApi.md#updatenotifications) | **PUT** /notifications |
*NotificationsAdminApi* | [**createNotification**](doc//NotificationsAdminApi.md#createnotification) | **POST** /admin/notifications |
*NotificationsAdminApi* | [**getNotificationTemplateAdmin**](doc//NotificationsAdminApi.md#getnotificationtemplateadmin) | **POST** /admin/notifications/templates/{name} |
*NotificationsAdminApi* | [**sendTestEmailAdmin**](doc//NotificationsAdminApi.md#sendtestemailadmin) | **POST** /admin/notifications/test-email |
*NotificationsAdminApi* | [**getNotificationTemplateAdmin**](doc//NotificationsAdminApi.md#getnotificationtemplateadmin) | **POST** /notifications/admin/templates/{name} |
*NotificationsAdminApi* | [**sendTestEmailAdmin**](doc//NotificationsAdminApi.md#sendtestemailadmin) | **POST** /notifications/admin/test-email |
*OAuthApi* | [**finishOAuth**](doc//OAuthApi.md#finishoauth) | **POST** /oauth/callback |
*OAuthApi* | [**linkOAuthAccount**](doc//OAuthApi.md#linkoauthaccount) | **POST** /oauth/link |
*OAuthApi* | [**redirectOAuthToMobile**](doc//OAuthApi.md#redirectoauthtomobile) | **GET** /oauth/mobile-redirect |
@@ -307,6 +300,7 @@ Class | Method | HTTP request | Description
- [AssetStatsResponseDto](doc//AssetStatsResponseDto.md)
- [AssetTypeEnum](doc//AssetTypeEnum.md)
- [AudioCodec](doc//AudioCodec.md)
- [AvatarResponse](doc//AvatarResponse.md)
- [AvatarUpdate](doc//AvatarUpdate.md)
- [BulkIdResponseDto](doc//BulkIdResponseDto.md)
- [BulkIdsDto](doc//BulkIdsDto.md)
@@ -367,17 +361,9 @@ Class | Method | HTTP request | Description
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
- [MergePersonDto](doc//MergePersonDto.md)
- [MetadataSearchDto](doc//MetadataSearchDto.md)
- [NotificationCreateDto](doc//NotificationCreateDto.md)
- [NotificationDeleteAllDto](doc//NotificationDeleteAllDto.md)
- [NotificationDto](doc//NotificationDto.md)
- [NotificationLevel](doc//NotificationLevel.md)
- [NotificationType](doc//NotificationType.md)
- [NotificationUpdateAllDto](doc//NotificationUpdateAllDto.md)
- [NotificationUpdateDto](doc//NotificationUpdateDto.md)
- [OAuthAuthorizeResponseDto](doc//OAuthAuthorizeResponseDto.md)
- [OAuthCallbackDto](doc//OAuthCallbackDto.md)
- [OAuthConfigDto](doc//OAuthConfigDto.md)
- [OAuthTokenEndpointAuthMethod](doc//OAuthTokenEndpointAuthMethod.md)
- [OnThisDayDto](doc//OnThisDayDto.md)
- [PartnerDirection](doc//PartnerDirection.md)
- [PartnerResponseDto](doc//PartnerResponseDto.md)

View File

@@ -44,7 +44,6 @@ part 'api/jobs_api.dart';
part 'api/libraries_api.dart';
part 'api/map_api.dart';
part 'api/memories_api.dart';
part 'api/notifications_api.dart';
part 'api/notifications_admin_api.dart';
part 'api/o_auth_api.dart';
part 'api/partners_api.dart';
@@ -108,6 +107,7 @@ part 'model/asset_stack_response_dto.dart';
part 'model/asset_stats_response_dto.dart';
part 'model/asset_type_enum.dart';
part 'model/audio_codec.dart';
part 'model/avatar_response.dart';
part 'model/avatar_update.dart';
part 'model/bulk_id_response_dto.dart';
part 'model/bulk_ids_dto.dart';
@@ -168,17 +168,9 @@ part 'model/memory_type.dart';
part 'model/memory_update_dto.dart';
part 'model/merge_person_dto.dart';
part 'model/metadata_search_dto.dart';
part 'model/notification_create_dto.dart';
part 'model/notification_delete_all_dto.dart';
part 'model/notification_dto.dart';
part 'model/notification_level.dart';
part 'model/notification_type.dart';
part 'model/notification_update_all_dto.dart';
part 'model/notification_update_dto.dart';
part 'model/o_auth_authorize_response_dto.dart';
part 'model/o_auth_callback_dto.dart';
part 'model/o_auth_config_dto.dart';
part 'model/o_auth_token_endpoint_auth_method.dart';
part 'model/on_this_day_dto.dart';
part 'model/partner_direction.dart';
part 'model/partner_response_dto.dart';

View File

@@ -16,54 +16,7 @@ class NotificationsAdminApi {
final ApiClient apiClient;
/// Performs an HTTP 'POST /admin/notifications' operation and returns the [Response].
/// Parameters:
///
/// * [NotificationCreateDto] notificationCreateDto (required):
Future<Response> createNotificationWithHttpInfo(NotificationCreateDto notificationCreateDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/notifications';
// ignore: prefer_final_locals
Object? postBody = notificationCreateDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [NotificationCreateDto] notificationCreateDto (required):
Future<NotificationDto?> createNotification(NotificationCreateDto notificationCreateDto,) async {
final response = await createNotificationWithHttpInfo(notificationCreateDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'NotificationDto',) as NotificationDto;
}
return null;
}
/// Performs an HTTP 'POST /admin/notifications/templates/{name}' operation and returns the [Response].
/// Performs an HTTP 'POST /notifications/admin/templates/{name}' operation and returns the [Response].
/// Parameters:
///
/// * [String] name (required):
@@ -71,7 +24,7 @@ class NotificationsAdminApi {
/// * [TemplateDto] templateDto (required):
Future<Response> getNotificationTemplateAdminWithHttpInfo(String name, TemplateDto templateDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/notifications/templates/{name}'
final apiPath = r'/notifications/admin/templates/{name}'
.replaceAll('{name}', name);
// ignore: prefer_final_locals
@@ -115,13 +68,13 @@ class NotificationsAdminApi {
return null;
}
/// Performs an HTTP 'POST /admin/notifications/test-email' operation and returns the [Response].
/// Performs an HTTP 'POST /notifications/admin/test-email' operation and returns the [Response].
/// Parameters:
///
/// * [SystemConfigSmtpDto] systemConfigSmtpDto (required):
Future<Response> sendTestEmailAdminWithHttpInfo(SystemConfigSmtpDto systemConfigSmtpDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/admin/notifications/test-email';
final apiPath = r'/notifications/admin/test-email';
// ignore: prefer_final_locals
Object? postBody = systemConfigSmtpDto;

View File

@@ -1,311 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationsApi {
NotificationsApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
/// Performs an HTTP 'DELETE /notifications/{id}' operation and returns the [Response].
/// Parameters:
///
/// * [String] id (required):
Future<Response> deleteNotificationWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final apiPath = r'/notifications/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [String] id (required):
Future<void> deleteNotification(String id,) async {
final response = await deleteNotificationWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'DELETE /notifications' operation and returns the [Response].
/// Parameters:
///
/// * [NotificationDeleteAllDto] notificationDeleteAllDto (required):
Future<Response> deleteNotificationsWithHttpInfo(NotificationDeleteAllDto notificationDeleteAllDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/notifications';
// ignore: prefer_final_locals
Object? postBody = notificationDeleteAllDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [NotificationDeleteAllDto] notificationDeleteAllDto (required):
Future<void> deleteNotifications(NotificationDeleteAllDto notificationDeleteAllDto,) async {
final response = await deleteNotificationsWithHttpInfo(notificationDeleteAllDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'GET /notifications/{id}' operation and returns the [Response].
/// Parameters:
///
/// * [String] id (required):
Future<Response> getNotificationWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final apiPath = r'/notifications/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [String] id (required):
Future<NotificationDto?> getNotification(String id,) async {
final response = await getNotificationWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'NotificationDto',) as NotificationDto;
}
return null;
}
/// Performs an HTTP 'GET /notifications' operation and returns the [Response].
/// Parameters:
///
/// * [String] id:
///
/// * [NotificationLevel] level:
///
/// * [NotificationType] type:
///
/// * [bool] unread:
Future<Response> getNotificationsWithHttpInfo({ String? id, NotificationLevel? level, NotificationType? type, bool? unread, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/notifications';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (id != null) {
queryParams.addAll(_queryParams('', 'id', id));
}
if (level != null) {
queryParams.addAll(_queryParams('', 'level', level));
}
if (type != null) {
queryParams.addAll(_queryParams('', 'type', type));
}
if (unread != null) {
queryParams.addAll(_queryParams('', 'unread', unread));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [String] id:
///
/// * [NotificationLevel] level:
///
/// * [NotificationType] type:
///
/// * [bool] unread:
Future<List<NotificationDto>?> getNotifications({ String? id, NotificationLevel? level, NotificationType? type, bool? unread, }) async {
final response = await getNotificationsWithHttpInfo( id: id, level: level, type: type, unread: unread, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<NotificationDto>') as List)
.cast<NotificationDto>()
.toList(growable: false);
}
return null;
}
/// Performs an HTTP 'PUT /notifications/{id}' operation and returns the [Response].
/// Parameters:
///
/// * [String] id (required):
///
/// * [NotificationUpdateDto] notificationUpdateDto (required):
Future<Response> updateNotificationWithHttpInfo(String id, NotificationUpdateDto notificationUpdateDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/notifications/{id}'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody = notificationUpdateDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'PUT',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [String] id (required):
///
/// * [NotificationUpdateDto] notificationUpdateDto (required):
Future<NotificationDto?> updateNotification(String id, NotificationUpdateDto notificationUpdateDto,) async {
final response = await updateNotificationWithHttpInfo(id, notificationUpdateDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'NotificationDto',) as NotificationDto;
}
return null;
}
/// Performs an HTTP 'PUT /notifications' operation and returns the [Response].
/// Parameters:
///
/// * [NotificationUpdateAllDto] notificationUpdateAllDto (required):
Future<Response> updateNotificationsWithHttpInfo(NotificationUpdateAllDto notificationUpdateAllDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/notifications';
// ignore: prefer_final_locals
Object? postBody = notificationUpdateAllDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'PUT',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [NotificationUpdateAllDto] notificationUpdateAllDto (required):
Future<void> updateNotifications(NotificationUpdateAllDto notificationUpdateAllDto,) async {
final response = await updateNotificationsWithHttpInfo(notificationUpdateAllDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
}

View File

@@ -270,6 +270,8 @@ class ApiClient {
return AssetTypeEnumTypeTransformer().decode(value);
case 'AudioCodec':
return AudioCodecTypeTransformer().decode(value);
case 'AvatarResponse':
return AvatarResponse.fromJson(value);
case 'AvatarUpdate':
return AvatarUpdate.fromJson(value);
case 'BulkIdResponseDto':
@@ -390,28 +392,12 @@ class ApiClient {
return MergePersonDto.fromJson(value);
case 'MetadataSearchDto':
return MetadataSearchDto.fromJson(value);
case 'NotificationCreateDto':
return NotificationCreateDto.fromJson(value);
case 'NotificationDeleteAllDto':
return NotificationDeleteAllDto.fromJson(value);
case 'NotificationDto':
return NotificationDto.fromJson(value);
case 'NotificationLevel':
return NotificationLevelTypeTransformer().decode(value);
case 'NotificationType':
return NotificationTypeTypeTransformer().decode(value);
case 'NotificationUpdateAllDto':
return NotificationUpdateAllDto.fromJson(value);
case 'NotificationUpdateDto':
return NotificationUpdateDto.fromJson(value);
case 'OAuthAuthorizeResponseDto':
return OAuthAuthorizeResponseDto.fromJson(value);
case 'OAuthCallbackDto':
return OAuthCallbackDto.fromJson(value);
case 'OAuthConfigDto':
return OAuthConfigDto.fromJson(value);
case 'OAuthTokenEndpointAuthMethod':
return OAuthTokenEndpointAuthMethodTypeTransformer().decode(value);
case 'OnThisDayDto':
return OnThisDayDto.fromJson(value);
case 'PartnerDirection':

View File

@@ -100,15 +100,6 @@ String parameterToString(dynamic value) {
if (value is MemoryType) {
return MemoryTypeTypeTransformer().encode(value).toString();
}
if (value is NotificationLevel) {
return NotificationLevelTypeTransformer().encode(value).toString();
}
if (value is NotificationType) {
return NotificationTypeTypeTransformer().encode(value).toString();
}
if (value is OAuthTokenEndpointAuthMethod) {
return OAuthTokenEndpointAuthMethodTypeTransformer().encode(value).toString();
}
if (value is PartnerDirection) {
return PartnerDirectionTypeTransformer().encode(value).toString();
}

View File

@@ -10,56 +10,52 @@
part of openapi.api;
class NotificationUpdateDto {
/// Returns a new [NotificationUpdateDto] instance.
NotificationUpdateDto({
this.readAt,
class AvatarResponse {
/// Returns a new [AvatarResponse] instance.
AvatarResponse({
required this.color,
});
DateTime? readAt;
UserAvatarColor color;
@override
bool operator ==(Object other) => identical(this, other) || other is NotificationUpdateDto &&
other.readAt == readAt;
bool operator ==(Object other) => identical(this, other) || other is AvatarResponse &&
other.color == color;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(readAt == null ? 0 : readAt!.hashCode);
(color.hashCode);
@override
String toString() => 'NotificationUpdateDto[readAt=$readAt]';
String toString() => 'AvatarResponse[color=$color]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.readAt != null) {
json[r'readAt'] = this.readAt!.toUtc().toIso8601String();
} else {
// json[r'readAt'] = null;
}
json[r'color'] = this.color;
return json;
}
/// Returns a new [NotificationUpdateDto] instance and imports its values from
/// Returns a new [AvatarResponse] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static NotificationUpdateDto? fromJson(dynamic value) {
upgradeDto(value, "NotificationUpdateDto");
static AvatarResponse? fromJson(dynamic value) {
upgradeDto(value, "AvatarResponse");
if (value is Map) {
final json = value.cast<String, dynamic>();
return NotificationUpdateDto(
readAt: mapDateTime(json, r'readAt', r''),
return AvatarResponse(
color: UserAvatarColor.fromJson(json[r'color'])!,
);
}
return null;
}
static List<NotificationUpdateDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationUpdateDto>[];
static List<AvatarResponse> listFromJson(dynamic json, {bool growable = false,}) {
final result = <AvatarResponse>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationUpdateDto.fromJson(row);
final value = AvatarResponse.fromJson(row);
if (value != null) {
result.add(value);
}
@@ -68,12 +64,12 @@ class NotificationUpdateDto {
return result.toList(growable: growable);
}
static Map<String, NotificationUpdateDto> mapFromJson(dynamic json) {
final map = <String, NotificationUpdateDto>{};
static Map<String, AvatarResponse> mapFromJson(dynamic json) {
final map = <String, AvatarResponse>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = NotificationUpdateDto.fromJson(entry.value);
final value = AvatarResponse.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
@@ -82,14 +78,14 @@ class NotificationUpdateDto {
return map;
}
// maps a json object with a list of NotificationUpdateDto-objects as value to a dart map
static Map<String, List<NotificationUpdateDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<NotificationUpdateDto>>{};
// maps a json object with a list of AvatarResponse-objects as value to a dart map
static Map<String, List<AvatarResponse>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<AvatarResponse>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = NotificationUpdateDto.listFromJson(entry.value, growable: growable,);
map[entry.key] = AvatarResponse.listFromJson(entry.value, growable: growable,);
}
}
return map;
@@ -97,6 +93,7 @@ class NotificationUpdateDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'color',
};
}

View File

@@ -1,180 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationCreateDto {
/// Returns a new [NotificationCreateDto] instance.
NotificationCreateDto({
this.data,
this.description,
this.level,
this.readAt,
required this.title,
this.type,
required this.userId,
});
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
Object? data;
String? description;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
NotificationLevel? level;
DateTime? readAt;
String title;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
NotificationType? type;
String userId;
@override
bool operator ==(Object other) => identical(this, other) || other is NotificationCreateDto &&
other.data == data &&
other.description == description &&
other.level == level &&
other.readAt == readAt &&
other.title == title &&
other.type == type &&
other.userId == userId;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(data == null ? 0 : data!.hashCode) +
(description == null ? 0 : description!.hashCode) +
(level == null ? 0 : level!.hashCode) +
(readAt == null ? 0 : readAt!.hashCode) +
(title.hashCode) +
(type == null ? 0 : type!.hashCode) +
(userId.hashCode);
@override
String toString() => 'NotificationCreateDto[data=$data, description=$description, level=$level, readAt=$readAt, title=$title, type=$type, userId=$userId]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.data != null) {
json[r'data'] = this.data;
} else {
// json[r'data'] = null;
}
if (this.description != null) {
json[r'description'] = this.description;
} else {
// json[r'description'] = null;
}
if (this.level != null) {
json[r'level'] = this.level;
} else {
// json[r'level'] = null;
}
if (this.readAt != null) {
json[r'readAt'] = this.readAt!.toUtc().toIso8601String();
} else {
// json[r'readAt'] = null;
}
json[r'title'] = this.title;
if (this.type != null) {
json[r'type'] = this.type;
} else {
// json[r'type'] = null;
}
json[r'userId'] = this.userId;
return json;
}
/// Returns a new [NotificationCreateDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static NotificationCreateDto? fromJson(dynamic value) {
upgradeDto(value, "NotificationCreateDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return NotificationCreateDto(
data: mapValueOfType<Object>(json, r'data'),
description: mapValueOfType<String>(json, r'description'),
level: NotificationLevel.fromJson(json[r'level']),
readAt: mapDateTime(json, r'readAt', r''),
title: mapValueOfType<String>(json, r'title')!,
type: NotificationType.fromJson(json[r'type']),
userId: mapValueOfType<String>(json, r'userId')!,
);
}
return null;
}
static List<NotificationCreateDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationCreateDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationCreateDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, NotificationCreateDto> mapFromJson(dynamic json) {
final map = <String, NotificationCreateDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = NotificationCreateDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of NotificationCreateDto-objects as value to a dart map
static Map<String, List<NotificationCreateDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<NotificationCreateDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = NotificationCreateDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'title',
'userId',
};
}

View File

@@ -1,101 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationDeleteAllDto {
/// Returns a new [NotificationDeleteAllDto] instance.
NotificationDeleteAllDto({
this.ids = const [],
});
List<String> ids;
@override
bool operator ==(Object other) => identical(this, other) || other is NotificationDeleteAllDto &&
_deepEquality.equals(other.ids, ids);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(ids.hashCode);
@override
String toString() => 'NotificationDeleteAllDto[ids=$ids]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'ids'] = this.ids;
return json;
}
/// Returns a new [NotificationDeleteAllDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static NotificationDeleteAllDto? fromJson(dynamic value) {
upgradeDto(value, "NotificationDeleteAllDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return NotificationDeleteAllDto(
ids: json[r'ids'] is Iterable
? (json[r'ids'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<NotificationDeleteAllDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationDeleteAllDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationDeleteAllDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, NotificationDeleteAllDto> mapFromJson(dynamic json) {
final map = <String, NotificationDeleteAllDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = NotificationDeleteAllDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of NotificationDeleteAllDto-objects as value to a dart map
static Map<String, List<NotificationDeleteAllDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<NotificationDeleteAllDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = NotificationDeleteAllDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'ids',
};
}

View File

@@ -1,182 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationDto {
/// Returns a new [NotificationDto] instance.
NotificationDto({
required this.createdAt,
this.data,
this.description,
required this.id,
required this.level,
this.readAt,
required this.title,
required this.type,
});
DateTime createdAt;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
Object? data;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? description;
String id;
NotificationLevel level;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
DateTime? readAt;
String title;
NotificationType type;
@override
bool operator ==(Object other) => identical(this, other) || other is NotificationDto &&
other.createdAt == createdAt &&
other.data == data &&
other.description == description &&
other.id == id &&
other.level == level &&
other.readAt == readAt &&
other.title == title &&
other.type == type;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(createdAt.hashCode) +
(data == null ? 0 : data!.hashCode) +
(description == null ? 0 : description!.hashCode) +
(id.hashCode) +
(level.hashCode) +
(readAt == null ? 0 : readAt!.hashCode) +
(title.hashCode) +
(type.hashCode);
@override
String toString() => 'NotificationDto[createdAt=$createdAt, data=$data, description=$description, id=$id, level=$level, readAt=$readAt, title=$title, type=$type]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'createdAt'] = this.createdAt.toUtc().toIso8601String();
if (this.data != null) {
json[r'data'] = this.data;
} else {
// json[r'data'] = null;
}
if (this.description != null) {
json[r'description'] = this.description;
} else {
// json[r'description'] = null;
}
json[r'id'] = this.id;
json[r'level'] = this.level;
if (this.readAt != null) {
json[r'readAt'] = this.readAt!.toUtc().toIso8601String();
} else {
// json[r'readAt'] = null;
}
json[r'title'] = this.title;
json[r'type'] = this.type;
return json;
}
/// Returns a new [NotificationDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static NotificationDto? fromJson(dynamic value) {
upgradeDto(value, "NotificationDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return NotificationDto(
createdAt: mapDateTime(json, r'createdAt', r'')!,
data: mapValueOfType<Object>(json, r'data'),
description: mapValueOfType<String>(json, r'description'),
id: mapValueOfType<String>(json, r'id')!,
level: NotificationLevel.fromJson(json[r'level'])!,
readAt: mapDateTime(json, r'readAt', r''),
title: mapValueOfType<String>(json, r'title')!,
type: NotificationType.fromJson(json[r'type'])!,
);
}
return null;
}
static List<NotificationDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, NotificationDto> mapFromJson(dynamic json) {
final map = <String, NotificationDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = NotificationDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of NotificationDto-objects as value to a dart map
static Map<String, List<NotificationDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<NotificationDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = NotificationDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'createdAt',
'id',
'level',
'title',
'type',
};
}

View File

@@ -1,91 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationLevel {
/// Instantiate a new enum with the provided [value].
const NotificationLevel._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const success = NotificationLevel._(r'success');
static const error = NotificationLevel._(r'error');
static const warning = NotificationLevel._(r'warning');
static const info = NotificationLevel._(r'info');
/// List of all possible values in this [enum][NotificationLevel].
static const values = <NotificationLevel>[
success,
error,
warning,
info,
];
static NotificationLevel? fromJson(dynamic value) => NotificationLevelTypeTransformer().decode(value);
static List<NotificationLevel> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationLevel>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationLevel.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [NotificationLevel] to String,
/// and [decode] dynamic data back to [NotificationLevel].
class NotificationLevelTypeTransformer {
factory NotificationLevelTypeTransformer() => _instance ??= const NotificationLevelTypeTransformer._();
const NotificationLevelTypeTransformer._();
String encode(NotificationLevel data) => data.value;
/// Decodes a [dynamic value][data] to a NotificationLevel.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
NotificationLevel? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'success': return NotificationLevel.success;
case r'error': return NotificationLevel.error;
case r'warning': return NotificationLevel.warning;
case r'info': return NotificationLevel.info;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [NotificationLevelTypeTransformer] instance.
static NotificationLevelTypeTransformer? _instance;
}

View File

@@ -1,91 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationType {
/// Instantiate a new enum with the provided [value].
const NotificationType._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const jobFailed = NotificationType._(r'JobFailed');
static const backupFailed = NotificationType._(r'BackupFailed');
static const systemMessage = NotificationType._(r'SystemMessage');
static const custom = NotificationType._(r'Custom');
/// List of all possible values in this [enum][NotificationType].
static const values = <NotificationType>[
jobFailed,
backupFailed,
systemMessage,
custom,
];
static NotificationType? fromJson(dynamic value) => NotificationTypeTypeTransformer().decode(value);
static List<NotificationType> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationType>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationType.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [NotificationType] to String,
/// and [decode] dynamic data back to [NotificationType].
class NotificationTypeTypeTransformer {
factory NotificationTypeTypeTransformer() => _instance ??= const NotificationTypeTypeTransformer._();
const NotificationTypeTypeTransformer._();
String encode(NotificationType data) => data.value;
/// Decodes a [dynamic value][data] to a NotificationType.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
NotificationType? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'JobFailed': return NotificationType.jobFailed;
case r'BackupFailed': return NotificationType.backupFailed;
case r'SystemMessage': return NotificationType.systemMessage;
case r'Custom': return NotificationType.custom;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [NotificationTypeTypeTransformer] instance.
static NotificationTypeTypeTransformer? _instance;
}

View File

@@ -1,112 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class NotificationUpdateAllDto {
/// Returns a new [NotificationUpdateAllDto] instance.
NotificationUpdateAllDto({
this.ids = const [],
this.readAt,
});
List<String> ids;
DateTime? readAt;
@override
bool operator ==(Object other) => identical(this, other) || other is NotificationUpdateAllDto &&
_deepEquality.equals(other.ids, ids) &&
other.readAt == readAt;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(ids.hashCode) +
(readAt == null ? 0 : readAt!.hashCode);
@override
String toString() => 'NotificationUpdateAllDto[ids=$ids, readAt=$readAt]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'ids'] = this.ids;
if (this.readAt != null) {
json[r'readAt'] = this.readAt!.toUtc().toIso8601String();
} else {
// json[r'readAt'] = null;
}
return json;
}
/// Returns a new [NotificationUpdateAllDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static NotificationUpdateAllDto? fromJson(dynamic value) {
upgradeDto(value, "NotificationUpdateAllDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return NotificationUpdateAllDto(
ids: json[r'ids'] is Iterable
? (json[r'ids'] as Iterable).cast<String>().toList(growable: false)
: const [],
readAt: mapDateTime(json, r'readAt', r''),
);
}
return null;
}
static List<NotificationUpdateAllDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <NotificationUpdateAllDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = NotificationUpdateAllDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, NotificationUpdateAllDto> mapFromJson(dynamic json) {
final map = <String, NotificationUpdateAllDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = NotificationUpdateAllDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of NotificationUpdateAllDto-objects as value to a dart map
static Map<String, List<NotificationUpdateAllDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<NotificationUpdateAllDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = NotificationUpdateAllDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'ids',
};
}

View File

@@ -1,85 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class OAuthTokenEndpointAuthMethod {
/// Instantiate a new enum with the provided [value].
const OAuthTokenEndpointAuthMethod._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const post = OAuthTokenEndpointAuthMethod._(r'client_secret_post');
static const basic = OAuthTokenEndpointAuthMethod._(r'client_secret_basic');
/// List of all possible values in this [enum][OAuthTokenEndpointAuthMethod].
static const values = <OAuthTokenEndpointAuthMethod>[
post,
basic,
];
static OAuthTokenEndpointAuthMethod? fromJson(dynamic value) => OAuthTokenEndpointAuthMethodTypeTransformer().decode(value);
static List<OAuthTokenEndpointAuthMethod> listFromJson(dynamic json, {bool growable = false,}) {
final result = <OAuthTokenEndpointAuthMethod>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = OAuthTokenEndpointAuthMethod.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [OAuthTokenEndpointAuthMethod] to String,
/// and [decode] dynamic data back to [OAuthTokenEndpointAuthMethod].
class OAuthTokenEndpointAuthMethodTypeTransformer {
factory OAuthTokenEndpointAuthMethodTypeTransformer() => _instance ??= const OAuthTokenEndpointAuthMethodTypeTransformer._();
const OAuthTokenEndpointAuthMethodTypeTransformer._();
String encode(OAuthTokenEndpointAuthMethod data) => data.value;
/// Decodes a [dynamic value][data] to a OAuthTokenEndpointAuthMethod.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
OAuthTokenEndpointAuthMethod? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'client_secret_post': return OAuthTokenEndpointAuthMethod.post;
case r'client_secret_basic': return OAuthTokenEndpointAuthMethod.basic;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [OAuthTokenEndpointAuthMethodTypeTransformer] instance.
static OAuthTokenEndpointAuthMethodTypeTransformer? _instance;
}

View File

@@ -66,10 +66,6 @@ class Permission {
static const memoryPeriodRead = Permission._(r'memory.read');
static const memoryPeriodUpdate = Permission._(r'memory.update');
static const memoryPeriodDelete = Permission._(r'memory.delete');
static const notificationPeriodCreate = Permission._(r'notification.create');
static const notificationPeriodRead = Permission._(r'notification.read');
static const notificationPeriodUpdate = Permission._(r'notification.update');
static const notificationPeriodDelete = Permission._(r'notification.delete');
static const partnerPeriodCreate = Permission._(r'partner.create');
static const partnerPeriodRead = Permission._(r'partner.read');
static const partnerPeriodUpdate = Permission._(r'partner.update');
@@ -151,10 +147,6 @@ class Permission {
memoryPeriodRead,
memoryPeriodUpdate,
memoryPeriodDelete,
notificationPeriodCreate,
notificationPeriodRead,
notificationPeriodUpdate,
notificationPeriodDelete,
partnerPeriodCreate,
partnerPeriodRead,
partnerPeriodUpdate,
@@ -271,10 +263,6 @@ class PermissionTypeTransformer {
case r'memory.read': return Permission.memoryPeriodRead;
case r'memory.update': return Permission.memoryPeriodUpdate;
case r'memory.delete': return Permission.memoryPeriodDelete;
case r'notification.create': return Permission.notificationPeriodCreate;
case r'notification.read': return Permission.notificationPeriodRead;
case r'notification.update': return Permission.notificationPeriodUpdate;
case r'notification.delete': return Permission.notificationPeriodDelete;
case r'partner.create': return Permission.partnerPeriodCreate;
case r'partner.read': return Permission.partnerPeriodRead;
case r'partner.update': return Permission.partnerPeriodUpdate;

View File

@@ -28,8 +28,6 @@ class SystemConfigOAuthDto {
required this.signingAlgorithm,
required this.storageLabelClaim,
required this.storageQuotaClaim,
required this.timeout,
required this.tokenEndpointAuthMethod,
});
bool autoLaunch;
@@ -63,11 +61,6 @@ class SystemConfigOAuthDto {
String storageQuotaClaim;
/// Minimum value: 1
int timeout;
OAuthTokenEndpointAuthMethod tokenEndpointAuthMethod;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigOAuthDto &&
other.autoLaunch == autoLaunch &&
@@ -84,9 +77,7 @@ class SystemConfigOAuthDto {
other.scope == scope &&
other.signingAlgorithm == signingAlgorithm &&
other.storageLabelClaim == storageLabelClaim &&
other.storageQuotaClaim == storageQuotaClaim &&
other.timeout == timeout &&
other.tokenEndpointAuthMethod == tokenEndpointAuthMethod;
other.storageQuotaClaim == storageQuotaClaim;
@override
int get hashCode =>
@@ -105,12 +96,10 @@ class SystemConfigOAuthDto {
(scope.hashCode) +
(signingAlgorithm.hashCode) +
(storageLabelClaim.hashCode) +
(storageQuotaClaim.hashCode) +
(timeout.hashCode) +
(tokenEndpointAuthMethod.hashCode);
(storageQuotaClaim.hashCode);
@override
String toString() => 'SystemConfigOAuthDto[autoLaunch=$autoLaunch, autoRegister=$autoRegister, buttonText=$buttonText, clientId=$clientId, clientSecret=$clientSecret, defaultStorageQuota=$defaultStorageQuota, enabled=$enabled, issuerUrl=$issuerUrl, mobileOverrideEnabled=$mobileOverrideEnabled, mobileRedirectUri=$mobileRedirectUri, profileSigningAlgorithm=$profileSigningAlgorithm, scope=$scope, signingAlgorithm=$signingAlgorithm, storageLabelClaim=$storageLabelClaim, storageQuotaClaim=$storageQuotaClaim, timeout=$timeout, tokenEndpointAuthMethod=$tokenEndpointAuthMethod]';
String toString() => 'SystemConfigOAuthDto[autoLaunch=$autoLaunch, autoRegister=$autoRegister, buttonText=$buttonText, clientId=$clientId, clientSecret=$clientSecret, defaultStorageQuota=$defaultStorageQuota, enabled=$enabled, issuerUrl=$issuerUrl, mobileOverrideEnabled=$mobileOverrideEnabled, mobileRedirectUri=$mobileRedirectUri, profileSigningAlgorithm=$profileSigningAlgorithm, scope=$scope, signingAlgorithm=$signingAlgorithm, storageLabelClaim=$storageLabelClaim, storageQuotaClaim=$storageQuotaClaim]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -129,8 +118,6 @@ class SystemConfigOAuthDto {
json[r'signingAlgorithm'] = this.signingAlgorithm;
json[r'storageLabelClaim'] = this.storageLabelClaim;
json[r'storageQuotaClaim'] = this.storageQuotaClaim;
json[r'timeout'] = this.timeout;
json[r'tokenEndpointAuthMethod'] = this.tokenEndpointAuthMethod;
return json;
}
@@ -158,8 +145,6 @@ class SystemConfigOAuthDto {
signingAlgorithm: mapValueOfType<String>(json, r'signingAlgorithm')!,
storageLabelClaim: mapValueOfType<String>(json, r'storageLabelClaim')!,
storageQuotaClaim: mapValueOfType<String>(json, r'storageQuotaClaim')!,
timeout: mapValueOfType<int>(json, r'timeout')!,
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.fromJson(json[r'tokenEndpointAuthMethod'])!,
);
}
return null;
@@ -222,8 +207,6 @@ class SystemConfigOAuthDto {
'signingAlgorithm',
'storageLabelClaim',
'storageQuotaClaim',
'timeout',
'tokenEndpointAuthMethod',
};
}

View File

@@ -13,7 +13,6 @@ part of openapi.api;
class UserAdminCreateDto {
/// Returns a new [UserAdminCreateDto] instance.
UserAdminCreateDto({
this.avatarColor,
required this.email,
required this.name,
this.notify,
@@ -23,8 +22,6 @@ class UserAdminCreateDto {
this.storageLabel,
});
UserAvatarColor? avatarColor;
String email;
String name;
@@ -54,7 +51,6 @@ class UserAdminCreateDto {
@override
bool operator ==(Object other) => identical(this, other) || other is UserAdminCreateDto &&
other.avatarColor == avatarColor &&
other.email == email &&
other.name == name &&
other.notify == notify &&
@@ -66,7 +62,6 @@ class UserAdminCreateDto {
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(avatarColor == null ? 0 : avatarColor!.hashCode) +
(email.hashCode) +
(name.hashCode) +
(notify == null ? 0 : notify!.hashCode) +
@@ -76,15 +71,10 @@ class UserAdminCreateDto {
(storageLabel == null ? 0 : storageLabel!.hashCode);
@override
String toString() => 'UserAdminCreateDto[avatarColor=$avatarColor, email=$email, name=$name, notify=$notify, password=$password, quotaSizeInBytes=$quotaSizeInBytes, shouldChangePassword=$shouldChangePassword, storageLabel=$storageLabel]';
String toString() => 'UserAdminCreateDto[email=$email, name=$name, notify=$notify, password=$password, quotaSizeInBytes=$quotaSizeInBytes, shouldChangePassword=$shouldChangePassword, storageLabel=$storageLabel]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.avatarColor != null) {
json[r'avatarColor'] = this.avatarColor;
} else {
// json[r'avatarColor'] = null;
}
json[r'email'] = this.email;
json[r'name'] = this.name;
if (this.notify != null) {
@@ -120,7 +110,6 @@ class UserAdminCreateDto {
final json = value.cast<String, dynamic>();
return UserAdminCreateDto(
avatarColor: UserAvatarColor.fromJson(json[r'avatarColor']),
email: mapValueOfType<String>(json, r'email')!,
name: mapValueOfType<String>(json, r'name')!,
notify: mapValueOfType<bool>(json, r'notify'),

View File

@@ -13,7 +13,6 @@ part of openapi.api;
class UserAdminUpdateDto {
/// Returns a new [UserAdminUpdateDto] instance.
UserAdminUpdateDto({
this.avatarColor,
this.email,
this.name,
this.password,
@@ -22,8 +21,6 @@ class UserAdminUpdateDto {
this.storageLabel,
});
UserAvatarColor? avatarColor;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
@@ -63,7 +60,6 @@ class UserAdminUpdateDto {
@override
bool operator ==(Object other) => identical(this, other) || other is UserAdminUpdateDto &&
other.avatarColor == avatarColor &&
other.email == email &&
other.name == name &&
other.password == password &&
@@ -74,7 +70,6 @@ class UserAdminUpdateDto {
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(avatarColor == null ? 0 : avatarColor!.hashCode) +
(email == null ? 0 : email!.hashCode) +
(name == null ? 0 : name!.hashCode) +
(password == null ? 0 : password!.hashCode) +
@@ -83,15 +78,10 @@ class UserAdminUpdateDto {
(storageLabel == null ? 0 : storageLabel!.hashCode);
@override
String toString() => 'UserAdminUpdateDto[avatarColor=$avatarColor, email=$email, name=$name, password=$password, quotaSizeInBytes=$quotaSizeInBytes, shouldChangePassword=$shouldChangePassword, storageLabel=$storageLabel]';
String toString() => 'UserAdminUpdateDto[email=$email, name=$name, password=$password, quotaSizeInBytes=$quotaSizeInBytes, shouldChangePassword=$shouldChangePassword, storageLabel=$storageLabel]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.avatarColor != null) {
json[r'avatarColor'] = this.avatarColor;
} else {
// json[r'avatarColor'] = null;
}
if (this.email != null) {
json[r'email'] = this.email;
} else {
@@ -134,7 +124,6 @@ class UserAdminUpdateDto {
final json = value.cast<String, dynamic>();
return UserAdminUpdateDto(
avatarColor: UserAvatarColor.fromJson(json[r'avatarColor']),
email: mapValueOfType<String>(json, r'email'),
name: mapValueOfType<String>(json, r'name'),
password: mapValueOfType<String>(json, r'password'),

View File

@@ -13,6 +13,7 @@ part of openapi.api;
class UserPreferencesResponseDto {
/// Returns a new [UserPreferencesResponseDto] instance.
UserPreferencesResponseDto({
required this.avatar,
required this.download,
required this.emailNotifications,
required this.folders,
@@ -24,6 +25,8 @@ class UserPreferencesResponseDto {
required this.tags,
});
AvatarResponse avatar;
DownloadResponse download;
EmailNotificationsResponse emailNotifications;
@@ -44,6 +47,7 @@ class UserPreferencesResponseDto {
@override
bool operator ==(Object other) => identical(this, other) || other is UserPreferencesResponseDto &&
other.avatar == avatar &&
other.download == download &&
other.emailNotifications == emailNotifications &&
other.folders == folders &&
@@ -57,6 +61,7 @@ class UserPreferencesResponseDto {
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(avatar.hashCode) +
(download.hashCode) +
(emailNotifications.hashCode) +
(folders.hashCode) +
@@ -68,10 +73,11 @@ class UserPreferencesResponseDto {
(tags.hashCode);
@override
String toString() => 'UserPreferencesResponseDto[download=$download, emailNotifications=$emailNotifications, folders=$folders, memories=$memories, people=$people, purchase=$purchase, ratings=$ratings, sharedLinks=$sharedLinks, tags=$tags]';
String toString() => 'UserPreferencesResponseDto[avatar=$avatar, download=$download, emailNotifications=$emailNotifications, folders=$folders, memories=$memories, people=$people, purchase=$purchase, ratings=$ratings, sharedLinks=$sharedLinks, tags=$tags]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'avatar'] = this.avatar;
json[r'download'] = this.download;
json[r'emailNotifications'] = this.emailNotifications;
json[r'folders'] = this.folders;
@@ -93,6 +99,7 @@ class UserPreferencesResponseDto {
final json = value.cast<String, dynamic>();
return UserPreferencesResponseDto(
avatar: AvatarResponse.fromJson(json[r'avatar'])!,
download: DownloadResponse.fromJson(json[r'download'])!,
emailNotifications: EmailNotificationsResponse.fromJson(json[r'emailNotifications'])!,
folders: FoldersResponse.fromJson(json[r'folders'])!,
@@ -149,6 +156,7 @@ class UserPreferencesResponseDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'avatar',
'download',
'emailNotifications',
'folders',

View File

@@ -13,14 +13,11 @@ part of openapi.api;
class UserUpdateMeDto {
/// Returns a new [UserUpdateMeDto] instance.
UserUpdateMeDto({
this.avatarColor,
this.email,
this.name,
this.password,
});
UserAvatarColor? avatarColor;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
@@ -47,7 +44,6 @@ class UserUpdateMeDto {
@override
bool operator ==(Object other) => identical(this, other) || other is UserUpdateMeDto &&
other.avatarColor == avatarColor &&
other.email == email &&
other.name == name &&
other.password == password;
@@ -55,21 +51,15 @@ class UserUpdateMeDto {
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(avatarColor == null ? 0 : avatarColor!.hashCode) +
(email == null ? 0 : email!.hashCode) +
(name == null ? 0 : name!.hashCode) +
(password == null ? 0 : password!.hashCode);
@override
String toString() => 'UserUpdateMeDto[avatarColor=$avatarColor, email=$email, name=$name, password=$password]';
String toString() => 'UserUpdateMeDto[email=$email, name=$name, password=$password]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.avatarColor != null) {
json[r'avatarColor'] = this.avatarColor;
} else {
// json[r'avatarColor'] = null;
}
if (this.email != null) {
json[r'email'] = this.email;
} else {
@@ -97,7 +87,6 @@ class UserUpdateMeDto {
final json = value.cast<String, dynamic>();
return UserUpdateMeDto(
avatarColor: UserAvatarColor.fromJson(json[r'avatarColor']),
email: mapValueOfType<String>(json, r'email'),
name: mapValueOfType<String>(json, r'name'),
password: mapValueOfType<String>(json, r'password'),

View File

@@ -206,141 +206,6 @@
]
}
},
"/admin/notifications": {
"post": {
"operationId": "createNotification",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationCreateDto"
}
}
},
"required": true
},
"responses": {
"201": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications (Admin)"
]
}
},
"/admin/notifications/templates/{name}": {
"post": {
"operationId": "getNotificationTemplateAdmin",
"parameters": [
{
"name": "name",
"required": true,
"in": "path",
"schema": {
"type": "string"
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TemplateDto"
}
}
},
"required": true
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TemplateResponseDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications (Admin)"
]
}
},
"/admin/notifications/test-email": {
"post": {
"operationId": "sendTestEmailAdmin",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/SystemConfigSmtpDto"
}
}
},
"required": true
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TestEmailResponseDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications (Admin)"
]
}
},
"/admin/users": {
"get": {
"operationId": "searchUsersAdmin",
@@ -3620,86 +3485,35 @@
]
}
},
"/notifications": {
"delete": {
"operationId": "deleteNotifications",
"parameters": [],
"/notifications/admin/templates/{name}": {
"post": {
"operationId": "getNotificationTemplateAdmin",
"parameters": [
{
"name": "name",
"required": true,
"in": "path",
"schema": {
"type": "string"
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationDeleteAllDto"
"$ref": "#/components/schemas/TemplateDto"
}
}
},
"required": true
},
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications"
]
},
"get": {
"operationId": "getNotifications",
"parameters": [
{
"name": "id",
"required": false,
"in": "query",
"schema": {
"format": "uuid",
"type": "string"
}
},
{
"name": "level",
"required": false,
"in": "query",
"schema": {
"$ref": "#/components/schemas/NotificationLevel"
}
},
{
"name": "type",
"required": false,
"in": "query",
"schema": {
"$ref": "#/components/schemas/NotificationType"
}
},
{
"name": "unread",
"required": false,
"in": "query",
"schema": {
"type": "boolean"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/NotificationDto"
},
"type": "array"
"$ref": "#/components/schemas/TemplateResponseDto"
}
}
},
@@ -3718,135 +3532,19 @@
}
],
"tags": [
"Notifications"
]
},
"put": {
"operationId": "updateNotifications",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationUpdateAllDto"
}
}
},
"required": true
},
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications"
"Notifications (Admin)"
]
}
},
"/notifications/{id}": {
"delete": {
"operationId": "deleteNotification",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"200": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications"
]
},
"get": {
"operationId": "getNotification",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Notifications"
]
},
"put": {
"operationId": "updateNotification",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"/notifications/admin/test-email": {
"post": {
"operationId": "sendTestEmailAdmin",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationUpdateDto"
"$ref": "#/components/schemas/SystemConfigSmtpDto"
}
}
},
@@ -3857,7 +3555,7 @@
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NotificationDto"
"$ref": "#/components/schemas/TestEmailResponseDto"
}
}
},
@@ -3876,7 +3574,7 @@
}
],
"tags": [
"Notifications"
"Notifications (Admin)"
]
}
},
@@ -9186,6 +8884,21 @@
],
"type": "string"
},
"AvatarResponse": {
"properties": {
"color": {
"allOf": [
{
"$ref": "#/components/schemas/UserAvatarColor"
}
]
}
},
"required": [
"color"
],
"type": "object"
},
"AvatarUpdate": {
"properties": {
"color": {
@@ -10628,157 +10341,6 @@
},
"type": "object"
},
"NotificationCreateDto": {
"properties": {
"data": {
"type": "object"
},
"description": {
"nullable": true,
"type": "string"
},
"level": {
"allOf": [
{
"$ref": "#/components/schemas/NotificationLevel"
}
]
},
"readAt": {
"format": "date-time",
"nullable": true,
"type": "string"
},
"title": {
"type": "string"
},
"type": {
"allOf": [
{
"$ref": "#/components/schemas/NotificationType"
}
]
},
"userId": {
"format": "uuid",
"type": "string"
}
},
"required": [
"title",
"userId"
],
"type": "object"
},
"NotificationDeleteAllDto": {
"properties": {
"ids": {
"items": {
"format": "uuid",
"type": "string"
},
"type": "array"
}
},
"required": [
"ids"
],
"type": "object"
},
"NotificationDto": {
"properties": {
"createdAt": {
"format": "date-time",
"type": "string"
},
"data": {
"type": "object"
},
"description": {
"type": "string"
},
"id": {
"type": "string"
},
"level": {
"allOf": [
{
"$ref": "#/components/schemas/NotificationLevel"
}
]
},
"readAt": {
"format": "date-time",
"type": "string"
},
"title": {
"type": "string"
},
"type": {
"allOf": [
{
"$ref": "#/components/schemas/NotificationType"
}
]
}
},
"required": [
"createdAt",
"id",
"level",
"title",
"type"
],
"type": "object"
},
"NotificationLevel": {
"enum": [
"success",
"error",
"warning",
"info"
],
"type": "string"
},
"NotificationType": {
"enum": [
"JobFailed",
"BackupFailed",
"SystemMessage",
"Custom"
],
"type": "string"
},
"NotificationUpdateAllDto": {
"properties": {
"ids": {
"items": {
"format": "uuid",
"type": "string"
},
"type": "array"
},
"readAt": {
"format": "date-time",
"nullable": true,
"type": "string"
}
},
"required": [
"ids"
],
"type": "object"
},
"NotificationUpdateDto": {
"properties": {
"readAt": {
"format": "date-time",
"nullable": true,
"type": "string"
}
},
"type": "object"
},
"OAuthAuthorizeResponseDto": {
"properties": {
"url": {
@@ -10824,13 +10386,6 @@
],
"type": "object"
},
"OAuthTokenEndpointAuthMethod": {
"enum": [
"client_secret_post",
"client_secret_basic"
],
"type": "string"
},
"OnThisDayDto": {
"properties": {
"year": {
@@ -11060,10 +10615,6 @@
"memory.read",
"memory.update",
"memory.delete",
"notification.create",
"notification.read",
"notification.update",
"notification.delete",
"partner.create",
"partner.read",
"partner.update",
@@ -13411,17 +12962,6 @@
},
"storageQuotaClaim": {
"type": "string"
},
"timeout": {
"minimum": 1,
"type": "integer"
},
"tokenEndpointAuthMethod": {
"allOf": [
{
"$ref": "#/components/schemas/OAuthTokenEndpointAuthMethod"
}
]
}
},
"required": [
@@ -13439,9 +12979,7 @@
"scope",
"signingAlgorithm",
"storageLabelClaim",
"storageQuotaClaim",
"timeout",
"tokenEndpointAuthMethod"
"storageQuotaClaim"
],
"type": "object"
},
@@ -14083,14 +13621,6 @@
},
"UserAdminCreateDto": {
"properties": {
"avatarColor": {
"allOf": [
{
"$ref": "#/components/schemas/UserAvatarColor"
}
],
"nullable": true
},
"email": {
"format": "email",
"type": "string"
@@ -14233,14 +13763,6 @@
},
"UserAdminUpdateDto": {
"properties": {
"avatarColor": {
"allOf": [
{
"$ref": "#/components/schemas/UserAvatarColor"
}
],
"nullable": true
},
"email": {
"format": "email",
"type": "string"
@@ -14304,6 +13826,9 @@
},
"UserPreferencesResponseDto": {
"properties": {
"avatar": {
"$ref": "#/components/schemas/AvatarResponse"
},
"download": {
"$ref": "#/components/schemas/DownloadResponse"
},
@@ -14333,6 +13858,7 @@
}
},
"required": [
"avatar",
"download",
"emailNotifications",
"folders",
@@ -14426,14 +13952,6 @@
},
"UserUpdateMeDto": {
"properties": {
"avatarColor": {
"allOf": [
{
"$ref": "#/components/schemas/UserAvatarColor"
}
],
"nullable": true
},
"email": {
"format": "email",
"type": "string"

View File

@@ -39,48 +39,6 @@ export type ActivityCreateDto = {
export type ActivityStatisticsResponseDto = {
comments: number;
};
export type NotificationCreateDto = {
data?: object;
description?: string | null;
level?: NotificationLevel;
readAt?: string | null;
title: string;
"type"?: NotificationType;
userId: string;
};
export type NotificationDto = {
createdAt: string;
data?: object;
description?: string;
id: string;
level: NotificationLevel;
readAt?: string;
title: string;
"type": NotificationType;
};
export type TemplateDto = {
template: string;
};
export type TemplateResponseDto = {
html: string;
name: string;
};
export type SystemConfigSmtpTransportDto = {
host: string;
ignoreCert: boolean;
password: string;
port: number;
username: string;
};
export type SystemConfigSmtpDto = {
enabled: boolean;
"from": string;
replyTo: string;
transport: SystemConfigSmtpTransportDto;
};
export type TestEmailResponseDto = {
messageId: string;
};
export type UserLicense = {
activatedAt: string;
activationKey: string;
@@ -106,7 +64,6 @@ export type UserAdminResponseDto = {
updatedAt: string;
};
export type UserAdminCreateDto = {
avatarColor?: (UserAvatarColor) | null;
email: string;
name: string;
notify?: boolean;
@@ -119,7 +76,6 @@ export type UserAdminDeleteDto = {
force?: boolean;
};
export type UserAdminUpdateDto = {
avatarColor?: (UserAvatarColor) | null;
email?: string;
name?: string;
password?: string;
@@ -127,6 +83,9 @@ export type UserAdminUpdateDto = {
shouldChangePassword?: boolean;
storageLabel?: string | null;
};
export type AvatarResponse = {
color: UserAvatarColor;
};
export type DownloadResponse = {
archiveSize: number;
includeEmbeddedVideos: boolean;
@@ -163,6 +122,7 @@ export type TagsResponse = {
sidebarWeb: boolean;
};
export type UserPreferencesResponseDto = {
avatar: AvatarResponse;
download: DownloadResponse;
emailNotifications: EmailNotificationsResponse;
folders: FoldersResponse;
@@ -703,15 +663,28 @@ export type MemoryUpdateDto = {
memoryAt?: string;
seenAt?: string;
};
export type NotificationDeleteAllDto = {
ids: string[];
export type TemplateDto = {
template: string;
};
export type NotificationUpdateAllDto = {
ids: string[];
readAt?: string | null;
export type TemplateResponseDto = {
html: string;
name: string;
};
export type NotificationUpdateDto = {
readAt?: string | null;
export type SystemConfigSmtpTransportDto = {
host: string;
ignoreCert: boolean;
password: string;
port: number;
username: string;
};
export type SystemConfigSmtpDto = {
enabled: boolean;
"from": string;
replyTo: string;
transport: SystemConfigSmtpTransportDto;
};
export type TestEmailResponseDto = {
messageId: string;
};
export type OAuthConfigDto = {
codeChallenge?: string;
@@ -1315,8 +1288,6 @@ export type SystemConfigOAuthDto = {
signingAlgorithm: string;
storageLabelClaim: string;
storageQuotaClaim: string;
timeout: number;
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod;
};
export type SystemConfigPasswordLoginDto = {
enabled: boolean;
@@ -1417,7 +1388,6 @@ export type TrashResponseDto = {
count: number;
};
export type UserUpdateMeDto = {
avatarColor?: (UserAvatarColor) | null;
email?: string;
name?: string;
password?: string;
@@ -1484,43 +1454,6 @@ export function deleteActivity({ id }: {
method: "DELETE"
}));
}
export function createNotification({ notificationCreateDto }: {
notificationCreateDto: NotificationCreateDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 201;
data: NotificationDto;
}>("/admin/notifications", oazapfts.json({
...opts,
method: "POST",
body: notificationCreateDto
})));
}
export function getNotificationTemplateAdmin({ name, templateDto }: {
name: string;
templateDto: TemplateDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TemplateResponseDto;
}>(`/admin/notifications/templates/${encodeURIComponent(name)}`, oazapfts.json({
...opts,
method: "POST",
body: templateDto
})));
}
export function sendTestEmailAdmin({ systemConfigSmtpDto }: {
systemConfigSmtpDto: SystemConfigSmtpDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TestEmailResponseDto;
}>("/admin/notifications/test-email", oazapfts.json({
...opts,
method: "POST",
body: systemConfigSmtpDto
})));
}
export function searchUsersAdmin({ withDeleted }: {
withDeleted?: boolean;
}, opts?: Oazapfts.RequestOpts) {
@@ -2389,71 +2322,29 @@ export function addMemoryAssets({ id, bulkIdsDto }: {
body: bulkIdsDto
})));
}
export function deleteNotifications({ notificationDeleteAllDto }: {
notificationDeleteAllDto: NotificationDeleteAllDto;
export function getNotificationTemplateAdmin({ name, templateDto }: {
name: string;
templateDto: TemplateDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/notifications", oazapfts.json({
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TemplateResponseDto;
}>(`/notifications/admin/templates/${encodeURIComponent(name)}`, oazapfts.json({
...opts,
method: "DELETE",
body: notificationDeleteAllDto
method: "POST",
body: templateDto
})));
}
export function getNotifications({ id, level, $type, unread }: {
id?: string;
level?: NotificationLevel;
$type?: NotificationType;
unread?: boolean;
export function sendTestEmailAdmin({ systemConfigSmtpDto }: {
systemConfigSmtpDto: SystemConfigSmtpDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: NotificationDto[];
}>(`/notifications${QS.query(QS.explode({
id,
level,
"type": $type,
unread
}))}`, {
...opts
}));
}
export function updateNotifications({ notificationUpdateAllDto }: {
notificationUpdateAllDto: NotificationUpdateAllDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/notifications", oazapfts.json({
data: TestEmailResponseDto;
}>("/notifications/admin/test-email", oazapfts.json({
...opts,
method: "PUT",
body: notificationUpdateAllDto
})));
}
export function deleteNotification({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/notifications/${encodeURIComponent(id)}`, {
...opts,
method: "DELETE"
}));
}
export function getNotification({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: NotificationDto;
}>(`/notifications/${encodeURIComponent(id)}`, {
...opts
}));
}
export function updateNotification({ id, notificationUpdateDto }: {
id: string;
notificationUpdateDto: NotificationUpdateDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: NotificationDto;
}>(`/notifications/${encodeURIComponent(id)}`, oazapfts.json({
...opts,
method: "PUT",
body: notificationUpdateDto
method: "POST",
body: systemConfigSmtpDto
})));
}
export function startOAuth({ oAuthConfigDto }: {
@@ -3562,18 +3453,6 @@ export enum UserAvatarColor {
Gray = "gray",
Amber = "amber"
}
export enum NotificationLevel {
Success = "success",
Error = "error",
Warning = "warning",
Info = "info"
}
export enum NotificationType {
JobFailed = "JobFailed",
BackupFailed = "BackupFailed",
SystemMessage = "SystemMessage",
Custom = "Custom"
}
export enum UserStatus {
Active = "active",
Removing = "removing",
@@ -3648,10 +3527,6 @@ export enum Permission {
MemoryRead = "memory.read",
MemoryUpdate = "memory.update",
MemoryDelete = "memory.delete",
NotificationCreate = "notification.create",
NotificationRead = "notification.read",
NotificationUpdate = "notification.update",
NotificationDelete = "notification.delete",
PartnerCreate = "partner.create",
PartnerRead = "partner.read",
PartnerUpdate = "partner.update",
@@ -3861,10 +3736,6 @@ export enum LogLevel {
Error = "error",
Fatal = "fatal"
}
export enum OAuthTokenEndpointAuthMethod {
ClientSecretPost = "client_secret_post",
ClientSecretBasic = "client_secret_basic"
}
export enum TimeBucketSize {
Day = "DAY",
Month = "MONTH"

View File

@@ -26,7 +26,7 @@ COPY --from=dev /usr/src/app/node_modules/@img ./node_modules/@img
COPY --from=dev /usr/src/app/node_modules/exiftool-vendored.pl ./node_modules/exiftool-vendored.pl
# web build
FROM node:22.15.0-alpine3.20@sha256:686b8892b69879ef5bfd6047589666933508f9a5451c67320df3070ba0e9807b AS web
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS web
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

644
server/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -57,7 +57,6 @@
"chokidar": "^3.5.3",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
"compression": "^1.8.0",
"cookie": "^1.0.2",
"cookie-parser": "^1.4.7",
"exiftool-vendored": "^28.3.1",
@@ -109,7 +108,6 @@
"@types/archiver": "^6.0.0",
"@types/async-lock": "^1.4.2",
"@types/bcrypt": "^5.0.0",
"@types/compression": "^1.7.5",
"@types/cookie-parser": "^1.4.8",
"@types/express": "^4.17.17",
"@types/fluent-ffmpeg": "^2.1.21",

View File

@@ -5,7 +5,6 @@ import {
CQMode,
ImageFormat,
LogLevel,
OAuthTokenEndpointAuthMethod,
QueueName,
ToneMapping,
TranscodeHWAccel,
@@ -97,8 +96,6 @@ export interface SystemConfig {
scope: string;
signingAlgorithm: string;
profileSigningAlgorithm: string;
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod;
timeout: number;
storageLabelClaim: string;
storageQuotaClaim: string;
};
@@ -263,8 +260,6 @@ export const defaults = Object.freeze<SystemConfig>({
profileSigningAlgorithm: 'none',
storageLabelClaim: 'preferred_username',
storageQuotaClaim: 'immich_quota',
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.CLIENT_SECRET_POST,
timeout: 30_000,
},
passwordLogin: {
enabled: true,

View File

@@ -14,7 +14,6 @@ import { LibraryController } from 'src/controllers/library.controller';
import { MapController } from 'src/controllers/map.controller';
import { MemoryController } from 'src/controllers/memory.controller';
import { NotificationAdminController } from 'src/controllers/notification-admin.controller';
import { NotificationController } from 'src/controllers/notification.controller';
import { OAuthController } from 'src/controllers/oauth.controller';
import { PartnerController } from 'src/controllers/partner.controller';
import { PersonController } from 'src/controllers/person.controller';
@@ -48,7 +47,6 @@ export const controllers = [
LibraryController,
MapController,
MemoryController,
NotificationController,
NotificationAdminController,
OAuthController,
PartnerController,

View File

@@ -1,28 +1,16 @@
import { Body, Controller, HttpCode, HttpStatus, Param, Post } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import {
NotificationCreateDto,
NotificationDto,
TemplateDto,
TemplateResponseDto,
TestEmailResponseDto,
} from 'src/dtos/notification.dto';
import { TemplateDto, TemplateResponseDto, TestEmailResponseDto } from 'src/dtos/notification.dto';
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { EmailTemplate } from 'src/repositories/email.repository';
import { NotificationAdminService } from 'src/services/notification-admin.service';
import { NotificationService } from 'src/services/notification.service';
@ApiTags('Notifications (Admin)')
@Controller('admin/notifications')
@Controller('notifications/admin')
export class NotificationAdminController {
constructor(private service: NotificationAdminService) {}
@Post()
@Authenticated({ admin: true })
createNotification(@Auth() auth: AuthDto, @Body() dto: NotificationCreateDto): Promise<NotificationDto> {
return this.service.create(auth, dto);
}
constructor(private service: NotificationService) {}
@Post('test-email')
@HttpCode(HttpStatus.OK)

View File

@@ -1,60 +0,0 @@
import { Body, Controller, Delete, Get, Param, Put, Query } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import {
NotificationDeleteAllDto,
NotificationDto,
NotificationSearchDto,
NotificationUpdateAllDto,
NotificationUpdateDto,
} from 'src/dtos/notification.dto';
import { Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { NotificationService } from 'src/services/notification.service';
import { UUIDParamDto } from 'src/validation';
@ApiTags('Notifications')
@Controller('notifications')
export class NotificationController {
constructor(private service: NotificationService) {}
@Get()
@Authenticated({ permission: Permission.NOTIFICATION_READ })
getNotifications(@Auth() auth: AuthDto, @Query() dto: NotificationSearchDto): Promise<NotificationDto[]> {
return this.service.search(auth, dto);
}
@Put()
@Authenticated({ permission: Permission.NOTIFICATION_UPDATE })
updateNotifications(@Auth() auth: AuthDto, @Body() dto: NotificationUpdateAllDto): Promise<void> {
return this.service.updateAll(auth, dto);
}
@Delete()
@Authenticated({ permission: Permission.NOTIFICATION_DELETE })
deleteNotifications(@Auth() auth: AuthDto, @Body() dto: NotificationDeleteAllDto): Promise<void> {
return this.service.deleteAll(auth, dto);
}
@Get(':id')
@Authenticated({ permission: Permission.NOTIFICATION_READ })
getNotification(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<NotificationDto> {
return this.service.get(auth, id);
}
@Put(':id')
@Authenticated({ permission: Permission.NOTIFICATION_UPDATE })
updateNotification(
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Body() dto: NotificationUpdateDto,
): Promise<NotificationDto> {
return this.service.update(auth, id, dto);
}
@Delete(':id')
@Authenticated({ permission: Permission.NOTIFICATION_DELETE })
deleteNotification(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<void> {
return this.service.delete(auth, id);
}
}

View File

@@ -90,7 +90,7 @@ export class StorageCore {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, person.ownerId, `${person.id}.jpeg`);
}
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: 'jpeg' | 'webp') {
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: ImageFormat) {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}-${type}.${format}`);
}

View File

@@ -9,7 +9,6 @@ import {
Permission,
SharedLinkType,
SourceType,
UserAvatarColor,
UserStatus,
} from 'src/enum';
import { OnThisDayData, UserMetadataItem } from 'src/types';
@@ -123,7 +122,6 @@ export type User = {
id: string;
name: string;
email: string;
avatarColor: UserAvatarColor | null;
profileImagePath: string;
profileChangedAt: Date;
};
@@ -266,15 +264,7 @@ export type AssetFace = {
person?: Person | null;
};
const userColumns = ['id', 'name', 'email', 'avatarColor', 'profileImagePath', 'profileChangedAt'] as const;
const userWithPrefixColumns = [
'users.id',
'users.name',
'users.email',
'users.avatarColor',
'users.profileImagePath',
'users.profileChangedAt',
] as const;
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
export const columns = {
asset: [
@@ -316,7 +306,7 @@ export const columns = {
'shared_links.password',
],
user: userColumns,
userWithPrefix: userWithPrefixColumns,
userWithPrefix: ['users.id', 'users.name', 'users.email', 'users.profileImagePath', 'users.profileChangedAt'],
userAdmin: [
...userColumns,
'createdAt',
@@ -333,7 +323,6 @@ export const columns = {
],
tag: ['tags.id', 'tags.value', 'tags.createdAt', 'tags.updatedAt', 'tags.color', 'tags.parentId'],
apiKey: ['id', 'name', 'userId', 'createdAt', 'updatedAt', 'permissions'],
notification: ['id', 'createdAt', 'level', 'type', 'title', 'description', 'data', 'readAt'],
syncAsset: [
'id',
'ownerId',

18
server/src/db.d.ts vendored
View File

@@ -11,8 +11,6 @@ import {
AssetStatus,
AssetType,
MemoryType,
NotificationLevel,
NotificationType,
Permission,
SharedLinkType,
SourceType,
@@ -265,21 +263,6 @@ export interface Memories {
updateId: Generated<string>;
}
export interface Notifications {
id: Generated<string>;
createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>;
deletedAt: Timestamp | null;
updateId: Generated<string>;
userId: string;
level: Generated<NotificationLevel>;
type: NotificationType;
title: string;
description: string | null;
data: any | null;
readAt: Timestamp | null;
}
export interface MemoriesAssetsAssets {
assetsId: string;
memoriesId: string;
@@ -480,7 +463,6 @@ export interface DB {
memories: Memories;
memories_assets_assets: MemoriesAssetsAssets;
migrations: Migrations;
notifications: Notifications;
move_history: MoveHistory;
naturalearth_countries: NaturalearthCountries;
partners_audit: PartnersAudit;

View File

@@ -1,7 +1,4 @@
import { ApiProperty } from '@nestjs/swagger';
import { IsEnum, IsString } from 'class-validator';
import { NotificationLevel, NotificationType } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
import { IsString } from 'class-validator';
export class TestEmailResponseDto {
messageId!: string;
@@ -14,106 +11,3 @@ export class TemplateDto {
@IsString()
template!: string;
}
export class NotificationDto {
id!: string;
@ValidateDate()
createdAt!: Date;
@ApiProperty({ enum: NotificationLevel, enumName: 'NotificationLevel' })
level!: NotificationLevel;
@ApiProperty({ enum: NotificationType, enumName: 'NotificationType' })
type!: NotificationType;
title!: string;
description?: string;
data?: any;
readAt?: Date;
}
export class NotificationSearchDto {
@Optional()
@ValidateUUID({ optional: true })
id?: string;
@IsEnum(NotificationLevel)
@Optional()
@ApiProperty({ enum: NotificationLevel, enumName: 'NotificationLevel' })
level?: NotificationLevel;
@IsEnum(NotificationType)
@Optional()
@ApiProperty({ enum: NotificationType, enumName: 'NotificationType' })
type?: NotificationType;
@ValidateBoolean({ optional: true })
unread?: boolean;
}
export class NotificationCreateDto {
@Optional()
@IsEnum(NotificationLevel)
@ApiProperty({ enum: NotificationLevel, enumName: 'NotificationLevel' })
level?: NotificationLevel;
@IsEnum(NotificationType)
@Optional()
@ApiProperty({ enum: NotificationType, enumName: 'NotificationType' })
type?: NotificationType;
@IsString()
title!: string;
@IsString()
@Optional({ nullable: true })
description?: string | null;
@Optional({ nullable: true })
data?: any;
@ValidateDate({ optional: true, nullable: true })
readAt?: Date | null;
@ValidateUUID()
userId!: string;
}
export class NotificationUpdateDto {
@ValidateDate({ optional: true, nullable: true })
readAt?: Date | null;
}
export class NotificationUpdateAllDto {
@ValidateUUID({ each: true, optional: true })
ids!: string[];
@ValidateDate({ optional: true, nullable: true })
readAt?: Date | null;
}
export class NotificationDeleteAllDto {
@ValidateUUID({ each: true })
ids!: string[];
}
export type MapNotification = {
id: string;
createdAt: Date;
updateId?: string;
level: NotificationLevel;
type: NotificationType;
data: any | null;
title: string;
description: string | null;
readAt: Date | null;
};
export const mapNotification = (notification: MapNotification): NotificationDto => {
return {
id: notification.id,
createdAt: notification.createdAt,
level: notification.level,
type: notification.type,
title: notification.title,
description: notification.description ?? undefined,
data: notification.data ?? undefined,
readAt: notification.readAt ?? undefined,
};
};

View File

@@ -25,7 +25,6 @@ import {
Colorspace,
ImageFormat,
LogLevel,
OAuthTokenEndpointAuthMethod,
QueueName,
ToneMapping,
TranscodeHWAccel,
@@ -34,7 +33,7 @@ import {
VideoContainer,
} from 'src/enum';
import { ConcurrentQueueName } from 'src/types';
import { IsCronExpression, Optional, ValidateBoolean } from 'src/validation';
import { IsCronExpression, ValidateBoolean } from 'src/validation';
const isLibraryScanEnabled = (config: SystemConfigLibraryScanDto) => config.enabled;
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
@@ -345,19 +344,10 @@ class SystemConfigOAuthDto {
clientId!: string;
@ValidateIf(isOAuthEnabled)
@IsNotEmpty()
@IsString()
clientSecret!: string;
@IsEnum(OAuthTokenEndpointAuthMethod)
@ApiProperty({ enum: OAuthTokenEndpointAuthMethod, enumName: 'OAuthTokenEndpointAuthMethod' })
tokenEndpointAuthMethod!: OAuthTokenEndpointAuthMethod;
@IsInt()
@IsPositive()
@Optional()
@ApiProperty({ type: 'integer' })
timeout!: number;
@IsNumber()
@Min(0)
defaultStorageQuota!: number;

View File

@@ -137,6 +137,11 @@ export class UserPreferencesUpdateDto {
purchase?: PurchaseUpdate;
}
class AvatarResponse {
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
color!: UserAvatarColor;
}
class RatingsResponse {
enabled: boolean = false;
}
@@ -190,6 +195,7 @@ export class UserPreferencesResponseDto implements UserPreferences {
ratings!: RatingsResponse;
sharedLinks!: SharedLinksResponse;
tags!: TagsResponse;
avatar!: AvatarResponse;
emailNotifications!: EmailNotificationsResponse;
download!: DownloadResponse;
purchase!: PurchaseResponse;

View File

@@ -1,9 +1,10 @@
import { ApiProperty } from '@nestjs/swagger';
import { Transform } from 'class-transformer';
import { IsBoolean, IsEmail, IsEnum, IsNotEmpty, IsNumber, IsString, Min } from 'class-validator';
import { IsBoolean, IsEmail, IsNotEmpty, IsNumber, IsString, Min } from 'class-validator';
import { User, UserAdmin } from 'src/database';
import { UserAvatarColor, UserMetadataKey, UserStatus } from 'src/enum';
import { UserMetadataItem } from 'src/types';
import { getPreferences } from 'src/utils/preferences';
import { Optional, ValidateBoolean, toEmail, toSanitized } from 'src/validation';
export class UserUpdateMeDto {
@@ -22,11 +23,6 @@ export class UserUpdateMeDto {
@IsString()
@IsNotEmpty()
name?: string;
@Optional({ nullable: true })
@IsEnum(UserAvatarColor)
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
avatarColor?: UserAvatarColor | null;
}
export class UserResponseDto {
@@ -45,21 +41,13 @@ export class UserLicense {
activatedAt!: Date;
}
const emailToAvatarColor = (email: string): UserAvatarColor => {
const values = Object.values(UserAvatarColor);
const randomIndex = Math.floor(
[...email].map((letter) => letter.codePointAt(0) ?? 0).reduce((a, b) => a + b, 0) % values.length,
);
return values[randomIndex];
};
export const mapUser = (entity: User | UserAdmin): UserResponseDto => {
return {
id: entity.id,
email: entity.email,
name: entity.name,
profileImagePath: entity.profileImagePath,
avatarColor: entity.avatarColor ?? emailToAvatarColor(entity.email),
avatarColor: getPreferences(entity.email, (entity as UserAdmin).metadata || []).avatar.color,
profileChangedAt: entity.profileChangedAt,
};
};
@@ -81,11 +69,6 @@ export class UserAdminCreateDto {
@IsString()
name!: string;
@Optional({ nullable: true })
@IsEnum(UserAvatarColor)
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
avatarColor?: UserAvatarColor | null;
@Optional({ nullable: true })
@IsString()
@Transform(toSanitized)
@@ -121,11 +104,6 @@ export class UserAdminUpdateDto {
@IsNotEmpty()
name?: string;
@Optional({ nullable: true })
@IsEnum(UserAvatarColor)
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
avatarColor?: UserAvatarColor | null;
@Optional({ nullable: true })
@IsString()
@Transform(toSanitized)

View File

@@ -126,11 +126,6 @@ export enum Permission {
MEMORY_UPDATE = 'memory.update',
MEMORY_DELETE = 'memory.delete',
NOTIFICATION_CREATE = 'notification.create',
NOTIFICATION_READ = 'notification.read',
NOTIFICATION_UPDATE = 'notification.update',
NOTIFICATION_DELETE = 'notification.delete',
PARTNER_CREATE = 'partner.create',
PARTNER_READ = 'partner.read',
PARTNER_UPDATE = 'partner.update',
@@ -337,11 +332,6 @@ export enum ImageFormat {
WEBP = 'webp',
}
export enum RawExtractedFormat {
JPEG = 'jpeg',
JXL = 'jxl',
}
export enum LogLevel {
VERBOSE = 'verbose',
DEBUG = 'debug',
@@ -525,7 +515,6 @@ export enum JobName {
NOTIFY_SIGNUP = 'notify-signup',
NOTIFY_ALBUM_INVITE = 'notify-album-invite',
NOTIFY_ALBUM_UPDATE = 'notify-album-update',
NOTIFICATIONS_CLEANUP = 'notifications-cleanup',
SEND_EMAIL = 'notification-send-email',
// Version check
@@ -591,22 +580,3 @@ export enum SyncEntityType {
PartnerAssetDeleteV1 = 'PartnerAssetDeleteV1',
PartnerAssetExifV1 = 'PartnerAssetExifV1',
}
export enum NotificationLevel {
Success = 'success',
Error = 'error',
Warning = 'warning',
Info = 'info',
}
export enum NotificationType {
JobFailed = 'JobFailed',
BackupFailed = 'BackupFailed',
SystemMessage = 'SystemMessage',
Custom = 'Custom',
}
export enum OAuthTokenEndpointAuthMethod {
CLIENT_SECRET_POST = 'client_secret_post',
CLIENT_SECRET_BASIC = 'client_secret_basic',
}

View File

@@ -1,5 +1,5 @@
import { DatabaseExtension } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { vectorIndexQuery } from 'src/utils/database';
import { MigrationInterface, QueryRunner } from 'typeorm';
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
@@ -8,9 +8,15 @@ export class AddCLIPEmbeddingIndex1700713994428 implements MigrationInterface {
name = 'AddCLIPEmbeddingIndex1700713994428';
public async up(queryRunner: QueryRunner): Promise<void> {
if (vectorExtension === DatabaseExtension.VECTORS) {
await queryRunner.query(`SET vectors.pgvector_compatibility=on`);
}
await queryRunner.query(`SET search_path TO "$user", public, vectors`);
await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' }));
await queryRunner.query(`
CREATE INDEX IF NOT EXISTS clip_index ON smart_search
USING hnsw (embedding vector_cosine_ops)
WITH (ef_construction = 300, m = 16)`);
}
public async down(queryRunner: QueryRunner): Promise<void> {

View File

@@ -1,5 +1,5 @@
import { DatabaseExtension } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { vectorIndexQuery } from 'src/utils/database';
import { MigrationInterface, QueryRunner } from 'typeorm';
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
@@ -8,9 +8,15 @@ export class AddFaceEmbeddingIndex1700714033632 implements MigrationInterface {
name = 'AddFaceEmbeddingIndex1700714033632';
public async up(queryRunner: QueryRunner): Promise<void> {
if (vectorExtension === DatabaseExtension.VECTORS) {
await queryRunner.query(`SET vectors.pgvector_compatibility=on`);
}
await queryRunner.query(`SET search_path TO "$user", public, vectors`);
await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'asset_faces', indexName: 'face_index' }));
await queryRunner.query(`
CREATE INDEX IF NOT EXISTS face_index ON asset_faces
USING hnsw (embedding vector_cosine_ops)
WITH (ef_construction = 300, m = 16)`);
}
public async down(queryRunner: QueryRunner): Promise<void> {

View File

@@ -1,6 +1,5 @@
import { DatabaseExtension } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { vectorIndexQuery } from 'src/utils/database';
import { MigrationInterface, QueryRunner } from 'typeorm';
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
@@ -9,6 +8,7 @@ export class AddFaceSearchRelation1718486162779 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
if (vectorExtension === DatabaseExtension.VECTORS) {
await queryRunner.query(`SET search_path TO "$user", public, vectors`);
await queryRunner.query(`SET vectors.pgvector_compatibility=on`);
}
const hasEmbeddings = async (tableName: string): Promise<boolean> => {
@@ -47,14 +47,21 @@ export class AddFaceSearchRelation1718486162779 implements MigrationInterface {
await queryRunner.query(`ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[]`);
await queryRunner.query(`ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512)`);
await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' }));
await queryRunner.query(`
CREATE INDEX IF NOT EXISTS clip_index ON smart_search
USING hnsw (embedding vector_cosine_ops)
WITH (ef_construction = 300, m = 16)`);
await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'face_search', indexName: 'face_index' }));
await queryRunner.query(`
CREATE INDEX face_index ON face_search
USING hnsw (embedding vector_cosine_ops)
WITH (ef_construction = 300, m = 16)`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
if (vectorExtension === DatabaseExtension.VECTORS) {
await queryRunner.query(`SET search_path TO "$user", public, vectors`);
await queryRunner.query(`SET vectors.pgvector_compatibility=on`);
}
await queryRunner.query(`ALTER TABLE asset_faces ADD COLUMN "embedding" vector(512)`);
@@ -67,6 +74,9 @@ export class AddFaceSearchRelation1718486162779 implements MigrationInterface {
WHERE id = fs."faceId"`);
await queryRunner.query(`DROP TABLE face_search`);
await queryRunner.query(vectorIndexQuery({ vectorExtension, table: 'asset_faces', indexName: 'face_index' }));
await queryRunner.query(`
CREATE INDEX face_index ON asset_faces
USING hnsw (embedding vector_cosine_ops)
WITH (ef_construction = 300, m = 16)`);
}
}

View File

@@ -157,15 +157,6 @@ where
and "memories"."ownerId" = $2
and "memories"."deletedAt" is null
-- AccessRepository.notification.checkOwnerAccess
select
"notifications"."id"
from
"notifications"
where
"notifications"."id" in ($1)
and "notifications"."userId" = $2
-- AccessRepository.person.checkOwnerAccess
select
"person"."id"

View File

@@ -13,7 +13,6 @@ from
"users"."id",
"users"."name",
"users"."email",
"users"."avatarColor",
"users"."profileImagePath",
"users"."profileChangedAt"
from
@@ -45,7 +44,6 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from

View File

@@ -12,7 +12,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -37,7 +36,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -102,7 +100,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -127,7 +124,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -195,7 +191,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -220,7 +215,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -275,7 +269,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -299,7 +292,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -361,7 +353,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from

View File

@@ -194,24 +194,6 @@ where
"asset_files"."assetId" = $1
and "asset_files"."type" = $2
-- AssetJobRepository.streamForEncodeClip
select
"assets"."id"
from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"job_status"."previewAt" is not null
and "assets"."isVisible" = $1
and not exists (
select
from
"smart_search"
where
"assetId" = "assets"."id"
)
and "assets"."deletedAt" is null
-- AssetJobRepository.getForClipEncoding
select
"assets"."id",
@@ -277,130 +259,6 @@ from
where
"assets"."id" = $2
-- AssetJobRepository.getForSyncAssets
select
"assets"."id",
"assets"."isOffline",
"assets"."libraryId",
"assets"."originalPath",
"assets"."status",
"assets"."fileModifiedAt"
from
"assets"
where
"assets"."id" = any ($1::uuid[])
-- AssetJobRepository.getForAssetDeletion
select
"assets"."id",
"assets"."isVisible",
"assets"."libraryId",
"assets"."ownerId",
"assets"."livePhotoVideoId",
"assets"."sidecarPath",
"assets"."encodedVideoPath",
"assets"."originalPath",
to_json("exif") as "exifInfo",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_faces".*,
"person" as "person"
from
"asset_faces"
left join lateral (
select
"person".*
from
"person"
where
"asset_faces"."personId" = "person"."id"
) as "person" on true
where
"asset_faces"."assetId" = "assets"."id"
and "asset_faces"."deletedAt" is null
) as agg
) as "faces",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_files"."id",
"asset_files"."path",
"asset_files"."type"
from
"asset_files"
where
"asset_files"."assetId" = "assets"."id"
) as agg
) as "files",
to_json("stacked_assets") as "stack"
from
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
left join lateral (
select
"asset_stack"."id",
"asset_stack"."primaryAssetId",
array_agg("stacked") as "assets"
from
"assets" as "stacked"
where
"stacked"."deletedAt" is not null
and "stacked"."isArchived" = $1
and "stacked"."stackId" = "asset_stack"."id"
group by
"asset_stack"."id"
) as "stacked_assets" on "asset_stack"."id" is not null
where
"assets"."id" = $2
-- AssetJobRepository.streamForVideoConversion
select
"assets"."id"
from
"assets"
where
"assets"."type" = $1
and (
"assets"."encodedVideoPath" is null
or "assets"."encodedVideoPath" = $2
)
and "assets"."isVisible" = $3
and "assets"."deletedAt" is null
-- AssetJobRepository.getForVideoConversion
select
"assets"."id",
"assets"."ownerId",
"assets"."originalPath",
"assets"."encodedVideoPath"
from
"assets"
where
"assets"."id" = $1
and "assets"."type" = $2
-- AssetJobRepository.streamForMetadataExtraction
select
"assets"."id"
from
"assets"
left join "asset_job_status" on "asset_job_status"."assetId" = "assets"."id"
where
(
"asset_job_status"."metadataExtractedAt" is null
or "asset_job_status"."assetId" is null
)
and "assets"."isVisible" = $1
and "assets"."deletedAt" is null
-- AssetJobRepository.getForStorageTemplateJob
select
"assets"."id",

View File

@@ -1,58 +0,0 @@
-- NOTE: This file is auto generated by ./sql-generator
-- NotificationRepository.cleanup
delete from "notifications"
where
(
(
"deletedAt" is not null
and "deletedAt" < $1
)
or (
"readAt" > $2
and "createdAt" < $3
)
or (
"readAt" = $4
and "createdAt" < $5
)
)
-- NotificationRepository.search
select
"id",
"createdAt",
"level",
"type",
"title",
"description",
"data",
"readAt"
from
"notifications"
where
"userId" = $1
and "deletedAt" is null
order by
"createdAt" desc
-- NotificationRepository.search (unread)
select
"id",
"createdAt",
"level",
"type",
"title",
"description",
"data",
"readAt"
from
"notifications"
where
(
"userId" = $1
and "readAt" is null
)
and "deletedAt" is null
order by
"createdAt" desc

View File

@@ -12,7 +12,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -30,7 +29,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -63,7 +61,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -81,7 +78,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -116,7 +112,6 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -134,7 +129,6 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -162,7 +156,6 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -180,7 +173,6 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from

View File

@@ -5,7 +5,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -44,7 +43,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -92,7 +90,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -131,7 +128,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -156,7 +152,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -203,7 +198,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -241,7 +235,6 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",

View File

@@ -279,26 +279,6 @@ class AuthDeviceAccess {
}
}
class NotificationAccess {
constructor(private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID_SET] })
@ChunkedSet({ paramIndex: 1 })
async checkOwnerAccess(userId: string, notificationIds: Set<string>) {
if (notificationIds.size === 0) {
return new Set<string>();
}
return this.db
.selectFrom('notifications')
.select('notifications.id')
.where('notifications.id', 'in', [...notificationIds])
.where('notifications.userId', '=', userId)
.execute()
.then((stacks) => new Set(stacks.map((stack) => stack.id)));
}
}
class StackAccess {
constructor(private db: Kysely<DB>) {}
@@ -446,7 +426,6 @@ export class AccessRepository {
asset: AssetAccess;
authDevice: AuthDeviceAccess;
memory: MemoryAccess;
notification: NotificationAccess;
person: PersonAccess;
partner: PartnerAccess;
stack: StackAccess;
@@ -459,7 +438,6 @@ export class AccessRepository {
this.asset = new AssetAccess(db);
this.authDevice = new AuthDeviceAccess(db);
this.memory = new MemoryAccess(db);
this.notification = new NotificationAccess(db);
this.person = new PersonAccess(db);
this.partner = new PartnerAccess(db);
this.stack = new StackAccess(db);

View File

@@ -2,21 +2,12 @@ import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { Asset, columns } from 'src/database';
import { columns } from 'src/database';
import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, AssetType } from 'src/enum';
import { AssetFileType } from 'src/enum';
import { StorageAsset } from 'src/types';
import {
anyUuid,
asUuid,
toJson,
withExif,
withExifInner,
withFaces,
withFacesAndPeople,
withFiles,
} from 'src/utils/database';
import { anyUuid, asUuid, withExifInner, withFaces, withFiles } from 'src/utils/database';
@Injectable()
export class AssetJobRepository {
@@ -135,23 +126,6 @@ export class AssetJobRepository {
.execute();
}
@GenerateSql({ params: [], stream: true })
streamForEncodeClip(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null)
.where('assets.isVisible', '=', true)
.$if(!force, (qb) =>
qb.where((eb) =>
eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))),
),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForClipEncoding(id: string) {
return this.db
@@ -174,7 +148,6 @@ export class AssetJobRepository {
.executeTakeFirst();
}
@GenerateSql({ params: [[DummyValue.UUID]] })
getForSyncAssets(ids: string[]) {
return this.db
.selectFrom('assets')
@@ -190,84 +163,6 @@ export class AssetJobRepository {
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForAssetDeletion(id: string) {
return this.db
.selectFrom('assets')
.select([
'assets.id',
'assets.isVisible',
'assets.libraryId',
'assets.ownerId',
'assets.livePhotoVideoId',
'assets.sidecarPath',
'assets.encodedVideoPath',
'assets.originalPath',
])
.$call(withExif)
.select(withFacesAndPeople)
.select(withFiles)
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.select(['asset_stack.id', 'asset_stack.primaryAssetId'])
.select((eb) => eb.fn<Asset[]>('array_agg', [eb.table('stacked')]).as('assets'))
.where('stacked.deletedAt', 'is not', null)
.where('stacked.isArchived', '=', false)
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => toJson(eb, 'stacked_assets').as('stack'))
.where('assets.id', '=', id)
.executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForVideoConversion(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.where('assets.type', '=', AssetType.VIDEO)
.$if(!force, (qb) =>
qb
.where((eb) => eb.or([eb('assets.encodedVideoPath', 'is', null), eb('assets.encodedVideoPath', '=', '')]))
.where('assets.isVisible', '=', true),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForVideoConversion(id: string) {
return this.db
.selectFrom('assets')
.select(['assets.id', 'assets.ownerId', 'assets.originalPath', 'assets.encodedVideoPath'])
.where('assets.id', '=', id)
.where('assets.type', '=', AssetType.VIDEO)
.executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForMetadataExtraction(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.$if(!force, (qb) =>
qb
.leftJoin('asset_job_status', 'asset_job_status.assetId', 'assets.id')
.where((eb) =>
eb.or([eb('asset_job_status.metadataExtractedAt', 'is', null), eb('asset_job_status.assetId', 'is', null)]),
)
.where('assets.isVisible', '=', true),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
private storageTemplateAssetQuery() {
return this.db
.selectFrom('assets')

View File

@@ -49,6 +49,7 @@ export enum WithoutProperty {
THUMBNAIL = 'thumbnail',
ENCODED_VIDEO = 'encoded-video',
EXIF = 'exif',
SMART_SEARCH = 'smart-search',
DUPLICATE = 'duplicate',
FACES = 'faces',
SIDECAR = 'sidecar',
@@ -570,6 +571,15 @@ export class AssetRepository {
.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)]))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.SMART_SEARCH, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null)
.where('assets.isVisible', '=', true)
.where((eb) =>
eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))),
),
)
.$if(property === WithoutProperty.THUMBNAIL, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')

View File

@@ -12,7 +12,6 @@ import { DatabaseExtension, DatabaseLock, VectorIndex } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { ExtensionVersion, VectorExtension, VectorUpdateResult } from 'src/types';
import { vectorIndexQuery } from 'src/utils/database';
import { isValidInteger } from 'src/validation';
import { DataSource } from 'typeorm';
@@ -120,7 +119,12 @@ export class DatabaseRepository {
await sql`ALTER TABLE ${sql.raw(table)} ALTER COLUMN embedding SET DATA TYPE vector(${sql.raw(String(dimSize))})`.execute(
tx,
);
await sql.raw(vectorIndexQuery({ vectorExtension: this.vectorExtension, table, indexName: index })).execute(tx);
await sql`SET vectors.pgvector_compatibility=on`.execute(tx);
await sql`
CREATE INDEX IF NOT EXISTS ${sql.raw(index)} ON ${sql.raw(table)}
USING hnsw (embedding vector_cosine_ops)
WITH (ef_construction = 300, m = 16)
`.execute(tx);
});
}
}

View File

@@ -14,7 +14,6 @@ import { SystemConfig } from 'src/config';
import { EventConfig } from 'src/decorators';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { NotificationDto } from 'src/dtos/notification.dto';
import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.dto';
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
@@ -65,7 +64,6 @@ type EventMap = {
'assets.restore': [{ assetIds: string[]; userId: string }];
'job.start': [QueueName, JobItem];
'job.failed': [{ job: JobItem; error: Error | any }];
// session events
'session.delete': [{ sessionId: string }];
@@ -106,7 +104,6 @@ export interface ClientEventMap {
on_server_version: [ServerVersionResponseDto];
on_config_update: [];
on_new_release: [ReleaseNotification];
on_notification: [NotificationDto];
on_session_delete: [string];
}

View File

@@ -22,7 +22,6 @@ import { MediaRepository } from 'src/repositories/media.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
@@ -56,7 +55,6 @@ export const repositories = [
CryptoRepository,
DatabaseRepository,
DownloadRepository,
EmailRepository,
EventRepository,
JobRepository,
LibraryRepository,
@@ -67,7 +65,7 @@ export const repositories = [
MemoryRepository,
MetadataRepository,
MoveRepository,
NotificationRepository,
EmailRepository,
OAuthRepository,
PartnerRepository,
PersonRepository,

View File

@@ -5,7 +5,7 @@ import { Telemetry } from 'src/decorators';
import { LogLevel } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
type LogDetails = any;
type LogDetails = any[];
type LogFunction = () => string;
const LOG_LEVELS = [LogLevel.VERBOSE, LogLevel.DEBUG, LogLevel.LOG, LogLevel.WARN, LogLevel.ERROR, LogLevel.FATAL];

View File

@@ -7,7 +7,7 @@ import { Writable } from 'node:stream';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Exif } from 'src/database';
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
import { Colorspace, LogLevel } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import {
DecodeToBufferOptions,
@@ -36,51 +36,34 @@ type ProgressEvent = {
percent?: number;
};
export type ExtractResult = {
buffer: Buffer;
format: RawExtractedFormat;
};
@Injectable()
export class MediaRepository {
constructor(private logger: LoggingRepository) {
this.logger.setContext(MediaRepository.name);
}
/**
*
* @param input file path to the input image
* @returns ExtractResult if succeeded, or null if failed
*/
async extract(input: string): Promise<ExtractResult | null> {
async extract(input: string, output: string): Promise<boolean> {
try {
const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw2', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract JpgFromRaw2 buffer from image, trying JPEG from RAW next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract JPEG buffer from image, trying PreviewJXL next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('PreviewJXL', input);
return { buffer, format: RawExtractedFormat.JXL };
} catch (error: any) {
this.logger.debug('Could not extract PreviewJXL buffer from image, trying PreviewImage next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('PreviewImage', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract preview buffer from image', error.message);
return null;
// remove existing output file if it exists
// as exiftool-vendored does not support overwriting via "-w!" flag
// and throws "1 files could not be read" error when the output file exists
await fs.unlink(output).catch(() => null);
await exiftool.extractBinaryTag('JpgFromRaw2', input, output);
} catch {
try {
this.logger.debug('Extracting JPEG from RAW image:', input);
await exiftool.extractJpgFromRaw(input, output);
} catch (error: any) {
this.logger.debug('Could not extract JPEG from image, trying preview', error.message);
try {
await exiftool.extractPreview(input, output);
} catch (error: any) {
this.logger.debug('Could not extract preview from image', error.message);
return false;
}
}
}
return true;
}
async writeExif(tags: Partial<Exif>, output: string): Promise<boolean> {
@@ -121,7 +104,7 @@ export class MediaRepository {
}
}
decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
decodeImage(input: string, options: DecodeToBufferOptions) {
return this.getImageDecodingPipeline(input, options).raw().toBuffer({ resolveWithObject: true });
}
@@ -252,7 +235,7 @@ export class MediaRepository {
});
}
async getImageDimensions(input: string | Buffer): Promise<ImageDimensions> {
async getImageDimensions(input: string): Promise<ImageDimensions> {
const { width = 0, height = 0 } = await sharp(input).metadata();
return { width, height };
}

View File

@@ -1,103 +0,0 @@
import { Insertable, Kysely, Updateable } from 'kysely';
import { DateTime } from 'luxon';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DB, Notifications } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { NotificationSearchDto } from 'src/dtos/notification.dto';
export class NotificationRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID] })
cleanup() {
return this.db
.deleteFrom('notifications')
.where((eb) =>
eb.or([
// remove soft-deleted notifications
eb.and([eb('deletedAt', 'is not', null), eb('deletedAt', '<', DateTime.now().minus({ days: 3 }).toJSDate())]),
// remove old, read notifications
eb.and([
// keep recently read messages around for a few days
eb('readAt', '>', DateTime.now().minus({ days: 2 }).toJSDate()),
eb('createdAt', '<', DateTime.now().minus({ days: 15 }).toJSDate()),
]),
eb.and([
// remove super old, unread notifications
eb('readAt', '=', null),
eb('createdAt', '<', DateTime.now().minus({ days: 30 }).toJSDate()),
]),
]),
)
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, {}] }, { name: 'unread', params: [DummyValue.UUID, { unread: true }] })
search(userId: string, dto: NotificationSearchDto) {
return this.db
.selectFrom('notifications')
.select(columns.notification)
.where((qb) =>
qb.and({
userId,
id: dto.id,
level: dto.level,
type: dto.type,
readAt: dto.unread ? null : undefined,
}),
)
.where('deletedAt', 'is', null)
.orderBy('createdAt', 'desc')
.execute();
}
create(notification: Insertable<Notifications>) {
return this.db
.insertInto('notifications')
.values(notification)
.returning(columns.notification)
.executeTakeFirstOrThrow();
}
get(id: string) {
return this.db
.selectFrom('notifications')
.select(columns.notification)
.where('id', '=', id)
.where('deletedAt', 'is not', null)
.executeTakeFirst();
}
update(id: string, notification: Updateable<Notifications>) {
return this.db
.updateTable('notifications')
.set(notification)
.where('deletedAt', 'is', null)
.where('id', '=', id)
.returning(columns.notification)
.executeTakeFirstOrThrow();
}
async updateAll(ids: string[], notification: Updateable<Notifications>) {
await this.db.updateTable('notifications').set(notification).where('id', 'in', ids).execute();
}
async delete(id: string) {
await this.db
.updateTable('notifications')
.set({ deletedAt: DateTime.now().toJSDate() })
.where('id', '=', id)
.execute();
}
async deleteAll(ids: string[]) {
await this.db
.updateTable('notifications')
.set({ deletedAt: DateTime.now().toJSDate() })
.where('id', 'in', ids)
.execute();
}
}

View File

@@ -1,19 +1,16 @@
import { Injectable, InternalServerErrorException } from '@nestjs/common';
import type { UserInfoResponse } from 'openid-client' with { 'resolution-mode': 'import' };
import { OAuthTokenEndpointAuthMethod } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
export type OAuthConfig = {
clientId: string;
clientSecret?: string;
clientSecret: string;
issuerUrl: string;
mobileOverrideEnabled: boolean;
mobileRedirectUri: string;
profileSigningAlgorithm: string;
scope: string;
signingAlgorithm: string;
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod;
timeout: number;
};
export type OAuthProfile = UserInfoResponse;
@@ -79,10 +76,12 @@ export class OAuthRepository {
);
}
this.logger.error(`OAuth login failed: ${error.message}`);
this.logger.error(error);
if (error.code === 'OAUTH_INVALID_RESPONSE') {
this.logger.warn(`Invalid response from authorization server. Cause: ${error.cause?.message}`);
throw error.cause;
}
throw new Error('OAuth login failed', { cause: error });
throw error;
}
}
@@ -104,8 +103,6 @@ export class OAuthRepository {
clientSecret,
profileSigningAlgorithm,
signingAlgorithm,
tokenEndpointAuthMethod,
timeout,
}: OAuthConfig) {
try {
const { allowInsecureRequests, discovery } = await import('openid-client');
@@ -117,38 +114,14 @@ export class OAuthRepository {
response_types: ['code'],
userinfo_signed_response_alg: profileSigningAlgorithm === 'none' ? undefined : profileSigningAlgorithm,
id_token_signed_response_alg: signingAlgorithm,
timeout: 30_000,
},
await this.getTokenAuthMethod(tokenEndpointAuthMethod, clientSecret),
{
execute: [allowInsecureRequests],
timeout,
},
undefined,
{ execute: [allowInsecureRequests] },
);
} catch (error: any | AggregateError) {
this.logger.error(`Error in OAuth discovery: ${error}`, error?.stack, error?.errors);
throw new InternalServerErrorException(`Error in OAuth discovery: ${error}`, { cause: error });
}
}
private async getTokenAuthMethod(tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod, clientSecret?: string) {
const { None, ClientSecretPost, ClientSecretBasic } = await import('openid-client');
if (!clientSecret) {
return None();
}
switch (tokenEndpointAuthMethod) {
case OAuthTokenEndpointAuthMethod.CLIENT_SECRET_POST: {
return ClientSecretPost(clientSecret);
}
case OAuthTokenEndpointAuthMethod.CLIENT_SECRET_BASIC: {
return ClientSecretBasic(clientSecret);
}
default: {
return None();
}
}
}
}

View File

@@ -6,8 +6,7 @@ import { DB, Exif } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetStatus, AssetType } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { anyUuid, asUuid, searchAssetBuilder, vectorIndexQuery } from 'src/utils/database';
import { anyUuid, asUuid, searchAssetBuilder } from 'src/utils/database';
import { isValidInteger } from 'src/validation';
export interface SearchResult<T> {
@@ -202,10 +201,7 @@ export interface GetCameraMakesOptions {
@Injectable()
export class SearchRepository {
constructor(
@InjectKysely() private db: Kysely<DB>,
private configRepository: ConfigRepository,
) {}
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({
params: [
@@ -450,8 +446,8 @@ export class SearchRepository {
async upsert(assetId: string, embedding: string): Promise<void> {
await this.db
.insertInto('smart_search')
.values({ assetId, embedding })
.onConflict((oc) => oc.column('assetId').doUpdateSet((eb) => ({ embedding: eb.ref('excluded.embedding') })))
.values({ assetId: asUuid(assetId), embedding } as any)
.onConflict((oc) => oc.column('assetId').doUpdateSet({ embedding } as any))
.execute();
}
@@ -473,32 +469,19 @@ export class SearchRepository {
return dimSize;
}
async setDimensionSize(dimSize: number): Promise<void> {
setDimensionSize(dimSize: number): Promise<void> {
if (!isValidInteger(dimSize, { min: 1, max: 2 ** 16 })) {
throw new Error(`Invalid CLIP dimension size: ${dimSize}`);
}
// this is done in two transactions to handle concurrent writes
await this.db.transaction().execute(async (trx) => {
await sql`delete from ${sql.table('smart_search')}`.execute(trx);
await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute();
await sql`alter table ${sql.table('smart_search')} add constraint dim_size_constraint check (array_length(embedding::real[], 1) = ${sql.lit(dimSize)})`.execute(
trx,
);
});
const vectorExtension = this.configRepository.getEnv().database.vectorExtension;
await this.db.transaction().execute(async (trx) => {
await sql`drop index if exists clip_index`.execute(trx);
return this.db.transaction().execute(async (trx) => {
await sql`truncate ${sql.table('smart_search')}`.execute(trx);
await trx.schema
.alterTable('smart_search')
.alterColumn('embedding', (col) => col.setDataType(sql.raw(`vector(${dimSize})`)))
.execute();
await sql.raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' })).execute(trx);
await trx.schema.alterTable('smart_search').dropConstraint('dim_size_constraint').ifExists().execute();
await sql`reindex index clip_index`.execute(trx);
});
await sql`vacuum analyze ${sql.table('smart_search')}`.execute(this.db);
}
async deleteAllSearchEmbeddings(): Promise<void> {

View File

@@ -28,7 +28,6 @@ import { MemoryTable } from 'src/schema/tables/memory.table';
import { MemoryAssetTable } from 'src/schema/tables/memory_asset.table';
import { MoveTable } from 'src/schema/tables/move.table';
import { NaturalEarthCountriesTable } from 'src/schema/tables/natural-earth-countries.table';
import { NotificationTable } from 'src/schema/tables/notification.table';
import { PartnerAuditTable } from 'src/schema/tables/partner-audit.table';
import { PartnerTable } from 'src/schema/tables/partner.table';
import { PersonTable } from 'src/schema/tables/person.table';
@@ -47,8 +46,14 @@ import { UserTable } from 'src/schema/tables/user.table';
import { VersionHistoryTable } from 'src/schema/tables/version-history.table';
import { ConfigurationParameter, Database, Extensions } from 'src/sql-tools';
@Extensions(['uuid-ossp', 'unaccent', 'cube', 'earthdistance', 'pg_trgm', 'plpgsql'])
@Extensions(['uuid-ossp', 'unaccent', 'cube', 'earthdistance', 'pg_trgm', 'vectors', 'plpgsql'])
@ConfigurationParameter({ name: 'search_path', value: () => '"$user", public, vectors', scope: 'database' })
@ConfigurationParameter({
name: 'vectors.pgvector_compatibility',
value: () => 'on',
scope: 'user',
synchronize: false,
})
@Database({ name: 'immich' })
export class ImmichDatabase {
tables = [
@@ -71,7 +76,6 @@ export class ImmichDatabase {
MemoryTable,
MoveTable,
NaturalEarthCountriesTable,
NotificationTable,
PartnerAuditTable,
PartnerTable,
PersonTable,

View File

@@ -2,7 +2,6 @@ import { Kysely, sql } from 'kysely';
import { DatabaseExtension } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { vectorIndexQuery } from 'src/utils/database';
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
const lastMigrationSql = sql<{ name: string }>`SELECT "name" FROM "migrations" ORDER BY "timestamp" DESC LIMIT 1;`;
@@ -30,7 +29,7 @@ export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE EXTENSION IF NOT EXISTS "cube";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "earthdistance";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "pg_trgm";`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS ${sql.raw(vectorExtension)}`.execute(db);
await sql`CREATE EXTENSION IF NOT EXISTS "vectors";`.execute(db);
await sql`CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())
RETURNS uuid
VOLATILE LANGUAGE SQL
@@ -109,6 +108,7 @@ export async function up(db: Kysely<any>): Promise<void> {
$$;`.execute(db);
if (vectorExtension === DatabaseExtension.VECTORS) {
await sql`SET search_path TO "$user", public, vectors`.execute(db);
await sql`SET vectors.pgvector_compatibility=on`.execute(db);
}
await sql`CREATE TYPE "assets_status_enum" AS ENUM ('active','trashed','deleted');`.execute(db);
await sql`CREATE TYPE "sourcetype" AS ENUM ('machine-learning','exif','manual');`.execute(db);
@@ -293,7 +293,7 @@ export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE INDEX "IDX_live_photo_cid" ON "exif" ("livePhotoCID")`.execute(db);
await sql`CREATE INDEX "IDX_auto_stack_id" ON "exif" ("autoStackId")`.execute(db);
await sql`CREATE INDEX "IDX_asset_exif_update_id" ON "exif" ("updateId")`.execute(db);
await sql.raw(vectorIndexQuery({ vectorExtension, table: 'face_search', indexName: 'face_index' })).execute(db);
await sql`CREATE INDEX "face_index" ON "face_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
await sql`CREATE INDEX "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`.execute(db);
await sql`CREATE INDEX "idx_geodata_places_name" ON "geodata_places" USING gin (f_unaccent("name") gin_trgm_ops)`.execute(db);
await sql`CREATE INDEX "idx_geodata_places_admin2_name" ON "geodata_places" USING gin (f_unaccent("admin2Name") gin_trgm_ops)`.execute(db);
@@ -316,7 +316,7 @@ export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE INDEX "IDX_sharedlink_albumId" ON "shared_links" ("albumId")`.execute(db);
await sql`CREATE INDEX "IDX_5b7decce6c8d3db9593d6111a6" ON "shared_link__asset" ("assetsId")`.execute(db);
await sql`CREATE INDEX "IDX_c9fab4aa97ffd1b034f3d6581a" ON "shared_link__asset" ("sharedLinksId")`.execute(db);
await sql.raw(vectorIndexQuery({ vectorExtension, table: 'smart_search', indexName: 'clip_index' })).execute(db);
await sql`CREATE INDEX "clip_index" ON "smart_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
await sql`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`.execute(db);
await sql`CREATE INDEX "IDX_session_sync_checkpoints_update_id" ON "session_sync_checkpoints" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_92e67dc508c705dd66c9461557" ON "tags" ("userId")`.execute(db);

View File

@@ -1,22 +0,0 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "notifications" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7(), "userId" uuid, "level" character varying NOT NULL DEFAULT 'info', "type" character varying NOT NULL DEFAULT 'info', "data" jsonb, "title" character varying NOT NULL, "description" text, "readAt" timestamp with time zone);`.execute(db);
await sql`ALTER TABLE "notifications" ADD CONSTRAINT "PK_6a72c3c0f683f6462415e653c3a" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "notifications" ADD CONSTRAINT "FK_692a909ee0fa9383e7859f9b406" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`CREATE INDEX "IDX_notifications_update_id" ON "notifications" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_692a909ee0fa9383e7859f9b40" ON "notifications" ("userId")`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "notifications_updated_at"
BEFORE UPDATE ON "notifications"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TRIGGER "notifications_updated_at" ON "notifications";`.execute(db);
await sql`DROP INDEX "IDX_notifications_update_id";`.execute(db);
await sql`DROP INDEX "IDX_692a909ee0fa9383e7859f9b40";`.execute(db);
await sql`ALTER TABLE "notifications" DROP CONSTRAINT "PK_6a72c3c0f683f6462415e653c3a";`.execute(db);
await sql`ALTER TABLE "notifications" DROP CONSTRAINT "FK_692a909ee0fa9383e7859f9b406";`.execute(db);
await sql`DROP TABLE "notifications";`.execute(db);
}

View File

@@ -1,14 +0,0 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "users" ADD "avatarColor" character varying;`.execute(db);
await sql`
UPDATE "users"
SET "avatarColor" = "user_metadata"."value"->'avatar'->>'color'
FROM "user_metadata"
WHERE "users"."id" = "user_metadata"."userId" AND "user_metadata"."key" = 'preferences';`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "users" DROP COLUMN "avatarColor";`.execute(db);
}

View File

@@ -1,52 +0,0 @@
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { NotificationLevel, NotificationType } from 'src/enum';
import { UserTable } from 'src/schema/tables/user.table';
import {
Column,
CreateDateColumn,
DeleteDateColumn,
ForeignKeyColumn,
PrimaryGeneratedColumn,
Table,
UpdateDateColumn,
} from 'src/sql-tools';
@Table('notifications')
@UpdatedAtTrigger('notifications_updated_at')
export class NotificationTable {
@PrimaryGeneratedColumn()
id!: string;
@CreateDateColumn()
createdAt!: Date;
@UpdateDateColumn()
updatedAt!: Date;
@DeleteDateColumn()
deletedAt?: Date;
@UpdateIdColumn({ indexName: 'IDX_notifications_update_id' })
updateId?: string;
@ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: true })
userId!: string;
@Column({ default: NotificationLevel.Info })
level!: NotificationLevel;
@Column({ default: NotificationLevel.Info })
type!: NotificationType;
@Column({ type: 'jsonb', nullable: true })
data!: any | null;
@Column()
title!: string;
@Column({ type: 'text', nullable: true })
description!: string;
@Column({ type: 'timestamp with time zone', nullable: true })
readAt?: Date | null;
}

View File

@@ -1,6 +1,6 @@
import { ColumnType } from 'kysely';
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { UserAvatarColor, UserStatus } from 'src/enum';
import { UserStatus } from 'src/enum';
import { users_delete_audit } from 'src/schema/functions';
import {
AfterDeleteTrigger,
@@ -49,9 +49,6 @@ export class UserTable {
@Column({ type: 'boolean', default: true })
shouldChangePassword!: Generated<boolean>;
@Column({ default: null })
avatarColor!: UserAvatarColor | null;
@DeleteDateColumn()
deletedAt!: Timestamp | null;

View File

@@ -565,7 +565,7 @@ describe(AssetService.name, () => {
it('should remove faces', async () => {
const assetWithFace = { ...assetStub.image, faces: [faceStub.face1, faceStub.mergeFace1] };
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetWithFace);
mocks.asset.getById.mockResolvedValue(assetWithFace);
await sut.handleAssetDeletion({ id: assetWithFace.id, deleteOnDisk: true });
@@ -592,7 +592,7 @@ describe(AssetService.name, () => {
it('should update stack primary asset if deleted asset was primary asset in a stack', async () => {
mocks.stack.update.mockResolvedValue(factory.stack() as any);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.primaryImage);
mocks.asset.getById.mockResolvedValue(assetStub.primaryImage);
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });
@@ -604,7 +604,7 @@ describe(AssetService.name, () => {
it('should delete the entire stack if deleted asset was the primary asset and the stack would only contain one asset afterwards', async () => {
mocks.stack.delete.mockResolvedValue();
mocks.assetJob.getForAssetDeletion.mockResolvedValue({
mocks.asset.getById.mockResolvedValue({
...assetStub.primaryImage,
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
});
@@ -615,7 +615,7 @@ describe(AssetService.name, () => {
});
it('should delete a live photo', async () => {
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.livePhotoStillAsset as any);
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
mocks.asset.getLivePhotoCount.mockResolvedValue(0);
await sut.handleAssetDeletion({
@@ -653,7 +653,7 @@ describe(AssetService.name, () => {
it('should not delete a live motion part if it is being used by another asset', async () => {
mocks.asset.getLivePhotoCount.mockResolvedValue(2);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.livePhotoStillAsset as any);
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
await sut.handleAssetDeletion({
id: assetStub.livePhotoStillAsset.id,
@@ -680,13 +680,12 @@ describe(AssetService.name, () => {
});
it('should update usage', async () => {
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.image);
mocks.asset.getById.mockResolvedValue(assetStub.image);
await sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true });
expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.image.ownerId, -5000);
});
it('should fail if asset could not be found', async () => {
mocks.assetJob.getForAssetDeletion.mockResolvedValue(void 0);
await expect(sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true })).resolves.toBe(
JobStatus.FAILED,
);

View File

@@ -189,7 +189,13 @@ export class AssetService extends BaseService {
async handleAssetDeletion(job: JobOf<JobName.ASSET_DELETION>): Promise<JobStatus> {
const { id, deleteOnDisk } = job;
const asset = await this.assetJobRepository.getForAssetDeletion(id);
const asset = await this.assetRepository.getById(id, {
faces: { person: true },
library: true,
stack: { assets: true },
exifInfo: true,
files: true,
});
if (!asset) {
return JobStatus.FAILED;

View File

@@ -142,55 +142,52 @@ describe(BackupService.name, () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
});
it('should run a database backup successfully', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
});
it('should rename file on success', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.storage.rename).toHaveBeenCalled();
});
it('should fail if pg_dumpall fails', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should not rename file if pgdump fails and gzip succeeds', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
expect(mocks.storage.rename).not.toHaveBeenCalled();
});
it('should fail if gzip fails', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should fail if write stream fails', async () => {
mocks.storage.createWriteStream.mockImplementation(() => {
throw new Error('error');
});
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should fail if rename fails', async () => {
mocks.storage.rename.mockRejectedValue(new Error('error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should ignore unlink failing and still return failed job status', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
mocks.storage.unlink.mockRejectedValue(new Error('error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
const result = await sut.handleBackupDatabase();
expect(mocks.storage.unlink).toHaveBeenCalled();
expect(result).toBe(JobStatus.FAILED);
});
it.each`
postgresVersion | expectedVersion
${'14.10'} | ${14}

View File

@@ -174,7 +174,7 @@ export class BackupService extends BaseService {
await this.storageRepository
.unlink(backupFilePath)
.catch((error) => this.logger.error('Failed to delete failed backup file', error));
throw error;
return JobStatus.FAILED;
}
this.logger.log(`Database Backup Success`);

View File

@@ -29,7 +29,6 @@ import { MediaRepository } from 'src/repositories/media.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
@@ -81,7 +80,6 @@ export class BaseService {
protected memoryRepository: MemoryRepository,
protected metadataRepository: MetadataRepository,
protected moveRepository: MoveRepository,
protected notificationRepository: NotificationRepository,
protected oauthRepository: OAuthRepository,
protected partnerRepository: PartnerRepository,
protected personRepository: PersonRepository,

View File

@@ -33,7 +33,7 @@ export class DownloadService extends BaseService {
const targetSize = dto.archiveSize || HumanReadableSize.GiB * 4;
const metadata = await this.userRepository.getMetadata(auth.user.id);
const preferences = getPreferences(metadata);
const preferences = getPreferences(auth.user.email, metadata);
const motionIds = new Set<string>();
const archives: DownloadArchiveInfo[] = [];
let archive: DownloadArchiveInfo = { size: 0, assetIds: [] };

View File

@@ -17,7 +17,6 @@ import { MapService } from 'src/services/map.service';
import { MediaService } from 'src/services/media.service';
import { MemoryService } from 'src/services/memory.service';
import { MetadataService } from 'src/services/metadata.service';
import { NotificationAdminService } from 'src/services/notification-admin.service';
import { NotificationService } from 'src/services/notification.service';
import { PartnerService } from 'src/services/partner.service';
import { PersonService } from 'src/services/person.service';
@@ -61,7 +60,6 @@ export const services = [
MemoryService,
MetadataService,
NotificationService,
NotificationAdminService,
PartnerService,
PersonService,
SearchService,

View File

@@ -215,7 +215,11 @@ export class JobService extends BaseService {
await this.onDone(job);
}
} catch (error: Error | any) {
await this.eventRepository.emit('job.failed', { job, error });
this.logger.error(
`Unable to run job handler (${queueName}/${job.name}): ${error}`,
error?.stack,
JSON.stringify(job.data),
);
} finally {
this.telemetryRepository.jobs.addToGauge(queueMetric, -1);
}

View File

@@ -1,6 +1,7 @@
import { OutputInfo } from 'sharp';
import { SystemConfig } from 'src/config';
import { Exif } from 'src/database';
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
import {
AssetFileType,
AssetPathType,
@@ -10,11 +11,11 @@ import {
ImageFormat,
JobName,
JobStatus,
RawExtractedFormat,
TranscodeHWAccel,
TranscodePolicy,
VideoCodec,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { MediaService } from 'src/services/media.service';
import { JobCounts, RawImageInfo } from 'src/types';
import { assetStub } from 'test/fixtures/asset.stub';
@@ -231,19 +232,17 @@ describe(MediaService.name, () => {
describe('handleGenerateThumbnails', () => {
let rawBuffer: Buffer;
let fullsizeBuffer: Buffer;
let extractedBuffer: Buffer;
let rawInfo: RawImageInfo;
beforeEach(() => {
fullsizeBuffer = Buffer.from('embedded image data');
rawBuffer = Buffer.from('raw image data');
extractedBuffer = Buffer.from('embedded image file');
rawBuffer = Buffer.from('image data');
rawInfo = { width: 100, height: 100, channels: 3 };
mocks.media.decodeImage.mockImplementation((input) =>
mocks.media.decodeImage.mockImplementation((path) =>
Promise.resolve(
typeof input === 'string'
? { data: rawBuffer, info: rawInfo as OutputInfo } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo }, // buffer implies embedded image extracted
path.includes(AssetMediaSize.FULLSIZE)
? { data: fullsizeBuffer, info: rawInfo as OutputInfo }
: { data: rawBuffer, info: rawInfo as OutputInfo },
),
);
});
@@ -586,15 +585,16 @@ describe(MediaService.name, () => {
});
it('should extract embedded image if enabled and available', async () => {
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const convertedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(convertedPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@@ -602,13 +602,16 @@ describe(MediaService.name, () => {
});
it('should resize original image if embedded image is too small', async () => {
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 1000, height: 1000 });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const extractedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(extractedPath).toMatch(/-fullsize\.jpeg$/);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageDng.originalPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
@@ -663,40 +666,38 @@ describe(MediaService.name, () => {
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(2);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: false }),
expect.objectContaining({ processInvalidImages: true }),
'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: false }),
expect.objectContaining({ processInvalidImages: true }),
'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp',
);
expect(mocks.media.generateThumbhash).toHaveBeenCalledOnce();
expect(mocks.media.generateThumbhash).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: false }),
expect.objectContaining({ processInvalidImages: true }),
);
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
vi.unstubAllEnvs();
});
it('should extract full-size JPEG preview from RAW', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
it('should generate full-size preview using embedded JPEG from RAW images when extractEmbedded is true', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: true } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const extractedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440, // capped to preview size as fullsize conversion is skipped
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(2);
@@ -714,51 +715,9 @@ describe(MediaService.name, () => {
);
});
it('should convert full-size WEBP preview from JXL preview of RAW', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JXL });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
fullsizeBuffer,
{
colorspace: Colorspace.P3,
format: ImageFormat.WEBP,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
'upload/thumbs/user-id/as/se/asset-id-fullsize.webp',
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
fullsizeBuffer,
{
colorspace: Colorspace.P3,
format: ImageFormat.JPEG,
size: 1440,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
);
});
it('should generate full-size preview directly from RAW images when extractEmbedded is false', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: false } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
@@ -798,7 +757,7 @@ describe(MediaService.name, () => {
it('should generate full-size preview from non-web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
@@ -827,7 +786,7 @@ describe(MediaService.name, () => {
it('should skip generating full-size preview for web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image);
@@ -852,7 +811,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP, quality: 90 } },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.extract.mockResolvedValue(true);
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
@@ -882,12 +841,16 @@ describe(MediaService.name, () => {
describe('handleQueueVideoConversion', () => {
it('should queue all video assets', async () => {
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.video],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueVideoConversion({ force: true });
expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(true);
expect(mocks.asset.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { type: AssetType.VIDEO });
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.VIDEO_CONVERSION,
@@ -897,11 +860,15 @@ describe(MediaService.name, () => {
});
it('should queue all video assets without encoded videos', async () => {
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.video],
hasNextPage: false,
});
await sut.handleQueueVideoConversion({});
expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(void 0);
expect(mocks.asset.getAll).not.toHaveBeenCalled();
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.ENCODED_VIDEO);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.VIDEO_CONVERSION,
@@ -913,18 +880,26 @@ describe(MediaService.name, () => {
describe('handleVideoConversion', () => {
beforeEach(() => {
mocks.assetJob.getForVideoConversion.mockResolvedValue(assetStub.video);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
sut.videoInterfaces = { dri: ['renderD128'], mali: true };
});
it('should skip transcoding if asset not found', async () => {
mocks.assetJob.getForVideoConversion.mockResolvedValue(void 0);
mocks.asset.getByIds.mockResolvedValue([]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip transcoding if non-video asset', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleVideoConversion({ id: assetStub.image.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should transcode the longest stream', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
@@ -946,12 +921,14 @@ describe(MediaService.name, () => {
it('should skip a video without any streams', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noVideoStreams);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip a video without any height', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noHeight);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -959,6 +936,7 @@ describe(MediaService.name, () => {
it('should throw an error if an unknown transcode policy is configured', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
@@ -969,6 +947,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, accel: TranscodeHWAccel.DISABLED },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValue(new Error('Error transcoding video'));
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
@@ -978,6 +957,7 @@ describe(MediaService.name, () => {
it('should transcode when set to all', async () => {
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.ALL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1055,6 +1035,7 @@ describe(MediaService.name, () => {
it('should scale horizontally when video is horizontal', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1070,6 +1051,7 @@ describe(MediaService.name, () => {
it('should scale vertically when video is vertical', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1087,6 +1069,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1104,6 +1087,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1121,6 +1105,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.HEVC, acceptedAudioCodecs: [AudioCodec.AAC] },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1142,6 +1127,7 @@ describe(MediaService.name, () => {
acceptedAudioCodecs: [AudioCodec.AAC],
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1163,6 +1149,7 @@ describe(MediaService.name, () => {
acceptedAudioCodecs: [AudioCodec.AAC],
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1178,6 +1165,7 @@ describe(MediaService.name, () => {
it('should copy audio stream when audio matches target', async () => {
mocks.media.probe.mockResolvedValue(probeStub.audioStreamAac);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1192,6 +1180,7 @@ describe(MediaService.name, () => {
it('should remux when input is not an accepted container', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamAvi);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1215,6 +1204,7 @@ describe(MediaService.name, () => {
it('should not transcode if transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1222,6 +1212,7 @@ describe(MediaService.name, () => {
it('should not remux when input is not an accepted container and transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1229,6 +1220,7 @@ describe(MediaService.name, () => {
it('should not transcode if target codec is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: 'invalid' as any } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1237,7 +1229,7 @@ describe(MediaService.name, () => {
const asset = assetStub.hasEncodedVideo;
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
mocks.asset.getByIds.mockResolvedValue([asset]);
await sut.handleVideoConversion({ id: asset.id });
@@ -1251,6 +1243,7 @@ describe(MediaService.name, () => {
it('should set max bitrate if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1266,6 +1259,7 @@ describe(MediaService.name, () => {
it('should default max bitrate to kbps if no unit is provided', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1281,6 +1275,7 @@ describe(MediaService.name, () => {
it('should transcode in two passes for h264/h265 when enabled and max bitrate is above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true, maxBitrate: '4500k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1296,6 +1291,7 @@ describe(MediaService.name, () => {
it('should fallback to one pass for h264/h265 if two-pass is enabled but no max bitrate is set', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1317,6 +1313,7 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.VP9,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1338,6 +1335,7 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.VP9,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1353,6 +1351,7 @@ describe(MediaService.name, () => {
it('should configure preset for vp9', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, preset: 'slow' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1368,6 +1367,7 @@ describe(MediaService.name, () => {
it('should not configure preset for vp9 if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { preset: 'invalid', targetVideoCodec: VideoCodec.VP9 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1383,6 +1383,7 @@ describe(MediaService.name, () => {
it('should configure threads if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, threads: 2 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1398,6 +1399,7 @@ describe(MediaService.name, () => {
it('should disable thread pooling for h264 if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1413,6 +1415,7 @@ describe(MediaService.name, () => {
it('should omit thread flags for h264 if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1428,6 +1431,7 @@ describe(MediaService.name, () => {
it('should disable thread pooling for hevc if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1, targetVideoCodec: VideoCodec.HEVC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1443,6 +1447,7 @@ describe(MediaService.name, () => {
it('should omit thread flags for hevc if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0, targetVideoCodec: VideoCodec.HEVC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1458,6 +1463,7 @@ describe(MediaService.name, () => {
it('should use av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1483,6 +1489,7 @@ describe(MediaService.name, () => {
it('should map `veryslow` preset to 4 for av1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, preset: 'veryslow' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1498,6 +1505,7 @@ describe(MediaService.name, () => {
it('should set max bitrate for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, maxBitrate: '2M' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1513,6 +1521,7 @@ describe(MediaService.name, () => {
it('should set threads for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1530,6 +1539,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4, maxBitrate: '2M' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1551,6 +1561,7 @@ describe(MediaService.name, () => {
targetResolution: '1080p',
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1560,6 +1571,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1567,6 +1579,7 @@ describe(MediaService.name, () => {
it('should fail if hwaccel option is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1574,6 +1587,7 @@ describe(MediaService.name, () => {
it('should set options for nvenc', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1611,6 +1625,7 @@ describe(MediaService.name, () => {
twoPass: true,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1626,6 +1641,7 @@ describe(MediaService.name, () => {
it('should set vbr options for nvenc when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1641,6 +1657,7 @@ describe(MediaService.name, () => {
it('should set cq options for nvenc when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1656,6 +1673,7 @@ describe(MediaService.name, () => {
it('should omit preset for nvenc if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1671,6 +1689,7 @@ describe(MediaService.name, () => {
it('should ignore two pass for nvenc if max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1688,6 +1707,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1710,6 +1730,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1731,6 +1752,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1746,6 +1768,7 @@ describe(MediaService.name, () => {
it('should set options for qsv', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1786,6 +1809,7 @@ describe(MediaService.name, () => {
preferredHwDevice: '/dev/dri/renderD128',
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1804,6 +1828,7 @@ describe(MediaService.name, () => {
it('should omit preset for qsv if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1824,6 +1849,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1843,6 +1869,7 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
@@ -1853,6 +1880,7 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1873,6 +1901,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -1899,6 +1928,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -1928,6 +1958,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@@ -1946,6 +1977,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -1968,6 +2000,7 @@ describe(MediaService.name, () => {
it('should set options for vaapi', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1998,6 +2031,7 @@ describe(MediaService.name, () => {
it('should set vbr options for vaapi when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2022,6 +2056,7 @@ describe(MediaService.name, () => {
it('should set cq options for vaapi when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2046,6 +2081,7 @@ describe(MediaService.name, () => {
it('should omit preset for vaapi if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2065,6 +2101,7 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2086,6 +2123,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2106,6 +2144,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2131,6 +2170,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2154,6 +2194,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2174,6 +2215,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@@ -2190,6 +2232,7 @@ describe(MediaService.name, () => {
it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
@@ -2210,6 +2253,7 @@ describe(MediaService.name, () => {
it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2228,6 +2272,7 @@ describe(MediaService.name, () => {
it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
@@ -2246,6 +2291,7 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: true };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -2253,6 +2299,7 @@ describe(MediaService.name, () => {
it('should set options for rkmpp', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2293,6 +2340,7 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.HEVC,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2310,6 +2358,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2327,6 +2376,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2349,6 +2399,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2368,6 +2419,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2390,6 +2442,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2409,6 +2462,7 @@ describe(MediaService.name, () => {
it('should tonemap when policy is required and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2428,6 +2482,7 @@ describe(MediaService.name, () => {
it('should tonemap when policy is optimal and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2447,6 +2502,7 @@ describe(MediaService.name, () => {
it('should transcode when policy is required and video is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2462,6 +2518,7 @@ describe(MediaService.name, () => {
it('should convert to yuv420p when scaling without tone-mapping', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream4K10Bit);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2477,6 +2534,7 @@ describe(MediaService.name, () => {
it('should count frames for progress when log level is debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.logger.isLevelEnabled.mockReturnValue(true);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2499,6 +2557,7 @@ describe(MediaService.name, () => {
it('should not count frames for progress when log level is not debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.probe).toHaveBeenCalledWith(assetStub.video.originalPath, { countFrames: false });
@@ -2523,39 +2582,48 @@ describe(MediaService.name, () => {
describe('isSRGB', () => {
it('should return true for srgb colorspace', () => {
expect(sut.isSRGB({ colorspace: 'sRGB' } as Exif)).toEqual(true);
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB' } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for srgb profile description', () => {
expect(sut.isSRGB({ profileDescription: 'sRGB v1.31' } as Exif)).toEqual(true);
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB v1.31' } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for 8-bit image with no colorspace metadata', () => {
expect(sut.isSRGB({ bitsPerSample: 8 } as Exif)).toEqual(true);
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 8 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for image with no colorspace or bit depth metadata', () => {
expect(sut.isSRGB({} as Exif)).toEqual(true);
const asset = { ...assetStub.image, exifInfo: {} as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return false for non-srgb colorspace', () => {
expect(sut.isSRGB({ colorspace: 'Adobe RGB' } as Exif)).toEqual(false);
const asset = { ...assetStub.image, exifInfo: { colorspace: 'Adobe RGB' } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
});
it('should return false for non-srgb profile description', () => {
expect(sut.isSRGB({ profileDescription: 'sP3C' } as Exif)).toEqual(false);
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sP3C' } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
});
it('should return false for 16-bit image with no colorspace metadata', () => {
expect(sut.isSRGB({ bitsPerSample: 16 } as Exif)).toEqual(false);
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
});
it('should return true for 16-bit image with sRGB colorspace', () => {
expect(sut.isSRGB({ colorspace: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB', bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for 16-bit image with sRGB profile', () => {
expect(sut.isSRGB({ profileDescription: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB', bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
});
});
});

Some files were not shown because too many files have changed in this diff Show More