Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31a1e64b58 | ||
|
|
e17bd8efc6 | ||
|
|
2f9019c0e1 | ||
|
|
dfa8a8a6e1 | ||
|
|
b9a0c3c79f | ||
|
|
bda97c4e0e | ||
|
|
e3426c880f | ||
|
|
d4ca7d0075 | ||
|
|
f1c9b763cf | ||
|
|
5097c92494 | ||
|
|
7aacc92699 | ||
|
|
00d6cc86ad | ||
|
|
54d881e5c6 | ||
|
|
edce096680 | ||
|
|
5c31acbcf0 | ||
|
|
6b49104d59 | ||
|
|
97dbe3236b | ||
|
|
586393f178 | ||
|
|
f3e88ea2fa | ||
|
|
c8b46802d6 | ||
|
|
7534098596 | ||
|
|
ec5b7c266b | ||
|
|
e84ad084d5 | ||
|
|
dc2de47204 | ||
|
|
2fe6607aea | ||
|
|
64831e2328 | ||
|
|
6053214e75 |
@@ -1,4 +1,4 @@
|
||||
FROM node:22.11.0-alpine3.20@sha256:f265794478aa0b1a23d85a492c8311ed795bc527c3fe7e43453b3c872dcd71a3 AS core
|
||||
FROM node:22.11.0-alpine3.20@sha256:dc8ba2f61dd86c44e43eb25a7812ad03c5b1b224a19fc6f77e1eb9e5669f0b82 AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
10
cli/package-lock.json
generated
10
cli/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.30",
|
||||
"version": "2.2.31",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.30",
|
||||
"version": "2.2.31",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
@@ -24,7 +24,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
@@ -52,14 +52,14 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.30",
|
||||
"version": "2.2.31",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
|
||||
@@ -40,7 +40,9 @@ The above error messages show that the server has previously (successfully) writ
|
||||
|
||||
### Ignoring the checks
|
||||
|
||||
The checks are designed to catch common problems that we have seen users have in the past, but if you want to disable them you can set the following environment variable:
|
||||
:::warning
|
||||
The checks are designed to catch common problems that we have seen users have in the past, and often indicate there's something wrong that you should solve. If you know what you're doing and you want to disable them you can set the following environment variable:
|
||||
:::
|
||||
|
||||
```
|
||||
IMMICH_IGNORE_MOUNT_CHECK_ERRORS=true
|
||||
|
||||
@@ -76,7 +76,7 @@ Setting these in the IDE give a better developer experience, auto-formatting cod
|
||||
|
||||
### Dart Code Metrics
|
||||
|
||||
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/getting-started/#installation) page for more information on setting up DCM
|
||||
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/) page for more information on setting up DCM
|
||||
|
||||
Note: Activating the license is not required.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Hardware Transcoding [Experimental]
|
||||
|
||||
This feature allows you to use a GPU to accelerate transcoding and reduce CPU load.
|
||||
Note that hardware transcoding is much less efficient for file sizes.
|
||||
Note that hardware transcoding produces significantly larger videos than software transcoding with similar settings, typically with lower quality. Using slow presets and preferring more efficient codecs can narrow this gap.
|
||||
As this is a new feature, it is still experimental and may not work on all systems.
|
||||
|
||||
:::info
|
||||
|
||||
@@ -17,6 +17,7 @@ In our `.env` file, we will define variables that will help us in the future whe
|
||||
+ THUMB_LOCATION=/custom/path/immich/thumbs
|
||||
+ ENCODED_VIDEO_LOCATION=/custom/path/immich/encoded-video
|
||||
+ PROFILE_LOCATION=/custom/path/immich/profile
|
||||
+ BACKUP_LOCATION=/custom/path/immich/backups
|
||||
...
|
||||
```
|
||||
|
||||
@@ -30,6 +31,7 @@ services:
|
||||
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
|
||||
+ - ${BACKUP_LOCATION}:/usr/src/app/upload/backups
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
```
|
||||
|
||||
|
||||
@@ -35,6 +35,13 @@ The default configuration looks like this:
|
||||
"accel": "disabled",
|
||||
"accelDecode": false
|
||||
},
|
||||
"backup": {
|
||||
"database": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 02 * * *",
|
||||
"keepLastAmount": 14
|
||||
}
|
||||
},
|
||||
"job": {
|
||||
"backgroundTask": {
|
||||
"concurrency": 5
|
||||
|
||||
@@ -74,7 +74,6 @@ import {
|
||||
mdiFaceRecognition,
|
||||
mdiVideo,
|
||||
mdiWeb,
|
||||
mdiDatabase,
|
||||
mdiDatabaseOutline,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
@@ -154,6 +153,9 @@ const weirdTags = {
|
||||
'v1.2.0': 'v0.2-dev ',
|
||||
};
|
||||
|
||||
const title = 'Roadmap';
|
||||
const description = 'A list of future plans and goals, as well as past achievements and milestones.';
|
||||
|
||||
const withLanguage = (date: Date) => (language: string) => date.toLocaleDateString(language);
|
||||
|
||||
type Base = { icon: string; iconColor?: React.CSSProperties['color']; title: string; description: string };
|
||||
@@ -870,14 +872,12 @@ const milestones: Item[] = [
|
||||
|
||||
export default function MilestonePage(): JSX.Element {
|
||||
return (
|
||||
<Layout title="Milestones" description="History of Immich">
|
||||
<Layout title={title} description={description}>
|
||||
<section className="my-8">
|
||||
<h1 className="md:text-6xl text-center mb-10 text-immich-primary dark:text-immich-dark-primary px-2">
|
||||
Roadmap
|
||||
{title}
|
||||
</h1>
|
||||
<p className="text-center text-xl px-2">
|
||||
A list of future plans and goals, as well as past achievements and milestones.
|
||||
</p>
|
||||
<p className="text-center text-xl px-2">{description}</p>
|
||||
<div className="flex justify-around mt-8 w-full max-w-full">
|
||||
<Timeline items={[...roadmap, ...milestones]} />
|
||||
</div>
|
||||
|
||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,4 +1,8 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.120.2",
|
||||
"url": "https://v1.120.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.120.1",
|
||||
"url": "https://v1.120.1.archive.immich.app"
|
||||
|
||||
14
e2e/package-lock.json
generated
14
e2e/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-e2e",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
@@ -15,7 +15,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -45,7 +45,7 @@
|
||||
},
|
||||
"../cli": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.30",
|
||||
"version": "2.2.31",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
@@ -64,7 +64,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
@@ -92,14 +92,14 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
|
||||
@@ -473,10 +473,7 @@ describe('/search', () => {
|
||||
.get('/search/explore')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
{ fieldName: 'exifInfo.city', items: [] },
|
||||
{ fieldName: 'smartInfo.tags', items: [] },
|
||||
]);
|
||||
expect(body).toEqual([{ fieldName: 'exifInfo.city', items: [] }]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1283,7 +1283,7 @@
|
||||
"variables": "Variables",
|
||||
"version": "Version",
|
||||
"version_announcement_closing": "Your friend, Alex",
|
||||
"version_announcement_message": "Hi friend, there is a new version of the application please take your time to visit the <link>release notes</link> and ensure your <code>docker-compose.yml</code>, and <code>.env</code> setup is up-to-date to prevent any misconfigurations, especially if you use WatchTower or any mechanism that handles updating your application automatically.",
|
||||
"version_announcement_message": "Hi there! A new version of Immich is available. Please take some time to read the <link>release notes</link> to ensure your setup is up-to-date to prevent any misconfigurations, especially if you use WatchTower or any mechanism that handles updating your Immich instance automatically.",
|
||||
"version_history": "Version History",
|
||||
"version_history_item": "Installed {version} on {date}",
|
||||
"video": "Video",
|
||||
|
||||
1
i18n/fil.json
Normal file
1
i18n/fil.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
1
i18n/nn.json
Normal file
1
i18n/nn.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
8
machine-learning/poetry.lock
generated
8
machine-learning/poetry.lock
generated
@@ -747,14 +747,14 @@ files = [
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi-slim"
|
||||
name = "fastapi"
|
||||
version = "0.115.4"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fastapi_slim-0.115.4-py3-none-any.whl", hash = "sha256:8947515618c21665590a1673a0bfe4c721db4267999c149d5301c3c0f7b3d9ce"},
|
||||
{file = "fastapi_slim-0.115.4.tar.gz", hash = "sha256:6d37987e4d1f6adefb8c7119c9b804e59c9b3f1a488be5425994d52308e2f958"},
|
||||
{file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"},
|
||||
{file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3778,4 +3778,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "f95dddfd343a4b2f4d19ffee71ce6b2f5137e5514a60765424164259c4dc1044"
|
||||
content-hash = "b690d5fbd141da3947f4f1dc029aba1b95e7faafd723166f2c4bdc47a66c095e"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.120.1"
|
||||
version = "1.120.2"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
@@ -11,7 +11,7 @@ python = ">=3.10,<4.0"
|
||||
insightface = ">=0.7.3,<1.0"
|
||||
opencv-python-headless = ">=4.7.0.72,<5.0"
|
||||
pillow = ">=9.5.0,<11.0"
|
||||
fastapi-slim = ">=0.95.2,<1.0"
|
||||
fastapi = ">=0.95.2,<1.0"
|
||||
uvicorn = {extras = ["standard"], version = ">=0.22.0,<1.0"}
|
||||
pydantic = "^2.0.0"
|
||||
pydantic-settings = "^2.5.2"
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 166,
|
||||
"android.injected.version.name" => "1.120.1",
|
||||
"android.injected.version.code" => 167,
|
||||
"android.injected.version.name" => "1.120.2",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -401,7 +401,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 182;
|
||||
CURRENT_PROJECT_VERSION = 183;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -543,7 +543,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 182;
|
||||
CURRENT_PROJECT_VERSION = 183;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -571,7 +571,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 182;
|
||||
CURRENT_PROJECT_VERSION = 183;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
|
||||
@@ -58,11 +58,11 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.120.0</string>
|
||||
<string>1.120.1</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>182</string>
|
||||
<string>183</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
@@ -19,7 +19,7 @@ platform :ios do
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
increment_version_number(
|
||||
version_number: "1.120.1"
|
||||
version_number: "1.120.2"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -11,6 +11,7 @@ import 'package:flutter_displaymode/flutter_displaymode.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/utils/download.dart';
|
||||
import 'package:intl/date_symbol_data_local.dart';
|
||||
import 'package:timezone/data/latest.dart';
|
||||
import 'package:immich_mobile/constants/locales.dart';
|
||||
import 'package:immich_mobile/services/background.service.dart';
|
||||
@@ -56,6 +57,7 @@ void main() async {
|
||||
|
||||
Future<void> initApp() async {
|
||||
await EasyLocalization.ensureInitialized();
|
||||
await initializeDateFormatting();
|
||||
|
||||
if (kReleaseMode && Platform.isAndroid) {
|
||||
try {
|
||||
|
||||
3
mobile/openapi/README.md
generated
3
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.120.1
|
||||
- API version: 1.120.2
|
||||
- Generator version: 7.8.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
@@ -408,7 +408,6 @@ Class | Method | HTTP request | Description
|
||||
- [SharedLinkResponseDto](doc//SharedLinkResponseDto.md)
|
||||
- [SharedLinkType](doc//SharedLinkType.md)
|
||||
- [SignUpDto](doc//SignUpDto.md)
|
||||
- [SmartInfoResponseDto](doc//SmartInfoResponseDto.md)
|
||||
- [SmartSearchDto](doc//SmartSearchDto.md)
|
||||
- [SourceType](doc//SourceType.md)
|
||||
- [StackCreateDto](doc//StackCreateDto.md)
|
||||
|
||||
1
mobile/openapi/lib/api.dart
generated
1
mobile/openapi/lib/api.dart
generated
@@ -222,7 +222,6 @@ part 'model/shared_link_edit_dto.dart';
|
||||
part 'model/shared_link_response_dto.dart';
|
||||
part 'model/shared_link_type.dart';
|
||||
part 'model/sign_up_dto.dart';
|
||||
part 'model/smart_info_response_dto.dart';
|
||||
part 'model/smart_search_dto.dart';
|
||||
part 'model/source_type.dart';
|
||||
part 'model/stack_create_dto.dart';
|
||||
|
||||
2
mobile/openapi/lib/api_client.dart
generated
2
mobile/openapi/lib/api_client.dart
generated
@@ -498,8 +498,6 @@ class ApiClient {
|
||||
return SharedLinkTypeTypeTransformer().decode(value);
|
||||
case 'SignUpDto':
|
||||
return SignUpDto.fromJson(value);
|
||||
case 'SmartInfoResponseDto':
|
||||
return SmartInfoResponseDto.fromJson(value);
|
||||
case 'SmartSearchDto':
|
||||
return SmartSearchDto.fromJson(value);
|
||||
case 'SourceType':
|
||||
|
||||
19
mobile/openapi/lib/model/asset_response_dto.dart
generated
19
mobile/openapi/lib/model/asset_response_dto.dart
generated
@@ -37,7 +37,6 @@ class AssetResponseDto {
|
||||
required this.ownerId,
|
||||
this.people = const [],
|
||||
this.resized,
|
||||
this.smartInfo,
|
||||
this.stack,
|
||||
this.tags = const [],
|
||||
required this.thumbhash,
|
||||
@@ -121,14 +120,6 @@ class AssetResponseDto {
|
||||
///
|
||||
bool? resized;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
SmartInfoResponseDto? smartInfo;
|
||||
|
||||
AssetStackResponseDto? stack;
|
||||
|
||||
List<TagResponseDto> tags;
|
||||
@@ -167,7 +158,6 @@ class AssetResponseDto {
|
||||
other.ownerId == ownerId &&
|
||||
_deepEquality.equals(other.people, people) &&
|
||||
other.resized == resized &&
|
||||
other.smartInfo == smartInfo &&
|
||||
other.stack == stack &&
|
||||
_deepEquality.equals(other.tags, tags) &&
|
||||
other.thumbhash == thumbhash &&
|
||||
@@ -202,7 +192,6 @@ class AssetResponseDto {
|
||||
(ownerId.hashCode) +
|
||||
(people.hashCode) +
|
||||
(resized == null ? 0 : resized!.hashCode) +
|
||||
(smartInfo == null ? 0 : smartInfo!.hashCode) +
|
||||
(stack == null ? 0 : stack!.hashCode) +
|
||||
(tags.hashCode) +
|
||||
(thumbhash == null ? 0 : thumbhash!.hashCode) +
|
||||
@@ -211,7 +200,7 @@ class AssetResponseDto {
|
||||
(updatedAt.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'AssetResponseDto[checksum=$checksum, deviceAssetId=$deviceAssetId, deviceId=$deviceId, duplicateId=$duplicateId, duration=$duration, exifInfo=$exifInfo, fileCreatedAt=$fileCreatedAt, fileModifiedAt=$fileModifiedAt, hasMetadata=$hasMetadata, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isOffline=$isOffline, isTrashed=$isTrashed, libraryId=$libraryId, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, originalFileName=$originalFileName, originalMimeType=$originalMimeType, originalPath=$originalPath, owner=$owner, ownerId=$ownerId, people=$people, resized=$resized, smartInfo=$smartInfo, stack=$stack, tags=$tags, thumbhash=$thumbhash, type=$type, unassignedFaces=$unassignedFaces, updatedAt=$updatedAt]';
|
||||
String toString() => 'AssetResponseDto[checksum=$checksum, deviceAssetId=$deviceAssetId, deviceId=$deviceId, duplicateId=$duplicateId, duration=$duration, exifInfo=$exifInfo, fileCreatedAt=$fileCreatedAt, fileModifiedAt=$fileModifiedAt, hasMetadata=$hasMetadata, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isOffline=$isOffline, isTrashed=$isTrashed, libraryId=$libraryId, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, originalFileName=$originalFileName, originalMimeType=$originalMimeType, originalPath=$originalPath, owner=$owner, ownerId=$ownerId, people=$people, resized=$resized, stack=$stack, tags=$tags, thumbhash=$thumbhash, type=$type, unassignedFaces=$unassignedFaces, updatedAt=$updatedAt]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -267,11 +256,6 @@ class AssetResponseDto {
|
||||
} else {
|
||||
// json[r'resized'] = null;
|
||||
}
|
||||
if (this.smartInfo != null) {
|
||||
json[r'smartInfo'] = this.smartInfo;
|
||||
} else {
|
||||
// json[r'smartInfo'] = null;
|
||||
}
|
||||
if (this.stack != null) {
|
||||
json[r'stack'] = this.stack;
|
||||
} else {
|
||||
@@ -322,7 +306,6 @@ class AssetResponseDto {
|
||||
ownerId: mapValueOfType<String>(json, r'ownerId')!,
|
||||
people: PersonWithFacesResponseDto.listFromJson(json[r'people']),
|
||||
resized: mapValueOfType<bool>(json, r'resized'),
|
||||
smartInfo: SmartInfoResponseDto.fromJson(json[r'smartInfo']),
|
||||
stack: AssetStackResponseDto.fromJson(json[r'stack']),
|
||||
tags: TagResponseDto.listFromJson(json[r'tags']),
|
||||
thumbhash: mapValueOfType<String>(json, r'thumbhash'),
|
||||
|
||||
117
mobile/openapi/lib/model/smart_info_response_dto.dart
generated
117
mobile/openapi/lib/model/smart_info_response_dto.dart
generated
@@ -1,117 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SmartInfoResponseDto {
|
||||
/// Returns a new [SmartInfoResponseDto] instance.
|
||||
SmartInfoResponseDto({
|
||||
this.objects = const [],
|
||||
this.tags = const [],
|
||||
});
|
||||
|
||||
List<String>? objects;
|
||||
|
||||
List<String>? tags;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SmartInfoResponseDto &&
|
||||
_deepEquality.equals(other.objects, objects) &&
|
||||
_deepEquality.equals(other.tags, tags);
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(objects == null ? 0 : objects!.hashCode) +
|
||||
(tags == null ? 0 : tags!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SmartInfoResponseDto[objects=$objects, tags=$tags]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
if (this.objects != null) {
|
||||
json[r'objects'] = this.objects;
|
||||
} else {
|
||||
// json[r'objects'] = null;
|
||||
}
|
||||
if (this.tags != null) {
|
||||
json[r'tags'] = this.tags;
|
||||
} else {
|
||||
// json[r'tags'] = null;
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SmartInfoResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SmartInfoResponseDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SmartInfoResponseDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SmartInfoResponseDto(
|
||||
objects: json[r'objects'] is Iterable
|
||||
? (json[r'objects'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
tags: json[r'tags'] is Iterable
|
||||
? (json[r'tags'] as Iterable).cast<String>().toList(growable: false)
|
||||
: const [],
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SmartInfoResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SmartInfoResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SmartInfoResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SmartInfoResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, SmartInfoResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SmartInfoResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SmartInfoResponseDto-objects as value to a dart map
|
||||
static Map<String, List<SmartInfoResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SmartInfoResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SmartInfoResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 1.120.1+166
|
||||
version: 1.120.2+167
|
||||
|
||||
environment:
|
||||
sdk: '>=3.3.0 <4.0.0'
|
||||
|
||||
@@ -8,11 +8,11 @@ bash tool/build_android.sh x64
|
||||
bash tool/build_android.sh armv7
|
||||
bash tool/build_android.sh arm64
|
||||
mv libisar_android_arm64.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
|
||||
mv libisar_android_armv7.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
|
||||
mv libisar_android_x64.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
|
||||
mv libisar_android_x86.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
|
||||
)
|
||||
@@ -7385,7 +7385,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
@@ -8402,9 +8402,6 @@
|
||||
"description": "This property was deprecated in v1.113.0",
|
||||
"type": "boolean"
|
||||
},
|
||||
"smartInfo": {
|
||||
"$ref": "#/components/schemas/SmartInfoResponseDto"
|
||||
},
|
||||
"stack": {
|
||||
"allOf": [
|
||||
{
|
||||
@@ -11284,25 +11281,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SmartInfoResponseDto": {
|
||||
"properties": {
|
||||
"objects": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"nullable": true,
|
||||
"type": "array"
|
||||
},
|
||||
"tags": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"nullable": true,
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"SmartSearchDto": {
|
||||
"properties": {
|
||||
"city": {
|
||||
|
||||
6
open-api/typescript-sdk/package-lock.json
generated
6
open-api/typescript-sdk/package-lock.json
generated
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
@@ -19,7 +19,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 1.120.1
|
||||
* 1.120.2
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
@@ -221,10 +221,6 @@ export type PersonWithFacesResponseDto = {
|
||||
/** This property was added in v1.107.0 */
|
||||
updatedAt?: string;
|
||||
};
|
||||
export type SmartInfoResponseDto = {
|
||||
objects?: string[] | null;
|
||||
tags?: string[] | null;
|
||||
};
|
||||
export type AssetStackResponseDto = {
|
||||
assetCount: number;
|
||||
id: string;
|
||||
@@ -267,7 +263,6 @@ export type AssetResponseDto = {
|
||||
people?: PersonWithFacesResponseDto[];
|
||||
/** This property was deprecated in v1.113.0 */
|
||||
resized?: boolean;
|
||||
smartInfo?: SmartInfoResponseDto;
|
||||
stack?: (AssetStackResponseDto) | null;
|
||||
tags?: TagResponseDto[];
|
||||
thumbhash: string | null;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dev build
|
||||
FROM ghcr.io/immich-app/base-server-dev:20241105@sha256:99eec44db9e281e30eb9c50161cfb8e810f06e4338896b900fb5cafd09e82cd5 AS dev
|
||||
FROM ghcr.io/immich-app/base-server-dev:20241112@sha256:889647c747b3f999b05e387eff414bcec5e42477958b267930e58ac58dadcfc7 AS dev
|
||||
|
||||
RUN apt-get install --no-install-recommends -yqq tini
|
||||
WORKDIR /usr/src/app
|
||||
@@ -25,7 +25,7 @@ COPY --from=dev /usr/src/app/node_modules/@img ./node_modules/@img
|
||||
COPY --from=dev /usr/src/app/node_modules/exiftool-vendored.pl ./node_modules/exiftool-vendored.pl
|
||||
|
||||
# web build
|
||||
FROM node:22.11.0-alpine3.20@sha256:f265794478aa0b1a23d85a492c8311ed795bc527c3fe7e43453b3c872dcd71a3 AS web
|
||||
FROM node:22.11.0-alpine3.20@sha256:dc8ba2f61dd86c44e43eb25a7812ad03c5b1b224a19fc6f77e1eb9e5669f0b82 AS web
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
@@ -42,7 +42,7 @@ RUN npm run build
|
||||
|
||||
|
||||
# prod build
|
||||
FROM ghcr.io/immich-app/base-server-prod:20241105@sha256:dbe566f5c53f36640da910ca86a7c5575a26e9b9f6bc8d90ae0a53b8bc3a1f73
|
||||
FROM ghcr.io/immich-app/base-server-prod:20241112@sha256:26a209563689f52b9a63feeedde9a16a8e0e558483cd3feb5c936423e55c7eea
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
|
||||
6
server/package-lock.json
generated
6
server/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@nestjs/bullmq": "^10.0.1",
|
||||
@@ -83,7 +83,7 @@
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/picomatch": "^3.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
@@ -108,7 +108,7 @@
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/picomatch": "^3.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Duration } from 'luxon';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { SemVer } from 'semver';
|
||||
import { ExifOrientation } from 'src/enum';
|
||||
|
||||
export const POSTGRES_VERSION_RANGE = '>=14.0.0';
|
||||
export const VECTORS_VERSION_RANGE = '>=0.2 <0.4';
|
||||
@@ -81,3 +82,19 @@ export const CLIP_MODEL_INFO: Record<string, ModelInfo> = {
|
||||
'nllb-clip-large-siglip__mrl': { dimSize: 1152 },
|
||||
'nllb-clip-large-siglip__v1': { dimSize: 1152 },
|
||||
};
|
||||
|
||||
type SharpRotationData = {
|
||||
angle?: number;
|
||||
flip?: boolean;
|
||||
flop?: boolean;
|
||||
};
|
||||
export const ORIENTATION_TO_SHARP_ROTATION: Record<ExifOrientation, SharpRotationData> = {
|
||||
[ExifOrientation.Horizontal]: { angle: 0 },
|
||||
[ExifOrientation.MirrorHorizontal]: { angle: 0, flop: true },
|
||||
[ExifOrientation.Rotate180]: { angle: 180 },
|
||||
[ExifOrientation.MirrorVertical]: { angle: 180, flop: true },
|
||||
[ExifOrientation.MirrorHorizontalRotate270CW]: { angle: 270, flip: true },
|
||||
[ExifOrientation.Rotate90CW]: { angle: 90 },
|
||||
[ExifOrientation.MirrorHorizontalRotate90CW]: { angle: 90, flip: true },
|
||||
[ExifOrientation.Rotate270CW]: { angle: 270 },
|
||||
} as const;
|
||||
|
||||
@@ -12,7 +12,6 @@ import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
|
||||
import { AssetType } from 'src/enum';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
|
||||
@@ -45,7 +44,6 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
|
||||
isTrashed!: boolean;
|
||||
isOffline!: boolean;
|
||||
exifInfo?: ExifResponseDto;
|
||||
smartInfo?: SmartInfoResponseDto;
|
||||
tags?: TagResponseDto[];
|
||||
people?: PersonWithFacesResponseDto[];
|
||||
unassignedFaces?: AssetFaceWithoutPersonResponseDto[];
|
||||
@@ -141,7 +139,6 @@ export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): As
|
||||
isTrashed: !!entity.deletedAt,
|
||||
duration: entity.duration ?? '0:00:00.00000',
|
||||
exifInfo: entity.exifInfo ? mapExif(entity.exifInfo) : undefined,
|
||||
smartInfo: entity.smartInfo ? mapSmartInfo(entity.smartInfo) : undefined,
|
||||
livePhotoVideoId: entity.livePhotoVideoId,
|
||||
tags: entity.tags?.map((tag) => mapTag(tag)),
|
||||
people: peopleWithFaces(entity.faces),
|
||||
@@ -161,15 +158,3 @@ export class MemoryLaneResponseDto {
|
||||
|
||||
assets!: AssetResponseDto[];
|
||||
}
|
||||
|
||||
export class SmartInfoResponseDto {
|
||||
tags?: string[] | null;
|
||||
objects?: string[] | null;
|
||||
}
|
||||
|
||||
export function mapSmartInfo(entity: SmartInfoEntity): SmartInfoResponseDto {
|
||||
return {
|
||||
tags: entity.tags,
|
||||
objects: entity.objects,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -12,11 +12,8 @@ import {
|
||||
IsUrl,
|
||||
Max,
|
||||
Min,
|
||||
Validate,
|
||||
ValidateIf,
|
||||
ValidateNested,
|
||||
ValidatorConstraint,
|
||||
ValidatorConstraintInterface,
|
||||
} from 'class-validator';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
|
||||
@@ -33,14 +30,7 @@ import {
|
||||
VideoContainer,
|
||||
} from 'src/enum';
|
||||
import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
|
||||
import { ValidateBoolean, validateCronExpression } from 'src/validation';
|
||||
|
||||
@ValidatorConstraint({ name: 'cronValidator' })
|
||||
class CronValidator implements ValidatorConstraintInterface {
|
||||
validate(expression: string): boolean {
|
||||
return validateCronExpression(expression);
|
||||
}
|
||||
}
|
||||
import { IsCronExpression, ValidateBoolean } from 'src/validation';
|
||||
|
||||
const isLibraryScanEnabled = (config: SystemConfigLibraryScanDto) => config.enabled;
|
||||
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
|
||||
@@ -54,7 +44,7 @@ export class DatabaseBackupConfig {
|
||||
|
||||
@ValidateIf(isDatabaseBackupEnabled)
|
||||
@IsNotEmpty()
|
||||
@Validate(CronValidator, { message: 'Invalid cron expression' })
|
||||
@IsCronExpression()
|
||||
@IsString()
|
||||
cronExpression!: string;
|
||||
|
||||
@@ -244,7 +234,7 @@ class SystemConfigLibraryScanDto {
|
||||
|
||||
@ValidateIf(isLibraryScanEnabled)
|
||||
@IsNotEmpty()
|
||||
@Validate(CronValidator, { message: 'Invalid cron expression' })
|
||||
@IsCronExpression()
|
||||
@IsString()
|
||||
cronExpression!: string;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { LibraryEntity } from 'src/entities/library.entity';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
|
||||
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
|
||||
import { StackEntity } from 'src/entities/stack.entity';
|
||||
import { TagEntity } from 'src/entities/tag.entity';
|
||||
@@ -143,9 +142,6 @@ export class AssetEntity {
|
||||
@OneToOne(() => ExifEntity, (exifEntity) => exifEntity.asset)
|
||||
exifInfo?: ExifEntity;
|
||||
|
||||
@OneToOne(() => SmartInfoEntity, (smartInfoEntity) => smartInfoEntity.asset)
|
||||
smartInfo?: SmartInfoEntity;
|
||||
|
||||
@OneToOne(() => SmartSearchEntity, (smartSearchEntity) => smartSearchEntity.asset)
|
||||
smartSearch?: SmartSearchEntity;
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ import { PartnerEntity } from 'src/entities/partner.entity';
|
||||
import { PersonEntity } from 'src/entities/person.entity';
|
||||
import { SessionEntity } from 'src/entities/session.entity';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
|
||||
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
|
||||
import { StackEntity } from 'src/entities/stack.entity';
|
||||
import { SystemMetadataEntity } from 'src/entities/system-metadata.entity';
|
||||
@@ -46,7 +45,6 @@ export const entities = [
|
||||
PartnerEntity,
|
||||
PersonEntity,
|
||||
SharedLinkEntity,
|
||||
SmartInfoEntity,
|
||||
SmartSearchEntity,
|
||||
StackEntity,
|
||||
SystemMetadataEntity,
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { Column, Entity, JoinColumn, OneToOne, PrimaryColumn } from 'typeorm';
|
||||
|
||||
@Entity('smart_info', { synchronize: false })
|
||||
export class SmartInfoEntity {
|
||||
@OneToOne(() => AssetEntity, { onDelete: 'CASCADE', nullable: true })
|
||||
@JoinColumn({ name: 'assetId', referencedColumnName: 'id' })
|
||||
asset?: AssetEntity;
|
||||
|
||||
@PrimaryColumn()
|
||||
assetId!: string;
|
||||
|
||||
@Column({ type: 'text', array: true, nullable: true })
|
||||
tags!: string[] | null;
|
||||
|
||||
@Column({ type: 'text', array: true, nullable: true })
|
||||
objects!: string[] | null;
|
||||
}
|
||||
@@ -373,3 +373,14 @@ export enum ImmichTelemetry {
|
||||
REPO = 'repo',
|
||||
JOB = 'job',
|
||||
}
|
||||
|
||||
export enum ExifOrientation {
|
||||
Horizontal = 1,
|
||||
MirrorHorizontal = 2,
|
||||
Rotate180 = 3,
|
||||
MirrorVertical = 4,
|
||||
MirrorHorizontalRotate270CW = 5,
|
||||
Rotate90CW = 6,
|
||||
MirrorHorizontalRotate90CW = 7,
|
||||
Rotate270CW = 8,
|
||||
}
|
||||
|
||||
@@ -28,9 +28,7 @@ export enum WithoutProperty {
|
||||
EXIF = 'exif',
|
||||
SMART_SEARCH = 'smart-search',
|
||||
DUPLICATE = 'duplicate',
|
||||
OBJECT_TAGS = 'object-tags',
|
||||
FACES = 'faces',
|
||||
PERSON = 'person',
|
||||
SIDECAR = 'sidecar',
|
||||
}
|
||||
|
||||
@@ -94,7 +92,6 @@ export type AssetWithoutRelations = Omit<
|
||||
| 'library'
|
||||
| 'exifInfo'
|
||||
| 'sharedLinks'
|
||||
| 'smartInfo'
|
||||
| 'smartSearch'
|
||||
| 'tags'
|
||||
>;
|
||||
@@ -190,7 +187,6 @@ export interface IAssetRepository {
|
||||
upsertExif(exif: Partial<ExifEntity>): Promise<void>;
|
||||
upsertJobStatus(...jobStatus: Partial<AssetJobStatusEntity>[]): Promise<void>;
|
||||
getAssetIdByCity(userId: string, options: AssetExploreFieldOptions): Promise<SearchExploreItem<string>>;
|
||||
getAssetIdByTag(userId: string, options: AssetExploreFieldOptions): Promise<SearchExploreItem<string>>;
|
||||
getDuplicates(options: AssetBuilderOptions): Promise<AssetEntity[]>;
|
||||
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]>;
|
||||
getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]>;
|
||||
|
||||
20
server/src/interfaces/cron.interface.ts
Normal file
20
server/src/interfaces/cron.interface.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
export const ICronRepository = 'ICronRepository';
|
||||
|
||||
type CronBase = {
|
||||
name: string;
|
||||
start?: boolean;
|
||||
};
|
||||
|
||||
export type CronCreate = CronBase & {
|
||||
expression: string;
|
||||
onTick: () => void;
|
||||
};
|
||||
|
||||
export type CronUpdate = CronBase & {
|
||||
expression?: string;
|
||||
};
|
||||
|
||||
export interface ICronRepository {
|
||||
create(cron: CronCreate): void;
|
||||
update(cron: CronUpdate): void;
|
||||
}
|
||||
@@ -315,8 +315,6 @@ export interface IJobRepository {
|
||||
setup(options: { services: ClassConstructor<unknown>[] }): void;
|
||||
startWorkers(): void;
|
||||
run(job: JobItem): Promise<JobStatus>;
|
||||
addCronJob(name: string, expression: string, onTick: () => void, start?: boolean): void;
|
||||
updateCronJob(name: string, expression?: string, start?: boolean): void;
|
||||
setConcurrency(queueName: QueueName, concurrency: number): void;
|
||||
queue(item: JobItem): Promise<void>;
|
||||
queueAll(items: JobItem[]): Promise<void>;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Writable } from 'node:stream';
|
||||
import { ImageFormat, TranscodeTarget, VideoCodec } from 'src/enum';
|
||||
import { ExifOrientation, ImageFormat, TranscodeTarget, VideoCodec } from 'src/enum';
|
||||
|
||||
export const IMediaRepository = 'IMediaRepository';
|
||||
|
||||
@@ -31,6 +31,7 @@ interface DecodeImageOptions {
|
||||
|
||||
export interface DecodeToBufferOptions extends DecodeImageOptions {
|
||||
size: number;
|
||||
orientation?: ExifOrientation;
|
||||
}
|
||||
|
||||
export type GenerateThumbnailOptions = ImageOptions & DecodeImageOptions;
|
||||
|
||||
@@ -68,7 +68,6 @@ export interface SearchStatusOptions {
|
||||
|
||||
export interface SearchOneToOneRelationOptions {
|
||||
withExif?: boolean;
|
||||
withSmartInfo?: boolean;
|
||||
withStacked?: boolean;
|
||||
}
|
||||
|
||||
|
||||
11
server/src/migrations/1730989238718-DropSmartInfoTable.ts
Normal file
11
server/src/migrations/1730989238718-DropSmartInfoTable.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class DropSmartInfoTable1730989238718 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP TABLE smart_info`);
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
// not implemented
|
||||
}
|
||||
}
|
||||
@@ -183,9 +183,6 @@ SELECT
|
||||
"AssetEntity__AssetEntity_exifInfo"."bitsPerSample" AS "AssetEntity__AssetEntity_exifInfo_bitsPerSample",
|
||||
"AssetEntity__AssetEntity_exifInfo"."rating" AS "AssetEntity__AssetEntity_exifInfo_rating",
|
||||
"AssetEntity__AssetEntity_exifInfo"."fps" AS "AssetEntity__AssetEntity_exifInfo_fps",
|
||||
"AssetEntity__AssetEntity_smartInfo"."assetId" AS "AssetEntity__AssetEntity_smartInfo_assetId",
|
||||
"AssetEntity__AssetEntity_smartInfo"."tags" AS "AssetEntity__AssetEntity_smartInfo_tags",
|
||||
"AssetEntity__AssetEntity_smartInfo"."objects" AS "AssetEntity__AssetEntity_smartInfo_objects",
|
||||
"AssetEntity__AssetEntity_tags"."id" AS "AssetEntity__AssetEntity_tags_id",
|
||||
"AssetEntity__AssetEntity_tags"."value" AS "AssetEntity__AssetEntity_tags_value",
|
||||
"AssetEntity__AssetEntity_tags"."createdAt" AS "AssetEntity__AssetEntity_tags_createdAt",
|
||||
@@ -252,7 +249,6 @@ SELECT
|
||||
FROM
|
||||
"assets" "AssetEntity"
|
||||
LEFT JOIN "exif" "AssetEntity__AssetEntity_exifInfo" ON "AssetEntity__AssetEntity_exifInfo"."assetId" = "AssetEntity"."id"
|
||||
LEFT JOIN "smart_info" "AssetEntity__AssetEntity_smartInfo" ON "AssetEntity__AssetEntity_smartInfo"."assetId" = "AssetEntity"."id"
|
||||
LEFT JOIN "tag_asset" "AssetEntity_AssetEntity__AssetEntity_tags" ON "AssetEntity_AssetEntity__AssetEntity_tags"."assetsId" = "AssetEntity"."id"
|
||||
LEFT JOIN "tags" "AssetEntity__AssetEntity_tags" ON "AssetEntity__AssetEntity_tags"."id" = "AssetEntity_AssetEntity__AssetEntity_tags"."tagsId"
|
||||
LEFT JOIN "asset_faces" "AssetEntity__AssetEntity_faces" ON "AssetEntity__AssetEntity_faces"."assetId" = "AssetEntity"."id"
|
||||
@@ -932,36 +928,6 @@ WHERE
|
||||
LIMIT
|
||||
12
|
||||
|
||||
-- AssetRepository.getAssetIdByTag
|
||||
WITH
|
||||
"random_tags" AS (
|
||||
SELECT
|
||||
unnest(tags) AS "tag"
|
||||
FROM
|
||||
"smart_info" "si"
|
||||
GROUP BY
|
||||
tag
|
||||
HAVING
|
||||
count(*) >= $1
|
||||
)
|
||||
SELECT DISTINCT
|
||||
ON (unnest("si"."tags")) "asset"."id" AS "data",
|
||||
unnest("si"."tags") AS "value"
|
||||
FROM
|
||||
"assets" "asset"
|
||||
INNER JOIN "smart_info" "si" ON "asset"."id" = si."assetId"
|
||||
INNER JOIN "random_tags" "t" ON "si"."tags" @> ARRAY[t.tag]
|
||||
WHERE
|
||||
(
|
||||
"asset"."isVisible" = true
|
||||
AND "asset"."type" = $2
|
||||
AND "asset"."ownerId" IN ($3)
|
||||
AND "asset"."isArchived" = $4
|
||||
)
|
||||
AND ("asset"."deletedAt" IS NULL)
|
||||
LIMIT
|
||||
12
|
||||
|
||||
-- AssetRepository.getAllForUserFullSync
|
||||
SELECT
|
||||
"asset"."id" AS "asset_id",
|
||||
|
||||
@@ -5,7 +5,6 @@ import { AssetFileEntity } from 'src/entities/asset-files.entity';
|
||||
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
|
||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType, PaginationMode } from 'src/enum';
|
||||
import {
|
||||
AssetBuilderOptions,
|
||||
@@ -60,7 +59,6 @@ export class AssetRepository implements IAssetRepository {
|
||||
@InjectRepository(AssetFileEntity) private fileRepository: Repository<AssetFileEntity>,
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
|
||||
@InjectRepository(SmartInfoEntity) private smartInfoRepository: Repository<SmartInfoEntity>,
|
||||
) {}
|
||||
|
||||
async upsertExif(exif: Partial<ExifEntity>): Promise<void> {
|
||||
@@ -119,7 +117,6 @@ export class AssetRepository implements IAssetRepository {
|
||||
where: { id: In(ids) },
|
||||
relations: {
|
||||
exifInfo: true,
|
||||
smartInfo: true,
|
||||
tags: true,
|
||||
faces: {
|
||||
person: true,
|
||||
@@ -422,22 +419,6 @@ export class AssetRepository implements IAssetRepository {
|
||||
break;
|
||||
}
|
||||
|
||||
case WithoutProperty.OBJECT_TAGS: {
|
||||
relations = {
|
||||
smartInfo: true,
|
||||
};
|
||||
where = {
|
||||
jobStatus: {
|
||||
previewAt: Not(IsNull()),
|
||||
},
|
||||
isVisible: true,
|
||||
smartInfo: {
|
||||
tags: IsNull(),
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case WithoutProperty.FACES: {
|
||||
relations = {
|
||||
faces: true,
|
||||
@@ -457,23 +438,6 @@ export class AssetRepository implements IAssetRepository {
|
||||
break;
|
||||
}
|
||||
|
||||
case WithoutProperty.PERSON: {
|
||||
relations = {
|
||||
faces: true,
|
||||
};
|
||||
where = {
|
||||
jobStatus: {
|
||||
previewAt: Not(IsNull()),
|
||||
},
|
||||
isVisible: true,
|
||||
faces: {
|
||||
assetId: Not(IsNull()),
|
||||
personId: IsNull(),
|
||||
},
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
case WithoutProperty.SIDECAR: {
|
||||
where = [
|
||||
{ sidecarPath: IsNull(), isVisible: true },
|
||||
@@ -611,35 +575,6 @@ export class AssetRepository implements IAssetRepository {
|
||||
return { fieldName: 'exifInfo.city', items };
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
|
||||
async getAssetIdByTag(
|
||||
ownerId: string,
|
||||
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
|
||||
): Promise<SearchExploreItem<string>> {
|
||||
const cte = this.smartInfoRepository
|
||||
.createQueryBuilder('si')
|
||||
.select('unnest(tags)', 'tag')
|
||||
.groupBy('tag')
|
||||
.having('count(*) >= :minAssetsPerField', { minAssetsPerField });
|
||||
|
||||
const items = await this.getBuilder({
|
||||
userIds: [ownerId],
|
||||
exifInfo: false,
|
||||
assetType: AssetType.IMAGE,
|
||||
isArchived: false,
|
||||
})
|
||||
.select('unnest(si.tags)', 'value')
|
||||
.addSelect('asset.id', 'data')
|
||||
.distinctOn(['unnest(si.tags)'])
|
||||
.innerJoin('smart_info', 'si', 'asset.id = si."assetId"')
|
||||
.addCommonTableExpression(cte, 'random_tags')
|
||||
.innerJoin('random_tags', 't', 'si.tags @> ARRAY[t.tag]')
|
||||
.limit(maxFields)
|
||||
.getRawMany();
|
||||
|
||||
return { fieldName: 'smartInfo.tags', items };
|
||||
}
|
||||
|
||||
private getBuilder(options: AssetBuilderOptions) {
|
||||
const builder = this.repository.createQueryBuilder('asset').where('asset.isVisible = true');
|
||||
|
||||
|
||||
52
server/src/repositories/cron.repository.ts
Normal file
52
server/src/repositories/cron.repository.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { SchedulerRegistry } from '@nestjs/schedule';
|
||||
import { CronJob, CronTime } from 'cron';
|
||||
import { CronCreate, CronUpdate, ICronRepository } from 'src/interfaces/cron.interface';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
|
||||
@Injectable()
|
||||
export class CronRepository implements ICronRepository {
|
||||
constructor(
|
||||
private schedulerRegistry: SchedulerRegistry,
|
||||
@Inject(ILoggerRepository) private logger: ILoggerRepository,
|
||||
) {
|
||||
this.logger.setContext(CronRepository.name);
|
||||
}
|
||||
|
||||
create({ name, expression, onTick, start = true }: CronCreate): void {
|
||||
const job = new CronJob<null, null>(
|
||||
expression,
|
||||
onTick,
|
||||
// function to run onComplete
|
||||
undefined,
|
||||
// whether it should start directly
|
||||
start,
|
||||
// timezone
|
||||
undefined,
|
||||
// context
|
||||
undefined,
|
||||
// runOnInit
|
||||
undefined,
|
||||
// utcOffset
|
||||
undefined,
|
||||
// prevents memory leaking by automatically stopping when the node process finishes
|
||||
true,
|
||||
);
|
||||
|
||||
this.schedulerRegistry.addCronJob(name, job);
|
||||
}
|
||||
|
||||
update({ name, expression, start }: CronUpdate): void {
|
||||
const job = this.schedulerRegistry.getCronJob(name);
|
||||
if (expression) {
|
||||
job.setTime(new CronTime(expression));
|
||||
}
|
||||
if (start !== undefined) {
|
||||
if (start) {
|
||||
job.start();
|
||||
} else {
|
||||
job.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { IKeyRepository } from 'src/interfaces/api-key.interface';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.interface';
|
||||
import { IAuditRepository } from 'src/interfaces/audit.interface';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
import { ICronRepository } from 'src/interfaces/cron.interface';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import { IEventRepository } from 'src/interfaces/event.interface';
|
||||
@@ -44,6 +45,7 @@ import { ApiKeyRepository } from 'src/repositories/api-key.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { AuditRepository } from 'src/repositories/audit.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CronRepository } from 'src/repositories/cron.repository';
|
||||
import { CryptoRepository } from 'src/repositories/crypto.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
@@ -83,6 +85,7 @@ export const repositories = [
|
||||
{ provide: IAssetRepository, useClass: AssetRepository },
|
||||
{ provide: IAuditRepository, useClass: AuditRepository },
|
||||
{ provide: IConfigRepository, useClass: ConfigRepository },
|
||||
{ provide: ICronRepository, useClass: CronRepository },
|
||||
{ provide: ICryptoRepository, useClass: CryptoRepository },
|
||||
{ provide: IDatabaseRepository, useClass: DatabaseRepository },
|
||||
{ provide: IEventRepository, useClass: EventRepository },
|
||||
|
||||
@@ -4,7 +4,6 @@ import { ModuleRef, Reflector } from '@nestjs/core';
|
||||
import { SchedulerRegistry } from '@nestjs/schedule';
|
||||
import { JobsOptions, Queue, Worker } from 'bullmq';
|
||||
import { ClassConstructor } from 'class-transformer';
|
||||
import { CronJob, CronTime } from 'cron';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
import { JobConfig } from 'src/decorators';
|
||||
import { MetadataKey } from 'src/enum';
|
||||
@@ -119,43 +118,6 @@ export class JobRepository implements IJobRepository {
|
||||
return item.handler(data);
|
||||
}
|
||||
|
||||
addCronJob(name: string, expression: string, onTick: () => void, start = true): void {
|
||||
const job = new CronJob<null, null>(
|
||||
expression,
|
||||
onTick,
|
||||
// function to run onComplete
|
||||
undefined,
|
||||
// whether it should start directly
|
||||
start,
|
||||
// timezone
|
||||
undefined,
|
||||
// context
|
||||
undefined,
|
||||
// runOnInit
|
||||
undefined,
|
||||
// utcOffset
|
||||
undefined,
|
||||
// prevents memory leaking by automatically stopping when the node process finishes
|
||||
true,
|
||||
);
|
||||
|
||||
this.schedulerRegistry.addCronJob(name, job);
|
||||
}
|
||||
|
||||
updateCronJob(name: string, expression?: string, start?: boolean): void {
|
||||
const job = this.schedulerRegistry.getCronJob(name);
|
||||
if (expression) {
|
||||
job.setTime(new CronTime(expression));
|
||||
}
|
||||
if (start !== undefined) {
|
||||
if (start) {
|
||||
job.start();
|
||||
} else {
|
||||
job.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setConcurrency(queueName: QueueName, concurrency: number) {
|
||||
const worker = this.workers[queueName];
|
||||
if (!worker) {
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Duration } from 'luxon';
|
||||
import fs from 'node:fs/promises';
|
||||
import { Writable } from 'node:stream';
|
||||
import sharp from 'sharp';
|
||||
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
|
||||
import { Colorspace, LogLevel } from 'src/enum';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import {
|
||||
@@ -82,7 +83,15 @@ export class MediaRepository implements IMediaRepository {
|
||||
.withIccProfile(options.colorspace);
|
||||
|
||||
if (!options.raw) {
|
||||
pipeline = pipeline.rotate();
|
||||
const { angle, flip, flop } = options.orientation ? ORIENTATION_TO_SHARP_ROTATION[options.orientation] : {};
|
||||
pipeline = pipeline.rotate(angle);
|
||||
if (flip) {
|
||||
pipeline = pipeline.flip();
|
||||
}
|
||||
|
||||
if (flop) {
|
||||
pipeline = pipeline.flop();
|
||||
}
|
||||
}
|
||||
|
||||
if (options.crop) {
|
||||
|
||||
@@ -6,7 +6,6 @@ import { AssetFaceEntity } from 'src/entities/asset-face.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
|
||||
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
|
||||
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
|
||||
import { AssetType, PaginationMode } from 'src/enum';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
@@ -34,7 +33,6 @@ export class SearchRepository implements ISearchRepository {
|
||||
private assetsByCityQuery: string;
|
||||
|
||||
constructor(
|
||||
@InjectRepository(SmartInfoEntity) private repository: Repository<SmartInfoEntity>,
|
||||
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
@InjectRepository(AssetFaceEntity) private assetFaceRepository: Repository<AssetFaceEntity>,
|
||||
@@ -278,7 +276,7 @@ export class SearchRepository implements ISearchRepository {
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
async getAssetsByCity(userIds: string[]): Promise<AssetEntity[]> {
|
||||
const parameters = [userIds, true, false, AssetType.IMAGE];
|
||||
const rawRes = await this.repository.query(this.assetsByCityQuery, parameters);
|
||||
const rawRes = await this.assetRepository.query(this.assetsByCityQuery, parameters);
|
||||
|
||||
const items: AssetEntity[] = [];
|
||||
for (const res of rawRes) {
|
||||
|
||||
@@ -94,7 +94,6 @@ export class AssetService extends BaseService {
|
||||
{
|
||||
exifInfo: true,
|
||||
sharedLinks: true,
|
||||
smartInfo: true,
|
||||
tags: true,
|
||||
owner: true,
|
||||
faces: {
|
||||
@@ -162,7 +161,6 @@ export class AssetService extends BaseService {
|
||||
const asset = await this.assetRepository.getById(id, {
|
||||
exifInfo: true,
|
||||
owner: true,
|
||||
smartInfo: true,
|
||||
tags: true,
|
||||
faces: {
|
||||
person: true,
|
||||
|
||||
@@ -3,8 +3,9 @@ import { defaults, SystemConfig } from 'src/config';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
import { ICronRepository } from 'src/interfaces/cron.interface';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.interface';
|
||||
import { JobStatus } from 'src/interfaces/job.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.interface';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
||||
@@ -18,13 +19,13 @@ describe(BackupService.name, () => {
|
||||
|
||||
let databaseMock: Mocked<IDatabaseRepository>;
|
||||
let configMock: Mocked<IConfigRepository>;
|
||||
let jobMock: Mocked<IJobRepository>;
|
||||
let cronMock: Mocked<ICronRepository>;
|
||||
let processMock: Mocked<IProcessRepository>;
|
||||
let storageMock: Mocked<IStorageRepository>;
|
||||
let systemMock: Mocked<ISystemMetadataRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, configMock, databaseMock, jobMock, processMock, storageMock, systemMock } = newTestService(BackupService));
|
||||
({ sut, cronMock, configMock, databaseMock, processMock, storageMock, systemMock } = newTestService(BackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
@@ -37,7 +38,7 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(jobMock.addCronJob).toHaveBeenCalled();
|
||||
expect(cronMock.create).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialize backup database cron job when lock is taken', async () => {
|
||||
@@ -45,14 +46,14 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
expect(cronMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialise backup database job when running on microservices', async () => {
|
||||
configMock.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
expect(cronMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -75,35 +76,15 @@ describe(BackupService.name, () => {
|
||||
} as SystemConfig,
|
||||
});
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith('backupDatabase', '0 1 * * *', true);
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalled();
|
||||
expect(cronMock.update).toHaveBeenCalledWith({ name: 'backupDatabase', expression: '0 1 * * *', start: true });
|
||||
expect(cronMock.update).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if instance does not have the backup database lock', async () => {
|
||||
databaseMock.tryLock.mockResolvedValue(false);
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
|
||||
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigValidateEvent', () => {
|
||||
it('should allow a valid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { backup: { database: { cronExpression: '0 0 * * *' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).not.toThrow(expect.stringContaining('Invalid cron expression'));
|
||||
});
|
||||
|
||||
it('should fail for an invalid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { backup: { database: { cronExpression: 'foo' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).toThrow(/Invalid cron expression.*/);
|
||||
expect(cronMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -165,6 +146,7 @@ describe(BackupService.name, () => {
|
||||
storageMock.readdir.mockResolvedValue([]);
|
||||
processMock.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
storageMock.rename.mockResolvedValue();
|
||||
storageMock.unlink.mockResolvedValue();
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.createWriteStream.mockReturnValue(new PassThrough());
|
||||
});
|
||||
@@ -207,5 +189,42 @@ describe(BackupService.name, () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
storageMock.unlink.mockRejectedValue(new Error('error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(storageMock.unlink).toHaveBeenCalled();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it.each`
|
||||
postgresVersion | expectedVersion
|
||||
${'14.10'} | ${14}
|
||||
${'14.10.3'} | ${14}
|
||||
${'14.10 (Debian 14.10-1.pgdg120+1)'} | ${14}
|
||||
${'15.3.3'} | ${15}
|
||||
${'16.4.2'} | ${16}
|
||||
${'17.15.1'} | ${17}
|
||||
`(
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
databaseMock.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(processMock.spawn).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
},
|
||||
);
|
||||
it.each`
|
||||
postgresVersion
|
||||
${'13.99.99'}
|
||||
${'18.0.0'}
|
||||
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
|
||||
databaseMock.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(processMock.spawn).not.toHaveBeenCalled();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { default as path } from 'node:path';
|
||||
import semver from 'semver';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||
@@ -8,7 +9,6 @@ import { ArgOf } from 'src/interfaces/event.interface';
|
||||
import { JobName, JobStatus, QueueName } from 'src/interfaces/job.interface';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
import { validateCronExpression } from 'src/validation';
|
||||
|
||||
@Injectable()
|
||||
export class BackupService extends BaseService {
|
||||
@@ -27,12 +27,12 @@ export class BackupService extends BaseService {
|
||||
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
|
||||
|
||||
if (this.backupLock) {
|
||||
this.jobRepository.addCronJob(
|
||||
'backupDatabase',
|
||||
database.cronExpression,
|
||||
() => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
|
||||
database.enabled,
|
||||
);
|
||||
this.cronRepository.create({
|
||||
name: 'backupDatabase',
|
||||
expression: database.cronExpression,
|
||||
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
|
||||
start: database.enabled,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,15 +42,11 @@ export class BackupService extends BaseService {
|
||||
return;
|
||||
}
|
||||
|
||||
this.jobRepository.updateCronJob('backupDatabase', backup.database.cronExpression, backup.database.enabled);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.validate' })
|
||||
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
||||
const { database } = newConfig.backup;
|
||||
if (!validateCronExpression(database.cronExpression)) {
|
||||
throw new Error(`Invalid cron expression ${database.cronExpression}`);
|
||||
}
|
||||
this.cronRepository.update({
|
||||
name: 'backupDatabase',
|
||||
expression: backup.database.cronExpression,
|
||||
start: backup.database.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
async cleanupDatabaseBackups() {
|
||||
@@ -85,17 +81,50 @@ export class BackupService extends BaseService {
|
||||
} = this.configRepository.getEnv();
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
const databaseParams = isUrlConnection ? ['-d', config.url] : ['-U', config.username, '-h', config.host];
|
||||
|
||||
const databaseParams = isUrlConnection
|
||||
? ['--dbname', config.url]
|
||||
: [
|
||||
'--username',
|
||||
config.username,
|
||||
'--host',
|
||||
config.host,
|
||||
'--port',
|
||||
`${config.port}`,
|
||||
'--database',
|
||||
config.database,
|
||||
];
|
||||
|
||||
databaseParams.push('--clean', '--if-exists');
|
||||
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
|
||||
`immich-db-backup-${Date.now()}.sql.gz.tmp`,
|
||||
);
|
||||
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <18.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(`pg_dumpall`, [...databaseParams, '--clean', '--if-exists'], {
|
||||
env: { PATH: process.env.PATH, PGPASSWORD: isUrlConnection ? undefined : config.password },
|
||||
});
|
||||
const pgdump = this.processRepository.spawn(
|
||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
||||
databaseParams,
|
||||
{
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: isUrlConnection ? undefined : config.password,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
||||
@@ -150,10 +179,13 @@ export class BackupService extends BaseService {
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
} catch (error) {
|
||||
this.logger.error('Database Backup Failure', error);
|
||||
await this.storageRepository
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => this.logger.error('Failed to delete failed backup file', error));
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.debug(`Database Backup Success`);
|
||||
this.logger.log(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import { IKeyRepository } from 'src/interfaces/api-key.interface';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.interface';
|
||||
import { IAuditRepository } from 'src/interfaces/audit.interface';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
import { ICronRepository } from 'src/interfaces/cron.interface';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import { IEventRepository } from 'src/interfaces/event.interface';
|
||||
@@ -57,6 +58,7 @@ export class BaseService {
|
||||
@Inject(IAlbumUserRepository) protected albumUserRepository: IAlbumUserRepository,
|
||||
@Inject(IAssetRepository) protected assetRepository: IAssetRepository,
|
||||
@Inject(IConfigRepository) protected configRepository: IConfigRepository,
|
||||
@Inject(ICronRepository) protected cronRepository: ICronRepository,
|
||||
@Inject(ICryptoRepository) protected cryptoRepository: ICryptoRepository,
|
||||
@Inject(IDatabaseRepository) protected databaseRepository: IDatabaseRepository,
|
||||
@Inject(IEventRepository) protected eventRepository: IEventRepository,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { defaults } from 'src/config';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { ImmichWorker } from 'src/enum';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.interface';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
@@ -31,7 +31,7 @@ describe(JobService.name, () => {
|
||||
|
||||
describe('onConfigUpdate', () => {
|
||||
it('should update concurrency', () => {
|
||||
sut.onConfigInitOrUpdate({ newConfig: defaults });
|
||||
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
||||
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(15);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
||||
|
||||
@@ -39,8 +39,7 @@ const asJobItem = (dto: JobCreateDto): JobItem => {
|
||||
@Injectable()
|
||||
export class JobService extends BaseService {
|
||||
@OnEvent({ name: 'config.init' })
|
||||
@OnEvent({ name: 'config.update', server: true })
|
||||
onConfigInitOrUpdate({ newConfig: config }: ArgOf<'config.init'>) {
|
||||
onConfigInit({ newConfig: config }: ArgOf<'config.init'>) {
|
||||
if (this.worker !== ImmichWorker.MICROSERVICES) {
|
||||
return;
|
||||
}
|
||||
@@ -56,6 +55,11 @@ export class JobService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.update', server: true })
|
||||
onConfigUpdate({ newConfig: config }: ArgOf<'config.update'>) {
|
||||
this.onConfigInit({ newConfig: config });
|
||||
}
|
||||
|
||||
async create(dto: JobCreateDto): Promise<void> {
|
||||
await this.jobRepository.queue(asJobItem(dto));
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import { UserEntity } from 'src/entities/user.entity';
|
||||
import { AssetType, ImmichWorker } from 'src/enum';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.interface';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
import { ICronRepository } from 'src/interfaces/cron.interface';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import {
|
||||
IJobRepository,
|
||||
@@ -36,13 +37,15 @@ describe(LibraryService.name, () => {
|
||||
|
||||
let assetMock: Mocked<IAssetRepository>;
|
||||
let configMock: Mocked<IConfigRepository>;
|
||||
let cronMock: Mocked<ICronRepository>;
|
||||
let databaseMock: Mocked<IDatabaseRepository>;
|
||||
let jobMock: Mocked<IJobRepository>;
|
||||
let libraryMock: Mocked<ILibraryRepository>;
|
||||
let storageMock: Mocked<IStorageRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, assetMock, configMock, databaseMock, jobMock, libraryMock, storageMock } = newTestService(LibraryService));
|
||||
({ sut, assetMock, configMock, cronMock, databaseMock, jobMock, libraryMock, storageMock } =
|
||||
newTestService(LibraryService));
|
||||
|
||||
databaseMock.tryLock.mockResolvedValue(true);
|
||||
configMock.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
|
||||
@@ -56,7 +59,7 @@ describe(LibraryService.name, () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
|
||||
expect(jobMock.addCronJob).toHaveBeenCalled();
|
||||
expect(cronMock.create).toHaveBeenCalled();
|
||||
|
||||
await sut.onConfigUpdate({
|
||||
oldConfig: defaults,
|
||||
@@ -71,7 +74,7 @@ describe(LibraryService.name, () => {
|
||||
} as SystemConfig,
|
||||
});
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith('libraryScan', '0 1 * * *', true);
|
||||
expect(cronMock.update).toHaveBeenCalledWith({ name: 'libraryScan', expression: '0 1 * * *', start: true });
|
||||
});
|
||||
|
||||
it('should initialize watcher for all external libraries', async () => {
|
||||
@@ -117,14 +120,14 @@ describe(LibraryService.name, () => {
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
expect(cronMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialize watcher or library scan job when running on api', async () => {
|
||||
configMock.getWorker.mockReturnValue(ImmichWorker.API);
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.libraryScan as SystemConfig });
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
expect(cronMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -138,7 +141,7 @@ describe(LibraryService.name, () => {
|
||||
databaseMock.tryLock.mockResolvedValue(false);
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
await sut.onConfigUpdate({ newConfig: systemConfigStub.libraryScan as SystemConfig, oldConfig: defaults });
|
||||
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||
expect(cronMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should update cron job and enable watching', async () => {
|
||||
@@ -148,11 +151,11 @@ describe(LibraryService.name, () => {
|
||||
oldConfig: defaults,
|
||||
});
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith(
|
||||
'libraryScan',
|
||||
systemConfigStub.libraryScan.library.scan.cronExpression,
|
||||
systemConfigStub.libraryScan.library.scan.enabled,
|
||||
);
|
||||
expect(cronMock.update).toHaveBeenCalledWith({
|
||||
name: 'libraryScan',
|
||||
expression: systemConfigStub.libraryScan.library.scan.cronExpression,
|
||||
start: systemConfigStub.libraryScan.library.scan.enabled,
|
||||
});
|
||||
});
|
||||
|
||||
it('should update cron job and disable watching', async () => {
|
||||
@@ -166,31 +169,11 @@ describe(LibraryService.name, () => {
|
||||
oldConfig: defaults,
|
||||
});
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith(
|
||||
'libraryScan',
|
||||
systemConfigStub.libraryScan.library.scan.cronExpression,
|
||||
systemConfigStub.libraryScan.library.scan.enabled,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigValidateEvent', () => {
|
||||
it('should allow a valid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { library: { scan: { cronExpression: '0 0 * * *' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).not.toThrow(expect.stringContaining('Invalid cron expression'));
|
||||
});
|
||||
|
||||
it('should fail for an invalid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { library: { scan: { cronExpression: 'foo' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).toThrow(/Invalid cron expression.*/);
|
||||
expect(cronMock.update).toHaveBeenCalledWith({
|
||||
name: 'libraryScan',
|
||||
expression: systemConfigStub.libraryScan.library.scan.cronExpression,
|
||||
start: systemConfigStub.libraryScan.library.scan.enabled,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@ import { BaseService } from 'src/services/base.service';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
import { usePagination } from 'src/utils/pagination';
|
||||
import { validateCronExpression } from 'src/validation';
|
||||
|
||||
@Injectable()
|
||||
export class LibraryService extends BaseService {
|
||||
@@ -48,12 +47,13 @@ export class LibraryService extends BaseService {
|
||||
this.watchLibraries = this.lock && watch.enabled;
|
||||
|
||||
if (this.lock) {
|
||||
this.jobRepository.addCronJob(
|
||||
'libraryScan',
|
||||
scan.cronExpression,
|
||||
() => handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL }), this.logger),
|
||||
scan.enabled,
|
||||
);
|
||||
this.cronRepository.create({
|
||||
name: 'libraryScan',
|
||||
expression: scan.cronExpression,
|
||||
onTick: () =>
|
||||
handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL }), this.logger),
|
||||
start: scan.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
if (this.watchLibraries) {
|
||||
@@ -67,7 +67,11 @@ export class LibraryService extends BaseService {
|
||||
return;
|
||||
}
|
||||
|
||||
this.jobRepository.updateCronJob('libraryScan', library.scan.cronExpression, library.scan.enabled);
|
||||
this.cronRepository.update({
|
||||
name: 'libraryScan',
|
||||
expression: library.scan.cronExpression,
|
||||
start: library.scan.enabled,
|
||||
});
|
||||
|
||||
if (library.watch.enabled !== this.watchLibraries) {
|
||||
// Watch configuration changed, update accordingly
|
||||
@@ -76,14 +80,6 @@ export class LibraryService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.validate' })
|
||||
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
||||
const { scan } = newConfig.library;
|
||||
if (!validateCronExpression(scan.cronExpression)) {
|
||||
throw new Error(`Invalid cron expression ${scan.cronExpression}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async watch(id: string): Promise<boolean> {
|
||||
if (!this.watchLibraries) {
|
||||
return false;
|
||||
|
||||
@@ -214,7 +214,8 @@ export class MediaService extends BaseService {
|
||||
const colorspace = this.isSRGB(asset) ? Colorspace.SRGB : image.colorspace;
|
||||
const processInvalidImages = process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true';
|
||||
|
||||
const decodeOptions = { colorspace, processInvalidImages, size: image.preview.size };
|
||||
const orientation = useExtracted && asset.exifInfo?.orientation ? Number(asset.exifInfo.orientation) : undefined;
|
||||
const decodeOptions = { colorspace, processInvalidImages, size: image.preview.size, orientation };
|
||||
const { data, info } = await this.mediaRepository.decodeImage(inputPath, decodeOptions);
|
||||
|
||||
const options = { colorspace, processInvalidImages, raw: info };
|
||||
|
||||
@@ -3,7 +3,7 @@ import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { constants } from 'node:fs/promises';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { AssetType, ImmichWorker, SourceType } from 'src/enum';
|
||||
import { AssetType, ExifOrientation, ImmichWorker, SourceType } from 'src/enum';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.interface';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.interface';
|
||||
import { IConfigRepository } from 'src/interfaces/config.interface';
|
||||
@@ -18,7 +18,7 @@ import { IStorageRepository } from 'src/interfaces/storage.interface';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
||||
import { ITagRepository } from 'src/interfaces/tag.interface';
|
||||
import { IUserRepository } from 'src/interfaces/user.interface';
|
||||
import { MetadataService, Orientation } from 'src/services/metadata.service';
|
||||
import { MetadataService } from 'src/services/metadata.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { fileStub } from 'test/fixtures/file.stub';
|
||||
import { probeStub } from 'test/fixtures/media.stub';
|
||||
@@ -539,7 +539,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } });
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ orientation: Orientation.Rotate270CW.toString() }),
|
||||
expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ import { AssetFaceEntity } from 'src/entities/asset-face.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { PersonEntity } from 'src/entities/person.entity';
|
||||
import { AssetType, ImmichWorker, SourceType } from 'src/enum';
|
||||
import { AssetType, ExifOrientation, ImmichWorker, SourceType } from 'src/enum';
|
||||
import { WithoutProperty } from 'src/interfaces/asset.interface';
|
||||
import { DatabaseLock } from 'src/interfaces/database.interface';
|
||||
import { ArgOf } from 'src/interfaces/event.interface';
|
||||
@@ -36,17 +36,6 @@ const EXIF_DATE_TAGS: Array<keyof Tags> = [
|
||||
'DateTimeCreated',
|
||||
];
|
||||
|
||||
export enum Orientation {
|
||||
Horizontal = 1,
|
||||
MirrorHorizontal = 2,
|
||||
Rotate180 = 3,
|
||||
MirrorVertical = 4,
|
||||
MirrorHorizontalRotate270CW = 5,
|
||||
Rotate90CW = 6,
|
||||
MirrorHorizontalRotate90CW = 7,
|
||||
Rotate270CW = 8,
|
||||
}
|
||||
|
||||
const validate = <T>(value: T): NonNullable<T> | null => {
|
||||
// handle lists of numbers
|
||||
if (Array.isArray(value)) {
|
||||
@@ -676,19 +665,19 @@ export class MetadataService extends BaseService {
|
||||
if (videoStreams[0]) {
|
||||
switch (videoStreams[0].rotation) {
|
||||
case -90: {
|
||||
tags.Orientation = Orientation.Rotate90CW;
|
||||
tags.Orientation = ExifOrientation.Rotate90CW;
|
||||
break;
|
||||
}
|
||||
case 0: {
|
||||
tags.Orientation = Orientation.Horizontal;
|
||||
tags.Orientation = ExifOrientation.Horizontal;
|
||||
break;
|
||||
}
|
||||
case 90: {
|
||||
tags.Orientation = Orientation.Rotate270CW;
|
||||
tags.Orientation = ExifOrientation.Rotate270CW;
|
||||
break;
|
||||
}
|
||||
case 180: {
|
||||
tags.Orientation = Orientation.Rotate180;
|
||||
tags.Orientation = ExifOrientation.Rotate180;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,14 +47,9 @@ describe(SearchService.name, () => {
|
||||
fieldName: 'exifInfo.city',
|
||||
items: [{ value: 'Paris', data: assetStub.image.id }],
|
||||
});
|
||||
assetMock.getAssetIdByTag.mockResolvedValue({
|
||||
fieldName: 'smartInfo.tags',
|
||||
items: [{ value: 'train', data: assetStub.imageFrom2015.id }],
|
||||
});
|
||||
assetMock.getByIdsWithAllRelations.mockResolvedValue([assetStub.image, assetStub.imageFrom2015]);
|
||||
const expectedResponse = [
|
||||
{ fieldName: 'exifInfo.city', items: [{ value: 'Paris', data: mapAsset(assetStub.image) }] },
|
||||
{ fieldName: 'smartInfo.tags', items: [{ value: 'train', data: mapAsset(assetStub.imageFrom2015) }] },
|
||||
];
|
||||
|
||||
const result = await sut.getExploreData(authStub.user1);
|
||||
|
||||
@@ -34,10 +34,8 @@ export class SearchService extends BaseService {
|
||||
|
||||
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
||||
const options = { maxFields: 12, minAssetsPerField: 5 };
|
||||
const results = await Promise.all([
|
||||
this.assetRepository.getAssetIdByCity(auth.user.id, options),
|
||||
this.assetRepository.getAssetIdByTag(auth.user.id, options),
|
||||
]);
|
||||
const result = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
|
||||
const results = [result];
|
||||
const assetIds = new Set<string>(results.flatMap((field) => field.items.map((item) => item.data)));
|
||||
const assets = await this.assetRepository.getByIdsWithAllRelations([...assetIds]);
|
||||
const assetMap = new Map<string, AssetResponseDto>(assets.map((asset) => [asset.id, mapAsset(asset)]));
|
||||
|
||||
@@ -38,7 +38,7 @@ describe(StorageTemplateService.name, () => {
|
||||
|
||||
systemMock.get.mockResolvedValue({ storageTemplate: { enabled: true } });
|
||||
|
||||
sut.onConfigInitOrUpdate({ newConfig: defaults });
|
||||
sut.onConfigInit({ newConfig: defaults });
|
||||
});
|
||||
|
||||
describe('onConfigValidate', () => {
|
||||
@@ -171,7 +171,7 @@ describe(StorageTemplateService.name, () => {
|
||||
const config = structuredClone(defaults);
|
||||
config.storageTemplate.template = '{{y}}/{{#if album}}{{album}}{{else}}other/{{MM}}{{/if}}/{{filename}}';
|
||||
|
||||
sut.onConfigInitOrUpdate({ newConfig: config });
|
||||
sut.onConfigInit({ newConfig: config });
|
||||
|
||||
userMock.get.mockResolvedValue(user);
|
||||
assetMock.getByIds.mockResolvedValueOnce([asset]);
|
||||
@@ -192,7 +192,7 @@ describe(StorageTemplateService.name, () => {
|
||||
const user = userStub.user1;
|
||||
const config = structuredClone(defaults);
|
||||
config.storageTemplate.template = '{{y}}/{{#if album}}{{album}}{{else}}other//{{MM}}{{/if}}/{{filename}}';
|
||||
sut.onConfigInitOrUpdate({ newConfig: config });
|
||||
sut.onConfigInit({ newConfig: config });
|
||||
|
||||
userMock.get.mockResolvedValue(user);
|
||||
assetMock.getByIds.mockResolvedValueOnce([asset]);
|
||||
|
||||
@@ -75,8 +75,7 @@ export class StorageTemplateService extends BaseService {
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.init' })
|
||||
@OnEvent({ name: 'config.update', server: true })
|
||||
onConfigInitOrUpdate({ newConfig }: ArgOf<'config.init'>) {
|
||||
onConfigInit({ newConfig }: ArgOf<'config.init'>) {
|
||||
const template = newConfig.storageTemplate.template;
|
||||
if (!this._template || template !== this.template.raw) {
|
||||
this.logger.debug(`Compiling new storage template: ${template}`);
|
||||
@@ -84,6 +83,11 @@ export class StorageTemplateService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.update', server: true })
|
||||
onConfigUpdate({ newConfig }: ArgOf<'config.update'>) {
|
||||
this.onConfigInit({ newConfig });
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.validate' })
|
||||
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
||||
try {
|
||||
|
||||
@@ -261,6 +261,29 @@ describe(SystemConfigService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept valid cron expressions', async () => {
|
||||
configMock.getEnv.mockReturnValue(mockEnvData({ configFile: 'immich-config.json' }));
|
||||
systemMock.readFile.mockResolvedValue(JSON.stringify({ library: { scan: { cronExpression: '0 0 * * *' } } }));
|
||||
|
||||
await expect(sut.getSystemConfig()).resolves.toMatchObject({
|
||||
library: {
|
||||
scan: {
|
||||
enabled: true,
|
||||
cronExpression: '0 0 * * *',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should reject invalid cron expressions', async () => {
|
||||
configMock.getEnv.mockReturnValue(mockEnvData({ configFile: 'immich-config.json' }));
|
||||
systemMock.readFile.mockResolvedValue(JSON.stringify({ library: { scan: { cronExpression: 'foo' } } }));
|
||||
|
||||
await expect(sut.getSystemConfig()).rejects.toThrow(
|
||||
'library.scan.cronExpression has failed the following constraints: cronValidator',
|
||||
);
|
||||
});
|
||||
|
||||
it('should log errors with the config file', async () => {
|
||||
configMock.getEnv.mockReturnValue(mockEnvData({ configFile: 'immich-config.json' }));
|
||||
|
||||
|
||||
@@ -90,7 +90,6 @@ export function searchAssetBuilder(
|
||||
isNotInAlbum,
|
||||
withFaces,
|
||||
withPeople,
|
||||
withSmartInfo,
|
||||
personIds,
|
||||
withStacked,
|
||||
trashedAfter,
|
||||
@@ -123,10 +122,6 @@ export function searchAssetBuilder(
|
||||
builder.leftJoinAndSelect('faces.person', 'person');
|
||||
}
|
||||
|
||||
if (withSmartInfo) {
|
||||
builder.leftJoinAndSelect(`${builder.alias}.smartInfo`, 'smartInfo');
|
||||
}
|
||||
|
||||
if (personIds && personIds.length > 0) {
|
||||
const cte = builder
|
||||
.createQueryBuilder()
|
||||
|
||||
@@ -16,9 +16,12 @@ import {
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUUID,
|
||||
Validate,
|
||||
ValidateBy,
|
||||
ValidateIf,
|
||||
ValidationOptions,
|
||||
ValidatorConstraint,
|
||||
ValidatorConstraintInterface,
|
||||
buildMessage,
|
||||
isDateString,
|
||||
} from 'class-validator';
|
||||
@@ -156,16 +159,20 @@ export const ValidateBoolean = (options?: BooleanOptions) => {
|
||||
return applyDecorators(...decorators);
|
||||
};
|
||||
|
||||
export function validateCronExpression(expression: string) {
|
||||
try {
|
||||
new CronJob(expression, () => {});
|
||||
} catch {
|
||||
return false;
|
||||
@ValidatorConstraint({ name: 'cronValidator' })
|
||||
class CronValidator implements ValidatorConstraintInterface {
|
||||
validate(expression: string): boolean {
|
||||
try {
|
||||
new CronJob(expression, () => {});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export const IsCronExpression = () => Validate(CronValidator, { message: 'Invalid cron expression' });
|
||||
|
||||
type IValue = { value: unknown };
|
||||
|
||||
export const toEmail = ({ value }: IValue) => (typeof value === 'string' ? value.toLowerCase() : value);
|
||||
|
||||
10
server/test/fixtures/shared-link.stub.ts
vendored
10
server/test/fixtures/shared-link.stub.ts
vendored
@@ -62,10 +62,6 @@ const assetResponse: AssetResponseDto = {
|
||||
updatedAt: today,
|
||||
isFavorite: false,
|
||||
isArchived: false,
|
||||
smartInfo: {
|
||||
tags: [],
|
||||
objects: ['a', 'b', 'c'],
|
||||
},
|
||||
duration: '0:00:00.00000',
|
||||
exifInfo: assetInfo,
|
||||
livePhotoVideoId: null,
|
||||
@@ -205,12 +201,6 @@ export const sharedLinkStub = {
|
||||
isArchived: false,
|
||||
isExternal: false,
|
||||
isOffline: false,
|
||||
smartInfo: {
|
||||
assetId: 'id_1',
|
||||
tags: [],
|
||||
objects: ['a', 'b', 'c'],
|
||||
asset: null as any,
|
||||
},
|
||||
files: [],
|
||||
thumbhash: null,
|
||||
encodedVideoPath: '',
|
||||
|
||||
@@ -33,7 +33,6 @@ export const newAssetRepositoryMock = (): Mocked<IAssetRepository> => {
|
||||
getTimeBucket: vitest.fn(),
|
||||
getTimeBuckets: vitest.fn(),
|
||||
getAssetIdByCity: vitest.fn(),
|
||||
getAssetIdByTag: vitest.fn(),
|
||||
getAllForUserFullSync: vitest.fn(),
|
||||
getChangedDeltaSync: vitest.fn(),
|
||||
getDuplicates: vitest.fn(),
|
||||
|
||||
9
server/test/repositories/cron.repository.mock.ts
Normal file
9
server/test/repositories/cron.repository.mock.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { ICronRepository } from 'src/interfaces/cron.interface';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newCronRepositoryMock = (): Mocked<ICronRepository> => {
|
||||
return {
|
||||
create: vitest.fn(),
|
||||
update: vitest.fn(),
|
||||
};
|
||||
};
|
||||
@@ -6,8 +6,6 @@ export const newJobRepositoryMock = (): Mocked<IJobRepository> => {
|
||||
setup: vitest.fn(),
|
||||
startWorkers: vitest.fn(),
|
||||
run: vitest.fn(),
|
||||
addCronJob: vitest.fn(),
|
||||
updateCronJob: vitest.fn(),
|
||||
setConcurrency: vitest.fn(),
|
||||
empty: vitest.fn(),
|
||||
pause: vitest.fn(),
|
||||
|
||||
@@ -12,6 +12,7 @@ import { newKeyRepositoryMock } from 'test/repositories/api-key.repository.mock'
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newAuditRepositoryMock } from 'test/repositories/audit.repository.mock';
|
||||
import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock';
|
||||
import { newCronRepositoryMock } from 'test/repositories/cron.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
|
||||
import { newEventRepositoryMock } from 'test/repositories/event.repository.mock';
|
||||
@@ -62,6 +63,7 @@ export const newTestService = <T extends BaseService>(
|
||||
|
||||
const accessMock = newAccessRepositoryMock();
|
||||
const loggerMock = newLoggerRepositoryMock();
|
||||
const cronMock = newCronRepositoryMock();
|
||||
const cryptoMock = newCryptoRepositoryMock();
|
||||
const activityMock = newActivityRepositoryMock();
|
||||
const auditMock = newAuditRepositoryMock();
|
||||
@@ -108,6 +110,7 @@ export const newTestService = <T extends BaseService>(
|
||||
albumUserMock,
|
||||
assetMock,
|
||||
configMock,
|
||||
cronMock,
|
||||
cryptoMock,
|
||||
databaseMock,
|
||||
eventMock,
|
||||
@@ -144,6 +147,7 @@ export const newTestService = <T extends BaseService>(
|
||||
sut,
|
||||
accessMock,
|
||||
loggerMock,
|
||||
cronMock,
|
||||
cryptoMock,
|
||||
activityMock,
|
||||
auditMock,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.11.0-alpine3.20@sha256:f265794478aa0b1a23d85a492c8311ed795bc527c3fe7e43453b3c872dcd71a3
|
||||
FROM node:22.11.0-alpine3.20@sha256:dc8ba2f61dd86c44e43eb25a7812ad03c5b1b224a19fc6f77e1eb9e5669f0b82
|
||||
|
||||
RUN apk add --no-cache tini
|
||||
USER node
|
||||
|
||||
8
web/package-lock.json
generated
8
web/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-web",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@formatjs/icu-messageformat-parser": "^2.7.8",
|
||||
@@ -74,13 +74,13 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/node": "^22.9.0",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "1.120.1",
|
||||
"version": "1.120.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"scripts": {
|
||||
"dev": "vite dev --host 0.0.0.0 --port 3000",
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import { clamp } from 'lodash-es';
|
||||
import { onMount } from 'svelte';
|
||||
import { isTimelineScrolling } from '$lib/stores/timeline.store';
|
||||
import { parseUtcDate } from '$lib/utils/date-time';
|
||||
import { fade, fly } from 'svelte/transition';
|
||||
|
||||
export let timelineTopOffset = 0;
|
||||
@@ -75,7 +74,6 @@
|
||||
$: timelineFullHeight = $assetStore.timelineHeight + timelineTopOffset + timelineBottomOffset;
|
||||
$: relativeTopOffset = toScrollY(timelineTopOffset / timelineFullHeight);
|
||||
$: relativeBottomOffset = toScrollY(timelineBottomOffset / timelineFullHeight);
|
||||
$: formatedDate = scrubBucket?.bucketDate ? parseUtcDate(scrubBucket?.bucketDate).toFormat('MMM yyyy') : '';
|
||||
|
||||
const listener: BucketListener = (event) => {
|
||||
const { type } = event;
|
||||
@@ -243,12 +241,12 @@
|
||||
class="absolute right-0 h-[2px] w-10 bg-immich-primary dark:bg-immich-dark-primary"
|
||||
style:top="{scrollY + HOVER_DATE_HEIGHT}px"
|
||||
>
|
||||
{#if $isTimelineScrolling && formatedDate}
|
||||
{#if $isTimelineScrolling && scrubBucket?.bucketDate}
|
||||
<p
|
||||
transition:fade={{ duration: 200 }}
|
||||
class="truncate pointer-events-none absolute right-0 bottom-0 z-[100] min-w-20 max-w-64 w-fit rounded-tl-md border-b-2 border-immich-primary bg-immich-bg/80 py-1 px-1 text-sm font-medium shadow-[0_0_8px_rgba(0,0,0,0.25)] dark:border-immich-dark-primary dark:bg-immich-dark-gray/80 dark:text-immich-dark-fg"
|
||||
>
|
||||
{formatedDate}
|
||||
{assetStore.getBucketByDate(scrubBucket.bucketDate)?.bucketDateFormattted}
|
||||
</p>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@@ -270,6 +270,7 @@ export const langs = [
|
||||
{ name: 'Estonian', code: 'et', loader: () => import('$i18n/et.json') },
|
||||
{ name: 'Persian', code: 'fa', loader: () => import('$i18n/fa.json') },
|
||||
{ name: 'Finnish', code: 'fi', loader: () => import('$i18n/fi.json') },
|
||||
{ name: 'Filipino', code: 'fil', loader: () => import('$i18n/fil.json') },
|
||||
{ name: 'French', code: 'fr', loader: () => import('$i18n/fr.json') },
|
||||
{ name: 'Hebrew', code: 'he', loader: () => import('$i18n/he.json') },
|
||||
{ name: 'Hindi', code: 'hi', loader: () => import('$i18n/hi.json') },
|
||||
@@ -291,6 +292,7 @@ export const langs = [
|
||||
{ name: 'Malay', code: 'ms', loader: () => import('$i18n/ms.json') },
|
||||
{ name: 'Norwegian Bokmål', code: 'nb-NO', weblateCode: 'nb_NO', loader: () => import('$i18n/nb_NO.json') },
|
||||
{ name: 'Dutch', code: 'nl', loader: () => import('$i18n/nl.json') },
|
||||
{ name: 'Norwegian Nynorsk', code: 'nn', loader: () => import('$i18n/nn.json') },
|
||||
{ name: 'Polish', code: 'pl', loader: () => import('$i18n/pl.json') },
|
||||
{ name: 'Portuguese', code: 'pt', loader: () => import('$i18n/pt.json') },
|
||||
{ name: 'Portuguese (Brazil) ', code: 'pt-BR', weblateCode: 'pt_BR', loader: () => import('$i18n/pt_BR.json') },
|
||||
|
||||
@@ -16,8 +16,6 @@
|
||||
|
||||
enum Field {
|
||||
CITY = 'exifInfo.city',
|
||||
TAGS = 'smartInfo.tags',
|
||||
OBJECTS = 'smartInfo.objects',
|
||||
}
|
||||
|
||||
const getFieldItems = (items: SearchExploreResponseDto[], field: Field) => {
|
||||
|
||||
@@ -64,8 +64,20 @@
|
||||
|
||||
type SettingsComponent = ComponentType<SvelteComponent<SettingsComponentProps>>;
|
||||
|
||||
// https://stackoverflow.com/questions/16167581/sort-object-properties-and-json-stringify/43636793#43636793
|
||||
const jsonReplacer = (key: string, value: unknown) =>
|
||||
value instanceof Object && !Array.isArray(value)
|
||||
? Object.keys(value)
|
||||
.sort()
|
||||
// eslint-disable-next-line unicorn/no-array-reduce
|
||||
.reduce((sorted: { [key: string]: unknown }, key) => {
|
||||
sorted[key] = (value as { [key: string]: unknown })[key];
|
||||
return sorted;
|
||||
}, {})
|
||||
: value;
|
||||
|
||||
const downloadConfig = () => {
|
||||
const blob = new Blob([JSON.stringify(config, null, 2)], { type: 'application/json' });
|
||||
const blob = new Blob([JSON.stringify(config, jsonReplacer, 2)], { type: 'application/json' });
|
||||
const downloadKey = 'immich-config.json';
|
||||
downloadManager.add(downloadKey, blob.size);
|
||||
downloadManager.update(downloadKey, blob.size);
|
||||
@@ -240,7 +252,7 @@
|
||||
<div class="hidden lg:block">
|
||||
<SearchBar placeholder={$t('search_settings')} bind:name={searchQuery} showLoadingSpinner={false} />
|
||||
</div>
|
||||
<LinkButton on:click={() => copyToClipboard(JSON.stringify(config, null, 2))}>
|
||||
<LinkButton on:click={() => copyToClipboard(JSON.stringify(config, jsonReplacer, 2))}>
|
||||
<div class="flex place-items-center gap-2 text-sm">
|
||||
<Icon path={mdiContentCopy} size="18" />
|
||||
{$t('copy_to_clipboard')}
|
||||
|
||||
Reference in New Issue
Block a user