Compare commits

...

41 Commits

Author SHA1 Message Date
github-actions
31a1e64b58 chore: version v1.120.2 2024-11-12 17:30:29 +00:00
Zack Pollard
e17bd8efc6 fix(server): backup version checks not handling database versions correctly (#14102) 2024-11-12 10:57:05 -06:00
Alex
2f9019c0e1 fix(server): correct rotation for common files (#14092)
* fix(server): correct rotation for common files

* fix: test:

* pr feedback
2024-11-12 15:07:56 +00:00
Zack Pollard
dfa8a8a6e1 feat(server): use pg_dumpall version that matches the database version (#14083) 2024-11-12 14:58:29 +00:00
renovate[bot]
b9a0c3c79f chore(deps): update base-image to v20241112 (major) (#14088)
chore(deps): update base-image to v20241112

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-12 12:49:31 +00:00
renovate[bot]
bda97c4e0e chore(deps): update node (#14090)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-12 07:32:52 -05:00
Robert Schütz
e3426c880f chore(ml): replace fastapi-slim with fastapi (#14091)
The two have been identical since version 0.112.0:
https://github.com/fastapi/fastapi/discussions/11525#discussioncomment-10219861
2024-11-11 23:08:29 -05:00
Zack Pollard
d4ca7d0075 fix: config updates not applying for job and storage template service (#14074) 2024-11-11 12:50:09 +00:00
Zack Pollard
f1c9b763cf docs: backup folder name is backups (#14073) 2024-11-11 12:28:53 +00:00
Zack Pollard
5097c92494 fix(server): attempt to delete failed backups immediately after failure (#13995) 2024-11-11 12:08:52 +00:00
gamescom15
7aacc92699 docs: clarify file size impact in hardware-transcoding.md (#14049) 2024-11-11 03:51:00 +00:00
Daniel Dietzler
00d6cc86ad chore: add weblate requests (#14051) 2024-11-10 15:49:23 -05:00
Joren Guillaume
54d881e5c6 docs: Fix DCM docs link (#14059)
Fix DCM docs link
2024-11-10 13:33:51 -05:00
Snowknight26
edce096680 chore(web): Update the new version announcement text (#14001)
* Update en.json

* Update en.json

* Update en.json
2024-11-09 12:15:25 -06:00
mcarbonne
5c31acbcf0 feat(web): stable json settings export (#14036)
* recursively sort json output (settings)

* fix format/lint/...g
2024-11-09 12:11:20 -06:00
Alex
6b49104d59 fix(mobile): make sure date locale is inititialized for some languages (#14035) 2024-11-09 10:40:13 -05:00
Ben
97dbe3236b chore(docs): roadmap SEO (#14024) 2024-11-08 19:48:23 -05:00
Michel Heusschen
586393f178 fix(web): use locale for scrubber label when scrolling (#14012) 2024-11-08 15:36:26 -05:00
bo0tzz
f3e88ea2fa docs: make IGNORE_MOUNT_CHECK warning stronger (#14011) 2024-11-08 13:29:10 +00:00
Terry Zhao
c8b46802d6 fix(server): thumbnail rotation when using embedded previews (#13948) 2024-11-08 06:30:59 +00:00
Jason Rasmussen
7534098596 fix(server): support non-default postgres port when taking a backup (#13992) 2024-11-07 20:06:16 +00:00
Zack Pollard
ec5b7c266b chore: backups custom location and config file docs (#13996) 2024-11-07 18:08:02 +00:00
Jason Rasmussen
e84ad084d5 refactor(server): cron validation (#13990) 2024-11-07 17:27:52 +00:00
Jason Rasmussen
dc2de47204 refactor(server): cron repository (#13988) 2024-11-07 12:15:54 -05:00
Alex
2fe6607aea chore(mobile): post release tasks (#13989) 2024-11-07 10:27:28 -06:00
Jason Rasmussen
64831e2328 refactor: remove smart info table (#13985) 2024-11-07 11:25:10 -05:00
Alex
6053214e75 chore(mobile): update isar build (#13987) 2024-11-07 09:54:19 -06:00
github-actions
599b489f81 chore: version v1.120.1 2024-11-07 15:31:19 +00:00
Jason Rasmussen
0b98c5e3c4 fix(web): time zone dependent test (#13859) 2024-11-07 10:05:55 -05:00
Alex
b238b69689 fix(mobile): video player not playing in full size on Android (#13986) 2024-11-07 15:04:20 +00:00
Jason Rasmussen
decbc741e2 docs: update roadmap (#13984) 2024-11-07 09:24:21 -05:00
Sefa Eyeoglu
564449a555 fix(server): database backups compatible with deduplication (#13965)
gzip --rsyncable has a slightly worse compression ratio, but allows for
efficient deduplication and, as the name implies, faster rsync
operations.

Signed-off-by: Sefa Eyeoglu <contact@scrumplex.net>
2024-11-07 12:36:17 +00:00
Robert Schütz
f4741c70f3 fix(server): allow starting backup through API and fix pg_dumpall args when using database URLs (#13970)
* fix(server): allow starting backup through API

* fix(server): fix pg_dumpall args when using database URLs

The database has to be specified using `-d`, unlike for pg_dump.
2024-11-07 11:57:36 +00:00
yodatak
be2b76be8c docs: add backups to startup folders list (#13967)
Add the check of backups that is done on immich microservice of backups folder presence
2024-11-07 04:18:14 +00:00
Alex
cff0b95f4c chore(mobile): post release task (#13954) 2024-11-06 17:57:45 -05:00
Daniel Dietzler
1321a393c1 docs: 50k stars (#13964) 2024-11-06 22:49:18 +01:00
bo0tzz
a9fc840d65 chore: tidy up backup-and-restore.md (#13961) 2024-11-06 22:18:55 +01:00
Alex
ebf06dc12e fix(server): cannot render email template (#13957) 2024-11-06 22:14:11 +01:00
Thariq Shanavas
8d8becd0f7 docs: Added a note about avoiding redundant database backups (#13958)
* Add note about built-in backups

* npm run format:fix
2024-11-06 15:09:53 -06:00
slamp
3b5f5ec57a docs: improve custom-locations wording to be easier to read (#13849)
* Improve wording to make it easier to read custom-locations.md

It's only grammatical change

* Update docs/docs/guides/custom-locations.md

Co-authored-by: bo0tzz <git@bo0tzz.me>

* Update custom-locations.md

Revert to 'because of' and remove 'hard drive'

---------

Co-authored-by: bo0tzz <git@bo0tzz.me>
2024-11-06 19:49:23 +00:00
Daniel Dietzler
b29e4ec39f fix: docker link (#13956) 2024-11-06 13:45:52 -06:00
95 changed files with 710 additions and 775 deletions

View File

@@ -1,4 +1,4 @@
FROM node:22.11.0-alpine3.20@sha256:f265794478aa0b1a23d85a492c8311ed795bc527c3fe7e43453b3c872dcd71a3 AS core
FROM node:22.11.0-alpine3.20@sha256:dc8ba2f61dd86c44e43eb25a7812ad03c5b1b224a19fc6f77e1eb9e5669f0b82 AS core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

10
cli/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@immich/cli",
"version": "2.2.29",
"version": "2.2.31",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@immich/cli",
"version": "2.2.29",
"version": "2.2.31",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"fast-glob": "^3.3.2",
@@ -24,7 +24,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vitest/coverage-v8": "^2.0.5",
@@ -52,14 +52,14 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.120.0",
"version": "1.120.2",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"typescript": "^5.3.3"
}
},

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.29",
"version": "2.2.31",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -20,7 +20,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vitest/coverage-v8": "^2.0.5",

View File

@@ -15,8 +15,6 @@ Immich saves [file paths in the database](https://github.com/immich-app/immich/d
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
:::
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command. When restoring, you need to delete the `DB_DATA_LOCATION` folder (if it exists) to reset the database.
:::caution
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
:::
@@ -79,53 +77,10 @@ docker compose up -d # Start remainder of Immich apps
</TabItem>
</Tabs>
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.), in which case you need to delete the `DB_DATA_LOCATION` folder to reset the database.
:::tip
Some deployment methods make it difficult to start the database without also starting the server or microservices. In these cases, you may set the environmental variable `DB_SKIP_MIGRATIONS=true` before starting the services. This will prevent the server from running migrations that interfere with the restore process. Note that both the server and microservices must have this variable set to prevent the migrations from running. Be sure to remove this variable and restart the services after the database is restored.
:::
### Automatic Database Backups
The database dumps can also be automated (using [this image](https://github.com/prodrigestivill/docker-postgres-backup-local)) by editing the docker compose file to match the following:
```yaml
services:
...
backup:
container_name: immich_db_dumper
image: prodrigestivill/postgres-backup-local:14
restart: always
env_file:
- .env
environment:
POSTGRES_HOST: database
POSTGRES_CLUSTER: 'TRUE'
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_DB: ${DB_DATABASE_NAME}
SCHEDULE: "@daily"
POSTGRES_EXTRA_OPTS: '--clean --if-exists'
BACKUP_DIR: /db_dumps
volumes:
- ./db_dumps:/db_dumps
depends_on:
- database
```
Then you can restore with the same command but pointed at the latest dump.
```bash title='Automated Restore'
# Be sure to check the username if you changed it from default
gunzip < db_dumps/last/immich-latest.sql.gz \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| docker exec -i immich_postgres psql --username=postgres
```
:::note
If you see the error `ERROR: type "earth" does not exist`, or you have problems with Reverse Geocoding after a restore, add the following `sed` fragment to your restore command.
Example: `gunzip < "/path/to/backup/dump.sql.gz" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | docker exec -i immich_postgres psql --username=postgres`
Some deployment methods make it difficult to start the database without also starting the server. In these cases, you may set the environment variable `DB_SKIP_MIGRATIONS=true` before starting the services. This will prevent the server from running migrations that interfere with the restore process. Be sure to remove this variable and restart the services after the database is restored.
:::
## Filesystem

View File

@@ -3,7 +3,7 @@
## Folder checks
:::info
The folders considered for these checks include: `upload/`, `library/`, `thumbs/`, `encoded-video/`, `profile/`
The folders considered for these checks include: `upload/`, `library/`, `thumbs/`, `encoded-video/`, `profile/`, `backups/`
:::
When Immich starts, it performs a series of checks in order to validate that it can read and write files to the volume mounts used by the storage system. If it cannot perform all the required operations, it will fail to start. The checks include:
@@ -40,7 +40,9 @@ The above error messages show that the server has previously (successfully) writ
### Ignoring the checks
The checks are designed to catch common problems that we have seen users have in the past, but if you want to disable them you can set the following environment variable:
:::warning
The checks are designed to catch common problems that we have seen users have in the past, and often indicate there's something wrong that you should solve. If you know what you're doing and you want to disable them you can set the following environment variable:
:::
```
IMMICH_IGNORE_MOUNT_CHECK_ERRORS=true

View File

@@ -76,7 +76,7 @@ Setting these in the IDE give a better developer experience, auto-formatting cod
### Dart Code Metrics
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/getting-started/#installation) page for more information on setting up DCM
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/) page for more information on setting up DCM
Note: Activating the license is not required.

View File

@@ -1,7 +1,7 @@
# Hardware Transcoding [Experimental]
This feature allows you to use a GPU to accelerate transcoding and reduce CPU load.
Note that hardware transcoding is much less efficient for file sizes.
Note that hardware transcoding produces significantly larger videos than software transcoding with similar settings, typically with lower quality. Using slow presets and preferring more efficient codecs can narrow this gap.
As this is a new feature, it is still experimental and may not work on all systems.
:::info

View File

@@ -1,15 +1,15 @@
# Files Custom Locations
This guide explains storing generated and raw files with docker's volume mount in different locations.
This guide explains how to store generated and raw files with docker's volume mount in different locations.
:::caution Backup
It is important to remember to update the backup settings after following the guide to back up the new backup paths if using automatic backup tools, especially `profile/`.
:::
In our `.env` file, we will define variables that will help us in the future when we want to move to a more advanced server in the future
In our `.env` file, we will define variables that will help us in the future when we want to move to a more advanced server
```diff title=".env"
# You can find documentation for all the supported env variables [here](/docs/install/environment-variables)
# You can find documentation for all the supported environment variables [here](/docs/install/environment-variables)
# Custom location where your uploaded, thumbnails, and transcoded video files are stored
- UPLOAD_LOCATION=./library
@@ -17,10 +17,11 @@ In our `.env` file, we will define variables that will help us in the future whe
+ THUMB_LOCATION=/custom/path/immich/thumbs
+ ENCODED_VIDEO_LOCATION=/custom/path/immich/encoded-video
+ PROFILE_LOCATION=/custom/path/immich/profile
+ BACKUP_LOCATION=/custom/path/immich/backups
...
```
After defining the locations for these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` container.
After defining the locations of these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` container.
```diff title="docker-compose.yml"
services:
@@ -30,6 +31,7 @@ services:
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
+ - ${BACKUP_LOCATION}:/usr/src/app/upload/backups
- /etc/localtime:/etc/localtime:ro
```
@@ -41,12 +43,11 @@ docker compose up -d
:::note
Because of the underlying properties of docker bind mounts, it is not recommended to mount the `upload/` and `library/` folders as separate bind mounts if they are on the same device.
For this reason, we mount the HDD or network storage to `/usr/src/app/upload` and then mount the folders we want quick access to below this folder.
For this reason, we mount the HDD or the network storage (NAS) to `/usr/src/app/upload` and then mount the folders we want to access under that folder.
The `thumbs/` folder contains both the small thumbnails shown in the timeline, and the larger previews shown when clicking into an image. These cannot be split up.
The `thumbs/` folder contains both the small thumbnails displayed in the timeline and the larger previews shown when clicking into an image. These cannot be separated.
The storage metrics of the Immich server will track the storage available at `UPLOAD_LOCATION`,
so the administrator should setup some kind of monitoring to make sure the SSD does not run out of space. The `profile/` folder is much smaller, typically less than 1 MB.
The storage metrics of the Immich server will track available storage at `UPLOAD_LOCATION`, so the administrator must set up some sort of monitoring to ensure the storage does not run out of space. The `profile/` folder is much smaller, usually less than 1 MB.
:::
Thanks to [Jrasm91](https://github.com/immich-app/immich/discussions/2110#discussioncomment-5477767) for writing the guide.

View File

@@ -6,6 +6,15 @@ This script assumes you have a second hard drive connected to your server for on
The database is saved to your Immich upload folder in the `database-backup` subdirectory. The database is then backed up and versioned with your assets by Borg. This ensures that the database backup is in sync with your assets in every snapshot.
:::info
This script makes backups of your database along with your photo/video library. This is redundant with the [automatic database backup tool](https://immich.app/docs/administration/backup-and-restore#automatic-database-backups) built into Immich. Using this script to backup your database has two advantages over the built-in backup tool:
- This script uses storage more efficiently by versioning your backups instead of making multiple copies.
- The database backups are performed at the same time as the library backup, ensuring that the backups of your database and the library are always in sync.
If you are using this script, it is therefore safe to turn off the built-in automatic database backups from your admin panel to save storage space.
:::
### Prerequisites
- Borg needs to be installed on your server as well as the remote machine. You can find instructions to install Borg [here](https://borgbackup.readthedocs.io/en/latest/installation.html).

View File

@@ -35,6 +35,13 @@ The default configuration looks like this:
"accel": "disabled",
"accelDecode": false
},
"backup": {
"database": {
"enabled": true,
"cronExpression": "0 02 * * *",
"keepLastAmount": 14
}
},
"job": {
"backgroundTask": {
"concurrency": 5

View File

@@ -8,7 +8,7 @@ Hardware and software requirements for Immich:
## Software
- [Docker](https://docs.docker.com/get-docker/)
- [Docker](https://docs.docker.com/engine/install/)
- [Docker Compose](https://docs.docker.com/compose/install/)
:::note

View File

@@ -49,7 +49,7 @@ export function Timeline({ items }: Props): JSX.Element {
<div className="flex flex-col flex-grow justify-between gap-2">
<div className="flex gap-2 items-center">
{cardIcon === 'immich' ? (
<img src="img/immich-logo.svg" height="30" className="rounded-none" />
<img src="/img/immich-logo.svg" height="30" className="rounded-none" />
) : (
<Icon path={cardIcon} size={1} color={item.iconColor} />
)}

View File

@@ -74,12 +74,14 @@ import {
mdiFaceRecognition,
mdiVideo,
mdiWeb,
mdiDatabaseOutline,
} from '@mdi/js';
import Layout from '@theme/Layout';
import React from 'react';
import { Item, Timeline } from '../components/timeline';
const releases = {
'v1.120.0': new Date(2024, 10, 6),
'v1.114.0': new Date(2024, 8, 6),
'v1.113.0': new Date(2024, 7, 30),
'v1.112.0': new Date(2024, 7, 14),
@@ -151,6 +153,9 @@ const weirdTags = {
'v1.2.0': 'v0.2-dev ',
};
const title = 'Roadmap';
const description = 'A list of future plans and goals, as well as past achievements and milestones.';
const withLanguage = (date: Date) => (language: string) => date.toLocaleDateString(language);
type Base = { icon: string; iconColor?: React.CSSProperties['color']; title: string; description: string };
@@ -175,6 +180,38 @@ const withRelease = ({
};
const roadmap: Item[] = [
{
done: false,
icon: mdiFlash,
iconColor: 'gold',
title: 'Workflows',
description: 'Automate tasks with workflows',
getDateLabel: () => 'Planned for 2025',
},
{
done: false,
icon: mdiTableKey,
iconColor: 'gray',
title: 'Fine grained access controls',
description: 'Granular access controls for users and api keys',
getDateLabel: () => 'Planned for 2025',
},
{
done: false,
icon: mdiImageEdit,
iconColor: 'rebeccapurple',
title: 'Basic editor',
description: 'Basic photo editing capabilities',
getDateLabel: () => 'Planned for 2025',
},
{
done: false,
icon: mdiRocketLaunch,
iconColor: 'indianred',
title: 'Stable release',
description: 'Immich goes stable',
getDateLabel: () => 'Planned for early 2025',
},
{
done: false,
icon: mdiLockOutline,
@@ -183,14 +220,6 @@ const roadmap: Item[] = [
description: 'Private assets with extra protections',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiRocketLaunch,
iconColor: 'indianred',
title: 'Stable release',
description: 'Immich goes stable',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiCloudUploadOutline,
@@ -199,30 +228,6 @@ const roadmap: Item[] = [
description: 'Rework background backups to be more reliable',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiImageEdit,
iconColor: 'rebeccapurple',
title: 'Basic editor',
description: 'Basic photo editing capabilities',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiFlash,
iconColor: 'gold',
title: 'Workflows',
description: 'Automate tasks with workflows',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiTableKey,
iconColor: 'gray',
title: 'Fine grained access controls',
description: 'Granular access controls for users and api keys',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiCameraBurst,
@@ -234,6 +239,20 @@ const roadmap: Item[] = [
];
const milestones: Item[] = [
withRelease({
icon: mdiDatabaseOutline,
iconColor: 'brown',
title: 'Automatic database backups',
description: 'Database backups are now integrated into the Immich server',
release: 'v1.120.0',
}),
{
icon: mdiStar,
iconColor: 'gold',
title: '50,000 Stars',
description: 'Reached 50K Stars on GitHub!',
getDateLabel: withLanguage(new Date(2024, 10, 1)),
},
withRelease({
icon: mdiFaceRecognition,
title: 'Metadata Face Import',
@@ -853,14 +872,12 @@ const milestones: Item[] = [
export default function MilestonePage(): JSX.Element {
return (
<Layout title="Milestones" description="History of Immich">
<Layout title={title} description={description}>
<section className="my-8">
<h1 className="md:text-6xl text-center mb-10 text-immich-primary dark:text-immich-dark-primary px-2">
Roadmap
{title}
</h1>
<p className="text-center text-xl px-2">
A list of future plans and goals, as well as past achievements and milestones.
</p>
<p className="text-center text-xl px-2">{description}</p>
<div className="flex justify-around mt-8 w-full max-w-full">
<Timeline items={[...roadmap, ...milestones]} />
</div>

View File

@@ -1,4 +1,12 @@
[
{
"label": "v1.120.2",
"url": "https://v1.120.2.archive.immich.app"
},
{
"label": "v1.120.1",
"url": "https://v1.120.1.archive.immich.app"
},
{
"label": "v1.120.0",
"url": "https://v1.120.0.archive.immich.app"

14
e2e/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "immich-e2e",
"version": "1.120.0",
"version": "1.120.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "immich-e2e",
"version": "1.120.0",
"version": "1.120.2",
"license": "GNU Affero General Public License version 3",
"devDependencies": {
"@eslint/eslintrc": "^3.1.0",
@@ -15,7 +15,7 @@
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@types/oidc-provider": "^8.5.1",
"@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4",
@@ -45,7 +45,7 @@
},
"../cli": {
"name": "@immich/cli",
"version": "2.2.29",
"version": "2.2.31",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
@@ -64,7 +64,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vitest/coverage-v8": "^2.0.5",
@@ -92,14 +92,14 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.120.0",
"version": "1.120.2",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"typescript": "^5.3.3"
}
},

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.120.0",
"version": "1.120.2",
"description": "",
"main": "index.js",
"type": "module",
@@ -25,7 +25,7 @@
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@types/oidc-provider": "^8.5.1",
"@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4",

View File

@@ -473,10 +473,7 @@ describe('/search', () => {
.get('/search/explore')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual([
{ fieldName: 'exifInfo.city', items: [] },
{ fieldName: 'smartInfo.tags', items: [] },
]);
expect(body).toEqual([{ fieldName: 'exifInfo.city', items: [] }]);
});
});

View File

@@ -1283,7 +1283,7 @@
"variables": "Variables",
"version": "Version",
"version_announcement_closing": "Your friend, Alex",
"version_announcement_message": "Hi friend, there is a new version of the application please take your time to visit the <link>release notes</link> and ensure your <code>docker-compose.yml</code>, and <code>.env</code> setup is up-to-date to prevent any misconfigurations, especially if you use WatchTower or any mechanism that handles updating your application automatically.",
"version_announcement_message": "Hi there! A new version of Immich is available. Please take some time to read the <link>release notes</link> to ensure your setup is up-to-date to prevent any misconfigurations, especially if you use WatchTower or any mechanism that handles updating your Immich instance automatically.",
"version_history": "Version History",
"version_history_item": "Installed {version} on {date}",
"video": "Video",

1
i18n/fil.json Normal file
View File

@@ -0,0 +1 @@
{}

1
i18n/nn.json Normal file
View File

@@ -0,0 +1 @@
{}

View File

@@ -747,14 +747,14 @@ files = [
test = ["pytest (>=6)"]
[[package]]
name = "fastapi-slim"
name = "fastapi"
version = "0.115.4"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
files = [
{file = "fastapi_slim-0.115.4-py3-none-any.whl", hash = "sha256:8947515618c21665590a1673a0bfe4c721db4267999c149d5301c3c0f7b3d9ce"},
{file = "fastapi_slim-0.115.4.tar.gz", hash = "sha256:6d37987e4d1f6adefb8c7119c9b804e59c9b3f1a488be5425994d52308e2f958"},
{file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"},
{file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"},
]
[package.dependencies]
@@ -3778,4 +3778,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<4.0"
content-hash = "f95dddfd343a4b2f4d19ffee71ce6b2f5137e5514a60765424164259c4dc1044"
content-hash = "b690d5fbd141da3947f4f1dc029aba1b95e7faafd723166f2c4bdc47a66c095e"

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "machine-learning"
version = "1.120.0"
version = "1.120.2"
description = ""
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
readme = "README.md"
@@ -11,7 +11,7 @@ python = ">=3.10,<4.0"
insightface = ">=0.7.3,<1.0"
opencv-python-headless = ">=4.7.0.72,<5.0"
pillow = ">=9.5.0,<11.0"
fastapi-slim = ">=0.95.2,<1.0"
fastapi = ">=0.95.2,<1.0"
uvicorn = {extras = ["standard"], version = ">=0.22.0,<1.0"}
pydantic = "^2.0.0"
pydantic-settings = "^2.5.2"

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 165,
"android.injected.version.name" => "1.120.0",
"android.injected.version.code" => 167,
"android.injected.version.name" => "1.120.2",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -207,7 +207,7 @@ SPEC CHECKSUMS:
geolocator_apple: 6cbaf322953988e009e5ecb481f07efece75c450
image_picker_ios: c560581cceedb403a6ff17f2f816d7fea1421fc1
integration_test: 252f60fa39af5e17c3aa9899d35d908a0721b573
isar_flutter_libs: b69f437aeab9c521821c3f376198c4371fa21073
isar_flutter_libs: fdf730ca925d05687f36d7f1d355e482529ed097
MapLibre: 620fc933c1d6029b33738c905c1490d024e5d4ef
maplibre_gl: a2efec727dd340e4c65e26d2b03b584f14881fd9
package_info_plus: 58f0028419748fad15bf008b270aaa8e54380b1c

View File

@@ -401,7 +401,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 181;
CURRENT_PROJECT_VERSION = 183;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -543,7 +543,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 181;
CURRENT_PROJECT_VERSION = 183;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -571,7 +571,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 181;
CURRENT_PROJECT_VERSION = 183;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;

View File

@@ -58,11 +58,11 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.119.0</string>
<string>1.120.1</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>181</string>
<string>183</string>
<key>FLTEnableImpeller</key>
<true/>
<key>ITSAppUsesNonExemptEncryption</key>

View File

@@ -19,7 +19,7 @@ platform :ios do
desc "iOS Release"
lane :release do
increment_version_number(
version_number: "1.120.0"
version_number: "1.120.2"
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -11,6 +11,7 @@ import 'package:flutter_displaymode/flutter_displaymode.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/utils/download.dart';
import 'package:intl/date_symbol_data_local.dart';
import 'package:timezone/data/latest.dart';
import 'package:immich_mobile/constants/locales.dart';
import 'package:immich_mobile/services/background.service.dart';
@@ -56,6 +57,7 @@ void main() async {
Future<void> initApp() async {
await EasyLocalization.ensureInitialized();
await initializeDateFormatting();
if (kReleaseMode && Platform.isAndroid) {
try {

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 1.120.0
- API version: 1.120.2
- Generator version: 7.8.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen
@@ -408,7 +408,6 @@ Class | Method | HTTP request | Description
- [SharedLinkResponseDto](doc//SharedLinkResponseDto.md)
- [SharedLinkType](doc//SharedLinkType.md)
- [SignUpDto](doc//SignUpDto.md)
- [SmartInfoResponseDto](doc//SmartInfoResponseDto.md)
- [SmartSearchDto](doc//SmartSearchDto.md)
- [SourceType](doc//SourceType.md)
- [StackCreateDto](doc//StackCreateDto.md)

View File

@@ -222,7 +222,6 @@ part 'model/shared_link_edit_dto.dart';
part 'model/shared_link_response_dto.dart';
part 'model/shared_link_type.dart';
part 'model/sign_up_dto.dart';
part 'model/smart_info_response_dto.dart';
part 'model/smart_search_dto.dart';
part 'model/source_type.dart';
part 'model/stack_create_dto.dart';

View File

@@ -498,8 +498,6 @@ class ApiClient {
return SharedLinkTypeTypeTransformer().decode(value);
case 'SignUpDto':
return SignUpDto.fromJson(value);
case 'SmartInfoResponseDto':
return SmartInfoResponseDto.fromJson(value);
case 'SmartSearchDto':
return SmartSearchDto.fromJson(value);
case 'SourceType':

View File

@@ -37,7 +37,6 @@ class AssetResponseDto {
required this.ownerId,
this.people = const [],
this.resized,
this.smartInfo,
this.stack,
this.tags = const [],
required this.thumbhash,
@@ -121,14 +120,6 @@ class AssetResponseDto {
///
bool? resized;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
SmartInfoResponseDto? smartInfo;
AssetStackResponseDto? stack;
List<TagResponseDto> tags;
@@ -167,7 +158,6 @@ class AssetResponseDto {
other.ownerId == ownerId &&
_deepEquality.equals(other.people, people) &&
other.resized == resized &&
other.smartInfo == smartInfo &&
other.stack == stack &&
_deepEquality.equals(other.tags, tags) &&
other.thumbhash == thumbhash &&
@@ -202,7 +192,6 @@ class AssetResponseDto {
(ownerId.hashCode) +
(people.hashCode) +
(resized == null ? 0 : resized!.hashCode) +
(smartInfo == null ? 0 : smartInfo!.hashCode) +
(stack == null ? 0 : stack!.hashCode) +
(tags.hashCode) +
(thumbhash == null ? 0 : thumbhash!.hashCode) +
@@ -211,7 +200,7 @@ class AssetResponseDto {
(updatedAt.hashCode);
@override
String toString() => 'AssetResponseDto[checksum=$checksum, deviceAssetId=$deviceAssetId, deviceId=$deviceId, duplicateId=$duplicateId, duration=$duration, exifInfo=$exifInfo, fileCreatedAt=$fileCreatedAt, fileModifiedAt=$fileModifiedAt, hasMetadata=$hasMetadata, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isOffline=$isOffline, isTrashed=$isTrashed, libraryId=$libraryId, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, originalFileName=$originalFileName, originalMimeType=$originalMimeType, originalPath=$originalPath, owner=$owner, ownerId=$ownerId, people=$people, resized=$resized, smartInfo=$smartInfo, stack=$stack, tags=$tags, thumbhash=$thumbhash, type=$type, unassignedFaces=$unassignedFaces, updatedAt=$updatedAt]';
String toString() => 'AssetResponseDto[checksum=$checksum, deviceAssetId=$deviceAssetId, deviceId=$deviceId, duplicateId=$duplicateId, duration=$duration, exifInfo=$exifInfo, fileCreatedAt=$fileCreatedAt, fileModifiedAt=$fileModifiedAt, hasMetadata=$hasMetadata, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isOffline=$isOffline, isTrashed=$isTrashed, libraryId=$libraryId, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, originalFileName=$originalFileName, originalMimeType=$originalMimeType, originalPath=$originalPath, owner=$owner, ownerId=$ownerId, people=$people, resized=$resized, stack=$stack, tags=$tags, thumbhash=$thumbhash, type=$type, unassignedFaces=$unassignedFaces, updatedAt=$updatedAt]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -267,11 +256,6 @@ class AssetResponseDto {
} else {
// json[r'resized'] = null;
}
if (this.smartInfo != null) {
json[r'smartInfo'] = this.smartInfo;
} else {
// json[r'smartInfo'] = null;
}
if (this.stack != null) {
json[r'stack'] = this.stack;
} else {
@@ -322,7 +306,6 @@ class AssetResponseDto {
ownerId: mapValueOfType<String>(json, r'ownerId')!,
people: PersonWithFacesResponseDto.listFromJson(json[r'people']),
resized: mapValueOfType<bool>(json, r'resized'),
smartInfo: SmartInfoResponseDto.fromJson(json[r'smartInfo']),
stack: AssetStackResponseDto.fromJson(json[r'stack']),
tags: TagResponseDto.listFromJson(json[r'tags']),
thumbhash: mapValueOfType<String>(json, r'thumbhash'),

View File

@@ -1,117 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SmartInfoResponseDto {
/// Returns a new [SmartInfoResponseDto] instance.
SmartInfoResponseDto({
this.objects = const [],
this.tags = const [],
});
List<String>? objects;
List<String>? tags;
@override
bool operator ==(Object other) => identical(this, other) || other is SmartInfoResponseDto &&
_deepEquality.equals(other.objects, objects) &&
_deepEquality.equals(other.tags, tags);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(objects == null ? 0 : objects!.hashCode) +
(tags == null ? 0 : tags!.hashCode);
@override
String toString() => 'SmartInfoResponseDto[objects=$objects, tags=$tags]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.objects != null) {
json[r'objects'] = this.objects;
} else {
// json[r'objects'] = null;
}
if (this.tags != null) {
json[r'tags'] = this.tags;
} else {
// json[r'tags'] = null;
}
return json;
}
/// Returns a new [SmartInfoResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SmartInfoResponseDto? fromJson(dynamic value) {
upgradeDto(value, "SmartInfoResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return SmartInfoResponseDto(
objects: json[r'objects'] is Iterable
? (json[r'objects'] as Iterable).cast<String>().toList(growable: false)
: const [],
tags: json[r'tags'] is Iterable
? (json[r'tags'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<SmartInfoResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SmartInfoResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SmartInfoResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SmartInfoResponseDto> mapFromJson(dynamic json) {
final map = <String, SmartInfoResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SmartInfoResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SmartInfoResponseDto-objects as value to a dart map
static Map<String, List<SmartInfoResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SmartInfoResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SmartInfoResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
};
}

View File

@@ -1717,13 +1717,13 @@ packages:
source: hosted
version: "2.9.2"
video_player_android:
dependency: transitive
dependency: "direct main"
description:
name: video_player_android
sha256: "391e092ba4abe2f93b3e625bd6b6a6ec7d7414279462c1c0ee42b5ab8d0a0898"
sha256: "4de50df9ee786f5891d3281e1e633d7b142ef1acf47392592eb91cba5d355849"
url: "https://pub.dev"
source: hosted
version: "2.7.16"
version: "2.6.0"
video_player_avfoundation:
dependency: transitive
description:

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 1.120.0+165
version: 1.120.2+167
environment:
sdk: '>=3.3.0 <4.0.0'
@@ -26,6 +26,7 @@ dependencies:
auto_route: ^9.2.0
fluttertoast: ^8.2.4
video_player: ^2.9.2
video_player_android: 2.6.0
chewie: ^1.7.4
socket_io_client: ^2.0.3+1
maplibre_gl: 0.19.0+2

View File

@@ -8,11 +8,11 @@ bash tool/build_android.sh x64
bash tool/build_android.sh armv7
bash tool/build_android.sh arm64
mv libisar_android_arm64.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
mv libisar_android_armv7.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
mv libisar_android_x64.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
mv libisar_android_x86.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
)

View File

@@ -7385,7 +7385,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "1.120.0",
"version": "1.120.2",
"contact": {}
},
"tags": [],
@@ -8402,9 +8402,6 @@
"description": "This property was deprecated in v1.113.0",
"type": "boolean"
},
"smartInfo": {
"$ref": "#/components/schemas/SmartInfoResponseDto"
},
"stack": {
"allOf": [
{
@@ -11284,25 +11281,6 @@
],
"type": "object"
},
"SmartInfoResponseDto": {
"properties": {
"objects": {
"items": {
"type": "string"
},
"nullable": true,
"type": "array"
},
"tags": {
"items": {
"type": "string"
},
"nullable": true,
"type": "array"
}
},
"type": "object"
},
"SmartSearchDto": {
"properties": {
"city": {

View File

@@ -1,18 +1,18 @@
{
"name": "@immich/sdk",
"version": "1.120.0",
"version": "1.120.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@immich/sdk",
"version": "1.120.0",
"version": "1.120.2",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"typescript": "^5.3.3"
}
},

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "1.120.0",
"version": "1.120.2",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",
@@ -19,7 +19,7 @@
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"typescript": "^5.3.3"
},
"repository": {

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 1.120.0
* 1.120.2
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/
@@ -221,10 +221,6 @@ export type PersonWithFacesResponseDto = {
/** This property was added in v1.107.0 */
updatedAt?: string;
};
export type SmartInfoResponseDto = {
objects?: string[] | null;
tags?: string[] | null;
};
export type AssetStackResponseDto = {
assetCount: number;
id: string;
@@ -267,7 +263,6 @@ export type AssetResponseDto = {
people?: PersonWithFacesResponseDto[];
/** This property was deprecated in v1.113.0 */
resized?: boolean;
smartInfo?: SmartInfoResponseDto;
stack?: (AssetStackResponseDto) | null;
tags?: TagResponseDto[];
thumbhash: string | null;

View File

@@ -1,5 +1,5 @@
# dev build
FROM ghcr.io/immich-app/base-server-dev:20241105@sha256:99eec44db9e281e30eb9c50161cfb8e810f06e4338896b900fb5cafd09e82cd5 AS dev
FROM ghcr.io/immich-app/base-server-dev:20241112@sha256:889647c747b3f999b05e387eff414bcec5e42477958b267930e58ac58dadcfc7 AS dev
RUN apt-get install --no-install-recommends -yqq tini
WORKDIR /usr/src/app
@@ -25,7 +25,7 @@ COPY --from=dev /usr/src/app/node_modules/@img ./node_modules/@img
COPY --from=dev /usr/src/app/node_modules/exiftool-vendored.pl ./node_modules/exiftool-vendored.pl
# web build
FROM node:22.11.0-alpine3.20@sha256:f265794478aa0b1a23d85a492c8311ed795bc527c3fe7e43453b3c872dcd71a3 AS web
FROM node:22.11.0-alpine3.20@sha256:dc8ba2f61dd86c44e43eb25a7812ad03c5b1b224a19fc6f77e1eb9e5669f0b82 AS web
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
@@ -42,7 +42,7 @@ RUN npm run build
# prod build
FROM ghcr.io/immich-app/base-server-prod:20241105@sha256:dbe566f5c53f36640da910ca86a7c5575a26e9b9f6bc8d90ae0a53b8bc3a1f73
FROM ghcr.io/immich-app/base-server-prod:20241112@sha256:26a209563689f52b9a63feeedde9a16a8e0e558483cd3feb5c936423e55c7eea
WORKDIR /usr/src/app
ENV NODE_ENV=production \

171
server/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "immich",
"version": "1.120.0",
"version": "1.120.2",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "immich",
"version": "1.120.0",
"version": "1.120.2",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@nestjs/bullmq": "^10.0.1",
@@ -23,7 +23,7 @@
"@opentelemetry/context-async-hooks": "^1.24.0",
"@opentelemetry/exporter-prometheus": "^0.54.0",
"@opentelemetry/sdk-node": "^0.54.0",
"@react-email/components": "^0.0.26",
"@react-email/components": "^0.0.25",
"@socket.io/redis-adapter": "^8.3.0",
"archiver": "^7.0.0",
"async-lock": "^1.4.0",
@@ -83,7 +83,7 @@
"@types/lodash": "^4.14.197",
"@types/mock-fs": "^4.13.1",
"@types/multer": "^1.4.7",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@types/nodemailer": "^6.4.14",
"@types/picomatch": "^3.0.0",
"@types/pngjs": "^6.0.5",
@@ -2643,7 +2643,8 @@
"node_modules/@one-ini/wasm": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/@one-ini/wasm/-/wasm-0.1.1.tgz",
"integrity": "sha512-XuySG1E38YScSJoMlqovLru4KTUNSjgVTIjyh7qMX6aNN5HY5Ct5LhRJdxO79JtTzKfzV/bnWpz+zquYrISsvw=="
"integrity": "sha512-XuySG1E38YScSJoMlqovLru4KTUNSjgVTIjyh7qMX6aNN5HY5Ct5LhRJdxO79JtTzKfzV/bnWpz+zquYrISsvw==",
"license": "MIT"
},
"node_modules/@opentelemetry/api": {
"version": "1.8.0",
@@ -3966,9 +3967,10 @@
}
},
"node_modules/@react-email/button": {
"version": "0.0.18",
"resolved": "https://registry.npmjs.org/@react-email/button/-/button-0.0.18.tgz",
"integrity": "sha512-uNUnpeDzz1o9HAky47JSTsUN/Ih0A3Az165AAOgAy8XOVzQJPrltUBRzHkScSVJTwRqKLASkie1yZbtNGIcRdA==",
"version": "0.0.17",
"resolved": "https://registry.npmjs.org/@react-email/button/-/button-0.0.17.tgz",
"integrity": "sha512-ioHdsk+BpGS/PqjU6JS7tUrVy9yvbUx92Z+Cem2+MbYp55oEwQ9VHf7u4f5NoM0gdhfKSehBwRdYlHt/frEMcg==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
},
@@ -4013,12 +4015,13 @@
}
},
"node_modules/@react-email/components": {
"version": "0.0.26",
"resolved": "https://registry.npmjs.org/@react-email/components/-/components-0.0.26.tgz",
"integrity": "sha512-FqxCGnQiI4zztEBAXPfjovIQ9e1l7NJNMgE8hSaH7slWySFn/PpPRQFYpxyCFNr9DqPVHtKYtpo8xvUYx2LdTg==",
"version": "0.0.25",
"resolved": "https://registry.npmjs.org/@react-email/components/-/components-0.0.25.tgz",
"integrity": "sha512-lnfVVrThEcET5NPoeaXvrz9UxtWpGRcut2a07dLbyKgNbP7vj/cXTI5TuHtanCvhCddFpMDnElNRghDOfPzwUg==",
"license": "MIT",
"dependencies": {
"@react-email/body": "0.0.10",
"@react-email/button": "0.0.18",
"@react-email/button": "0.0.17",
"@react-email/code-block": "0.0.9",
"@react-email/code-inline": "0.0.4",
"@react-email/column": "0.0.12",
@@ -4029,13 +4032,13 @@
"@react-email/hr": "0.0.10",
"@react-email/html": "0.0.10",
"@react-email/img": "0.0.10",
"@react-email/link": "0.0.11",
"@react-email/link": "0.0.10",
"@react-email/markdown": "0.0.12",
"@react-email/preview": "0.0.11",
"@react-email/render": "1.0.2",
"@react-email/row": "0.0.11",
"@react-email/section": "0.0.15",
"@react-email/tailwind": "1.0.0",
"@react-email/render": "1.0.1",
"@react-email/row": "0.0.10",
"@react-email/section": "0.0.14",
"@react-email/tailwind": "0.1.0",
"@react-email/text": "0.0.10"
},
"engines": {
@@ -4120,9 +4123,10 @@
}
},
"node_modules/@react-email/link": {
"version": "0.0.11",
"resolved": "https://registry.npmjs.org/@react-email/link/-/link-0.0.11.tgz",
"integrity": "sha512-o1/BgPn2Fi+bN4Nh+P64t4tulaOyPhkBNSpNmiYL1Ar+ilw8q0BmUAqM+lvHy8Qr/4K7BjkgFoc4GoYkoEjOig==",
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/@react-email/link/-/link-0.0.10.tgz",
"integrity": "sha512-tva3wvAWSR10lMJa9fVA09yRn7pbEki0ZZpHE6GD1jKbFhmzt38VgLO9B797/prqoDZdAr4rVK7LJFcdPx3GwA==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
},
@@ -4156,9 +4160,10 @@
}
},
"node_modules/@react-email/render": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@react-email/render/-/render-1.0.2.tgz",
"integrity": "sha512-q82eBd39TepzA/xjlm8szqJlrQk/gh7mgtxXMGlJ4dcdx89go1m9YBDpZY98SFy+2r2KAOd5A1mxvUbsPwoATg==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@react-email/render/-/render-1.0.1.tgz",
"integrity": "sha512-W3gTrcmLOVYnG80QuUp22ReIT/xfLsVJ+n7ghSlG2BITB8evNABn1AO2rGQoXuK84zKtDAlxCdm3hRyIpZdGSA==",
"license": "MIT",
"dependencies": {
"html-to-text": "9.0.5",
"js-beautify": "^1.14.11",
@@ -4173,9 +4178,10 @@
}
},
"node_modules/@react-email/row": {
"version": "0.0.11",
"resolved": "https://registry.npmjs.org/@react-email/row/-/row-0.0.11.tgz",
"integrity": "sha512-ra09h7BMoGa14ds3vh7KVuj1N3astTstEC1YbMdCiHcx/nxylglNaT7qJXU74ZTzyHiGabyiNuyabTS+HLoMCA==",
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/@react-email/row/-/row-0.0.10.tgz",
"integrity": "sha512-jPyEhG3gsLX+Eb9U+A30fh0gK6hXJwF4ghJ+ZtFQtlKAKqHX+eCpWlqB3Xschd/ARJLod8WAswg0FB+JD9d0/A==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
},
@@ -4184,9 +4190,10 @@
}
},
"node_modules/@react-email/section": {
"version": "0.0.15",
"resolved": "https://registry.npmjs.org/@react-email/section/-/section-0.0.15.tgz",
"integrity": "sha512-xfM3Qy5eU7fbkwvktlTeQgad7uo+1Z7YVh1aowSZaRBvKbkEXgoH/XssRYQmQL8ZrZGXbEJMujwtf4fsQL6vrg==",
"version": "0.0.14",
"resolved": "https://registry.npmjs.org/@react-email/section/-/section-0.0.14.tgz",
"integrity": "sha512-+fYWLb4tPU1A/+GE5J1+SEMA7/wR3V30lQ+OR9t2kAJqNrARDbMx0bLnYnR1QL5TiFRz0pCF05SQUobk6gHEDQ==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
},
@@ -4195,9 +4202,10 @@
}
},
"node_modules/@react-email/tailwind": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@react-email/tailwind/-/tailwind-1.0.0.tgz",
"integrity": "sha512-LV0SflR0aI5Sjxyp8upyPL8Ctwj+7aqwTgCDO9yZuOI6KpXbBGaYz8bSofe8oaVc/BmymZ5O3+/7FjQexbW+Yg==",
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/@react-email/tailwind/-/tailwind-0.1.0.tgz",
"integrity": "sha512-qysVUEY+M3SKUvu35XDpzn7yokhqFOT3tPU6Mj/pgc62TL5tQFj6msEbBtwoKs2qO3WZvai0DIHdLhaOxBQSow==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
},
@@ -4468,6 +4476,7 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@selderee/plugin-htmlparser2/-/plugin-htmlparser2-0.11.0.tgz",
"integrity": "sha512-P33hHGdldxGabLFjPPpaTxVolMrzrcegejx+0GxjrIb9Zv48D8yAIA/QTDR2dFl7Uz7urX8aX6+5bCZslr+gWQ==",
"license": "MIT",
"dependencies": {
"domhandler": "^5.0.3",
"selderee": "^0.11.0"
@@ -7059,6 +7068,7 @@
"version": "1.1.13",
"resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
"integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==",
"license": "MIT",
"dependencies": {
"ini": "^1.3.4",
"proto-list": "~1.2.1"
@@ -7553,6 +7563,7 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
"license": "MIT",
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.2",
@@ -7571,12 +7582,14 @@
"type": "github",
"url": "https://github.com/sponsors/fb55"
}
]
],
"license": "BSD-2-Clause"
},
"node_modules/domhandler": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
"license": "BSD-2-Clause",
"dependencies": {
"domelementtype": "^2.3.0"
},
@@ -7591,6 +7604,7 @@
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz",
"integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==",
"license": "BSD-2-Clause",
"dependencies": {
"dom-serializer": "^2.0.0",
"domelementtype": "^2.3.0",
@@ -7620,6 +7634,7 @@
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/editorconfig/-/editorconfig-1.0.4.tgz",
"integrity": "sha512-L9Qe08KWTlqYMVvMcTIvMAdl1cDUubzRNYL+WfA4bLDMHe4nemKkpmYzkznE1FwLKu0EEmy6obgQKzMJrg4x9Q==",
"license": "MIT",
"dependencies": {
"@one-ini/wasm": "0.1.1",
"commander": "^10.0.0",
@@ -7637,6 +7652,7 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
@@ -7645,6 +7661,7 @@
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
"integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==",
"license": "MIT",
"engines": {
"node": ">=14"
}
@@ -7653,6 +7670,7 @@
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.1.tgz",
"integrity": "sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==",
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
@@ -7740,6 +7758,7 @@
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"license": "BSD-2-Clause",
"engines": {
"node": ">=0.12"
},
@@ -9072,6 +9091,7 @@
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/html-to-text/-/html-to-text-9.0.5.tgz",
"integrity": "sha512-qY60FjREgVZL03vJU6IfMV4GDjGBIoOyvuFdpBDIX9yTlDw0TjxVBQp+P8NvpdIXNJvfWBTNul7fsAQJq2FNpg==",
"license": "MIT",
"dependencies": {
"@selderee/plugin-htmlparser2": "^0.11.0",
"deepmerge": "^4.3.1",
@@ -9094,6 +9114,7 @@
"url": "https://github.com/sponsors/fb55"
}
],
"license": "MIT",
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3",
@@ -9239,7 +9260,8 @@
"node_modules/ini": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
"license": "ISC"
},
"node_modules/inquirer": {
"version": "8.2.6",
@@ -9537,6 +9559,7 @@
"version": "1.15.1",
"resolved": "https://registry.npmjs.org/js-beautify/-/js-beautify-1.15.1.tgz",
"integrity": "sha512-ESjNzSlt/sWE8sciZH8kBF8BPlwXPwhR6pWKAw8bw4Bwj+iZcnKW6ONWUutJ7eObuBZQpiIb8S7OYspWrKt7rA==",
"license": "MIT",
"dependencies": {
"config-chain": "^1.1.13",
"editorconfig": "^1.0.4",
@@ -9557,14 +9580,16 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
"integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==",
"license": "ISC",
"engines": {
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
}
},
"node_modules/js-beautify/node_modules/nopt": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.0.tgz",
"integrity": "sha512-CVDtwCdhYIvnAzFoJ6NJ6dX3oga9/HyciQDnG1vQDjSLMeKLJ4A93ZqYKDrgYSr1FBY5/hMYC+2VCi24pgpkGA==",
"version": "7.2.1",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
"integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
"license": "ISC",
"dependencies": {
"abbrev": "^2.0.0"
},
@@ -9579,6 +9604,7 @@
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-3.0.5.tgz",
"integrity": "sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==",
"license": "MIT",
"engines": {
"node": ">=14"
}
@@ -9722,6 +9748,7 @@
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/leac/-/leac-0.6.0.tgz",
"integrity": "sha512-y+SqErxb8h7nE/fiEX07jsbuhrpO9lL8eca7/Y1nuWV2moNlXhyd59iDGcRf6moVyDMbmTNzL40SUyrFU/yDpg==",
"license": "MIT",
"funding": {
"url": "https://ko-fi.com/killymxi"
}
@@ -10873,6 +10900,7 @@
"version": "0.12.1",
"resolved": "https://registry.npmjs.org/parseley/-/parseley-0.12.1.tgz",
"integrity": "sha512-e6qHKe3a9HWr0oMRVDTRhKce+bRO8VGQR3NyVwcjwrbhMmFCX9KszEV35+rn4AdilFAq9VPxP/Fe1wC9Qjd2lw==",
"license": "MIT",
"dependencies": {
"leac": "^0.6.0",
"peberminta": "^0.9.0"
@@ -10995,6 +11023,7 @@
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/peberminta/-/peberminta-0.9.0.tgz",
"integrity": "sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ==",
"license": "MIT",
"funding": {
"url": "https://ko-fi.com/killymxi"
}
@@ -11455,7 +11484,8 @@
"node_modules/proto-list": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
"integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA=="
"integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==",
"license": "ISC"
},
"node_modules/protobufjs": {
"version": "7.4.0",
@@ -12118,6 +12148,7 @@
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/react-promise-suspense/-/react-promise-suspense-0.3.4.tgz",
"integrity": "sha512-I42jl7L3Ze6kZaq+7zXWSunBa3b1on5yfvUW6Eo/3fFOj6dZ5Bqmcd264nJbTK/gn1HjjILAjSwnZbV4RpSaNQ==",
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^2.0.1"
}
@@ -12125,7 +12156,8 @@
"node_modules/react-promise-suspense/node_modules/fast-deep-equal": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
"integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="
"integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==",
"license": "MIT"
},
"node_modules/read-cache": {
"version": "1.0.0",
@@ -12765,6 +12797,7 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/selderee/-/selderee-0.11.0.tgz",
"integrity": "sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA==",
"license": "MIT",
"dependencies": {
"parseley": "^0.12.0"
},
@@ -17450,9 +17483,9 @@
"requires": {}
},
"@react-email/button": {
"version": "0.0.18",
"resolved": "https://registry.npmjs.org/@react-email/button/-/button-0.0.18.tgz",
"integrity": "sha512-uNUnpeDzz1o9HAky47JSTsUN/Ih0A3Az165AAOgAy8XOVzQJPrltUBRzHkScSVJTwRqKLASkie1yZbtNGIcRdA==",
"version": "0.0.17",
"resolved": "https://registry.npmjs.org/@react-email/button/-/button-0.0.17.tgz",
"integrity": "sha512-ioHdsk+BpGS/PqjU6JS7tUrVy9yvbUx92Z+Cem2+MbYp55oEwQ9VHf7u4f5NoM0gdhfKSehBwRdYlHt/frEMcg==",
"requires": {}
},
"@react-email/code-block": {
@@ -17476,12 +17509,12 @@
"requires": {}
},
"@react-email/components": {
"version": "0.0.26",
"resolved": "https://registry.npmjs.org/@react-email/components/-/components-0.0.26.tgz",
"integrity": "sha512-FqxCGnQiI4zztEBAXPfjovIQ9e1l7NJNMgE8hSaH7slWySFn/PpPRQFYpxyCFNr9DqPVHtKYtpo8xvUYx2LdTg==",
"version": "0.0.25",
"resolved": "https://registry.npmjs.org/@react-email/components/-/components-0.0.25.tgz",
"integrity": "sha512-lnfVVrThEcET5NPoeaXvrz9UxtWpGRcut2a07dLbyKgNbP7vj/cXTI5TuHtanCvhCddFpMDnElNRghDOfPzwUg==",
"requires": {
"@react-email/body": "0.0.10",
"@react-email/button": "0.0.18",
"@react-email/button": "0.0.17",
"@react-email/code-block": "0.0.9",
"@react-email/code-inline": "0.0.4",
"@react-email/column": "0.0.12",
@@ -17492,13 +17525,13 @@
"@react-email/hr": "0.0.10",
"@react-email/html": "0.0.10",
"@react-email/img": "0.0.10",
"@react-email/link": "0.0.11",
"@react-email/link": "0.0.10",
"@react-email/markdown": "0.0.12",
"@react-email/preview": "0.0.11",
"@react-email/render": "1.0.2",
"@react-email/row": "0.0.11",
"@react-email/section": "0.0.15",
"@react-email/tailwind": "1.0.0",
"@react-email/render": "1.0.1",
"@react-email/row": "0.0.10",
"@react-email/section": "0.0.14",
"@react-email/tailwind": "0.1.0",
"@react-email/text": "0.0.10"
}
},
@@ -17545,9 +17578,9 @@
"requires": {}
},
"@react-email/link": {
"version": "0.0.11",
"resolved": "https://registry.npmjs.org/@react-email/link/-/link-0.0.11.tgz",
"integrity": "sha512-o1/BgPn2Fi+bN4Nh+P64t4tulaOyPhkBNSpNmiYL1Ar+ilw8q0BmUAqM+lvHy8Qr/4K7BjkgFoc4GoYkoEjOig==",
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/@react-email/link/-/link-0.0.10.tgz",
"integrity": "sha512-tva3wvAWSR10lMJa9fVA09yRn7pbEki0ZZpHE6GD1jKbFhmzt38VgLO9B797/prqoDZdAr4rVK7LJFcdPx3GwA==",
"requires": {}
},
"@react-email/markdown": {
@@ -17565,9 +17598,9 @@
"requires": {}
},
"@react-email/render": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@react-email/render/-/render-1.0.2.tgz",
"integrity": "sha512-q82eBd39TepzA/xjlm8szqJlrQk/gh7mgtxXMGlJ4dcdx89go1m9YBDpZY98SFy+2r2KAOd5A1mxvUbsPwoATg==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@react-email/render/-/render-1.0.1.tgz",
"integrity": "sha512-W3gTrcmLOVYnG80QuUp22ReIT/xfLsVJ+n7ghSlG2BITB8evNABn1AO2rGQoXuK84zKtDAlxCdm3hRyIpZdGSA==",
"requires": {
"html-to-text": "9.0.5",
"js-beautify": "^1.14.11",
@@ -17575,21 +17608,21 @@
}
},
"@react-email/row": {
"version": "0.0.11",
"resolved": "https://registry.npmjs.org/@react-email/row/-/row-0.0.11.tgz",
"integrity": "sha512-ra09h7BMoGa14ds3vh7KVuj1N3astTstEC1YbMdCiHcx/nxylglNaT7qJXU74ZTzyHiGabyiNuyabTS+HLoMCA==",
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/@react-email/row/-/row-0.0.10.tgz",
"integrity": "sha512-jPyEhG3gsLX+Eb9U+A30fh0gK6hXJwF4ghJ+ZtFQtlKAKqHX+eCpWlqB3Xschd/ARJLod8WAswg0FB+JD9d0/A==",
"requires": {}
},
"@react-email/section": {
"version": "0.0.15",
"resolved": "https://registry.npmjs.org/@react-email/section/-/section-0.0.15.tgz",
"integrity": "sha512-xfM3Qy5eU7fbkwvktlTeQgad7uo+1Z7YVh1aowSZaRBvKbkEXgoH/XssRYQmQL8ZrZGXbEJMujwtf4fsQL6vrg==",
"version": "0.0.14",
"resolved": "https://registry.npmjs.org/@react-email/section/-/section-0.0.14.tgz",
"integrity": "sha512-+fYWLb4tPU1A/+GE5J1+SEMA7/wR3V30lQ+OR9t2kAJqNrARDbMx0bLnYnR1QL5TiFRz0pCF05SQUobk6gHEDQ==",
"requires": {}
},
"@react-email/tailwind": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@react-email/tailwind/-/tailwind-1.0.0.tgz",
"integrity": "sha512-LV0SflR0aI5Sjxyp8upyPL8Ctwj+7aqwTgCDO9yZuOI6KpXbBGaYz8bSofe8oaVc/BmymZ5O3+/7FjQexbW+Yg==",
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/@react-email/tailwind/-/tailwind-0.1.0.tgz",
"integrity": "sha512-qysVUEY+M3SKUvu35XDpzn7yokhqFOT3tPU6Mj/pgc62TL5tQFj6msEbBtwoKs2qO3WZvai0DIHdLhaOxBQSow==",
"requires": {}
},
"@react-email/text": {
@@ -21551,9 +21584,9 @@
"integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ=="
},
"nopt": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.0.tgz",
"integrity": "sha512-CVDtwCdhYIvnAzFoJ6NJ6dX3oga9/HyciQDnG1vQDjSLMeKLJ4A93ZqYKDrgYSr1FBY5/hMYC+2VCi24pgpkGA==",
"version": "7.2.1",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
"integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
"requires": {
"abbrev": "^2.0.0"
}

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "1.120.0",
"version": "1.120.2",
"description": "",
"author": "",
"private": true,
@@ -48,7 +48,7 @@
"@opentelemetry/context-async-hooks": "^1.24.0",
"@opentelemetry/exporter-prometheus": "^0.54.0",
"@opentelemetry/sdk-node": "^0.54.0",
"@react-email/components": "^0.0.26",
"@react-email/components": "^0.0.25",
"@socket.io/redis-adapter": "^8.3.0",
"archiver": "^7.0.0",
"async-lock": "^1.4.0",
@@ -108,7 +108,7 @@
"@types/lodash": "^4.14.197",
"@types/mock-fs": "^4.13.1",
"@types/multer": "^1.4.7",
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"@types/nodemailer": "^6.4.14",
"@types/picomatch": "^3.0.0",
"@types/pngjs": "^6.0.5",

View File

@@ -1,6 +1,7 @@
import { Duration } from 'luxon';
import { readFileSync } from 'node:fs';
import { SemVer } from 'semver';
import { ExifOrientation } from 'src/enum';
export const POSTGRES_VERSION_RANGE = '>=14.0.0';
export const VECTORS_VERSION_RANGE = '>=0.2 <0.4';
@@ -81,3 +82,19 @@ export const CLIP_MODEL_INFO: Record<string, ModelInfo> = {
'nllb-clip-large-siglip__mrl': { dimSize: 1152 },
'nllb-clip-large-siglip__v1': { dimSize: 1152 },
};
type SharpRotationData = {
angle?: number;
flip?: boolean;
flop?: boolean;
};
export const ORIENTATION_TO_SHARP_ROTATION: Record<ExifOrientation, SharpRotationData> = {
[ExifOrientation.Horizontal]: { angle: 0 },
[ExifOrientation.MirrorHorizontal]: { angle: 0, flop: true },
[ExifOrientation.Rotate180]: { angle: 180 },
[ExifOrientation.MirrorVertical]: { angle: 180, flop: true },
[ExifOrientation.MirrorHorizontalRotate270CW]: { angle: 270, flip: true },
[ExifOrientation.Rotate90CW]: { angle: 90 },
[ExifOrientation.MirrorHorizontalRotate90CW]: { angle: 90, flip: true },
[ExifOrientation.Rotate270CW]: { angle: 270 },
} as const;

View File

@@ -12,7 +12,6 @@ import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { AssetType } from 'src/enum';
import { mimeTypes } from 'src/utils/mime-types';
@@ -45,7 +44,6 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
isTrashed!: boolean;
isOffline!: boolean;
exifInfo?: ExifResponseDto;
smartInfo?: SmartInfoResponseDto;
tags?: TagResponseDto[];
people?: PersonWithFacesResponseDto[];
unassignedFaces?: AssetFaceWithoutPersonResponseDto[];
@@ -141,7 +139,6 @@ export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): As
isTrashed: !!entity.deletedAt,
duration: entity.duration ?? '0:00:00.00000',
exifInfo: entity.exifInfo ? mapExif(entity.exifInfo) : undefined,
smartInfo: entity.smartInfo ? mapSmartInfo(entity.smartInfo) : undefined,
livePhotoVideoId: entity.livePhotoVideoId,
tags: entity.tags?.map((tag) => mapTag(tag)),
people: peopleWithFaces(entity.faces),
@@ -161,15 +158,3 @@ export class MemoryLaneResponseDto {
assets!: AssetResponseDto[];
}
export class SmartInfoResponseDto {
tags?: string[] | null;
objects?: string[] | null;
}
export function mapSmartInfo(entity: SmartInfoEntity): SmartInfoResponseDto {
return {
tags: entity.tags,
objects: entity.objects,
};
}

View File

@@ -12,11 +12,8 @@ import {
IsUrl,
Max,
Min,
Validate,
ValidateIf,
ValidateNested,
ValidatorConstraint,
ValidatorConstraintInterface,
} from 'class-validator';
import { SystemConfig } from 'src/config';
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
@@ -33,14 +30,7 @@ import {
VideoContainer,
} from 'src/enum';
import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
import { ValidateBoolean, validateCronExpression } from 'src/validation';
@ValidatorConstraint({ name: 'cronValidator' })
class CronValidator implements ValidatorConstraintInterface {
validate(expression: string): boolean {
return validateCronExpression(expression);
}
}
import { IsCronExpression, ValidateBoolean } from 'src/validation';
const isLibraryScanEnabled = (config: SystemConfigLibraryScanDto) => config.enabled;
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
@@ -54,7 +44,7 @@ export class DatabaseBackupConfig {
@ValidateIf(isDatabaseBackupEnabled)
@IsNotEmpty()
@Validate(CronValidator, { message: 'Invalid cron expression' })
@IsCronExpression()
@IsString()
cronExpression!: string;
@@ -244,7 +234,7 @@ class SystemConfigLibraryScanDto {
@ValidateIf(isLibraryScanEnabled)
@IsNotEmpty()
@Validate(CronValidator, { message: 'Invalid cron expression' })
@IsCronExpression()
@IsString()
cronExpression!: string;
}

View File

@@ -5,7 +5,6 @@ import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { LibraryEntity } from 'src/entities/library.entity';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
import { StackEntity } from 'src/entities/stack.entity';
import { TagEntity } from 'src/entities/tag.entity';
@@ -143,9 +142,6 @@ export class AssetEntity {
@OneToOne(() => ExifEntity, (exifEntity) => exifEntity.asset)
exifInfo?: ExifEntity;
@OneToOne(() => SmartInfoEntity, (smartInfoEntity) => smartInfoEntity.asset)
smartInfo?: SmartInfoEntity;
@OneToOne(() => SmartSearchEntity, (smartSearchEntity) => smartSearchEntity.asset)
smartSearch?: SmartSearchEntity;

View File

@@ -18,7 +18,6 @@ import { PartnerEntity } from 'src/entities/partner.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { SessionEntity } from 'src/entities/session.entity';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
import { StackEntity } from 'src/entities/stack.entity';
import { SystemMetadataEntity } from 'src/entities/system-metadata.entity';
@@ -46,7 +45,6 @@ export const entities = [
PartnerEntity,
PersonEntity,
SharedLinkEntity,
SmartInfoEntity,
SmartSearchEntity,
StackEntity,
SystemMetadataEntity,

View File

@@ -1,18 +0,0 @@
import { AssetEntity } from 'src/entities/asset.entity';
import { Column, Entity, JoinColumn, OneToOne, PrimaryColumn } from 'typeorm';
@Entity('smart_info', { synchronize: false })
export class SmartInfoEntity {
@OneToOne(() => AssetEntity, { onDelete: 'CASCADE', nullable: true })
@JoinColumn({ name: 'assetId', referencedColumnName: 'id' })
asset?: AssetEntity;
@PrimaryColumn()
assetId!: string;
@Column({ type: 'text', array: true, nullable: true })
tags!: string[] | null;
@Column({ type: 'text', array: true, nullable: true })
objects!: string[] | null;
}

View File

@@ -373,3 +373,14 @@ export enum ImmichTelemetry {
REPO = 'repo',
JOB = 'job',
}
export enum ExifOrientation {
Horizontal = 1,
MirrorHorizontal = 2,
Rotate180 = 3,
MirrorVertical = 4,
MirrorHorizontalRotate270CW = 5,
Rotate90CW = 6,
MirrorHorizontalRotate90CW = 7,
Rotate270CW = 8,
}

View File

@@ -28,9 +28,7 @@ export enum WithoutProperty {
EXIF = 'exif',
SMART_SEARCH = 'smart-search',
DUPLICATE = 'duplicate',
OBJECT_TAGS = 'object-tags',
FACES = 'faces',
PERSON = 'person',
SIDECAR = 'sidecar',
}
@@ -94,7 +92,6 @@ export type AssetWithoutRelations = Omit<
| 'library'
| 'exifInfo'
| 'sharedLinks'
| 'smartInfo'
| 'smartSearch'
| 'tags'
>;
@@ -190,7 +187,6 @@ export interface IAssetRepository {
upsertExif(exif: Partial<ExifEntity>): Promise<void>;
upsertJobStatus(...jobStatus: Partial<AssetJobStatusEntity>[]): Promise<void>;
getAssetIdByCity(userId: string, options: AssetExploreFieldOptions): Promise<SearchExploreItem<string>>;
getAssetIdByTag(userId: string, options: AssetExploreFieldOptions): Promise<SearchExploreItem<string>>;
getDuplicates(options: AssetBuilderOptions): Promise<AssetEntity[]>;
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]>;
getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]>;

View File

@@ -0,0 +1,20 @@
export const ICronRepository = 'ICronRepository';
type CronBase = {
name: string;
start?: boolean;
};
export type CronCreate = CronBase & {
expression: string;
onTick: () => void;
};
export type CronUpdate = CronBase & {
expression?: string;
};
export interface ICronRepository {
create(cron: CronCreate): void;
update(cron: CronUpdate): void;
}

View File

@@ -315,8 +315,6 @@ export interface IJobRepository {
setup(options: { services: ClassConstructor<unknown>[] }): void;
startWorkers(): void;
run(job: JobItem): Promise<JobStatus>;
addCronJob(name: string, expression: string, onTick: () => void, start?: boolean): void;
updateCronJob(name: string, expression?: string, start?: boolean): void;
setConcurrency(queueName: QueueName, concurrency: number): void;
queue(item: JobItem): Promise<void>;
queueAll(items: JobItem[]): Promise<void>;

View File

@@ -1,5 +1,5 @@
import { Writable } from 'node:stream';
import { ImageFormat, TranscodeTarget, VideoCodec } from 'src/enum';
import { ExifOrientation, ImageFormat, TranscodeTarget, VideoCodec } from 'src/enum';
export const IMediaRepository = 'IMediaRepository';
@@ -31,6 +31,7 @@ interface DecodeImageOptions {
export interface DecodeToBufferOptions extends DecodeImageOptions {
size: number;
orientation?: ExifOrientation;
}
export type GenerateThumbnailOptions = ImageOptions & DecodeImageOptions;

View File

@@ -68,7 +68,6 @@ export interface SearchStatusOptions {
export interface SearchOneToOneRelationOptions {
withExif?: boolean;
withSmartInfo?: boolean;
withStacked?: boolean;
}

View File

@@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class DropSmartInfoTable1730989238718 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE smart_info`);
}
public async down(): Promise<void> {
// not implemented
}
}

View File

@@ -183,9 +183,6 @@ SELECT
"AssetEntity__AssetEntity_exifInfo"."bitsPerSample" AS "AssetEntity__AssetEntity_exifInfo_bitsPerSample",
"AssetEntity__AssetEntity_exifInfo"."rating" AS "AssetEntity__AssetEntity_exifInfo_rating",
"AssetEntity__AssetEntity_exifInfo"."fps" AS "AssetEntity__AssetEntity_exifInfo_fps",
"AssetEntity__AssetEntity_smartInfo"."assetId" AS "AssetEntity__AssetEntity_smartInfo_assetId",
"AssetEntity__AssetEntity_smartInfo"."tags" AS "AssetEntity__AssetEntity_smartInfo_tags",
"AssetEntity__AssetEntity_smartInfo"."objects" AS "AssetEntity__AssetEntity_smartInfo_objects",
"AssetEntity__AssetEntity_tags"."id" AS "AssetEntity__AssetEntity_tags_id",
"AssetEntity__AssetEntity_tags"."value" AS "AssetEntity__AssetEntity_tags_value",
"AssetEntity__AssetEntity_tags"."createdAt" AS "AssetEntity__AssetEntity_tags_createdAt",
@@ -252,7 +249,6 @@ SELECT
FROM
"assets" "AssetEntity"
LEFT JOIN "exif" "AssetEntity__AssetEntity_exifInfo" ON "AssetEntity__AssetEntity_exifInfo"."assetId" = "AssetEntity"."id"
LEFT JOIN "smart_info" "AssetEntity__AssetEntity_smartInfo" ON "AssetEntity__AssetEntity_smartInfo"."assetId" = "AssetEntity"."id"
LEFT JOIN "tag_asset" "AssetEntity_AssetEntity__AssetEntity_tags" ON "AssetEntity_AssetEntity__AssetEntity_tags"."assetsId" = "AssetEntity"."id"
LEFT JOIN "tags" "AssetEntity__AssetEntity_tags" ON "AssetEntity__AssetEntity_tags"."id" = "AssetEntity_AssetEntity__AssetEntity_tags"."tagsId"
LEFT JOIN "asset_faces" "AssetEntity__AssetEntity_faces" ON "AssetEntity__AssetEntity_faces"."assetId" = "AssetEntity"."id"
@@ -932,36 +928,6 @@ WHERE
LIMIT
12
-- AssetRepository.getAssetIdByTag
WITH
"random_tags" AS (
SELECT
unnest(tags) AS "tag"
FROM
"smart_info" "si"
GROUP BY
tag
HAVING
count(*) >= $1
)
SELECT DISTINCT
ON (unnest("si"."tags")) "asset"."id" AS "data",
unnest("si"."tags") AS "value"
FROM
"assets" "asset"
INNER JOIN "smart_info" "si" ON "asset"."id" = si."assetId"
INNER JOIN "random_tags" "t" ON "si"."tags" @> ARRAY[t.tag]
WHERE
(
"asset"."isVisible" = true
AND "asset"."type" = $2
AND "asset"."ownerId" IN ($3)
AND "asset"."isArchived" = $4
)
AND ("asset"."deletedAt" IS NULL)
LIMIT
12
-- AssetRepository.getAllForUserFullSync
SELECT
"asset"."id" AS "asset_id",

View File

@@ -5,7 +5,6 @@ import { AssetFileEntity } from 'src/entities/asset-files.entity';
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { AssetFileType, AssetOrder, AssetStatus, AssetType, PaginationMode } from 'src/enum';
import {
AssetBuilderOptions,
@@ -60,7 +59,6 @@ export class AssetRepository implements IAssetRepository {
@InjectRepository(AssetFileEntity) private fileRepository: Repository<AssetFileEntity>,
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
@InjectRepository(SmartInfoEntity) private smartInfoRepository: Repository<SmartInfoEntity>,
) {}
async upsertExif(exif: Partial<ExifEntity>): Promise<void> {
@@ -119,7 +117,6 @@ export class AssetRepository implements IAssetRepository {
where: { id: In(ids) },
relations: {
exifInfo: true,
smartInfo: true,
tags: true,
faces: {
person: true,
@@ -422,22 +419,6 @@ export class AssetRepository implements IAssetRepository {
break;
}
case WithoutProperty.OBJECT_TAGS: {
relations = {
smartInfo: true,
};
where = {
jobStatus: {
previewAt: Not(IsNull()),
},
isVisible: true,
smartInfo: {
tags: IsNull(),
},
};
break;
}
case WithoutProperty.FACES: {
relations = {
faces: true,
@@ -457,23 +438,6 @@ export class AssetRepository implements IAssetRepository {
break;
}
case WithoutProperty.PERSON: {
relations = {
faces: true,
};
where = {
jobStatus: {
previewAt: Not(IsNull()),
},
isVisible: true,
faces: {
assetId: Not(IsNull()),
personId: IsNull(),
},
};
break;
}
case WithoutProperty.SIDECAR: {
where = [
{ sidecarPath: IsNull(), isVisible: true },
@@ -611,35 +575,6 @@ export class AssetRepository implements IAssetRepository {
return { fieldName: 'exifInfo.city', items };
}
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
async getAssetIdByTag(
ownerId: string,
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
): Promise<SearchExploreItem<string>> {
const cte = this.smartInfoRepository
.createQueryBuilder('si')
.select('unnest(tags)', 'tag')
.groupBy('tag')
.having('count(*) >= :minAssetsPerField', { minAssetsPerField });
const items = await this.getBuilder({
userIds: [ownerId],
exifInfo: false,
assetType: AssetType.IMAGE,
isArchived: false,
})
.select('unnest(si.tags)', 'value')
.addSelect('asset.id', 'data')
.distinctOn(['unnest(si.tags)'])
.innerJoin('smart_info', 'si', 'asset.id = si."assetId"')
.addCommonTableExpression(cte, 'random_tags')
.innerJoin('random_tags', 't', 'si.tags @> ARRAY[t.tag]')
.limit(maxFields)
.getRawMany();
return { fieldName: 'smartInfo.tags', items };
}
private getBuilder(options: AssetBuilderOptions) {
const builder = this.repository.createQueryBuilder('asset').where('asset.isVisible = true');

View File

@@ -0,0 +1,52 @@
import { Inject, Injectable } from '@nestjs/common';
import { SchedulerRegistry } from '@nestjs/schedule';
import { CronJob, CronTime } from 'cron';
import { CronCreate, CronUpdate, ICronRepository } from 'src/interfaces/cron.interface';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
@Injectable()
export class CronRepository implements ICronRepository {
constructor(
private schedulerRegistry: SchedulerRegistry,
@Inject(ILoggerRepository) private logger: ILoggerRepository,
) {
this.logger.setContext(CronRepository.name);
}
create({ name, expression, onTick, start = true }: CronCreate): void {
const job = new CronJob<null, null>(
expression,
onTick,
// function to run onComplete
undefined,
// whether it should start directly
start,
// timezone
undefined,
// context
undefined,
// runOnInit
undefined,
// utcOffset
undefined,
// prevents memory leaking by automatically stopping when the node process finishes
true,
);
this.schedulerRegistry.addCronJob(name, job);
}
update({ name, expression, start }: CronUpdate): void {
const job = this.schedulerRegistry.getCronJob(name);
if (expression) {
job.setTime(new CronTime(expression));
}
if (start !== undefined) {
if (start) {
job.start();
} else {
job.stop();
}
}
}
}

View File

@@ -6,6 +6,7 @@ import { IKeyRepository } from 'src/interfaces/api-key.interface';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IAuditRepository } from 'src/interfaces/audit.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ICronRepository } from 'src/interfaces/cron.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import { IEventRepository } from 'src/interfaces/event.interface';
@@ -44,6 +45,7 @@ import { ApiKeyRepository } from 'src/repositories/api-key.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { AuditRepository } from 'src/repositories/audit.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EventRepository } from 'src/repositories/event.repository';
@@ -83,6 +85,7 @@ export const repositories = [
{ provide: IAssetRepository, useClass: AssetRepository },
{ provide: IAuditRepository, useClass: AuditRepository },
{ provide: IConfigRepository, useClass: ConfigRepository },
{ provide: ICronRepository, useClass: CronRepository },
{ provide: ICryptoRepository, useClass: CryptoRepository },
{ provide: IDatabaseRepository, useClass: DatabaseRepository },
{ provide: IEventRepository, useClass: EventRepository },

View File

@@ -4,7 +4,6 @@ import { ModuleRef, Reflector } from '@nestjs/core';
import { SchedulerRegistry } from '@nestjs/schedule';
import { JobsOptions, Queue, Worker } from 'bullmq';
import { ClassConstructor } from 'class-transformer';
import { CronJob, CronTime } from 'cron';
import { setTimeout } from 'node:timers/promises';
import { JobConfig } from 'src/decorators';
import { MetadataKey } from 'src/enum';
@@ -119,43 +118,6 @@ export class JobRepository implements IJobRepository {
return item.handler(data);
}
addCronJob(name: string, expression: string, onTick: () => void, start = true): void {
const job = new CronJob<null, null>(
expression,
onTick,
// function to run onComplete
undefined,
// whether it should start directly
start,
// timezone
undefined,
// context
undefined,
// runOnInit
undefined,
// utcOffset
undefined,
// prevents memory leaking by automatically stopping when the node process finishes
true,
);
this.schedulerRegistry.addCronJob(name, job);
}
updateCronJob(name: string, expression?: string, start?: boolean): void {
const job = this.schedulerRegistry.getCronJob(name);
if (expression) {
job.setTime(new CronTime(expression));
}
if (start !== undefined) {
if (start) {
job.start();
} else {
job.stop();
}
}
}
setConcurrency(queueName: QueueName, concurrency: number) {
const worker = this.workers[queueName];
if (!worker) {

View File

@@ -5,6 +5,7 @@ import { Duration } from 'luxon';
import fs from 'node:fs/promises';
import { Writable } from 'node:stream';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Colorspace, LogLevel } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import {
@@ -82,7 +83,15 @@ export class MediaRepository implements IMediaRepository {
.withIccProfile(options.colorspace);
if (!options.raw) {
pipeline = pipeline.rotate();
const { angle, flip, flop } = options.orientation ? ORIENTATION_TO_SHARP_ROTATION[options.orientation] : {};
pipeline = pipeline.rotate(angle);
if (flip) {
pipeline = pipeline.flip();
}
if (flop) {
pipeline = pipeline.flop();
}
}
if (options.crop) {

View File

@@ -0,0 +1,74 @@
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { EmailRenderRequest, EmailTemplate } from 'src/interfaces/notification.interface';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { Mocked } from 'vitest';
describe(NotificationRepository.name, () => {
let sut: NotificationRepository;
let loggerMock: Mocked<ILoggerRepository>;
beforeEach(() => {
loggerMock = {
setContext: vitest.fn(),
debug: vitest.fn(),
} as unknown as Mocked<ILoggerRepository>;
sut = new NotificationRepository(loggerMock);
});
describe('renderEmail', () => {
it('should render the email correctly for TEST_EMAIL template', async () => {
const request: EmailRenderRequest = {
template: EmailTemplate.TEST_EMAIL,
data: { displayName: 'Alen Turing', baseUrl: 'http://localhost' },
};
const result = await sut.renderEmail(request);
expect(result.html).toContain('<!DOCTYPE html PUBLIC');
expect(result.text).toContain('test email');
});
it('should render the email correctly for WELCOME template', async () => {
const request: EmailRenderRequest = {
template: EmailTemplate.WELCOME,
data: { displayName: 'Alen Turing', username: 'turing', baseUrl: 'http://localhost' },
};
const result = await sut.renderEmail(request);
expect(result.html).toContain('<!DOCTYPE html PUBLIC');
expect(result.text).toContain('A new account has been created for you');
});
it('should render the email correctly for ALBUM_INVITE template', async () => {
const request: EmailRenderRequest = {
template: EmailTemplate.ALBUM_INVITE,
data: {
albumName: 'Vacation',
albumId: '123',
senderName: 'John',
recipientName: 'Jane',
baseUrl: 'http://localhost',
},
};
const result = await sut.renderEmail(request);
expect(result.html).toContain('<!DOCTYPE html PUBLIC');
expect(result.text).toContain('Vacation');
});
it('should render the email correctly for ALBUM_UPDATE template', async () => {
const request: EmailRenderRequest = {
template: EmailTemplate.ALBUM_UPDATE,
data: { albumName: 'Holiday', albumId: '123', recipientName: 'Jane', baseUrl: 'http://localhost' },
};
const result = await sut.renderEmail(request);
expect(result.html).toContain('<!DOCTYPE html PUBLIC');
expect(result.text).toContain('Holiday');
});
});
});

View File

@@ -6,7 +6,6 @@ import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
import { AssetType, PaginationMode } from 'src/enum';
import { IConfigRepository } from 'src/interfaces/config.interface';
@@ -34,7 +33,6 @@ export class SearchRepository implements ISearchRepository {
private assetsByCityQuery: string;
constructor(
@InjectRepository(SmartInfoEntity) private repository: Repository<SmartInfoEntity>,
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
@InjectRepository(AssetFaceEntity) private assetFaceRepository: Repository<AssetFaceEntity>,
@@ -278,7 +276,7 @@ export class SearchRepository implements ISearchRepository {
@GenerateSql({ params: [[DummyValue.UUID]] })
async getAssetsByCity(userIds: string[]): Promise<AssetEntity[]> {
const parameters = [userIds, true, false, AssetType.IMAGE];
const rawRes = await this.repository.query(this.assetsByCityQuery, parameters);
const rawRes = await this.assetRepository.query(this.assetsByCityQuery, parameters);
const items: AssetEntity[] = [];
for (const res of rawRes) {

View File

@@ -94,7 +94,6 @@ export class AssetService extends BaseService {
{
exifInfo: true,
sharedLinks: true,
smartInfo: true,
tags: true,
owner: true,
faces: {
@@ -162,7 +161,6 @@ export class AssetService extends BaseService {
const asset = await this.assetRepository.getById(id, {
exifInfo: true,
owner: true,
smartInfo: true,
tags: true,
faces: {
person: true,

View File

@@ -3,8 +3,9 @@ import { defaults, SystemConfig } from 'src/config';
import { StorageCore } from 'src/cores/storage.core';
import { ImmichWorker, StorageFolder } from 'src/enum';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ICronRepository } from 'src/interfaces/cron.interface';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import { IJobRepository, JobStatus } from 'src/interfaces/job.interface';
import { JobStatus } from 'src/interfaces/job.interface';
import { IProcessRepository } from 'src/interfaces/process.interface';
import { IStorageRepository } from 'src/interfaces/storage.interface';
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
@@ -18,13 +19,13 @@ describe(BackupService.name, () => {
let databaseMock: Mocked<IDatabaseRepository>;
let configMock: Mocked<IConfigRepository>;
let jobMock: Mocked<IJobRepository>;
let cronMock: Mocked<ICronRepository>;
let processMock: Mocked<IProcessRepository>;
let storageMock: Mocked<IStorageRepository>;
let systemMock: Mocked<ISystemMetadataRepository>;
beforeEach(() => {
({ sut, configMock, databaseMock, jobMock, processMock, storageMock, systemMock } = newTestService(BackupService));
({ sut, cronMock, configMock, databaseMock, processMock, storageMock, systemMock } = newTestService(BackupService));
});
it('should work', () => {
@@ -37,7 +38,7 @@ describe(BackupService.name, () => {
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(jobMock.addCronJob).toHaveBeenCalled();
expect(cronMock.create).toHaveBeenCalled();
});
it('should not initialize backup database cron job when lock is taken', async () => {
@@ -45,14 +46,14 @@ describe(BackupService.name, () => {
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(jobMock.addCronJob).not.toHaveBeenCalled();
expect(cronMock.create).not.toHaveBeenCalled();
});
it('should not initialise backup database job when running on microservices', async () => {
configMock.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(jobMock.addCronJob).not.toHaveBeenCalled();
expect(cronMock.create).not.toHaveBeenCalled();
});
});
@@ -75,35 +76,15 @@ describe(BackupService.name, () => {
} as SystemConfig,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith('backupDatabase', '0 1 * * *', true);
expect(jobMock.updateCronJob).toHaveBeenCalled();
expect(cronMock.update).toHaveBeenCalledWith({ name: 'backupDatabase', expression: '0 1 * * *', start: true });
expect(cronMock.update).toHaveBeenCalled();
});
it('should do nothing if instance does not have the backup database lock', async () => {
databaseMock.tryLock.mockResolvedValue(false);
await sut.onConfigInit({ newConfig: defaults });
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
});
});
describe('onConfigValidateEvent', () => {
it('should allow a valid cron expression', () => {
expect(() =>
sut.onConfigValidate({
newConfig: { backup: { database: { cronExpression: '0 0 * * *' } } } as SystemConfig,
oldConfig: {} as SystemConfig,
}),
).not.toThrow(expect.stringContaining('Invalid cron expression'));
});
it('should fail for an invalid cron expression', () => {
expect(() =>
sut.onConfigValidate({
newConfig: { backup: { database: { cronExpression: 'foo' } } } as SystemConfig,
oldConfig: {} as SystemConfig,
}),
).toThrow(/Invalid cron expression.*/);
expect(cronMock.update).not.toHaveBeenCalled();
});
});
@@ -165,6 +146,7 @@ describe(BackupService.name, () => {
storageMock.readdir.mockResolvedValue([]);
processMock.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
storageMock.rename.mockResolvedValue();
storageMock.unlink.mockResolvedValue();
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
storageMock.createWriteStream.mockReturnValue(new PassThrough());
});
@@ -207,5 +189,42 @@ describe(BackupService.name, () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
});
it('should ignore unlink failing and still return failed job status', async () => {
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
storageMock.unlink.mockRejectedValue(new Error('error'));
const result = await sut.handleBackupDatabase();
expect(storageMock.unlink).toHaveBeenCalled();
expect(result).toBe(JobStatus.FAILED);
});
it.each`
postgresVersion | expectedVersion
${'14.10'} | ${14}
${'14.10.3'} | ${14}
${'14.10 (Debian 14.10-1.pgdg120+1)'} | ${14}
${'15.3.3'} | ${15}
${'16.4.2'} | ${16}
${'17.15.1'} | ${17}
`(
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
async ({ postgresVersion, expectedVersion }) => {
databaseMock.getPostgresVersion.mockResolvedValue(postgresVersion);
await sut.handleBackupDatabase();
expect(processMock.spawn).toHaveBeenCalledWith(
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
expect.any(Array),
expect.any(Object),
);
},
);
it.each`
postgresVersion
${'13.99.99'}
${'18.0.0'}
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
databaseMock.getPostgresVersion.mockResolvedValue(postgresVersion);
const result = await sut.handleBackupDatabase();
expect(processMock.spawn).not.toHaveBeenCalled();
expect(result).toBe(JobStatus.FAILED);
});
});
});

View File

@@ -1,5 +1,6 @@
import { Injectable } from '@nestjs/common';
import { default as path } from 'node:path';
import semver from 'semver';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { ImmichWorker, StorageFolder } from 'src/enum';
@@ -8,7 +9,6 @@ import { ArgOf } from 'src/interfaces/event.interface';
import { JobName, JobStatus, QueueName } from 'src/interfaces/job.interface';
import { BaseService } from 'src/services/base.service';
import { handlePromiseError } from 'src/utils/misc';
import { validateCronExpression } from 'src/validation';
@Injectable()
export class BackupService extends BaseService {
@@ -27,12 +27,12 @@ export class BackupService extends BaseService {
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
if (this.backupLock) {
this.jobRepository.addCronJob(
'backupDatabase',
database.cronExpression,
() => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
database.enabled,
);
this.cronRepository.create({
name: 'backupDatabase',
expression: database.cronExpression,
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
start: database.enabled,
});
}
}
@@ -42,15 +42,11 @@ export class BackupService extends BaseService {
return;
}
this.jobRepository.updateCronJob('backupDatabase', backup.database.cronExpression, backup.database.enabled);
}
@OnEvent({ name: 'config.validate' })
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
const { database } = newConfig.backup;
if (!validateCronExpression(database.cronExpression)) {
throw new Error(`Invalid cron expression ${database.cronExpression}`);
}
this.cronRepository.update({
name: 'backupDatabase',
expression: backup.database.cronExpression,
start: backup.database.enabled,
});
}
async cleanupDatabaseBackups() {
@@ -85,19 +81,53 @@ export class BackupService extends BaseService {
} = this.configRepository.getEnv();
const isUrlConnection = config.connectionType === 'url';
const databaseParams = isUrlConnection ? [config.url] : ['-U', config.username, '-h', config.host];
const databaseParams = isUrlConnection
? ['--dbname', config.url]
: [
'--username',
config.username,
'--host',
config.host,
'--port',
`${config.port}`,
'--database',
config.database,
];
databaseParams.push('--clean', '--if-exists');
const backupFilePath = path.join(
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
`immich-db-backup-${Date.now()}.sql.gz.tmp`,
);
const databaseVersion = await this.databaseRepository.getPostgresVersion();
const databaseSemver = semver.coerce(databaseVersion);
const databaseMajorVersion = databaseSemver?.major;
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <18.0.0')) {
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
return JobStatus.FAILED;
}
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
try {
await new Promise<void>((resolve, reject) => {
const pgdump = this.processRepository.spawn(`pg_dumpall`, [...databaseParams, '--clean', '--if-exists'], {
env: { PATH: process.env.PATH, PGPASSWORD: isUrlConnection ? undefined : config.password },
});
const pgdump = this.processRepository.spawn(
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
databaseParams,
{
env: {
PATH: process.env.PATH,
PGPASSWORD: isUrlConnection ? undefined : config.password,
},
},
);
const gzip = this.processRepository.spawn(`gzip`, []);
// NOTE: `--rsyncable` is only supported in GNU gzip
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
pgdump.stdout.pipe(gzip.stdin);
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
@@ -149,10 +179,13 @@ export class BackupService extends BaseService {
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
} catch (error) {
this.logger.error('Database Backup Failure', error);
await this.storageRepository
.unlink(backupFilePath)
.catch((error) => this.logger.error('Failed to delete failed backup file', error));
return JobStatus.FAILED;
}
this.logger.debug(`Database Backup Success`);
this.logger.log(`Database Backup Success`);
await this.cleanupDatabaseBackups();
return JobStatus.SUCCESS;
}

View File

@@ -12,6 +12,7 @@ import { IKeyRepository } from 'src/interfaces/api-key.interface';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IAuditRepository } from 'src/interfaces/audit.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ICronRepository } from 'src/interfaces/cron.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import { IEventRepository } from 'src/interfaces/event.interface';
@@ -57,6 +58,7 @@ export class BaseService {
@Inject(IAlbumUserRepository) protected albumUserRepository: IAlbumUserRepository,
@Inject(IAssetRepository) protected assetRepository: IAssetRepository,
@Inject(IConfigRepository) protected configRepository: IConfigRepository,
@Inject(ICronRepository) protected cronRepository: ICronRepository,
@Inject(ICryptoRepository) protected cryptoRepository: ICryptoRepository,
@Inject(IDatabaseRepository) protected databaseRepository: IDatabaseRepository,
@Inject(IEventRepository) protected eventRepository: IEventRepository,

View File

@@ -1,5 +1,5 @@
import { BadRequestException } from '@nestjs/common';
import { defaults } from 'src/config';
import { defaults, SystemConfig } from 'src/config';
import { ImmichWorker } from 'src/enum';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
@@ -31,7 +31,7 @@ describe(JobService.name, () => {
describe('onConfigUpdate', () => {
it('should update concurrency', () => {
sut.onConfigInitOrUpdate({ newConfig: defaults });
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(15);
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);

View File

@@ -39,8 +39,7 @@ const asJobItem = (dto: JobCreateDto): JobItem => {
@Injectable()
export class JobService extends BaseService {
@OnEvent({ name: 'config.init' })
@OnEvent({ name: 'config.update', server: true })
onConfigInitOrUpdate({ newConfig: config }: ArgOf<'config.init'>) {
onConfigInit({ newConfig: config }: ArgOf<'config.init'>) {
if (this.worker !== ImmichWorker.MICROSERVICES) {
return;
}
@@ -56,6 +55,11 @@ export class JobService extends BaseService {
}
}
@OnEvent({ name: 'config.update', server: true })
onConfigUpdate({ newConfig: config }: ArgOf<'config.update'>) {
this.onConfigInit({ newConfig: config });
}
async create(dto: JobCreateDto): Promise<void> {
await this.jobRepository.queue(asJobItem(dto));
}
@@ -164,6 +168,10 @@ export class JobService extends BaseService {
return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL, data: { force } });
}
case QueueName.BACKUP_DATABASE: {
return this.jobRepository.queue({ name: JobName.BACKUP_DATABASE, data: { force } });
}
default: {
throw new BadRequestException(`Invalid job name: ${name}`);
}

View File

@@ -6,6 +6,7 @@ import { UserEntity } from 'src/entities/user.entity';
import { AssetType, ImmichWorker } from 'src/enum';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ICronRepository } from 'src/interfaces/cron.interface';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import {
IJobRepository,
@@ -36,13 +37,15 @@ describe(LibraryService.name, () => {
let assetMock: Mocked<IAssetRepository>;
let configMock: Mocked<IConfigRepository>;
let cronMock: Mocked<ICronRepository>;
let databaseMock: Mocked<IDatabaseRepository>;
let jobMock: Mocked<IJobRepository>;
let libraryMock: Mocked<ILibraryRepository>;
let storageMock: Mocked<IStorageRepository>;
beforeEach(() => {
({ sut, assetMock, configMock, databaseMock, jobMock, libraryMock, storageMock } = newTestService(LibraryService));
({ sut, assetMock, configMock, cronMock, databaseMock, jobMock, libraryMock, storageMock } =
newTestService(LibraryService));
databaseMock.tryLock.mockResolvedValue(true);
configMock.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
@@ -56,7 +59,7 @@ describe(LibraryService.name, () => {
it('should init cron job and handle config changes', async () => {
await sut.onConfigInit({ newConfig: defaults });
expect(jobMock.addCronJob).toHaveBeenCalled();
expect(cronMock.create).toHaveBeenCalled();
await sut.onConfigUpdate({
oldConfig: defaults,
@@ -71,7 +74,7 @@ describe(LibraryService.name, () => {
} as SystemConfig,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith('libraryScan', '0 1 * * *', true);
expect(cronMock.update).toHaveBeenCalledWith({ name: 'libraryScan', expression: '0 1 * * *', start: true });
});
it('should initialize watcher for all external libraries', async () => {
@@ -117,14 +120,14 @@ describe(LibraryService.name, () => {
await sut.onConfigInit({ newConfig: systemConfigStub.libraryWatchEnabled as SystemConfig });
expect(jobMock.addCronJob).not.toHaveBeenCalled();
expect(cronMock.create).not.toHaveBeenCalled();
});
it('should not initialize watcher or library scan job when running on api', async () => {
configMock.getWorker.mockReturnValue(ImmichWorker.API);
await sut.onConfigInit({ newConfig: systemConfigStub.libraryScan as SystemConfig });
expect(jobMock.addCronJob).not.toHaveBeenCalled();
expect(cronMock.create).not.toHaveBeenCalled();
});
});
@@ -138,7 +141,7 @@ describe(LibraryService.name, () => {
databaseMock.tryLock.mockResolvedValue(false);
await sut.onConfigInit({ newConfig: defaults });
await sut.onConfigUpdate({ newConfig: systemConfigStub.libraryScan as SystemConfig, oldConfig: defaults });
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
expect(cronMock.update).not.toHaveBeenCalled();
});
it('should update cron job and enable watching', async () => {
@@ -148,11 +151,11 @@ describe(LibraryService.name, () => {
oldConfig: defaults,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith(
'libraryScan',
systemConfigStub.libraryScan.library.scan.cronExpression,
systemConfigStub.libraryScan.library.scan.enabled,
);
expect(cronMock.update).toHaveBeenCalledWith({
name: 'libraryScan',
expression: systemConfigStub.libraryScan.library.scan.cronExpression,
start: systemConfigStub.libraryScan.library.scan.enabled,
});
});
it('should update cron job and disable watching', async () => {
@@ -166,31 +169,11 @@ describe(LibraryService.name, () => {
oldConfig: defaults,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith(
'libraryScan',
systemConfigStub.libraryScan.library.scan.cronExpression,
systemConfigStub.libraryScan.library.scan.enabled,
);
});
});
describe('onConfigValidateEvent', () => {
it('should allow a valid cron expression', () => {
expect(() =>
sut.onConfigValidate({
newConfig: { library: { scan: { cronExpression: '0 0 * * *' } } } as SystemConfig,
oldConfig: {} as SystemConfig,
}),
).not.toThrow(expect.stringContaining('Invalid cron expression'));
});
it('should fail for an invalid cron expression', () => {
expect(() =>
sut.onConfigValidate({
newConfig: { library: { scan: { cronExpression: 'foo' } } } as SystemConfig,
oldConfig: {} as SystemConfig,
}),
).toThrow(/Invalid cron expression.*/);
expect(cronMock.update).toHaveBeenCalledWith({
name: 'libraryScan',
expression: systemConfigStub.libraryScan.library.scan.cronExpression,
start: systemConfigStub.libraryScan.library.scan.enabled,
});
});
});

View File

@@ -24,7 +24,6 @@ import { BaseService } from 'src/services/base.service';
import { mimeTypes } from 'src/utils/mime-types';
import { handlePromiseError } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
import { validateCronExpression } from 'src/validation';
@Injectable()
export class LibraryService extends BaseService {
@@ -48,12 +47,13 @@ export class LibraryService extends BaseService {
this.watchLibraries = this.lock && watch.enabled;
if (this.lock) {
this.jobRepository.addCronJob(
'libraryScan',
scan.cronExpression,
() => handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL }), this.logger),
scan.enabled,
);
this.cronRepository.create({
name: 'libraryScan',
expression: scan.cronExpression,
onTick: () =>
handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL }), this.logger),
start: scan.enabled,
});
}
if (this.watchLibraries) {
@@ -67,7 +67,11 @@ export class LibraryService extends BaseService {
return;
}
this.jobRepository.updateCronJob('libraryScan', library.scan.cronExpression, library.scan.enabled);
this.cronRepository.update({
name: 'libraryScan',
expression: library.scan.cronExpression,
start: library.scan.enabled,
});
if (library.watch.enabled !== this.watchLibraries) {
// Watch configuration changed, update accordingly
@@ -76,14 +80,6 @@ export class LibraryService extends BaseService {
}
}
@OnEvent({ name: 'config.validate' })
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
const { scan } = newConfig.library;
if (!validateCronExpression(scan.cronExpression)) {
throw new Error(`Invalid cron expression ${scan.cronExpression}`);
}
}
private async watch(id: string): Promise<boolean> {
if (!this.watchLibraries) {
return false;

View File

@@ -214,7 +214,8 @@ export class MediaService extends BaseService {
const colorspace = this.isSRGB(asset) ? Colorspace.SRGB : image.colorspace;
const processInvalidImages = process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true';
const decodeOptions = { colorspace, processInvalidImages, size: image.preview.size };
const orientation = useExtracted && asset.exifInfo?.orientation ? Number(asset.exifInfo.orientation) : undefined;
const decodeOptions = { colorspace, processInvalidImages, size: image.preview.size, orientation };
const { data, info } = await this.mediaRepository.decodeImage(inputPath, decodeOptions);
const options = { colorspace, processInvalidImages, raw: info };

View File

@@ -3,7 +3,7 @@ import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { ExifEntity } from 'src/entities/exif.entity';
import { AssetType, ImmichWorker, SourceType } from 'src/enum';
import { AssetType, ExifOrientation, ImmichWorker, SourceType } from 'src/enum';
import { IAlbumRepository } from 'src/interfaces/album.interface';
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
@@ -18,7 +18,7 @@ import { IStorageRepository } from 'src/interfaces/storage.interface';
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
import { ITagRepository } from 'src/interfaces/tag.interface';
import { IUserRepository } from 'src/interfaces/user.interface';
import { MetadataService, Orientation } from 'src/services/metadata.service';
import { MetadataService } from 'src/services/metadata.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { fileStub } from 'test/fixtures/file.stub';
import { probeStub } from 'test/fixtures/media.stub';
@@ -539,7 +539,7 @@ describe(MetadataService.name, () => {
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id], { faces: { person: false } });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ orientation: Orientation.Rotate270CW.toString() }),
expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
);
});

View File

@@ -12,7 +12,7 @@ import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { AssetType, ImmichWorker, SourceType } from 'src/enum';
import { AssetType, ExifOrientation, ImmichWorker, SourceType } from 'src/enum';
import { WithoutProperty } from 'src/interfaces/asset.interface';
import { DatabaseLock } from 'src/interfaces/database.interface';
import { ArgOf } from 'src/interfaces/event.interface';
@@ -36,17 +36,6 @@ const EXIF_DATE_TAGS: Array<keyof Tags> = [
'DateTimeCreated',
];
export enum Orientation {
Horizontal = 1,
MirrorHorizontal = 2,
Rotate180 = 3,
MirrorVertical = 4,
MirrorHorizontalRotate270CW = 5,
Rotate90CW = 6,
MirrorHorizontalRotate90CW = 7,
Rotate270CW = 8,
}
const validate = <T>(value: T): NonNullable<T> | null => {
// handle lists of numbers
if (Array.isArray(value)) {
@@ -676,19 +665,19 @@ export class MetadataService extends BaseService {
if (videoStreams[0]) {
switch (videoStreams[0].rotation) {
case -90: {
tags.Orientation = Orientation.Rotate90CW;
tags.Orientation = ExifOrientation.Rotate90CW;
break;
}
case 0: {
tags.Orientation = Orientation.Horizontal;
tags.Orientation = ExifOrientation.Horizontal;
break;
}
case 90: {
tags.Orientation = Orientation.Rotate270CW;
tags.Orientation = ExifOrientation.Rotate270CW;
break;
}
case 180: {
tags.Orientation = Orientation.Rotate180;
tags.Orientation = ExifOrientation.Rotate180;
break;
}
}

View File

@@ -47,14 +47,9 @@ describe(SearchService.name, () => {
fieldName: 'exifInfo.city',
items: [{ value: 'Paris', data: assetStub.image.id }],
});
assetMock.getAssetIdByTag.mockResolvedValue({
fieldName: 'smartInfo.tags',
items: [{ value: 'train', data: assetStub.imageFrom2015.id }],
});
assetMock.getByIdsWithAllRelations.mockResolvedValue([assetStub.image, assetStub.imageFrom2015]);
const expectedResponse = [
{ fieldName: 'exifInfo.city', items: [{ value: 'Paris', data: mapAsset(assetStub.image) }] },
{ fieldName: 'smartInfo.tags', items: [{ value: 'train', data: mapAsset(assetStub.imageFrom2015) }] },
];
const result = await sut.getExploreData(authStub.user1);

View File

@@ -34,10 +34,8 @@ export class SearchService extends BaseService {
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
const options = { maxFields: 12, minAssetsPerField: 5 };
const results = await Promise.all([
this.assetRepository.getAssetIdByCity(auth.user.id, options),
this.assetRepository.getAssetIdByTag(auth.user.id, options),
]);
const result = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
const results = [result];
const assetIds = new Set<string>(results.flatMap((field) => field.items.map((item) => item.data)));
const assets = await this.assetRepository.getByIdsWithAllRelations([...assetIds]);
const assetMap = new Map<string, AssetResponseDto>(assets.map((asset) => [asset.id, mapAsset(asset)]));

View File

@@ -38,7 +38,7 @@ describe(StorageTemplateService.name, () => {
systemMock.get.mockResolvedValue({ storageTemplate: { enabled: true } });
sut.onConfigInitOrUpdate({ newConfig: defaults });
sut.onConfigInit({ newConfig: defaults });
});
describe('onConfigValidate', () => {
@@ -171,7 +171,7 @@ describe(StorageTemplateService.name, () => {
const config = structuredClone(defaults);
config.storageTemplate.template = '{{y}}/{{#if album}}{{album}}{{else}}other/{{MM}}{{/if}}/{{filename}}';
sut.onConfigInitOrUpdate({ newConfig: config });
sut.onConfigInit({ newConfig: config });
userMock.get.mockResolvedValue(user);
assetMock.getByIds.mockResolvedValueOnce([asset]);
@@ -192,7 +192,7 @@ describe(StorageTemplateService.name, () => {
const user = userStub.user1;
const config = structuredClone(defaults);
config.storageTemplate.template = '{{y}}/{{#if album}}{{album}}{{else}}other//{{MM}}{{/if}}/{{filename}}';
sut.onConfigInitOrUpdate({ newConfig: config });
sut.onConfigInit({ newConfig: config });
userMock.get.mockResolvedValue(user);
assetMock.getByIds.mockResolvedValueOnce([asset]);

View File

@@ -75,8 +75,7 @@ export class StorageTemplateService extends BaseService {
}
@OnEvent({ name: 'config.init' })
@OnEvent({ name: 'config.update', server: true })
onConfigInitOrUpdate({ newConfig }: ArgOf<'config.init'>) {
onConfigInit({ newConfig }: ArgOf<'config.init'>) {
const template = newConfig.storageTemplate.template;
if (!this._template || template !== this.template.raw) {
this.logger.debug(`Compiling new storage template: ${template}`);
@@ -84,6 +83,11 @@ export class StorageTemplateService extends BaseService {
}
}
@OnEvent({ name: 'config.update', server: true })
onConfigUpdate({ newConfig }: ArgOf<'config.update'>) {
this.onConfigInit({ newConfig });
}
@OnEvent({ name: 'config.validate' })
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
try {

View File

@@ -261,6 +261,29 @@ describe(SystemConfigService.name, () => {
});
});
it('should accept valid cron expressions', async () => {
configMock.getEnv.mockReturnValue(mockEnvData({ configFile: 'immich-config.json' }));
systemMock.readFile.mockResolvedValue(JSON.stringify({ library: { scan: { cronExpression: '0 0 * * *' } } }));
await expect(sut.getSystemConfig()).resolves.toMatchObject({
library: {
scan: {
enabled: true,
cronExpression: '0 0 * * *',
},
},
});
});
it('should reject invalid cron expressions', async () => {
configMock.getEnv.mockReturnValue(mockEnvData({ configFile: 'immich-config.json' }));
systemMock.readFile.mockResolvedValue(JSON.stringify({ library: { scan: { cronExpression: 'foo' } } }));
await expect(sut.getSystemConfig()).rejects.toThrow(
'library.scan.cronExpression has failed the following constraints: cronValidator',
);
});
it('should log errors with the config file', async () => {
configMock.getEnv.mockReturnValue(mockEnvData({ configFile: 'immich-config.json' }));

View File

@@ -90,7 +90,6 @@ export function searchAssetBuilder(
isNotInAlbum,
withFaces,
withPeople,
withSmartInfo,
personIds,
withStacked,
trashedAfter,
@@ -123,10 +122,6 @@ export function searchAssetBuilder(
builder.leftJoinAndSelect('faces.person', 'person');
}
if (withSmartInfo) {
builder.leftJoinAndSelect(`${builder.alias}.smartInfo`, 'smartInfo');
}
if (personIds && personIds.length > 0) {
const cte = builder
.createQueryBuilder()

View File

@@ -16,9 +16,12 @@ import {
IsOptional,
IsString,
IsUUID,
Validate,
ValidateBy,
ValidateIf,
ValidationOptions,
ValidatorConstraint,
ValidatorConstraintInterface,
buildMessage,
isDateString,
} from 'class-validator';
@@ -156,16 +159,20 @@ export const ValidateBoolean = (options?: BooleanOptions) => {
return applyDecorators(...decorators);
};
export function validateCronExpression(expression: string) {
try {
new CronJob(expression, () => {});
} catch {
return false;
@ValidatorConstraint({ name: 'cronValidator' })
class CronValidator implements ValidatorConstraintInterface {
validate(expression: string): boolean {
try {
new CronJob(expression, () => {});
return true;
} catch {
return false;
}
}
return true;
}
export const IsCronExpression = () => Validate(CronValidator, { message: 'Invalid cron expression' });
type IValue = { value: unknown };
export const toEmail = ({ value }: IValue) => (typeof value === 'string' ? value.toLowerCase() : value);

View File

@@ -62,10 +62,6 @@ const assetResponse: AssetResponseDto = {
updatedAt: today,
isFavorite: false,
isArchived: false,
smartInfo: {
tags: [],
objects: ['a', 'b', 'c'],
},
duration: '0:00:00.00000',
exifInfo: assetInfo,
livePhotoVideoId: null,
@@ -205,12 +201,6 @@ export const sharedLinkStub = {
isArchived: false,
isExternal: false,
isOffline: false,
smartInfo: {
assetId: 'id_1',
tags: [],
objects: ['a', 'b', 'c'],
asset: null as any,
},
files: [],
thumbhash: null,
encodedVideoPath: '',

View File

@@ -33,7 +33,6 @@ export const newAssetRepositoryMock = (): Mocked<IAssetRepository> => {
getTimeBucket: vitest.fn(),
getTimeBuckets: vitest.fn(),
getAssetIdByCity: vitest.fn(),
getAssetIdByTag: vitest.fn(),
getAllForUserFullSync: vitest.fn(),
getChangedDeltaSync: vitest.fn(),
getDuplicates: vitest.fn(),

View File

@@ -0,0 +1,9 @@
import { ICronRepository } from 'src/interfaces/cron.interface';
import { Mocked, vitest } from 'vitest';
export const newCronRepositoryMock = (): Mocked<ICronRepository> => {
return {
create: vitest.fn(),
update: vitest.fn(),
};
};

View File

@@ -6,8 +6,6 @@ export const newJobRepositoryMock = (): Mocked<IJobRepository> => {
setup: vitest.fn(),
startWorkers: vitest.fn(),
run: vitest.fn(),
addCronJob: vitest.fn(),
updateCronJob: vitest.fn(),
setConcurrency: vitest.fn(),
empty: vitest.fn(),
pause: vitest.fn(),

View File

@@ -12,6 +12,7 @@ import { newKeyRepositoryMock } from 'test/repositories/api-key.repository.mock'
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
import { newAuditRepositoryMock } from 'test/repositories/audit.repository.mock';
import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock';
import { newCronRepositoryMock } from 'test/repositories/cron.repository.mock';
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
import { newEventRepositoryMock } from 'test/repositories/event.repository.mock';
@@ -62,6 +63,7 @@ export const newTestService = <T extends BaseService>(
const accessMock = newAccessRepositoryMock();
const loggerMock = newLoggerRepositoryMock();
const cronMock = newCronRepositoryMock();
const cryptoMock = newCryptoRepositoryMock();
const activityMock = newActivityRepositoryMock();
const auditMock = newAuditRepositoryMock();
@@ -108,6 +110,7 @@ export const newTestService = <T extends BaseService>(
albumUserMock,
assetMock,
configMock,
cronMock,
cryptoMock,
databaseMock,
eventMock,
@@ -144,6 +147,7 @@ export const newTestService = <T extends BaseService>(
sut,
accessMock,
loggerMock,
cronMock,
cryptoMock,
activityMock,
auditMock,

View File

@@ -1,4 +1,4 @@
FROM node:22.11.0-alpine3.20@sha256:f265794478aa0b1a23d85a492c8311ed795bc527c3fe7e43453b3c872dcd71a3
FROM node:22.11.0-alpine3.20@sha256:dc8ba2f61dd86c44e43eb25a7812ad03c5b1b224a19fc6f77e1eb9e5669f0b82
RUN apk add --no-cache tini
USER node

8
web/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "immich-web",
"version": "1.120.0",
"version": "1.120.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "immich-web",
"version": "1.120.0",
"version": "1.120.2",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@formatjs/icu-messageformat-parser": "^2.7.8",
@@ -74,13 +74,13 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.120.0",
"version": "1.120.2",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.8.6",
"@types/node": "^22.9.0",
"typescript": "^5.3.3"
}
},

View File

@@ -1,6 +1,6 @@
{
"name": "immich-web",
"version": "1.120.0",
"version": "1.120.2",
"license": "GNU Affero General Public License version 3",
"scripts": {
"dev": "vite dev --host 0.0.0.0 --port 3000",

View File

@@ -5,7 +5,6 @@
import { clamp } from 'lodash-es';
import { onMount } from 'svelte';
import { isTimelineScrolling } from '$lib/stores/timeline.store';
import { parseUtcDate } from '$lib/utils/date-time';
import { fade, fly } from 'svelte/transition';
export let timelineTopOffset = 0;
@@ -75,7 +74,6 @@
$: timelineFullHeight = $assetStore.timelineHeight + timelineTopOffset + timelineBottomOffset;
$: relativeTopOffset = toScrollY(timelineTopOffset / timelineFullHeight);
$: relativeBottomOffset = toScrollY(timelineBottomOffset / timelineFullHeight);
$: formatedDate = scrubBucket?.bucketDate ? parseUtcDate(scrubBucket?.bucketDate).toFormat('MMM yyyy') : '';
const listener: BucketListener = (event) => {
const { type } = event;
@@ -243,12 +241,12 @@
class="absolute right-0 h-[2px] w-10 bg-immich-primary dark:bg-immich-dark-primary"
style:top="{scrollY + HOVER_DATE_HEIGHT}px"
>
{#if $isTimelineScrolling && formatedDate}
{#if $isTimelineScrolling && scrubBucket?.bucketDate}
<p
transition:fade={{ duration: 200 }}
class="truncate pointer-events-none absolute right-0 bottom-0 z-[100] min-w-20 max-w-64 w-fit rounded-tl-md border-b-2 border-immich-primary bg-immich-bg/80 py-1 px-1 text-sm font-medium shadow-[0_0_8px_rgba(0,0,0,0.25)] dark:border-immich-dark-primary dark:bg-immich-dark-gray/80 dark:text-immich-dark-fg"
>
{formatedDate}
{assetStore.getBucketByDate(scrubBucket.bucketDate)?.bucketDateFormattted}
</p>
{/if}
</div>

View File

@@ -270,6 +270,7 @@ export const langs = [
{ name: 'Estonian', code: 'et', loader: () => import('$i18n/et.json') },
{ name: 'Persian', code: 'fa', loader: () => import('$i18n/fa.json') },
{ name: 'Finnish', code: 'fi', loader: () => import('$i18n/fi.json') },
{ name: 'Filipino', code: 'fil', loader: () => import('$i18n/fil.json') },
{ name: 'French', code: 'fr', loader: () => import('$i18n/fr.json') },
{ name: 'Hebrew', code: 'he', loader: () => import('$i18n/he.json') },
{ name: 'Hindi', code: 'hi', loader: () => import('$i18n/hi.json') },
@@ -291,6 +292,7 @@ export const langs = [
{ name: 'Malay', code: 'ms', loader: () => import('$i18n/ms.json') },
{ name: 'Norwegian Bokmål', code: 'nb-NO', weblateCode: 'nb_NO', loader: () => import('$i18n/nb_NO.json') },
{ name: 'Dutch', code: 'nl', loader: () => import('$i18n/nl.json') },
{ name: 'Norwegian Nynorsk', code: 'nn', loader: () => import('$i18n/nn.json') },
{ name: 'Polish', code: 'pl', loader: () => import('$i18n/pl.json') },
{ name: 'Portuguese', code: 'pt', loader: () => import('$i18n/pt.json') },
{ name: 'Portuguese (Brazil) ', code: 'pt-BR', weblateCode: 'pt_BR', loader: () => import('$i18n/pt_BR.json') },

View File

@@ -5,11 +5,13 @@ import { DateTime } from 'luxon';
describe('formatGroupTitle', () => {
beforeAll(() => {
vi.useFakeTimers();
process.env.TZ = 'UTC';
vi.setSystemTime(new Date('2024-07-27T12:00:00Z'));
});
afterAll(() => {
vi.useRealTimers();
delete process.env.TZ;
});
it('formats today', () => {

View File

@@ -16,8 +16,6 @@
enum Field {
CITY = 'exifInfo.city',
TAGS = 'smartInfo.tags',
OBJECTS = 'smartInfo.objects',
}
const getFieldItems = (items: SearchExploreResponseDto[], field: Field) => {

View File

@@ -64,8 +64,20 @@
type SettingsComponent = ComponentType<SvelteComponent<SettingsComponentProps>>;
// https://stackoverflow.com/questions/16167581/sort-object-properties-and-json-stringify/43636793#43636793
const jsonReplacer = (key: string, value: unknown) =>
value instanceof Object && !Array.isArray(value)
? Object.keys(value)
.sort()
// eslint-disable-next-line unicorn/no-array-reduce
.reduce((sorted: { [key: string]: unknown }, key) => {
sorted[key] = (value as { [key: string]: unknown })[key];
return sorted;
}, {})
: value;
const downloadConfig = () => {
const blob = new Blob([JSON.stringify(config, null, 2)], { type: 'application/json' });
const blob = new Blob([JSON.stringify(config, jsonReplacer, 2)], { type: 'application/json' });
const downloadKey = 'immich-config.json';
downloadManager.add(downloadKey, blob.size);
downloadManager.update(downloadKey, blob.size);
@@ -240,7 +252,7 @@
<div class="hidden lg:block">
<SearchBar placeholder={$t('search_settings')} bind:name={searchQuery} showLoadingSpinner={false} />
</div>
<LinkButton on:click={() => copyToClipboard(JSON.stringify(config, null, 2))}>
<LinkButton on:click={() => copyToClipboard(JSON.stringify(config, jsonReplacer, 2))}>
<div class="flex place-items-center gap-2 text-sm">
<Icon path={mdiContentCopy} size="18" />
{$t('copy_to_clipboard')}