Compare commits

..

6 Commits

Author SHA1 Message Date
Alex Tran
27fa817ea6 Merge branch 'main' of github.com:apeman76/immich into apeman76/main 2024-07-08 22:48:45 -05:00
Alex
4bf2ded729 Merge branch 'main' into main 2024-07-08 22:46:30 -05:00
Alex Tran
fed7d0464a format fix 2024-07-08 22:44:29 -05:00
apeman
3ab67886b0 Fix: Update tests to include autorotate 2024-07-06 01:09:39 +02:00
apeman
2b06d4b284 Update tests to include autorotate 2024-07-06 01:08:14 +02:00
apeman
34bea0190e Add noautorotate to all HW recorders 2024-07-06 00:37:08 +02:00
223 changed files with 4340 additions and 8283 deletions

View File

@@ -29,4 +29,3 @@ web/node_modules/
web/coverage/
web/.svelte-kit
web/build/
web/.env

View File

@@ -1,13 +1,11 @@
title: "[Feature] feature-name-goes-here"
title: "[Feature] <feature-name-goes-here>"
labels: ["feature"]
body:
- type: markdown
attributes:
value: |
Please use this form to request new feature for Immich.
Stick to only a single feature per request. If you list multiple different features at once,
your request will be closed.
Please use this form to request new feature for Immich
- type: checkboxes
attributes:

View File

@@ -1 +1 @@
20.15.1
20.15

View File

@@ -1,4 +1,4 @@
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as core
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

107
cli/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@immich/cli",
"version": "2.2.11",
"version": "2.2.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@immich/cli",
"version": "2.2.11",
"version": "2.2.7",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"fast-glob": "^3.3.2",
@@ -22,7 +22,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.14.10",
"@types/node": "^20.14.9",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^1.2.2",
@@ -49,14 +49,14 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.109.2",
"version": "1.107.2",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^20.14.10",
"@types/node": "^20.14.9",
"typescript": "^5.3.3"
}
},
@@ -1124,11 +1124,10 @@
"dev": true
},
"node_modules/@types/cli-progress": {
"version": "3.11.6",
"resolved": "https://registry.npmjs.org/@types/cli-progress/-/cli-progress-3.11.6.tgz",
"integrity": "sha512-cE3+jb9WRlu+uOSAugewNpITJDt1VF8dHOopPO4IABFc3SXYL5WE/+PTz/FCdZRRfIujiWW3n3aMbv1eIGVRWA==",
"version": "3.11.5",
"resolved": "https://registry.npmjs.org/@types/cli-progress/-/cli-progress-3.11.5.tgz",
"integrity": "sha512-D4PbNRbviKyppS5ivBGyFO29POlySLmA2HyUFE4p5QGazAMM3CwkKWcvTl8gvElSuxRh6FPKL8XmidX873ou4g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
@@ -1164,9 +1163,9 @@
}
},
"node_modules/@types/node": {
"version": "20.14.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz",
"integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==",
"version": "20.14.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1180,17 +1179,17 @@
"dev": true
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.16.0.tgz",
"integrity": "sha512-py1miT6iQpJcs1BiJjm54AMzeuMPBSPuKPlnT8HlfudbcS5rYeX5jajpLf3mrdRh9dA/Ec2FVUY0ifeVNDIhZw==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.15.0.tgz",
"integrity": "sha512-uiNHpyjZtFrLwLDpHnzaDlP3Tt6sGMqTCiqmxaN4n4RP0EfYZDODJyddiFDF44Hjwxr5xAcaYxVKm9QKQFJFLA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
"@typescript-eslint/scope-manager": "7.16.0",
"@typescript-eslint/type-utils": "7.16.0",
"@typescript-eslint/utils": "7.16.0",
"@typescript-eslint/visitor-keys": "7.16.0",
"@typescript-eslint/scope-manager": "7.15.0",
"@typescript-eslint/type-utils": "7.15.0",
"@typescript-eslint/utils": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -1214,16 +1213,16 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.16.0.tgz",
"integrity": "sha512-ar9E+k7CU8rWi2e5ErzQiC93KKEFAXA2Kky0scAlPcxYblLt8+XZuHUZwlyfXILyQa95P6lQg+eZgh/dDs3+Vw==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.15.0.tgz",
"integrity": "sha512-k9fYuQNnypLFcqORNClRykkGOMOj+pV6V91R4GO/l1FDGwpqmSwoOQrOHo3cGaH63e+D3ZiCAOsuS/D2c99j/A==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"@typescript-eslint/scope-manager": "7.16.0",
"@typescript-eslint/types": "7.16.0",
"@typescript-eslint/typescript-estree": "7.16.0",
"@typescript-eslint/visitor-keys": "7.16.0",
"@typescript-eslint/scope-manager": "7.15.0",
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/typescript-estree": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0",
"debug": "^4.3.4"
},
"engines": {
@@ -1243,14 +1242,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.16.0.tgz",
"integrity": "sha512-8gVv3kW6n01Q6TrI1cmTZ9YMFi3ucDT7i7aI5lEikk2ebk1AEjrwX8MDTdaX5D7fPXMBLvnsaa0IFTAu+jcfOw==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.15.0.tgz",
"integrity": "sha512-Q/1yrF/XbxOTvttNVPihxh1b9fxamjEoz2Os/Pe38OHwxC24CyCqXxGTOdpb4lt6HYtqw9HetA/Rf6gDGaMPlw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "7.16.0",
"@typescript-eslint/visitor-keys": "7.16.0"
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0"
},
"engines": {
"node": "^18.18.0 || >=20.0.0"
@@ -1261,14 +1260,14 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.16.0.tgz",
"integrity": "sha512-j0fuUswUjDHfqV/UdW6mLtOQQseORqfdmoBNDFOqs9rvNVR2e+cmu6zJu/Ku4SDuqiJko6YnhwcL8x45r8Oqxg==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.15.0.tgz",
"integrity": "sha512-SkgriaeV6PDvpA6253PDVep0qCqgbO1IOBiycjnXsszNTVQe5flN5wR5jiczoEoDEnAqYFSFFc9al9BSGVltkg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/typescript-estree": "7.16.0",
"@typescript-eslint/utils": "7.16.0",
"@typescript-eslint/typescript-estree": "7.15.0",
"@typescript-eslint/utils": "7.15.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -1289,9 +1288,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.16.0.tgz",
"integrity": "sha512-fecuH15Y+TzlUutvUl9Cc2XJxqdLr7+93SQIbcZfd4XRGGKoxyljK27b+kxKamjRkU7FYC6RrbSCg0ALcZn/xw==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.15.0.tgz",
"integrity": "sha512-aV1+B1+ySXbQH0pLK0rx66I3IkiZNidYobyfn0WFsdGhSXw+P3YOqeTq5GED458SfB24tg+ux3S+9g118hjlTw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1303,14 +1302,14 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.16.0.tgz",
"integrity": "sha512-a5NTvk51ZndFuOLCh5OaJBELYc2O3Zqxfl3Js78VFE1zE46J2AaVuW+rEbVkQznjkmlzWsUI15BG5tQMixzZLw==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.15.0.tgz",
"integrity": "sha512-gjyB/rHAopL/XxfmYThQbXbzRMGhZzGw6KpcMbfe8Q3nNQKStpxnUKeXb0KiN/fFDR42Z43szs6rY7eHk0zdGQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"@typescript-eslint/types": "7.16.0",
"@typescript-eslint/visitor-keys": "7.16.0",
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0",
"debug": "^4.3.4",
"globby": "^11.1.0",
"is-glob": "^4.0.3",
@@ -1332,16 +1331,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.16.0.tgz",
"integrity": "sha512-PqP4kP3hb4r7Jav+NiRCntlVzhxBNWq6ZQ+zQwII1y/G/1gdIPeYDCKr2+dH6049yJQsWZiHU6RlwvIFBXXGNA==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.15.0.tgz",
"integrity": "sha512-hfDMDqaqOqsUVGiEPSMLR/AjTSCsmJwjpKkYQRo1FNbmW4tBwBspYDwO9eh7sKSTwMQgBw9/T4DHudPaqshRWA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
"@typescript-eslint/scope-manager": "7.16.0",
"@typescript-eslint/types": "7.16.0",
"@typescript-eslint/typescript-estree": "7.16.0"
"@typescript-eslint/scope-manager": "7.15.0",
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/typescript-estree": "7.15.0"
},
"engines": {
"node": "^18.18.0 || >=20.0.0"
@@ -1355,13 +1354,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.16.0.tgz",
"integrity": "sha512-rMo01uPy9C7XxG7AFsxa8zLnWXTF8N3PYclekWSrurvhwiw1eW88mrKiAYe6s53AUY57nTRz8dJsuuXdkAhzCg==",
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.15.0.tgz",
"integrity": "sha512-Hqgy/ETgpt2L5xueA/zHHIl4fJI2O4XUE9l4+OIfbJIRSnTJb/QscncdqqZzofQegIJugRIF57OJea1khw2SDw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "7.16.0",
"@typescript-eslint/types": "7.15.0",
"eslint-visitor-keys": "^3.4.3"
},
"engines": {

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.11",
"version": "2.2.7",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -18,7 +18,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.14.10",
"@types/node": "^20.14.9",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^1.2.2",
@@ -64,6 +64,6 @@
"lodash-es": "^4.17.21"
},
"volta": {
"node": "20.15.1"
"node": "20.15.0"
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.37.0"
constraints = "4.37.0"
version = "4.36.0"
constraints = "4.36.0"
hashes = [
"h1:0gOI8arnh2CTcHfGH8iwAe6qz2BRSytmbOiNXZjnrHc=",
"h1:0h0qRJYPHL92Dx3NYZO2WJ21cxyZGEoldzw9aYhPnew=",
"h1:6ri7vZ1MLtQbooicIO4catyIuRq4LHAsIcgd3vGq3AE=",
"h1:7BwVaqxSD9VsmLzs6jDJBJvHPq0dz4I8rCeJAK63Dc4=",
"h1:8tVm+BJvzI14pRbEyt00AvH6oIyqiLRZQ9KxcBeSDhE=",
"h1:FTll1M9rPA7RxEyLB6etQqaqynWWl3WkiwJtHMjPr3Y=",
"h1:L7ysGftn0fstXMjCt3/XEz2giRdEwBsGrdvi4Zw8uzM=",
"h1:PsbAKy7LdSpwZMJZ7bO3lI04hLDTlXke/LCkrKXYwwE=",
"h1:Sjkpr8CKs0rXGcdis5q4Kbqmo5mmosgirnQi65G4sM8=",
"h1:YxJRQdVSzMZR5Ce5M3Gs1SPutXpednxuRwtSSiReHDY=",
"h1:bJrJeBKWEwt4hGQ+3VJR69dsqHORovE8LzuQt9+NTug=",
"h1:hPC7Vk0ZGXCDJ1y5dOepVo1c0PoUulnJUarrMv4gQIQ=",
"h1:joMURZCLUJ2eSlj645xqHWKYbRBYqvajCkhaz7qzi8g=",
"h1:uqo0WgG5lCcG8+gf99VnsKKbJMM1urNZq1FbAT6u3S0=",
"zh:012a6c3e8bf4aca0ebe0884e15bd42fd018659193f2159d5d2bf9948a9be1bc4",
"zh:079666c0a079237af46ed19ffc4143655ee0e8920a274868e44fbc3db88f346d",
"zh:08e7ff86f6848f3109d59ad46f8c0987178eff2f70c8ef03f2d44ae68e42dfb3",
"zh:1ce8a499fdf8f484f7d18ec91566bc0759b07d0ca710990cd60d32b222e416b1",
"zh:348e72338095bffccf7c46c7e6b9d0e063a22d9ae761061b0b31dea1aad22cd9",
"zh:47d39343dea1ef469a2c8e51c8d5993687af427a132da5379796fec27acb5710",
"zh:4cdf8e9579f9af3c72270088fc6e22208f0f91fd4382bc4a860d16040c86917b",
"zh:4fbebb21ecebc7e5ac0ea9e341c5dbea3094fc0579e4dc5b40bfe693164e022e",
"zh:778578dda7dd98576a3fe228132c8b60f646f4cf113638c94f1c40e2b11c027c",
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:894071f0f42571f820918d1a4316704923e29c5b2392704c1cbd063a04a641b8",
"zh:8d11dd73dd499c74d89f77a7e1b3d4a077ac88b0c9c3412e9a6a1b4efe17d107",
"zh:991e088be8381a73872cd33bb659e9dd69d7ab1f1f8d89b3cd17ffe59dffc65f",
"zh:9c0848b9c7e6799c9ffcf3afa70ad94a027f3e15a94679d56790714de0b072c5",
"zh:ad71ae800065ffc24b94d994250136ae8a9f6da704cf91b0dc9e14989e947369",
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.37.0"
version = "4.36.0"
}
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.37.0"
constraints = "4.37.0"
version = "4.36.0"
constraints = "4.36.0"
hashes = [
"h1:0gOI8arnh2CTcHfGH8iwAe6qz2BRSytmbOiNXZjnrHc=",
"h1:0h0qRJYPHL92Dx3NYZO2WJ21cxyZGEoldzw9aYhPnew=",
"h1:6ri7vZ1MLtQbooicIO4catyIuRq4LHAsIcgd3vGq3AE=",
"h1:7BwVaqxSD9VsmLzs6jDJBJvHPq0dz4I8rCeJAK63Dc4=",
"h1:8tVm+BJvzI14pRbEyt00AvH6oIyqiLRZQ9KxcBeSDhE=",
"h1:FTll1M9rPA7RxEyLB6etQqaqynWWl3WkiwJtHMjPr3Y=",
"h1:L7ysGftn0fstXMjCt3/XEz2giRdEwBsGrdvi4Zw8uzM=",
"h1:PsbAKy7LdSpwZMJZ7bO3lI04hLDTlXke/LCkrKXYwwE=",
"h1:Sjkpr8CKs0rXGcdis5q4Kbqmo5mmosgirnQi65G4sM8=",
"h1:YxJRQdVSzMZR5Ce5M3Gs1SPutXpednxuRwtSSiReHDY=",
"h1:bJrJeBKWEwt4hGQ+3VJR69dsqHORovE8LzuQt9+NTug=",
"h1:hPC7Vk0ZGXCDJ1y5dOepVo1c0PoUulnJUarrMv4gQIQ=",
"h1:joMURZCLUJ2eSlj645xqHWKYbRBYqvajCkhaz7qzi8g=",
"h1:uqo0WgG5lCcG8+gf99VnsKKbJMM1urNZq1FbAT6u3S0=",
"zh:012a6c3e8bf4aca0ebe0884e15bd42fd018659193f2159d5d2bf9948a9be1bc4",
"zh:079666c0a079237af46ed19ffc4143655ee0e8920a274868e44fbc3db88f346d",
"zh:08e7ff86f6848f3109d59ad46f8c0987178eff2f70c8ef03f2d44ae68e42dfb3",
"zh:1ce8a499fdf8f484f7d18ec91566bc0759b07d0ca710990cd60d32b222e416b1",
"zh:348e72338095bffccf7c46c7e6b9d0e063a22d9ae761061b0b31dea1aad22cd9",
"zh:47d39343dea1ef469a2c8e51c8d5993687af427a132da5379796fec27acb5710",
"zh:4cdf8e9579f9af3c72270088fc6e22208f0f91fd4382bc4a860d16040c86917b",
"zh:4fbebb21ecebc7e5ac0ea9e341c5dbea3094fc0579e4dc5b40bfe693164e022e",
"zh:778578dda7dd98576a3fe228132c8b60f646f4cf113638c94f1c40e2b11c027c",
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:894071f0f42571f820918d1a4316704923e29c5b2392704c1cbd063a04a641b8",
"zh:8d11dd73dd499c74d89f77a7e1b3d4a077ac88b0c9c3412e9a6a1b4efe17d107",
"zh:991e088be8381a73872cd33bb659e9dd69d7ab1f1f8d89b3cd17ffe59dffc65f",
"zh:9c0848b9c7e6799c9ffcf3afa70ad94a027f3e15a94679d56790714de0b072c5",
"zh:ad71ae800065ffc24b94d994250136ae8a9f6da704cf91b0dc9e14989e947369",
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.37.0"
version = "4.36.0"
}
}
}

View File

@@ -73,7 +73,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:f20d3127bf2876f4a1df76246fca576b41ddf1125ed1c546fbd8b16ea55117e6
image: prom/prometheus@sha256:075b1ba2c4ebb04bc3a6ab86c06ec8d8099f8fda1c96ef6d104d9bb1def1d8bc
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus

View File

@@ -1 +1 @@
20.15.1
20.15

View File

@@ -1,7 +1,7 @@
---
title: The Immich core team goes full-time
authors: [alextran]
tags: [update, announcement, FUTO]
tags: [update, announcement, futo]
date: 2024-05-01T00:00
---

View File

@@ -1,91 +0,0 @@
---
title: Licensing announcement - Purchase a license to support Immich
authors: [alextran]
tags: [update, announcement, FUTO]
date: 2024-07-18T00:00
---
Hello everybody,
Firstly, on behalf of the Immich team, I'd like to thank everybody for your continuous support of Immich since the very first day! Your contributions, encouragement, and community engagement have helped bring Immich to its current state. The team and I are forever grateful for that.
Since our [last announcement of the core team joining FUTO to work on Immich full-time](https://immich.app/blog/2024/immich-core-team-goes-fulltime), one of the goals of our new position is to foster a healthy relationship between the developers and the users. We believe that this enables us to create great software, establish transparent policies and build trust.
We want to build a great software application that brings value to you and your loved ones' lives. We are not using you as a product, i.e., selling or tracking your data. We are not putting annoying ads into our software. We respect your privacy. We want to be compensated for the hard work we put in to build Immich for you.
With those notes, we have enabled a way for you to financially support the continued development of Immich, ensuring the software can move forward and will be maintained, by offering a lifetime license of the software. We think if you like and use software, you should pay for it, but _we're never going to force anyone to pay or try to limit Immich for those who don't._
There are two types of license that you can choose to purchase: **Server License** and **Individual License**.
### Server License
This is a lifetime license costing **$99.99**. The license is applied to the whole server. You and all users that use your server are licensed.
### Individual License
This is a lifetime license costing **$24.99**. The license is applied to a single user, and can be used on any server they choose to connect to.
<img
width="837"
alt="license-social-gh"
src="https://github.com/user-attachments/assets/241932ed-ef3b-44ec-a9e2-ee80754e0cca"
/>
You can purchase the license on [our page - https://buy.immich.app](https://buy.immich.app).
Starting with release `v1.109.0` you can purchase and enter your purchased license key directly in the app.
<img
width="1414"
alt="license-page-gh"
src="https://github.com/user-attachments/assets/364fc32a-f6ef-4594-9fea-28d5a26ad77c"
/>
## Thank you
Thank you again for your support, this will help create a strong foundation and stability for the Immich team to continue developing and maintaining the project that you love to use.
<p align="center">
<img
src="https://media.giphy.com/media/v1.Y2lkPTc5MGI3NjExbjY2eWc5Y2F0ZW56MmR4aWE0dDhzZXlidXRmYWZyajl1bWZidXZpcyZlcD12MV9pbnRlcm5hbF9naWZfYnlfaWQmY3Q9Zw/87CKDqErVfMqY/giphy.gif"
width="550"
title="SUPPORT THE PROJECT!"
/>
</p>
<br />
<br />
Cheers! 🎉
Immich team
# FAQ
### 1. Where can I purchase a license?
There are several places where you can purchase the license from
- [https://buy.immich.app](https://buy.immich.app)
- [https://pay.futo.org](https://pay.futo.org/)
- or directly from the app.
### 2. Do I need both _Individual License_ and _Server License_?
No,
If you are the admin and the sole user, or your instance has less than a total of 4 users, you can buy the **Individual License** for each user.
If your instance has more than 4 users, it is more cost-effective to buy the **Server License**, which will license all the users on your instance.
### 3. What do I do if I don't pay?
You can continue using Immich for an unlimited trial period.
### 4. Will there be any paywalled features?
No, there will never be any paywalled features.
### 5. Where can I get support regarding payment issues?
You can email us with your `orderId` and your email address `billing@futo.org` or on our Discord server.

View File

@@ -167,7 +167,7 @@ Immich uses CLIP models. For more information about CLIP and its capabilities, r
### How does facial recognition work?
See [How Facial Recognition Works](/docs/features/facial-recognition#How-Facial-Recognition-Works) for details.
For face detection and recognition, Immich uses [InsightFace models](https://github.com/deepinsight/insightface/tree/master/model_zoo).
### How can I disable machine learning?
@@ -181,15 +181,19 @@ However, disabling all jobs will not disable the machine learning service itself
### I'm getting errors about models being corrupt or failing to download. What do I do?
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Hugging Face][huggingface] and place them in the cache folder.
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface][huggingface] and place them in the cache folder.
### Can I use a custom CLIP model?
No, this is not supported. Only models listed in the [Hugging Face][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
No, this is not supported. Only models listed in the [Huggingface][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
### I want to be able to search in other languages besides English. How can I do that?
You can change to a multilingual CLIP model. See [here](/docs/features/smart-search#CLIP-model) for instructions.
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
:::note
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list][huggingface].
:::
### Does Immich support Facial Recognition for videos?
@@ -230,7 +234,7 @@ ls clip/ facial-recognition/
### Why is Immich slow on low-memory systems like the Raspberry Pi?
Immich optionally uses transcoding and machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning), or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
Immich optionally uses machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning), or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
### Can I lower CPU and RAM usage?
@@ -239,12 +243,10 @@ The initial backup is the most intensive due to the number of jobs running. The
- Lower the job concurrency for these jobs to 1.
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
- At the container level, you can [set resource constraints](/docs/FAQ#can-i-limit-cpu-and-ram-usage) to lower usage further.
- It's recommended to only apply these constraints _after_ taking some of the measures here for best performance.
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
### Can I limit CPU and RAM usage?
### Can I limit the amount of CPU and RAM usage?
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows. To limit this, you can add the following to the `docker-compose.yml` block of any containers that you want to have limited resources.
@@ -264,8 +266,6 @@ deploy:
</details>
For more details, you can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit).
Note that memory constraints work by terminating the container, so this can introduce instability if set too low.
### How can I boost machine learning speed?
:::note
@@ -275,16 +275,21 @@ This advice improves throughput, not latency. This is to say that it will make S
You can increase throughput by increasing the job concurrency for machine learning jobs (Smart Search, Face Detection). With higher concurrency, the host will work on more assets in parallel. You can do this by navigating to Administration > Settings > Job Settings and increasing concurrency as needed.
:::danger
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Storage speed and latency can quickly become the limiting factor beyond this, particularly when using HDDs.
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Beyond this, note that storage speed and latency may quickly become the limiting factor; particularly when using HDDs.
The concurrency can be increased more comfortably with a GPU, but should still not be above 16 in most cases.
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
Do not exaggerate with the job concurrency because you're probably thoroughly overloading the server.
More details can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
:::
### My server shows Server Status Offline | Version Unknown. What can I do?
### Why is Immich using so much of my CPU?
You need to enable WebSockets on your reverse proxy.
When a large number of assets are uploaded to Immich, it makes sense that the CPU and RAM will be heavily used for machine learning work and creating image thumbnails.
Once this process is completed, the percentage of CPU usage will drop to around 3-5% usage
### My server shows Server Status Offline | Version Unknown what can I do?
You need to enable Websocket on your reverse proxy.
---

View File

@@ -2,7 +2,7 @@
## Overview
Immich recognizes faces in your photos and videos and groups them together into people. You can then assign names to these people and search for them.
Immich recognizes faces in your photos and videos and groups them together. You can then assign names to the faces and search for them.
The list of people is shown in the Explore page.
@@ -18,75 +18,13 @@ The asset detail view will also show the faces that are recognized in the asset.
## Actions
Additional actions you can do include:
Additional actions you can do with a detected person are:
- Changing the feature photo of the person
- Setting a person's date of birth
- Merging two or more detected faces into one person
- Hiding the faces of a person from the Explore page and detail view
- Assigning an unrecognized face to a person
- Change the feature face photo of the person
- Set date of birth
- Merge two or more detected faces into one person
- Hide face
It can be found from the app bar when you access the detail view of a person.
<img src={require('./img/facial-recognition-4.png').default} title='Facial Recognition 4' width="70%"/>
## How Face Detection Works
Face detection sends the generated preview image to the machine learning service for processing. The service checks if it has the relevant model downloaded and downloads it if not. The image is decoded, pre-processed and passed to the face detection model (with hardware acceleration if configured). The bounding boxes and scores outputted from this model are used to crop and preprocess the image once again to be passed to a facial recognition model (also accelerated if configured). The embeddings from the recognition model, together with the bounding boxes and scores from the face detection model, are then sent back to the server to be added to the database. The embeddings in particular are indexed so they can be searched quickly during facial recognition clustering.
## How Facial Recognition Works
The facial recognition algorithm we use is derived from DBSCAN, a popular clustering algorithm. It essentially treats each detected face as a point in a graph and aims to group points that are close to each other.
:::note
An important concept is whether something is a _core point_. A core point has a minimum number of points around it within a certain distance. A non-core point can only be assigned to a cluster if it can reach a core point; a non-core point can't be used to extend a cluster even if it's part of one. In Immich, the _Minimum Recognized Faces_ setting controls the threshold to be considered a core point.
:::
For each face, it looks around it to find other faces within a certain distance. Faces within this distance are considered similar, so it then checks if any of these faces are associated with a person.
If there is an existing person, it assigns the person of the most similar face to the face being processed.
If there is none, then it has to determine something from the DBSCAN algorithm: whether the face is a _core point_. If there are a certain number of similar faces (by default 3, including the face being considered), then this face is a core point. A new person is created for this face and the face is assigned to it. When other faces are processed, if they're similar to this face, they'll see that it has an associated person and can be assigned to that person.
However, if there aren't enough similar faces, no new person will be created. Instead, the face will wait for all the other faces to be processed to see if any matches that previously didn't have an associated person now do. If they do, then the face will be assigned to that person. If not, this face will be considered an outlier, such as a stranger in the background of an image.
The algorithm has some subtle differences compared to DBSCAN:
- DBSCAN doesn't have a concept of incremental clustering: it clusters all points at once. In contrast, facial recognition has to evolve as more assets are added without re-clustering everything each time.
- The algorithm described above works within a set of queued assets. Once these faces are processed and a new round of faces are detected, the behavior will not be the same as traditional DBSCAN since it preserves the clusters (people) generated from the previous round.
- Facial recognition tries to wait for face detection and thumbnail generation to complete before starting for this reason: the larger the set of faces in the queue, the better the results will be.
- Re-running facial recognition on all assets afterwards does behave like DBSCAN, however.
- DBSCAN is designed for range-based searches (i.e. points within a distance), but high-dimensional vector indices are generally optimized for getting the closest K results. The recognition algorithm doesn't try to get _all_ similar faces within a distance for performance reasons. Instead, it searches for a small number of matches for each face. The end result should be very similar if not identical, but with possibly different performance characteristics.
- Because of this, part of the recognition process is handled during a nightly job to ensure that unassigned faces with potential matches can be recognized.
:::tip
If you didn't import your assets at once or if the server was able to process jobs faster than you could upload them, it's possible that the clustering was suboptimal. If you haven't put effort into the current results, it may be worth re-running facial recognition on all assets for the best starting point. If it's too late for that, you can also manually assign a selection of unassigned faces and queue _Missing_ for Facial Recognition to help it learn and assign more faces automatically.
:::
## Configuration
Navigating to Administration > Settings > Machine Learning Settings > Facial Recognition will show the options available.
:::tip
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
:::
### Facial recognition model
There are a few different models available; the default is typically considered the best. On more constrained systems where the default is too intensive, you can choose a smaller model instead.
### Minimum detection score
This setting affects whether a result from the face detecton model is filtered out as a false positive. It may seem tempting to set this low to detect more faces, but it can lead to false positives that are difficult to deal with and can harm facial recognition. It is strongly recommended not to go below 0.5 for this setting. Setting it to a very high number like 0.9 is also not recommended: the default is already biased toward precision, so a threshold that high leads to many undetected faces.
After changing this setting, it will only apply to new face detection jobs. To apply the new setting to all assets, you need to re-run face detection for all assets.
### Maximum recognition distance
The distance threshold described in How Facial Recognition Works. The default works well for most people, but it may be worth lowering it if the library has twins or otherwise very similar looking people. A threshold that's too low just means needing to merge duplicate people after facial recognition, whereas a threshold too high can produce unsalvageable results. It is strongly recommended not to go below 0.3 or above 0.7.
### Minimum recognized faces
The core point threshold described in How Facial Recognition Works. This setting has a few implications. First, it takes effect immediately in that people with fewer faces than this are hidden from view. Secondly, it makes clustering more robust as it prevents loosely-related faces from being linked to each other by requiring a certain level of density.
Increasing this setting is a good idea if you increase the recognition distance or reduce the minimum detection score. Setting it to 1 effectively disables the concept of core points, but can be an option if you prefer a more hands-on approach.

View File

@@ -123,7 +123,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
- You may want to choose a slower preset than for software transcoding to maintain quality and efficiency
- While you can use VAAPI with NVIDIA and Intel devices, prefer the more specific APIs since they're more optimized for their respective devices
- You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for NVIDIA, `intel_gpu_top` for Intel, etc.) when transcoding. A lack of error logs when transcoding also indicates that it's being used.
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html

View File

@@ -104,19 +104,18 @@ The `immich-server` container will need access to the gallery. Modify your docke
immich-server:
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /mnt/nas/christmas-trip:/mnt/nas/christmas-trip:ro
+ - /home/user/old-pics:/home/user/old-pics:ro
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
+ - /home/user/old-pics:/mnt/media/old-pics:ro
+ - /mnt/media/videos:/mnt/media/videos:ro
+ - /mnt/media/videos2:/mnt/media/videos2 # the files in this folder can be deleted, as it does not end with :ro
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
```
:::tip
The `ro` flag at the end only gives read-only access to the volumes. This will disallow the images from being deleted in the web UI.
The `ro` flag at the end only gives read-only access to the volumes. While Immich does not modify files, it's a good practice to mount read-only.
:::
:::info
_Remember to run `docker compose up -d` to register the changes. Make sure you can see the mounted path in the container._
_Remember to bring the container `docker compose down/up` to register the changes. Make sure you can see the mounted path in the container._
:::
### Create External Libraries

View File

@@ -7,30 +7,29 @@ Immich uses Postgres as its search database for both metadata and smart search.
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
Archived photos are not included in search results by default. To include them, mark the checkbox in [advanced search filters](/docs/features/smart-search#advanced-search-filters).
:::tip Alternative CLIP Models
More powerful models can be used for more accurate search results. For more information, see the related [FAQ](/docs/FAQ#can-i-use-a-custom-clip-model).
:::
:::info
Smart Search is currently limited to 5,000 results for a single search on the web.
:::
## Advanced Search Filters
In addition, Immich offers advanced search functionality, allowing you to find specific content using customizable search filters. These filters include location, one or more faces, specific albums, and more. You can try out the search filters on the [Demo site](https://demo.immich.app).
The filters smart search allows you to search by include:
Smart search features include:
- People
- Location
- Country
- State
- City
- Camera
- Make
- Model
- Date range
- File name or extension
- Media type
- Image (including live/motion photos)
- Video
- All
- Condition
- Not in any album
- Archived
- Favorited
- Search for one or more faces (with or without context search).
- Search by Country or State or City or by all three.
- Search by camera make and model.
- Search by date range.
- Search by file name.
- Search by media types: image, video or all (**Note:** Image includes live images).
- Search by condition: not in any album or archive or Favorite or all conditions.
<Tabs>
<TabItem value="Computer" label="Computer" default>
@@ -48,27 +47,3 @@ Some search examples:
</TabItem>
</Tabs>
## Configuration
Navigating to `Administration > Settings > Machine Learning Settings > Smart Search` will show the options available.
### CLIP model
More powerful models can be used for more accurate search results, but are slower and can require more server resources. Check out the models [here][huggingface-clip] for more options!
[Multilingual models][huggingface-multilingual-clip] are also available so users can search in their native language. These models support over 100 languages; the `nllb` models in particular support 200.
:::note
Multilingual models are much slower and larger and perform slightly worse for English than English-only models. For this reason, only use them if you actually intend to search in a language besides English.
As a special case, the `ViT-H-14-quickgelu__dfn5b` and `ViT-H-14-378-quickgelu__dfn5b` models are excellent at many European languages despite not specifically being multilingual. They're very intensive regardless, however - especially the latter.
:::
Once you've chosen a model, change this setting to the name of the model you chose. Be sure to re-run Smart Search on all assets after this change.
:::note
Feel free to make a feature request if there's a model you want to use that we don't currently support.
:::
[huggingface-clip]: https://huggingface.co/collections/immich-app/clip-654eaefb077425890874cd07
[huggingface-multilingual-clip]: https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7

View File

@@ -6,18 +6,36 @@ in a directory on the same machine.
# Mount the directory into the containers.
Edit `docker-compose.yml` to add one or more new mount points in the section `immich-server:` under `volumes:`.
If you want Immich to be able to delete the images in the external library, remove `:ro` from the end of the mount point.
Edit `docker-compose.yml` to add two new mount points in the section `immich-server:` under `volumes:`
```diff
immich-server:
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /home/user/photos1:/home/user/photos1:ro
+ - /mnt/photos2:/mnt/photos2:ro # you can delete this line if you only have one mount point, or you can add more lines if you have more than two
+ - ${EXTERNAL_PATH}:/usr/src/app/external
```
Restart Immich by running `docker compose up -d`.
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
```
EXTERNAL_PATH=<your-path-here>
```
On my computer, for example, I use this path:
```
EXTERNAL_PATH=/home/tenino/photos
```
:::info EXTERNAL_PATH design
The design choice to put the EXTERNAL_PATH into .env rather than put two copies of the absolute path in the yml file in order to make everything easier, so if you have two copies of the same path that have to be kept in sync, then someday later when you move the data, update only one of the paths, without everything will break mysteriously.
:::
Restart Immich.
```
docker compose down
docker compose up -d
```
# Create the library

View File

@@ -4,7 +4,7 @@ This page gives a few pointers on how to access your Immich instance from outsid
You can read the [full discussion in Discord](https://discord.com/channels/979116623879368755/1122615710846308484)
:::danger
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you susceptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
:::
## Option 1: VPN to home network

View File

@@ -4,11 +4,14 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
- Copy the following `docker-compose.yml` to your ML system.
- If using [hardware acceleration](/docs/features/ml-hardware-acceleration), the [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be added
- Start the container by running `docker compose up -d`.
:::info
Smart Search and Face Detection will use this feature, but Facial Recognition is handled in the server.
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning container, but facial recognition is done in the `microservices` worker.
:::
:::note
The [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be in the same folder if trying to use [hardware acceleration](/docs/features/ml-hardware-acceleration).
:::
```yaml
@@ -34,7 +37,3 @@ volumes:
```
Please note that version mismatches between both hosts may cause instabilities and bugs, so make sure to always perform updates together.
:::caution
As an internal service, the machine learning container has no security measures whatsoever. Please be mindful of where it's deployed and who can access it.
:::

View File

@@ -38,18 +38,17 @@ Regardless of filesystem, it is not recommended to use a network share for your
## General
| Variable | Description | Default | Containers | Workers |
| :---------------------------------- | :-------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
| `TZ` | Timezone | | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
| Variable | Description | Default | Containers | Workers |
| :---------------------------------- | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
| `TZ` | Timezone | | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
\*1: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
It only need to be set if the Immich deployment method is changing.
@@ -156,18 +155,18 @@ Redis (Sentinel) URL example JSON before encoding:
## Machine Learning
| Variable | Description | Default | Containers |
| :----------------------------------------------- | :------------------------------------------------------------------- | :-----------------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO image) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP` | Name of a CLIP model to be preloaded and kept in cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION` | Name of a facial recognition model to be preloaded and kept in cache | | machine learning |
| Variable | Description | Default | Containers |
| :----------------------------------------------- | :------------------------------------------------------------------- | :-----------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP` | Name of a CLIP model to be preloaded and kept in cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION` | Name of a facial recognition model to be preloaded and kept in cache | | machine learning |
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.

View File

@@ -8,10 +8,6 @@ sidebar_position: 20
This method is experimental and not currently recommended for production use. For production, please refer to installing with [Docker Compose](/docs/install/docker-compose.mdx).
:::
:::note
The install script only supports Linux operating systems and requires Docker to be already installed on the system.
:::
In the shell, from a directory of your choice, run the following command:
```bash

View File

@@ -92,7 +92,6 @@ const config = {
alt: 'Immich Logo',
src: 'img/immich-logo-inline-light.png',
srcDark: 'img/immich-logo-inline-dark.png',
className: 'rounded-none',
},
items: [
{

View File

@@ -56,6 +56,6 @@
"node": ">=20"
},
"volta": {
"node": "20.15.1"
"node": "20.15.0"
}
}

View File

@@ -49,7 +49,7 @@ export function Timeline({ items }: Props): JSX.Element {
<div className="flex flex-col flex-grow justify-between gap-2">
<div className="flex gap-2 items-center">
{cardIcon === 'immich' ? (
<img src="img/immich-logo.svg" height="30" className="rounded-none" />
<img src="img/immich-logo.svg" height="30" />
) : (
<Icon path={cardIcon} size={1} color={item.iconColor} />
)}

View File

@@ -49,7 +49,7 @@ const items: Item[] = [
iconColor: 'greenyellow',
title: 'JavaScript Date objects are cursed',
description: 'JavaScript date objects are 1 indexed for years and days, but 0 indexed for months.',
link: { url: 'https://github.com/immich-app/immich/pull/6787', text: '#6787' },
link: { url: 'https://github.com/immich-app/immich/pulls/6787', text: '#6787' },
date: new Date(2024, 0, 31),
},
];

View File

@@ -10,7 +10,7 @@ function HomepageHeader() {
<section className="text-center m-6 p-12 border border-red-400 rounded-[50px] bg-slate-200 dark:bg-immich-dark-gray">
<img
src={isDarkTheme ? 'img/immich-logo-stacked-dark.svg' : 'img/immich-logo-stacked-light.svg'}
className="md:h-60 h-44 mb-2 antialiased rounded-none"
className="md:h-60 h-44 mb-2 antialiased"
alt="Immich logo"
/>
<div className="sm:text-2xl text-lg md:text-4xl mb-12 sm:leading-tight">

View File

@@ -1,20 +1,4 @@
[
{
"label": "v1.109.2",
"url": "https://v1.109.2.archive.immich.app"
},
{
"label": "v1.109.1",
"url": "https://v1.109.1.archive.immich.app"
},
{
"label": "v1.109.0",
"url": "https://v1.109.0.archive.immich.app"
},
{
"label": "v1.108.0",
"url": "https://v1.108.0.archive.immich.app"
},
{
"label": "v1.107.2",
"url": "https://v1.107.2.archive.immich.app"

View File

@@ -1 +1 @@
20.15.1
20.15

View File

@@ -26,8 +26,6 @@ services:
volumes:
- upload:/usr/src/app/upload
- ./test-assets:/test-assets
extra_hosts:
- 'auth-server:host-gateway'
depends_on:
- redis
- database

1129
e2e/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.109.2",
"version": "1.107.2",
"description": "",
"main": "index.js",
"type": "module",
@@ -23,8 +23,7 @@
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2",
"@types/node": "^20.14.10",
"@types/oidc-provider": "^8.5.1",
"@types/node": "^20.14.9",
"@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4",
"@types/supertest": "^6.0.2",
@@ -36,9 +35,7 @@
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^54.0.0",
"exiftool-vendored": "^27.0.0",
"jose": "^5.6.3",
"luxon": "^3.4.4",
"oidc-provider": "^8.5.1",
"pg": "^8.11.3",
"pngjs": "^7.0.0",
"prettier": "^3.2.5",
@@ -50,6 +47,6 @@
"vitest": "^1.3.0"
},
"volta": {
"node": "20.15.1"
"node": "20.15.0"
}
}

View File

@@ -507,22 +507,6 @@ describe('/asset', () => {
expect(status).toEqual(200);
});
it.skip('should geocode country from gps data in the middle of nowhere', async () => {
const { status } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ latitude: 42, longitude: 69 });
expect(status).toEqual(200);
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
const asset = await getAssetInfo({ id: user1Assets[0].id }, { headers: asBearerAuth(user1.accessToken) });
expect(asset).toMatchObject({
id: user1Assets[0].id,
exifInfo: expect.objectContaining({ city: null, country: 'Kazakhstan' }),
});
});
it('should set the description', async () => {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
@@ -1186,25 +1170,17 @@ describe('/asset', () => {
// into the test here.
it.each([
{
filepath: 'formats/motionphoto/samsung-one-ui-5.jpg',
filepath: 'formats/motionphoto/Samsung One UI 5.jpg',
checksum: 'fr14niqCq6N20HB8rJYEvpsUVtI=',
},
{
filepath: 'formats/motionphoto/samsung-one-ui-6.jpg',
filepath: 'formats/motionphoto/Samsung One UI 6.jpg',
checksum: 'lT9Uviw/FFJYCjfIxAGPTjzAmmw=',
},
{
filepath: 'formats/motionphoto/samsung-one-ui-6.heic',
filepath: 'formats/motionphoto/Samsung One UI 6.heic',
checksum: '/ejgzywvgvzvVhUYVfvkLzFBAF0=',
},
{
filepath: 'formats/motionphoto/pixel-6-pro.jpg',
checksum: 'bFhLGbdK058PSk4FTfrSnoKWykc=',
},
{
filepath: 'formats/motionphoto/pixel-8a.jpg',
checksum: '7YdY+WF0h+CXHbiXpi0HiCMTTjs=',
},
])(`should extract motionphoto video from $filepath`, async ({ filepath, checksum }) => {
const response = await utils.createAsset(admin.accessToken, {
assetData: {

View File

@@ -100,12 +100,6 @@ describe('/auth/*', () => {
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
it('should reject an invalid email', async () => {
const { status, body } = await request(app).post('/auth/login').send({ email: [], password });
expect(status).toBe(400);
expect(body).toEqual(errorDto.invalidEmail);
});
}
it('should accept a correct password', async () => {

View File

@@ -1,85 +1,12 @@
import {
LoginResponseDto,
SystemConfigOAuthDto,
getConfigDefaults,
getMyUser,
startOAuth,
updateConfig,
} from '@immich/sdk';
import { errorDto } from 'src/responses';
import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
import { app, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, describe, expect, it } from 'vitest';
const authServer = {
internal: 'http://auth-server:3000',
external: 'http://127.0.0.1:3000',
};
const redirect = async (url: string, cookies?: string[]) => {
const { headers } = await request(url)
.get('/')
.set('Cookie', cookies || []);
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
};
const loginWithOAuth = async (sub: OAuthUser | string) => {
const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: `${baseUrl}/auth/login` } });
// login
const response1 = await redirect(url.replace(authServer.internal, authServer.external));
const response2 = await request(authServer.external + response1.location)
.post('/')
.set('Cookie', response1.cookies)
.type('form')
.send({ prompt: 'login', login: sub, password: 'password' });
// approve
const response3 = await redirect(response2.header.location, response1.cookies);
const response4 = await request(authServer.external + response3.location)
.post('/')
.type('form')
.set('Cookie', response3.cookies)
.send({ prompt: 'consent' });
const response5 = await redirect(response4.header.location, response3.cookies.slice(1));
const redirectUrl = response5.location;
expect(redirectUrl).toBeDefined();
const params = new URL(redirectUrl).searchParams;
expect(params.get('code')).toBeDefined();
expect(params.get('state')).toBeDefined();
return redirectUrl;
};
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
const options = { headers: asBearerAuth(token) };
const defaults = await getConfigDefaults(options);
const merged = {
...defaults.oauth,
buttonText: 'Login with Immich',
issuerUrl: `${authServer.internal}/.well-known/openid-configuration`,
...dto,
};
await updateConfig({ systemConfigDto: { ...defaults, oauth: merged } }, options);
};
describe(`/oauth`, () => {
let admin: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
await setupOAuth(admin.accessToken, {
enabled: true,
clientId: OAuthClient.DEFAULT,
clientSecret: OAuthClient.DEFAULT,
buttonText: 'Login with Immich',
storageLabelClaim: 'immich_username',
});
await utils.adminSetup();
});
describe('POST /oauth/authorize', () => {
@@ -88,171 +15,5 @@ describe(`/oauth`, () => {
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['redirectUri must be a string', 'redirectUri should not be empty']));
});
it('should return a redirect uri', async () => {
const { status, body } = await request(app)
.post('/oauth/authorize')
.send({ redirectUri: 'http://127.0.0.1:2283/auth/login' });
expect(status).toBe(201);
expect(body).toEqual({ url: expect.stringContaining(`${authServer.internal}/auth?`) });
const params = new URL(body.url).searchParams;
expect(params.get('client_id')).toBe('client-default');
expect(params.get('response_type')).toBe('code');
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2283/auth/login');
expect(params.get('state')).toBeDefined();
});
});
describe('POST /oauth/callback', () => {
it(`should throw an error if a url is not provided`, async () => {
const { status, body } = await request(app).post('/oauth/callback').send({});
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['url must be a string', 'url should not be empty']));
});
it(`should throw an error if the url is empty`, async () => {
const { status, body } = await request(app).post('/oauth/callback').send({ url: '' });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
});
it('should auto register the user by default', async () => {
const url = await loginWithOAuth('oauth-auto-register');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
accessToken: expect.any(String),
isAdmin: false,
name: 'OAuth User',
userEmail: 'oauth-auto-register@immich.app',
userId: expect.any(String),
});
});
it('should handle a user without an email', async () => {
const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
});
it('should set the quota from a claim', async () => {
const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
accessToken: expect.any(String),
userId: expect.any(String),
userEmail: 'oauth-with-quota@immich.app',
});
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
expect(user.quotaSizeInBytes).toBe(25 * 2 ** 30); // 25 GiB;
});
it('should set the storage label from a claim', async () => {
const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
accessToken: expect.any(String),
userId: expect.any(String),
userEmail: 'oauth-with-username@immich.app',
});
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
expect(user.storageLabel).toBe('user-username');
});
it('should work with RS256 signed tokens', async () => {
await setupOAuth(admin.accessToken, {
enabled: true,
clientId: OAuthClient.RS256_TOKENS,
clientSecret: OAuthClient.RS256_TOKENS,
autoRegister: true,
buttonText: 'Login with Immich',
signingAlgorithm: 'RS256',
});
const url = await loginWithOAuth('oauth-RS256-token');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
accessToken: expect.any(String),
isAdmin: false,
name: 'OAuth User',
userEmail: 'oauth-RS256-token@immich.app',
userId: expect.any(String),
});
});
it('should work with RS256 signed user profiles', async () => {
await setupOAuth(admin.accessToken, {
enabled: true,
clientId: OAuthClient.RS256_PROFILE,
clientSecret: OAuthClient.RS256_PROFILE,
buttonText: 'Login with Immich',
profileSigningAlgorithm: 'RS256',
});
const url = await loginWithOAuth('oauth-signed-profile');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
userId: expect.any(String),
userEmail: 'oauth-signed-profile@immich.app',
});
});
it('should throw an error for an invalid token algorithm', async () => {
await setupOAuth(admin.accessToken, {
enabled: true,
clientId: OAuthClient.DEFAULT,
clientSecret: OAuthClient.DEFAULT,
buttonText: 'Login with Immich',
signingAlgorithm: 'something-that-does-not-work',
});
const url = await loginWithOAuth('oauth-signed-bad');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(500);
expect(body).toMatchObject({
error: 'Internal Server Error',
message: 'Failed to finish oauth',
statusCode: 500,
});
});
describe('autoRegister: false', () => {
beforeAll(async () => {
await setupOAuth(admin.accessToken, {
enabled: true,
clientId: OAuthClient.DEFAULT,
clientSecret: OAuthClient.DEFAULT,
autoRegister: false,
buttonText: 'Login with Immich',
});
});
it('should not auto register the user', async () => {
const url = await loginWithOAuth('oauth-no-auto-register');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
});
it('should link to an existing user by email', async () => {
const { userId } = await utils.userSetup(admin.accessToken, {
name: 'OAuth User 3',
email: 'oauth-user3@immich.app',
password: 'password',
});
const url = await loginWithOAuth('oauth-user3');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
userId,
userEmail: 'oauth-user3@immich.app',
});
});
});
});
});

View File

@@ -49,9 +49,9 @@ describe('/search', () => {
{ filename: '/albums/nature/silver_fir.jpg' },
{ filename: '/formats/heic/IMG_2682.heic' },
{ filename: '/formats/jpg/el_torcal_rocks.jpg' },
{ filename: '/formats/motionphoto/samsung-one-ui-6.jpg' },
{ filename: '/formats/motionphoto/samsung-one-ui-6.heic' },
{ filename: '/formats/motionphoto/samsung-one-ui-5.jpg' },
{ filename: '/formats/motionphoto/Samsung One UI 6.jpg' },
{ filename: '/formats/motionphoto/Samsung One UI 6.heic' },
{ filename: '/formats/motionphoto/Samsung One UI 5.jpg' },
{ filename: '/metadata/gps-position/thompson-springs.jpg', dto: { isArchived: true } },
@@ -315,7 +315,7 @@ describe('/search', () => {
{
should: 'should search by originalFilename with spaces',
deferred: () => ({
dto: { originalFileName: 'samsung-one', type: 'IMAGE' },
dto: { originalFileName: 'Samsung One', type: 'IMAGE' },
assets: [assetOneJpg5, assetOneJpg6, assetOneHeic6],
}),
},

View File

@@ -61,12 +61,6 @@ export const errorDto = {
message: 'The server already has an admin',
correlationId: expect.any(String),
},
invalidEmail: {
error: 'Bad Request',
statusCode: 400,
message: ['email must be an email'],
correlationId: expect.any(String),
},
};
export const signupResponseDto = {

View File

@@ -1,117 +0,0 @@
import { exportJWK, generateKeyPair } from 'jose';
import Provider from 'oidc-provider';
export enum OAuthClient {
DEFAULT = 'client-default',
RS256_TOKENS = 'client-RS256-tokens',
RS256_PROFILE = 'client-RS256-profile',
}
export enum OAuthUser {
NO_EMAIL = 'no-email',
NO_NAME = 'no-name',
WITH_QUOTA = 'with-quota',
WITH_USERNAME = 'with-username',
}
const claims = [
{ sub: OAuthUser.NO_EMAIL },
{
sub: OAuthUser.NO_NAME,
email: 'oauth-no-name@immich.app',
email_verified: true,
},
{
sub: OAuthUser.WITH_USERNAME,
email: 'oauth-with-username@immich.app',
email_verified: true,
immich_username: 'user-username',
},
{
sub: OAuthUser.WITH_QUOTA,
email: 'oauth-with-quota@immich.app',
email_verified: true,
preferred_username: 'user-quota',
immich_quota: 25,
},
];
const withDefaultClaims = (sub: string) => ({
sub,
email: `${sub}@immich.app`,
name: 'OAuth User',
given_name: `OAuth`,
family_name: 'User',
email_verified: true,
});
const getClaims = (sub: string) => claims.find((user) => user.sub === sub) || withDefaultClaims(sub);
const setup = async () => {
const { privateKey, publicKey } = await generateKeyPair('RS256');
const port = 3000;
const host = '0.0.0.0';
const oidc = new Provider(`http://${host}:${port}`, {
renderError: async (ctx, out, error) => {
console.error(out);
console.error(error);
ctx.body = 'Internal Server Error';
},
findAccount: (ctx, sub) => ({ accountId: sub, claims: () => getClaims(sub) }),
scopes: ['openid', 'email', 'profile'],
claims: {
openid: ['sub'],
email: ['email', 'email_verified'],
profile: ['name', 'given_name', 'family_name', 'preferred_username', 'immich_quota', 'immich_username'],
},
features: {
jwtUserinfo: {
enabled: true,
},
},
cookies: {
names: {
session: 'oidc.session',
interaction: 'oidc.interaction',
resume: 'oidc.resume',
state: 'oidc.state',
},
},
pkce: {
required: () => false,
},
jwks: { keys: [await exportJWK(privateKey)] },
clients: [
{
client_id: OAuthClient.DEFAULT,
client_secret: OAuthClient.DEFAULT,
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
grant_types: ['authorization_code'],
response_types: ['code'],
},
{
client_id: OAuthClient.RS256_TOKENS,
client_secret: OAuthClient.RS256_TOKENS,
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
grant_types: ['authorization_code'],
id_token_signed_response_alg: 'RS256',
jwks: { keys: [await exportJWK(publicKey)] },
},
{
client_id: OAuthClient.RS256_PROFILE,
client_secret: OAuthClient.RS256_PROFILE,
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
grant_types: ['authorization_code'],
userinfo_signed_response_alg: 'RS256',
jwks: { keys: [await exportJWK(publicKey)] },
},
],
});
const onStart = () => console.log(`[auth-server] http://${host}:${port}/.well-known/openid-configuration`);
const app = oidc.listen(port, host, onStart);
return () => app.close();
};
export default setup;

View File

@@ -53,7 +53,8 @@ type AdminSetupOptions = { onboarding?: boolean };
type AssetData = { bytes?: Buffer; filename: string };
const dbUrl = 'postgres://postgres:postgres@127.0.0.1:5433/immich';
export const baseUrl = 'http://127.0.0.1:2283';
const baseUrl = 'http://127.0.0.1:2283';
export const shareUrl = `${baseUrl}/share`;
export const app = `${baseUrl}/api`;
// TODO move test assets into e2e/assets

View File

@@ -1,11 +1,11 @@
import { defineConfig } from 'vitest/config';
// skip `docker compose up` if `make e2e` was already run
const globalSetup: string[] = ['src/setup/auth-server.ts'];
const globalSetup: string[] = [];
try {
await fetch('http://127.0.0.1:2283/api/server-info/ping');
} catch {
globalSetup.push('src/setup/docker-compose.ts');
globalSetup.push('src/setup.ts');
}
export default defineConfig({

View File

@@ -77,7 +77,6 @@ RUN apt-get update && \
rm -rf /var/lib/apt/lists/*
WORKDIR /usr/src/app
ARG DEVICE
ENV TRANSFORMERS_CACHE=/cache \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \

View File

@@ -168,12 +168,6 @@ def warning() -> Iterator[mock.Mock]:
yield mocked
@pytest.fixture(scope="function")
def exception() -> Iterator[mock.Mock]:
with mock.patch.object(log, "exception") as mocked:
yield mocked
@pytest.fixture(scope="function")
def snapshot_download() -> Iterator[mock.Mock]:
with mock.patch("app.models.base.snapshot_download") as mocked:

View File

@@ -29,7 +29,6 @@ from .schemas import (
InferenceEntry,
InferenceResponse,
MessageResponse,
ModelFormat,
ModelIdentity,
ModelTask,
ModelType,
@@ -196,17 +195,7 @@ async def load(model: InferenceModel) -> InferenceModel:
if model.load_attempts > 1:
raise HTTPException(500, f"Failed to load model '{model.model_name}'")
with lock:
try:
model.load()
except FileNotFoundError as e:
if model.model_format == ModelFormat.ONNX:
raise e
log.exception(e)
log.warning(
f"{model.model_format.upper()} is available, but model '{model.model_name}' does not support it."
)
model.model_format = ModelFormat.ONNX
model.load()
model.load()
return model
try:

View File

@@ -23,7 +23,7 @@ class InferenceModel(ABC):
self,
model_name: str,
cache_dir: Path | str | None = None,
model_format: ModelFormat | None = None,
preferred_format: ModelFormat | None = None,
session: ModelSession | None = None,
**model_kwargs: Any,
) -> None:
@@ -31,7 +31,7 @@ class InferenceModel(ABC):
self.load_attempts = 0
self.model_name = clean_name(model_name)
self.cache_dir = Path(cache_dir) if cache_dir is not None else self._cache_dir_default
self.model_format = model_format if model_format is not None else self._model_format_default
self.model_format = preferred_format if preferred_format is not None else self._model_format_default
if session is not None:
self.session = session
@@ -48,7 +48,7 @@ class InferenceModel(ABC):
self.load_attempts += 1
self.download()
attempt = f"Attempt #{self.load_attempts} to load" if self.load_attempts > 1 else "Loading"
attempt = f"Attempt #{self.load_attempts + 1} to load" if self.load_attempts else "Loading"
log.info(f"{attempt} {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
self.session = self._load()
self.loaded = True
@@ -101,9 +101,6 @@ class InferenceModel(ABC):
self.cache_dir.mkdir(parents=True, exist_ok=True)
def _make_session(self, model_path: Path) -> ModelSession:
if not model_path.is_file():
raise FileNotFoundError(f"Model file not found: {model_path}")
match model_path.suffix:
case ".armnn":
session: ModelSession = AnnSession(model_path)
@@ -147,13 +144,17 @@ class InferenceModel(ABC):
@property
def model_format(self) -> ModelFormat:
return self._model_format
return self._preferred_format
@model_format.setter
def model_format(self, model_format: ModelFormat) -> None:
log.debug(f"Setting model format to {model_format}")
self._model_format = model_format
def model_format(self, preferred_format: ModelFormat) -> None:
log.debug(f"Setting preferred format to {preferred_format}")
self._preferred_format = preferred_format
@property
def _model_format_default(self) -> ModelFormat:
return ModelFormat.ARMNN if ann.ann.is_available and settings.ann else ModelFormat.ONNX
prefer_ann = ann.ann.is_available and settings.ann
ann_exists = (self.model_dir / "model.armnn").is_file()
if prefer_ann and not ann_exists:
log.warning(f"ARM NN is available, but '{self.model_name}' does not support ARM NN. Falling back to ONNX.")
return ModelFormat.ARMNN if prefer_ann and ann_exists else ModelFormat.ONNX

View File

@@ -22,12 +22,11 @@ class BaseCLIPTextualEncoder(InferenceModel):
return res
def _load(self) -> ModelSession:
session = super()._load()
log.debug(f"Loading tokenizer for CLIP model '{self.model_name}'")
self.tokenizer = self._load_tokenizer()
log.debug(f"Loaded tokenizer for CLIP model '{self.model_name}'")
return session
return super()._load()
@abstractmethod
def _load_tokenizer(self) -> Tokenizer:

View File

@@ -1,3 +1,4 @@
from pathlib import Path
from typing import Any
import numpy as np
@@ -13,9 +14,15 @@ class FaceDetector(InferenceModel):
depends = []
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
def __init__(
self,
model_name: str,
min_score: float = 0.7,
cache_dir: Path | str | None = None,
**model_kwargs: Any,
) -> None:
self.min_score = model_kwargs.pop("minScore", min_score)
super().__init__(model_name, **model_kwargs)
super().__init__(model_name, cache_dir, **model_kwargs)
def _load(self) -> ModelSession:
session = self._make_session(self.model_path)

View File

@@ -9,7 +9,7 @@ from numpy.typing import NDArray
from onnx.tools.update_model_dims import update_inputs_outputs_dims
from PIL import Image
from app.config import log
from app.config import clean_name, log
from app.models.base import InferenceModel
from app.models.transforms import decode_cv2
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelFormat, ModelSession, ModelTask, ModelType
@@ -20,14 +20,20 @@ class FaceRecognizer(InferenceModel):
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
super().__init__(model_name, **model_kwargs)
def __init__(
self,
model_name: str,
min_score: float = 0.7,
cache_dir: Path | str | None = None,
**model_kwargs: Any,
) -> None:
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
self.min_score = model_kwargs.pop("minScore", min_score)
self.batch = self.model_format == ModelFormat.ONNX
def _load(self) -> ModelSession:
session = self._make_session(self.model_path)
if self.batch and not has_batch_axis(session):
if self.model_format == ModelFormat.ONNX and not has_batch_axis(session):
self._add_batch_axis(self.model_path)
session = self._make_session(self.model_path)
self.model = ArcFaceONNX(

View File

@@ -43,7 +43,7 @@ class TestBase:
assert encoder.cache_dir == cache_dir
def test_sets_default_model_format(self, mocker: MockerFixture) -> None:
def test_sets_default_preferred_format(self, mocker: MockerFixture) -> None:
mocker.patch.object(settings, "ann", True)
mocker.patch("ann.ann.is_available", False)
@@ -51,7 +51,7 @@ class TestBase:
assert encoder.model_format == ModelFormat.ONNX
def test_sets_default_model_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
def test_sets_default_preferred_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
mocker.patch.object(settings, "ann", True)
mocker.patch("ann.ann.is_available", True)
path.suffix = ".armnn"
@@ -60,11 +60,11 @@ class TestBase:
assert encoder.model_format == ModelFormat.ARMNN
def test_sets_model_format_kwarg(self, mocker: MockerFixture) -> None:
def test_sets_preferred_format_kwarg(self, mocker: MockerFixture) -> None:
mocker.patch.object(settings, "ann", False)
mocker.patch("ann.ann.is_available", False)
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
assert encoder.model_format == ModelFormat.ARMNN
@@ -129,7 +129,7 @@ class TestBase:
)
def test_download_downloads_armnn_if_preferred_format(self, snapshot_download: mock.Mock) -> None:
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
encoder.download()
snapshot_download.assert_called_once_with(
@@ -140,19 +140,6 @@ class TestBase:
ignore_patterns=[],
)
def test_throws_exception_if_model_path_does_not_exist(
self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock
) -> None:
path.return_value.__truediv__.return_value.__truediv__.return_value.is_file.return_value = False
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
with pytest.raises(FileNotFoundError):
encoder.load()
snapshot_download.assert_called_once()
ort_session.assert_not_called()
@pytest.mark.usefixtures("ort_session")
class TestOrtSession:
@@ -480,18 +467,16 @@ class TestFaceRecognition:
assert isinstance(call_args[0][0], np.ndarray)
assert call_args[0][0].shape == (112, 112, 3)
def test_recognition_adds_batch_axis_for_ort(
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
) -> None:
def test_recognition_adds_batch_axis_for_ort(self, ort_session: mock.Mock, mocker: MockerFixture) -> None:
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
update_dims = mocker.patch(
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
)
mocker.patch("app.models.base.InferenceModel.download")
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
ort_session.return_value.get_inputs.return_value = [SimpleNamespace(name="input.1", shape=(1, 3, 224, 224))]
ort_session.return_value.get_outputs.return_value = [SimpleNamespace(name="output.1", shape=(1, 800))]
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
proto = mock.Mock()
@@ -507,30 +492,27 @@ class TestFaceRecognition:
onnx.load.return_value = proto
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
face_recognizer = FaceRecognizer("buffalo_s")
face_recognizer.load()
assert face_recognizer.batch is True
update_dims.assert_called_once_with(proto, {"input.1": ["batch", 3, 224, 224]}, {"output.1": ["batch", 800]})
onnx.save.assert_called_once_with(update_dims.return_value, face_recognizer.model_path)
def test_recognition_does_not_add_batch_axis_if_exists(
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
) -> None:
def test_recognition_does_not_add_batch_axis_if_exists(self, ort_session: mock.Mock, mocker: MockerFixture) -> None:
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
update_dims = mocker.patch(
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
)
mocker.patch("app.models.base.InferenceModel.download")
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
ort_session.return_value.get_inputs.return_value = inputs
ort_session.return_value.get_outputs.return_value = outputs
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
face_recognizer = FaceRecognizer("buffalo_s")
face_recognizer.load()
assert face_recognizer.batch is True
@@ -538,30 +520,6 @@ class TestFaceRecognition:
onnx.load.assert_not_called()
onnx.save.assert_not_called()
def test_recognition_does_not_add_batch_axis_for_armnn(
self, ann_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
) -> None:
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
update_dims = mocker.patch(
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
)
mocker.patch("app.models.base.InferenceModel.download")
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".armnn"
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
ann_session.return_value.get_inputs.return_value = inputs
ann_session.return_value.get_outputs.return_value = outputs
face_recognizer = FaceRecognizer("buffalo_s", model_format=ModelFormat.ARMNN, cache_dir=path)
face_recognizer.load()
assert face_recognizer.batch is False
update_dims.assert_not_called()
onnx.load.assert_not_called()
onnx.save.assert_not_called()
@pytest.mark.asyncio
class TestCache:
@@ -735,7 +693,7 @@ class TestLoad:
mock_model.clear_cache.assert_called_once()
assert mock_model.load.call_count == 2
async def test_load_raises_if_os_error_and_already_retried(self) -> None:
async def test_load_clears_cache_and_raises_if_os_error_and_already_retried(self) -> None:
mock_model = mock.Mock(spec=InferenceModel)
mock_model.model_name = "test_model_name"
mock_model.model_type = ModelType.VISUAL
@@ -749,27 +707,6 @@ class TestLoad:
mock_model.clear_cache.assert_not_called()
mock_model.load.assert_not_called()
async def test_falls_back_to_onnx_if_other_format_does_not_exist(
self, exception: mock.Mock, warning: mock.Mock
) -> None:
mock_model = mock.Mock(spec=InferenceModel)
mock_model.model_name = "test_model_name"
mock_model.model_type = ModelType.VISUAL
mock_model.model_task = ModelTask.SEARCH
mock_model.model_format = ModelFormat.ARMNN
mock_model.loaded = False
mock_model.load_attempts = 0
error = FileNotFoundError()
mock_model.load.side_effect = [error, None]
await load(mock_model)
mock_model.clear_cache.assert_not_called()
assert mock_model.load.call_count == 2
exception.assert_called_once_with(error)
warning.assert_called_once_with("ARMNN is available, but model 'test_model_name' does not support it.")
mock_model.model_format = ModelFormat.ONNX
@pytest.mark.skipif(
not settings.test_full,

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "machine-learning"
version = "1.109.2"
version = "1.107.2"
description = ""
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
readme = "README.md"

View File

@@ -5,14 +5,12 @@ lib_path="/usr/lib/$(arch)-linux-gnu/libmimalloc.so.2"
if ! [ "$DEVICE" = "openvino" ]; then
export LD_PRELOAD="$lib_path"
export LD_BIND_NOW=1
: "${MACHINE_LEARNING_WORKER_TIMEOUT:=120}"
else
: "${MACHINE_LEARNING_WORKER_TIMEOUT:=300}"
fi
: "${IMMICH_HOST:=[::]}"
: "${IMMICH_PORT:=3003}"
: "${MACHINE_LEARNING_WORKERS:=1}"
: "${MACHINE_LEARNING_WORKER_TIMEOUT:=120}"
gunicorn app.main:app \
-k app.config.CustomUvicornWorker \

View File

@@ -1,7 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="app.alextran.immich"
xmlns:tools="http://schemas.android.com/tools">
<application android:label="Immich" android:name=".ImmichApp" android:usesCleartextTraffic="true"
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true" android:largeHeap="true">
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true">
<meta-data
android:name="io.flutter.embedding.android.EnableImpeller"

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 150,
"android.injected.version.name" => "1.109.2",
"android.injected.version.code" => 147,
"android.injected.version.name" => "1.107.2",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -167,7 +167,7 @@ SPEC CHECKSUMS:
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
path_provider_ios: 14f3d2fd28c4fdb42f44e0f751d12861c43cee02
permission_handler_apple: e76247795d700c14ea09e3a2d8855d41ee80a2e6
photo_manager: ff695c7a1dd5bc379974953a2b5c0a293f7c4c8a
photo_manager: 4f6810b7dfc4feb03b461ac1a70dacf91fba7604
ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825
SAMKeychain: 483e1c9f32984d50ca961e26818a534283b4cd5c
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5

View File

@@ -383,7 +383,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 163;
CURRENT_PROJECT_VERSION = 162;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -525,7 +525,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 163;
CURRENT_PROJECT_VERSION = 162;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -553,7 +553,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 163;
CURRENT_PROJECT_VERSION = 162;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;

View File

@@ -58,11 +58,11 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.108.0</string>
<string>1.107.1</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>163</string>
<string>162</string>
<key>FLTEnableImpeller</key>
<true/>
<key>ITSAppUsesNonExemptEncryption</key>

View File

@@ -16,10 +16,10 @@
default_platform(:ios)
platform :ios do
desc "iOS Release"
lane :release do
desc "iOS Beta"
lane :beta do
increment_version_number(
version_number: "1.109.2"
version_number: "1.107.2"
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -104,7 +104,7 @@ Future<Isar> loadDb() async {
if (Platform.isIOS) IOSDeviceAssetSchema,
],
directory: dir.path,
maxSizeMiB: 1024,
maxSizeMiB: 256,
);
Store.init(db);
return db;

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 1.109.2
- API version: 1.107.2
- Generator version: 7.5.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen

View File

@@ -23,7 +23,6 @@ class SystemConfigOAuthDto {
required this.issuerUrl,
required this.mobileOverrideEnabled,
required this.mobileRedirectUri,
required this.profileSigningAlgorithm,
required this.scope,
required this.signingAlgorithm,
required this.storageLabelClaim,
@@ -51,8 +50,6 @@ class SystemConfigOAuthDto {
String mobileRedirectUri;
String profileSigningAlgorithm;
String scope;
String signingAlgorithm;
@@ -73,7 +70,6 @@ class SystemConfigOAuthDto {
other.issuerUrl == issuerUrl &&
other.mobileOverrideEnabled == mobileOverrideEnabled &&
other.mobileRedirectUri == mobileRedirectUri &&
other.profileSigningAlgorithm == profileSigningAlgorithm &&
other.scope == scope &&
other.signingAlgorithm == signingAlgorithm &&
other.storageLabelClaim == storageLabelClaim &&
@@ -92,14 +88,13 @@ class SystemConfigOAuthDto {
(issuerUrl.hashCode) +
(mobileOverrideEnabled.hashCode) +
(mobileRedirectUri.hashCode) +
(profileSigningAlgorithm.hashCode) +
(scope.hashCode) +
(signingAlgorithm.hashCode) +
(storageLabelClaim.hashCode) +
(storageQuotaClaim.hashCode);
@override
String toString() => 'SystemConfigOAuthDto[autoLaunch=$autoLaunch, autoRegister=$autoRegister, buttonText=$buttonText, clientId=$clientId, clientSecret=$clientSecret, defaultStorageQuota=$defaultStorageQuota, enabled=$enabled, issuerUrl=$issuerUrl, mobileOverrideEnabled=$mobileOverrideEnabled, mobileRedirectUri=$mobileRedirectUri, profileSigningAlgorithm=$profileSigningAlgorithm, scope=$scope, signingAlgorithm=$signingAlgorithm, storageLabelClaim=$storageLabelClaim, storageQuotaClaim=$storageQuotaClaim]';
String toString() => 'SystemConfigOAuthDto[autoLaunch=$autoLaunch, autoRegister=$autoRegister, buttonText=$buttonText, clientId=$clientId, clientSecret=$clientSecret, defaultStorageQuota=$defaultStorageQuota, enabled=$enabled, issuerUrl=$issuerUrl, mobileOverrideEnabled=$mobileOverrideEnabled, mobileRedirectUri=$mobileRedirectUri, scope=$scope, signingAlgorithm=$signingAlgorithm, storageLabelClaim=$storageLabelClaim, storageQuotaClaim=$storageQuotaClaim]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -113,7 +108,6 @@ class SystemConfigOAuthDto {
json[r'issuerUrl'] = this.issuerUrl;
json[r'mobileOverrideEnabled'] = this.mobileOverrideEnabled;
json[r'mobileRedirectUri'] = this.mobileRedirectUri;
json[r'profileSigningAlgorithm'] = this.profileSigningAlgorithm;
json[r'scope'] = this.scope;
json[r'signingAlgorithm'] = this.signingAlgorithm;
json[r'storageLabelClaim'] = this.storageLabelClaim;
@@ -139,7 +133,6 @@ class SystemConfigOAuthDto {
issuerUrl: mapValueOfType<String>(json, r'issuerUrl')!,
mobileOverrideEnabled: mapValueOfType<bool>(json, r'mobileOverrideEnabled')!,
mobileRedirectUri: mapValueOfType<String>(json, r'mobileRedirectUri')!,
profileSigningAlgorithm: mapValueOfType<String>(json, r'profileSigningAlgorithm')!,
scope: mapValueOfType<String>(json, r'scope')!,
signingAlgorithm: mapValueOfType<String>(json, r'signingAlgorithm')!,
storageLabelClaim: mapValueOfType<String>(json, r'storageLabelClaim')!,
@@ -201,7 +194,6 @@ class SystemConfigOAuthDto {
'issuerUrl',
'mobileOverrideEnabled',
'mobileRedirectUri',
'profileSigningAlgorithm',
'scope',
'signingAlgorithm',
'storageLabelClaim',

View File

@@ -1162,10 +1162,10 @@ packages:
dependency: "direct main"
description:
name: photo_manager
sha256: "68d6099d07ce5033170f8368af8128a4555cf1d590a97242f83669552de989b1"
sha256: "8cf79918f6de9843b394a1670fe1aec54ebcac852b4b4c9ef88211894547dc61"
url: "https://pub.dev"
source: hosted
version: "3.2.0"
version: "3.0.0-dev.5"
photo_manager_image_provider:
dependency: "direct main"
description:

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 1.109.2+150
version: 1.107.2+147
environment:
sdk: '>=3.3.0 <4.0.0'
@@ -15,7 +15,7 @@ dependencies:
path_provider_ios:
# TODO: upgrade to stable after 3.0.1 is released. 3.0.0 is broken
# https://github.com/fluttercandies/flutter_photo_manager/pull/990#issuecomment-2058066427
photo_manager: ^3.2.0
photo_manager: ^3.0.0-dev.5
photo_manager_image_provider: ^2.1.0
flutter_hooks: ^0.20.4
hooks_riverpod: ^2.4.9

View File

@@ -50,7 +50,7 @@ final class TestUtils {
AndroidDeviceAssetSchema,
IOSDeviceAssetSchema,
],
maxSizeMiB: 1024,
maxSizeMiB: 256,
directory: "test/",
);

View File

@@ -7007,7 +7007,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "1.109.2",
"version": "1.107.2",
"contact": {}
},
"tags": [],
@@ -11030,9 +11030,6 @@
"mobileRedirectUri": {
"type": "string"
},
"profileSigningAlgorithm": {
"type": "string"
},
"scope": {
"type": "string"
},
@@ -11057,7 +11054,6 @@
"issuerUrl",
"mobileOverrideEnabled",
"mobileRedirectUri",
"profileSigningAlgorithm",
"scope",
"signingAlgorithm",
"storageLabelClaim",

View File

@@ -1 +1 @@
20.15.1
20.15.0

View File

@@ -1,18 +1,18 @@
{
"name": "@immich/sdk",
"version": "1.109.2",
"version": "1.107.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@immich/sdk",
"version": "1.109.2",
"version": "1.107.2",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^20.14.10",
"@types/node": "^20.14.9",
"typescript": "^5.3.3"
}
},
@@ -22,9 +22,9 @@
"integrity": "sha512-8tKiYffhwTGHSHYGnZ3oneLGCjX0po/XAXQ5Ng9fqKkvIdl/xz8+Vh8i+6xjzZqvZ2pLVpUcuSfnvNI/x67L0g=="
},
"node_modules/@types/node": {
"version": "20.14.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz",
"integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==",
"version": "20.14.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
"dev": true,
"license": "MIT",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "1.109.2",
"version": "1.107.2",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",
@@ -19,7 +19,7 @@
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^20.14.10",
"@types/node": "^20.14.9",
"typescript": "^5.3.3"
},
"repository": {
@@ -28,6 +28,6 @@
"directory": "open-api/typescript-sdk"
},
"volta": {
"node": "20.15.1"
"node": "20.15.0"
}
}

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 1.109.2
* 1.107.2
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/
@@ -1063,7 +1063,6 @@ export type SystemConfigOAuthDto = {
issuerUrl: string;
mobileOverrideEnabled: boolean;
mobileRedirectUri: string;
profileSigningAlgorithm: string;
scope: string;
signingAlgorithm: string;
storageLabelClaim: string;

View File

@@ -37,96 +37,72 @@
## Warnung
- ⚠️ Das Projekt befindet sich in **sehr aktiver** Entwicklung.
- ⚠️ Gehe von möglichen Fehlern und von Änderungen mit Breaking-Changes aus.
- ⚠️ Erwarte Fehler und Änderungen mit Breaking-Changes.
- ⚠️ **Nutze die App auf keinen Fall als einziges Speichermedium für deine Fotos und Videos.**
- ⚠️ Befolge immer die [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) Backup-Regel für deine wertvollen Fotos und Videos!
> [!NOTE]
> Die Hauptdokumentation, einschließlich der Installationsanleitungen, befinden sich unter https://immich.app/.
## Inhalt
- [Offizielle Dokumentation](https://immich.app/docs)
- [Über Immich](https://immich.app/docs/overview/introduction)
- [Installation](https://immich.app/docs/install/requirements)
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
- [Demo](#demo)
- [Funktionen](#funktionen)
- [Übersetzungen](https://immich.app/docs/developer/translations)
- [Einführung](https://immich.app/docs/overview/introduction)
- [Installation](https://immich.app/docs/install/requirements)
- [Beitragsrichtlinien](https://immich.app/docs/overview/support-the-project)
## Dokumentation
Die Hauptdokumentation, inklusive Installationsanleitungen, ist unter https://immich.app zu finden.
## Demo
Die Web-Demo kannst Du unter https://demo.immich.app finden.
Die Demo läuft auf einer Free Tier Oracle VM in Amsterdam mit einer 2.4Ghz Quad-Core ARM64 CPU und 24GB RAM.
Für die Handy-App kannst Du `https://demo.immich.app/api` als `Server Endpoint URL` angeben.
### Login Daten
```bash title="Demo Credential"
Die Anmeldedaten
email: demo@immich.app
passwort: demo
```
| Email | Password |
| --------------- | -------- |
| demo@immich.app | demo |
```
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
```
## Funktionen
| Funktionen | Mobil | Web |
| ---------------------------------------------------- | ------ | ----- |
| Fotos & Videos hochladen und ansehen | Ja | Ja |
| Automatische Sicherung beim öffnen der App | Ja | k. A |
| Selektive Auswahl von Alben zum Sichern | Ja | k. A |
| Vermeidung von Duplikaten | Ja | Ja |
| Automatisches Backup wenn die App geöffnet ist | Ja | n. a. |
| Selektive Auswahl von Alben zum Sichern | Ja | n. a. |
| Fotos und Videos auf das Gerät herunterladen | Ja | Ja |
| Unterstützung mehrerer Benutzer | Ja | Ja |
| Alben und geteilte Alben | Ja | Ja |
| Scrollbar mit Scrubbing-/Drag-Funktion | Ja | Ja |
| Unterstützung für RAW Formate | Ja | Ja |
| Unterstützt mehrere Benutzer | Ja | Ja |
| Album und geteilte Alben | Ja | Ja |
| Scrollleiste | Ja | Ja |
| Unterstützt RAW Formate | Ja | Ja |
| Metadaten anzeigen (EXIF, Karte) | Ja | Ja |
| Suchen nach Metadaten, Objekten, Gesichtern und CLIP | Ja | Ja |
| Administrative Funktionen (Benutzerverwaltung) | Nein | Ja |
| Hintergrundsicherung | Ja | k. A |
| Backup im Hintergrund | Ja | n. a. |
| Virtuelles Scrollen | Ja | Ja |
| OAuth Unterstützung | Ja | Ja |
| API-Schlüssel | k. A | Ja |
| LivePhoto/MotionPhoto Sicherung und Wiedergabe | Ja | Ja |
| Unterstützung für 360-Grad-Bilder | Nein | Ja |
| API-Schlüssel | n. a. | Ja |
| LivePhoto/MotionPhoto Backup und Wiedergabe | Ja | Ja |
| Benutzerdefinierte Speicherstruktur | Ja | Ja |
| Öffentliches Teilen | Nein | Ja |
| Archiv und Favoriten | Ja | Ja |
| Archive und Favoriten | Ja | Ja |
| Globale Karte | Ja | Ja |
| Partnerfreigabe (Teilen) | Ja | Ja |
| Gesichtserkennung und -gruppierung | Ja | Ja |
| Teilen mit Partner | Ja | Ja |
| Gesichtserkennung und Gruppierung | Ja | Ja |
| Rückblicke (heute vor x Jahren) | Ja | Ja |
| Offline Unterstützung | Ja | Nein |
| Schreibgeschützte Gallerie | Ja | Ja |
| Gestapelte Bilder | Ja | Ja |
## Übersetzungen
Mehr zum Thema Übersetzungen kannst du [hier](https://immich.app/docs/developer/translations) erfahren.
<a href="https://hosted.weblate.org/engage/immich/">
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
</a>
## Repository-Aktivität
![Activities](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Repobeats analytics image")
## Github Sterne
<a href="https://star-history.com/#immich-app/immich&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
</picture>
</a>
## Mitwirkende
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>

View File

@@ -1 +1 @@
20.15.1
20.15

View File

@@ -1,5 +1,5 @@
# dev build
FROM ghcr.io/immich-app/base-server-dev:20240718@sha256:55e009a70528cbea353c42e899ca5fbc3194cf75dd6fea39a4c01f3f40ccc6bd as dev
FROM ghcr.io/immich-app/base-server-dev:20240708@sha256:2a9e3231c34493cb861299d475c84c031e7f04519dbc895bbebb5017d479a3cb as dev
RUN apt-get install --no-install-recommends -yqq tini
WORKDIR /usr/src/app
@@ -25,7 +25,7 @@ COPY --from=dev /usr/src/app/node_modules/@img ./node_modules/@img
COPY --from=dev /usr/src/app/node_modules/exiftool-vendored.pl ./node_modules/exiftool-vendored.pl
# web build
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as web
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as web
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
@@ -41,7 +41,7 @@ RUN npm run build
# prod build
FROM ghcr.io/immich-app/base-server-prod:20240718@sha256:f386227411c908ebfa983f04d1981e7df07d1314fcdf271ab987e7236379b606
FROM ghcr.io/immich-app/base-server-prod:20240708@sha256:0af3a5bb036c9a4b6a5a51becaa6e94fe182f6bc97480d57e8f2e6f994bfa453
WORKDIR /usr/src/app
ENV NODE_ENV=production \

1581
server/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "1.109.2",
"version": "1.107.2",
"description": "",
"author": "",
"private": true,
@@ -46,7 +46,7 @@
"@nestjs/swagger": "^7.1.8",
"@nestjs/typeorm": "^10.0.0",
"@nestjs/websockets": "^10.2.2",
"@opentelemetry/auto-instrumentations-node": "^0.48.0",
"@opentelemetry/auto-instrumentations-node": "^0.47.0",
"@opentelemetry/context-async-hooks": "^1.24.0",
"@opentelemetry/exporter-prometheus": "^0.52.0",
"@opentelemetry/sdk-node": "^0.52.0",
@@ -60,7 +60,7 @@
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
"cookie-parser": "^1.4.6",
"exiftool-vendored": "26.0.0",
"exiftool-vendored": "~27.0.0",
"fast-glob": "^3.3.2",
"fluent-ffmpeg": "^2.1.2",
"geo-tz": "^8.0.0",
@@ -106,7 +106,7 @@
"@types/lodash": "^4.14.197",
"@types/mock-fs": "^4.13.1",
"@types/multer": "^1.4.7",
"@types/node": "^20.14.10",
"@types/node": "^20.14.9",
"@types/nodemailer": "^6.4.14",
"@types/picomatch": "^2.3.3",
"@types/semver": "^7.5.8",
@@ -122,7 +122,7 @@
"mock-fs": "^5.2.0",
"prettier": "^3.0.2",
"prettier-plugin-organize-imports": "^4.0.0",
"rimraf": "^6.0.0",
"rimraf": "^5.0.1",
"source-map-support": "^0.5.21",
"sql-formatter": "^15.0.0",
"tsconfig-paths": "^4.2.0",
@@ -132,6 +132,6 @@
"vitest": "^1.5.0"
},
"volta": {
"node": "20.15.1"
"node": "20.15.0"
}
}

View File

@@ -146,7 +146,6 @@ export interface SystemConfig {
mobileRedirectUri: string;
scope: string;
signingAlgorithm: string;
profileSigningAlgorithm: string;
storageLabelClaim: string;
storageQuotaClaim: string;
};
@@ -290,7 +289,6 @@ export const defaults = Object.freeze<SystemConfig>({
mobileRedirectUri: '',
scope: 'openid email profile',
signingAlgorithm: 'RS256',
profileSigningAlgorithm: 'none',
storageLabelClaim: 'preferred_username',
storageQuotaClaim: 'immich_quota',
},

View File

@@ -43,7 +43,6 @@ export const resourcePaths = {
admin1: join(folders.geodata, 'admin1CodesASCII.txt'),
admin2: join(folders.geodata, 'admin2Codes.txt'),
cities500: join(folders.geodata, citiesFile),
naturalEarthCountriesPath: join(folders.geodata, 'ne_10m_admin_0_countries.geojson'),
},
web: {
root: folders.web,

View File

@@ -95,7 +95,7 @@ export class ServerController {
@Get('license')
@Authenticated({ admin: true })
getServerLicense(): Promise<LicenseResponseDto | null> {
getServerLicense(): Promise<LicenseKeyDto | null> {
return this.service.getLicense();
}
}

View File

@@ -49,26 +49,23 @@ function chunks<T>(collection: Array<T> | Set<T>, size: number): Array<Array<T>>
* @param options.paramIndex The index of the function parameter to chunk. Defaults to 0.
* @param options.flatten Whether to flatten the results. Defaults to false.
*/
export function Chunked(
options: { paramIndex?: number; chunkSize?: number; mergeFn?: (results: any) => any } = {},
): MethodDecorator {
export function Chunked(options: { paramIndex?: number; mergeFn?: (results: any) => any } = {}): MethodDecorator {
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
const originalMethod = descriptor.value;
const parameterIndex = options.paramIndex ?? 0;
const chunkSize = options.chunkSize || DATABASE_PARAMETER_CHUNK_SIZE;
descriptor.value = async function (...arguments_: any[]) {
const argument = arguments_[parameterIndex];
// Early return if argument length is less than or equal to the chunk size.
if (
(Array.isArray(argument) && argument.length <= chunkSize) ||
(argument instanceof Set && argument.size <= chunkSize)
(Array.isArray(argument) && argument.length <= DATABASE_PARAMETER_CHUNK_SIZE) ||
(argument instanceof Set && argument.size <= DATABASE_PARAMETER_CHUNK_SIZE)
) {
return await originalMethod.apply(this, arguments_);
}
return Promise.all(
chunks(argument, chunkSize).map(async (chunk) => {
chunks(argument, DATABASE_PARAMETER_CHUNK_SIZE).map(async (chunk) => {
await Reflect.apply(originalMethod, this, [
...arguments_.slice(0, parameterIndex),
chunk,

View File

@@ -5,7 +5,6 @@ import { APIKeyEntity } from 'src/entities/api-key.entity';
import { SessionEntity } from 'src/entities/session.entity';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { UserEntity } from 'src/entities/user.entity';
import { toEmail } from 'src/validation';
export enum ImmichCookie {
ACCESS_TOKEN = 'immich_access_token',
@@ -42,7 +41,7 @@ export class AuthDto {
export class LoginCredentialDto {
@IsEmail({ require_tld: false })
@Transform(toEmail)
@Transform(({ value }) => value?.toLowerCase())
@IsNotEmpty()
@ApiProperty({ example: 'testuser@email.com' })
email!: string;

View File

@@ -349,10 +349,6 @@ class SystemConfigOAuthDto {
@IsNotEmpty()
signingAlgorithm!: string;
@IsString()
@IsNotEmpty()
profileSigningAlgorithm!: string;
@IsString()
storageLabelClaim!: string;

View File

@@ -38,22 +38,6 @@ describe('create user DTO', () => {
expect(errors).toHaveLength(0);
});
it('validates invalid email type', async () => {
let dto = plainToInstance(UserAdminCreateDto, {
email: [],
password: 'some password',
name: 'some name',
});
expect(await validate(dto)).toHaveLength(1);
dto = plainToInstance(UserAdminCreateDto, {
email: {},
password: 'some password',
name: 'some name',
});
expect(await validate(dto)).toHaveLength(1);
});
it('should allow emails without a tld', async () => {
const someEmail = 'test@test';

View File

@@ -1,186 +0,0 @@
import {
Body,
Button,
Column,
Container,
Head,
Hr,
Html,
Img,
Link,
Preview,
Row,
Section,
Text,
} from '@react-email/components';
import * as CSS from 'csstype';
import * as React from 'react';
/**
* Template to be used for FUTOPay project
* Variable is {{LICENSEKEY}}
* */
export const LicenseEmail = () => (
<Html>
<Head />
<Preview>Your Immich Server License</Preview>
<Body
style={{
margin: 0,
padding: 0,
backgroundColor: '#f4f4f4',
color: 'rgb(28,28,28)',
fontFamily: 'Overpass, sans-serif',
fontSize: '18px',
lineHeight: '24px',
}}
>
<Container
style={{
width: '540px',
maxWidth: '100%',
padding: '10px',
margin: '0 auto',
}}
>
<Section
style={{
padding: '36px',
tableLayout: 'fixed',
backgroundColor: '#fefefe',
borderRadius: '16px',
textAlign: 'center' as const,
}}
>
<Img
src="https://immich.app/img/immich-logo-inline-light.png"
alt="Immich"
style={{
height: 'auto',
margin: '0 auto 48px auto',
width: '50%',
alignSelf: 'center',
color: 'white',
}}
/>
<Text style={text}>Thank you for supporting Immich and open-source software</Text>
<Text style={text}>
Your <strong>Immich</strong> license key is
</Text>
<Section
style={{
textAlign: 'center',
background: 'rgb(225, 225, 225)',
borderRadius: '16px',
marginBottom: '25px',
}}
>
<Text style={{ fontFamily: 'monospace', fontWeight: 600, color: 'rgb(66, 80, 175)' }}>
{'{{LICENSEKEY}}'}
</Text>
</Section>
{/* <Text style={text}>
To activate your instance, you can click the following button or copy and paste the link below to your
browser
</Text>
<Row>
<Column align="center">
<Button
style={button}
href={`https://my.immich.app/link?target=activate_license&licenseKey={{LICENSEKEY}}&activationKey={{ACTIVATIONKEY}}`}
>
Activate
</Button>
</Column>
</Row>
<Row>
<Column align="center">
<a
style={{ marginTop: '50px', color: 'rgb(66, 80, 175)', fontSize: '0.9rem' }}
href={`https://my.immich.app/link?target=activate_license&licenseKey={{LICENSEKEY}}&activationKey={{ACTIVATIONKEY}}`}
>
https://my.immich.app/link?target=activate_license&licenseKey={'{{LICENSEKEY}}'}&activationKey=
{'{{ACTIVATIONKEY}}'}
</a>
</Column>
</Row> */}
</Section>
<Section style={{ textAlign: 'center' }}>
<Row>
<Column align="center">
<Link href="https://futo.org">
<Img
src="https://futo.org/images/FutoMainLogo.svg"
alt="FUTO"
style={{
height: '24px',
marginTop: '25px',
marginBottom: '25px',
}}
/>
</Link>
</Column>
</Row>
</Section>
<Hr style={{ color: 'rgb(66, 80, 175)', marginTop: '0' }} />
<Section style={{ textAlign: 'center' }}>
<Column align="center">
<Link href="https://apps.apple.com/sg/app/immich/id1613945652">
<Img
src={`https://immich.app/img/ios-app-store-badge.png`}
alt="Immich"
style={{ height: '72px', padding: '14px' }}
/>
</Link>
<Link href="https://play.google.com/store/apps/details?id=app.alextran.immich">
<Img src={`https://immich.app/img/google-play-badge.png`} height="96px" alt="Immich" />
</Link>
</Column>
</Section>
<Text
style={{
color: '#6a737d',
fontSize: '0.8rem',
textAlign: 'center' as const,
marginTop: '14px',
}}
>
<Link href="https://immich.app">Immich</Link> project is available under GNU AGPL v3 license.
</Text>
</Container>
</Body>
</Html>
);
LicenseEmail.PreviewProps = {};
export default LicenseEmail;
const text = {
margin: '0 0 24px 0',
textAlign: 'left' as const,
fontSize: '16px',
lineHeight: '24px',
};
const button: CSS.Properties = {
backgroundColor: 'rgb(66, 80, 175)',
margin: '1em 0',
padding: '0.75em 3em',
color: '#fff',
fontSize: '1em',
fontWeight: 600,
lineHeight: 1.5,
textTransform: 'uppercase',
borderRadius: '9999px',
};

View File

@@ -12,7 +12,6 @@ import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
import { LibraryEntity } from 'src/entities/library.entity';
import { MemoryEntity } from 'src/entities/memory.entity';
import { MoveEntity } from 'src/entities/move.entity';
import { NaturalEarthCountriesEntity } from 'src/entities/natural-earth-countries.entity';
import { PartnerEntity } from 'src/entities/partner.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { SessionEntity } from 'src/entities/session.entity';
@@ -37,7 +36,6 @@ export const entities = [
ExifEntity,
FaceSearchEntity,
GeodataPlacesEntity,
NaturalEarthCountriesEntity,
MemoryEntity,
MoveEntity,
PartnerEntity,

View File

@@ -1,19 +0,0 @@
import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm';
@Entity('naturalearth_countries', { synchronize: false })
export class NaturalEarthCountriesEntity {
@PrimaryGeneratedColumn()
id!: number;
@Column({ type: 'varchar', length: 50 })
admin!: string;
@Column({ type: 'varchar', length: 3 })
admin_a3!: string;
@Column({ type: 'varchar', length: 50 })
type!: string;
@Column({ type: 'polygon' })
coordinates!: string;
}

View File

@@ -12,7 +12,6 @@ export class SystemMetadataEntity<T extends keyof SystemMetadata = SystemMetadat
export enum SystemMetadataKey {
REVERSE_GEOCODING_STATE = 'reverse-geocoding-state',
FACIAL_RECOGNITION_STATE = 'facial-recognition-state',
ADMIN_ONBOARDING = 'admin-onboarding',
SYSTEM_CONFIG = 'system-config',
VERSION_CHECK_STATE = 'version-check-state',
@@ -23,7 +22,6 @@ export type VersionCheckMetadata = { checkedAt: string; releaseVersion: string }
export interface SystemMetadata extends Record<SystemMetadataKey, Record<string, any>> {
[SystemMetadataKey.REVERSE_GEOCODING_STATE]: { lastUpdate?: string; lastImportFileName?: string };
[SystemMetadataKey.FACIAL_RECOGNITION_STATE]: { lastRun?: string };
[SystemMetadataKey.ADMIN_ONBOARDING]: { isOnboarded: boolean };
[SystemMetadataKey.SYSTEM_CONFIG]: DeepPartial<SystemConfig>;
[SystemMetadataKey.VERSION_CHECK_STATE]: VersionCheckMetadata;

View File

@@ -30,6 +30,6 @@ export interface IAlbumRepository extends IBulkAsset {
getAll(): Promise<AlbumEntity[]>;
create(album: Partial<AlbumEntity>): Promise<AlbumEntity>;
update(album: Partial<AlbumEntity>): Promise<AlbumEntity>;
delete(id: string): Promise<void>;
delete(album: AlbumEntity): Promise<void>;
updateThumbnails(): Promise<number | undefined>;
}

View File

@@ -153,10 +153,6 @@ export interface IDeferrableJob extends IEntityJob {
deferred?: boolean;
}
export interface INightlyJob extends IBaseJob {
nightly?: boolean;
}
export interface IEmailJob {
to: string;
subject: string;
@@ -233,7 +229,7 @@ export type JobItem =
// Facial Recognition
| { name: JobName.QUEUE_FACE_DETECTION; data: IBaseJob }
| { name: JobName.FACE_DETECTION; data: IEntityJob }
| { name: JobName.QUEUE_FACIAL_RECOGNITION; data: INightlyJob }
| { name: JobName.QUEUE_FACIAL_RECOGNITION; data: IBaseJob }
| { name: JobName.FACIAL_RECOGNITION; data: IDeferrableJob }
| { name: JobName.GENERATE_PERSON_THUMBNAIL; data: IEntityJob }

View File

@@ -16,7 +16,6 @@ export interface ThumbnailOptions {
colorspace: string;
quality: number;
crop?: CropOptions;
processInvalidImages: boolean;
}
export interface VideoStreamInfo {

View File

@@ -64,5 +64,4 @@ export interface IPersonRepository {
getNumberOfPeople(userId: string): Promise<PeopleStatistics>;
reassignFaces(data: UpdateFacesData): Promise<number>;
update(entity: Partial<PersonEntity>): Promise<PersonEntity>;
getLatestFaceDate(): Promise<string | undefined>;
}

View File

@@ -1,14 +0,0 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class NaturalEarthCountries1720375641148 implements MigrationInterface {
name = 'NaturalEarthCountries1720375641148'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`CREATE TABLE "naturalearth_countries" ("id" SERIAL NOT NULL, "admin" character varying(50) NOT NULL, "admin_a3" character varying(3) NOT NULL, "type" character varying(50) NOT NULL, "coordinates" polygon NOT NULL, CONSTRAINT "PK_21a6d86d1ab5d841648212e5353" PRIMARY KEY ("id"))`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE "naturalearth_countries"`);
}
}

View File

@@ -434,9 +434,3 @@ WHERE
(("AssetFaceEntity"."personId" = $1))
LIMIT
1
-- PersonRepository.getLatestFaceDate
SELECT
MAX("jobStatus"."facesRecognizedAt")::text AS "latestDate"
FROM
"asset_job_status" "jobStatus"

View File

@@ -5,16 +5,7 @@ import { AlbumEntity } from 'src/entities/album.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { AlbumAssetCount, AlbumInfoOptions, IAlbumRepository } from 'src/interfaces/album.interface';
import { Instrumentation } from 'src/utils/instrumentation';
import {
DataSource,
EntityManager,
FindOptionsOrder,
FindOptionsRelations,
In,
IsNull,
Not,
Repository,
} from 'typeorm';
import { DataSource, FindOptionsOrder, FindOptionsRelations, In, IsNull, Not, Repository } from 'typeorm';
const withoutDeletedUsers = <T extends AlbumEntity | null>(album: T) => {
if (album) {
@@ -264,46 +255,24 @@ export class AlbumRepository implements IAlbumRepository {
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.UUID]] })
async addAssetIds(albumId: string, assetIds: string[]): Promise<void> {
await this.addAssets(this.dataSource.manager, albumId, assetIds);
await this.dataSource
.createQueryBuilder()
.insert()
.into('albums_assets_assets', ['albumsId', 'assetsId'])
.values(assetIds.map((assetId) => ({ albumsId: albumId, assetsId: assetId })))
.execute();
}
create(album: Partial<AlbumEntity>): Promise<AlbumEntity> {
return this.dataSource.transaction<AlbumEntity>(async (manager) => {
const { id } = await manager.save(AlbumEntity, { ...album, assets: [] });
const assetIds = (album.assets || []).map((asset) => asset.id);
await this.addAssets(manager, id, assetIds);
return manager.findOneOrFail(AlbumEntity, {
where: { id },
relations: {
owner: true,
albumUsers: { user: true },
sharedLinks: true,
assets: true,
},
});
});
return this.save(album);
}
update(album: Partial<AlbumEntity>): Promise<AlbumEntity> {
return this.save(album);
}
async delete(id: string): Promise<void> {
await this.repository.delete({ id });
}
@Chunked({ paramIndex: 2, chunkSize: 30_000 })
private async addAssets(manager: EntityManager, albumId: string, assetIds: string[]): Promise<void> {
if (assetIds.length === 0) {
return;
}
await manager
.createQueryBuilder()
.insert()
.into('albums_assets_assets', ['albumsId', 'assetsId'])
.values(assetIds.map((assetId) => ({ albumsId: albumId, assetsId: assetId })))
.execute();
async delete(album: AlbumEntity): Promise<void> {
await this.repository.remove(album);
}
private async save(album: Partial<AlbumEntity>) {

View File

@@ -1,10 +1,11 @@
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AssetOrder } from 'src/entities/album.entity';
import { AlbumEntity, AssetOrder } from 'src/entities/album.entity';
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity, AssetType } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { PartnerEntity } from 'src/entities/partner.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import {
AssetBuilderOptions,
@@ -62,6 +63,8 @@ export class AssetRepository implements IAssetRepository {
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
@InjectRepository(SmartInfoEntity) private smartInfoRepository: Repository<SmartInfoEntity>,
@InjectRepository(PartnerEntity) private partnerRepository: Repository<PartnerEntity>,
@InjectRepository(AlbumEntity) private albumRepository: Repository<AlbumEntity>,
) {}
async upsertExif(exif: Partial<ExifEntity>): Promise<void> {

View File

@@ -7,7 +7,6 @@ import readLine from 'node:readline';
import { citiesFile, resourcePaths } from 'src/constants';
import { AssetEntity } from 'src/entities/asset.entity';
import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
import { NaturalEarthCountriesEntity } from 'src/entities/natural-earth-countries.entity';
import { SystemMetadataKey } from 'src/entities/system-metadata.entity';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import {
@@ -29,8 +28,6 @@ export class MapRepository implements IMapRepository {
constructor(
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
@InjectRepository(GeodataPlacesEntity) private geodataPlacesRepository: Repository<GeodataPlacesEntity>,
@InjectRepository(NaturalEarthCountriesEntity)
private naturalEarthCountriesRepository: Repository<NaturalEarthCountriesEntity>,
@InjectDataSource() private dataSource: DataSource,
@Inject(ISystemMetadataRepository) private metadataRepository: ISystemMetadataRepository,
@Inject(ILoggerRepository) private logger: ILoggerRepository,
@@ -49,7 +46,6 @@ export class MapRepository implements IMapRepository {
}
await this.importGeodata();
await this.importNaturalEarthCountries();
await this.metadataRepository.set(SystemMetadataKey.REVERSE_GEOCODING_STATE, {
lastUpdate: geodataDate,
@@ -134,93 +130,22 @@ export class MapRepository implements IMapRepository {
.limit(1)
.getOne();
if (response) {
this.logger.verbose(`Raw: ${JSON.stringify(response, null, 2)}`);
const { countryCode, name: city, admin1Name } = response;
const country = getName(countryCode, 'en') ?? null;
const state = admin1Name;
return { country, state, city };
}
this.logger.warn(
`Response from database for reverse geocoding latitude: ${point.latitude}, longitude: ${point.longitude} was null`,
);
const ne_response = await this.naturalEarthCountriesRepository
.createQueryBuilder('naturalearth_countries')
.where('coordinates @> point (:longitude, :latitude)', point)
.limit(1)
.getOne();
if (!ne_response) {
if (!response) {
this.logger.warn(
`Response from database for natural earth reverse geocoding latitude: ${point.latitude}, longitude: ${point.longitude} was null`,
`Response from database for reverse geocoding latitude: ${point.latitude}, longitude: ${point.longitude} was null`,
);
return null;
}
this.logger.verbose(`Raw: ${JSON.stringify(ne_response, ['id', 'admin', 'admin_a3', 'type'], 2)}`);
this.logger.verbose(`Raw: ${JSON.stringify(response, null, 2)}`);
const { admin_a3 } = ne_response;
const country = getName(admin_a3, 'en') ?? null;
const state = null;
const city = null;
const { countryCode, name: city, admin1Name } = response;
const country = getName(countryCode, 'en') ?? null;
const state = admin1Name;
return { country, state, city };
}
private transformCoordinatesToPolygon(coordinates: number[][][]): string {
const pointsString = coordinates.map((point) => `(${point[0]},${point[1]})`).join(', ');
return `(${pointsString})`;
}
private async importNaturalEarthCountries() {
const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect();
try {
await queryRunner.startTransaction();
await queryRunner.manager.clear(NaturalEarthCountriesEntity);
const fileContent = await readFile(resourcePaths.geodata.naturalEarthCountriesPath, 'utf8');
const geoJSONData = JSON.parse(fileContent);
if (geoJSONData.type !== 'FeatureCollection' || !Array.isArray(geoJSONData.features)) {
this.logger.fatal('Invalid GeoJSON FeatureCollection');
return;
}
for await (const feature of geoJSONData.features) {
for (const polygon of feature.geometry.coordinates) {
const featureRecord = new NaturalEarthCountriesEntity();
featureRecord.admin = feature.properties.ADMIN;
featureRecord.admin_a3 = feature.properties.ADM0_A3;
featureRecord.type = feature.properties.TYPE;
if (feature.geometry.type === 'MultiPolygon') {
featureRecord.coordinates = this.transformCoordinatesToPolygon(polygon[0]);
await queryRunner.manager.save(featureRecord);
} else if (feature.geometry.type === 'Polygon') {
featureRecord.coordinates = this.transformCoordinatesToPolygon(polygon);
await queryRunner.manager.save(featureRecord);
break;
}
}
}
await queryRunner.commitTransaction();
} catch (error) {
this.logger.fatal('Error importing natural earth country data', error);
await queryRunner.rollbackTransaction();
throw error;
} finally {
await queryRunner.release();
}
}
private async importGeodata() {
const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect();

View File

@@ -45,8 +45,7 @@ export class MediaRepository implements IMediaRepository {
}
async generateThumbnail(input: string | Buffer, output: string, options: ThumbnailOptions): Promise<void> {
// some invalid images can still be processed by sharp, but we want to fail on them by default to avoid crashes
const pipeline = sharp(input, { failOn: options.processInvalidImages ? 'none' : 'error', limitInputPixels: false })
const pipeline = sharp(input, { failOn: 'error', limitInputPixels: false })
.pipelineColorspace(options.colorspace === Colorspace.SRGB ? 'srgb' : 'rgb16')
.rotate();

View File

@@ -1,56 +1,58 @@
import { Inject, Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { DefaultReadTaskOptions, Tags, exiftool } from 'exiftool-vendored';
import { InjectDataSource, InjectRepository } from '@nestjs/typeorm';
import { DefaultReadTaskOptions, ExifTool, Tags } from 'exiftool-vendored';
import geotz from 'geo-tz';
import { DummyValue, GenerateSql } from 'src/decorators';
import { ExifEntity } from 'src/entities/exif.entity';
import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { IMetadataRepository, ImmichTags } from 'src/interfaces/metadata.interface';
import { Instrumentation } from 'src/utils/instrumentation';
import { Repository } from 'typeorm';
import { DataSource, Repository } from 'typeorm';
@Instrumentation()
@Injectable()
export class MetadataRepository implements IMetadataRepository {
constructor(
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
@InjectRepository(GeodataPlacesEntity) private geodataPlacesRepository: Repository<GeodataPlacesEntity>,
@InjectDataSource() private dataSource: DataSource,
@Inject(ILoggerRepository) private logger: ILoggerRepository,
) {
this.logger.setContext(MetadataRepository.name);
this.exiftool = new ExifTool({
defaultVideosToUTC: true,
backfillTimezones: true,
inferTimezoneFromDatestamps: true,
useMWG: true,
numericTags: [...DefaultReadTaskOptions.numericTags, 'FocalLength'],
/* eslint unicorn/no-array-callback-reference: off, unicorn/no-array-method-this-argument: off */
geoTz: (lat, lon) => geotz.find(lat, lon)[0],
// Enable exiftool LFS to parse metadata for files larger than 2GB.
readArgs: ['-api', 'largefilesupport=1'],
writeArgs: ['-api', 'largefilesupport=1', '-overwrite_original'],
});
}
private exiftool: ExifTool;
async teardown() {
await exiftool.end();
await this.exiftool.end();
}
readTags(path: string): Promise<ImmichTags | null> {
return exiftool
.read(path, undefined, {
...DefaultReadTaskOptions,
// Enable exiftool LFS to parse metadata for files larger than 2GB.
optionalArgs: ['-api', 'largefilesupport=1'],
defaultVideosToUTC: true,
backfillTimezones: true,
inferTimezoneFromDatestamps: true,
useMWG: true,
numericTags: [...DefaultReadTaskOptions.numericTags, 'FocalLength'],
/* eslint unicorn/no-array-callback-reference: off, unicorn/no-array-method-this-argument: off */
geoTz: (lat, lon) => geotz.find(lat, lon)[0],
})
.catch((error) => {
this.logger.warn(`Error reading exif data (${path}): ${error}`, error?.stack);
return null;
}) as Promise<ImmichTags | null>;
return this.exiftool.read(path).catch((error) => {
this.logger.warn(`Error reading exif data (${path}): ${error}`, error?.stack);
return null;
}) as Promise<ImmichTags | null>;
}
extractBinaryTag(path: string, tagName: string): Promise<Buffer> {
return exiftool.extractBinaryTagToBuffer(tagName, path);
return this.exiftool.extractBinaryTagToBuffer(tagName, path);
}
async writeTags(path: string, tags: Partial<Tags>): Promise<void> {
try {
await exiftool.write(path, tags, ['-overwrite_original']);
await this.exiftool.write(path, tags);
} catch (error) {
this.logger.warn(`Error writing exif data (${path}): ${error}`);
}

View File

@@ -3,7 +3,6 @@ import { InjectRepository } from '@nestjs/typeorm';
import _ from 'lodash';
import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { PersonEntity } from 'src/entities/person.entity';
import {
@@ -26,7 +25,6 @@ export class PersonRepository implements IPersonRepository {
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
@InjectRepository(PersonEntity) private personRepository: Repository<PersonEntity>,
@InjectRepository(AssetFaceEntity) private assetFaceRepository: Repository<AssetFaceEntity>,
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
) {}
@GenerateSql({ params: [{ oldPersonId: DummyValue.UUID, newPersonId: DummyValue.UUID }] })
@@ -269,13 +267,4 @@ export class PersonRepository implements IPersonRepository {
async getRandomFace(personId: string): Promise<AssetFaceEntity | null> {
return this.assetFaceRepository.findOneBy({ personId });
}
@GenerateSql()
async getLatestFaceDate(): Promise<string | undefined> {
const result: { latestDate?: string } | undefined = await this.jobStatusRepository
.createQueryBuilder('jobStatus')
.select('MAX(jobStatus.facesRecognizedAt)::text', 'latestDate')
.getRawOne();
return result?.latestDate;
}
}

View File

@@ -302,7 +302,8 @@ describe(AlbumService.name, () => {
describe('delete', () => {
it('should throw an error for an album not found', async () => {
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set());
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithAdmin.id]));
albumMock.getById.mockResolvedValue(null);
await expect(sut.delete(authStub.admin, albumStub.sharedWithAdmin.id)).rejects.toBeInstanceOf(
BadRequestException,
@@ -328,7 +329,7 @@ describe(AlbumService.name, () => {
await sut.delete(authStub.admin, albumStub.empty.id);
expect(albumMock.delete).toHaveBeenCalledTimes(1);
expect(albumMock.delete).toHaveBeenCalledWith(albumStub.empty.id);
expect(albumMock.delete).toHaveBeenCalledWith(albumStub.empty);
});
});

Some files were not shown because too many files have changed in this diff Show More