Compare commits

..

5 Commits

Author SHA1 Message Date
mertalev
8d2a849edc optimized scrfd code 2024-06-09 23:03:34 -04:00
mertalev
fb4fe5d40b change log message 2024-06-08 21:24:23 -04:00
mertalev
717961ce7b add tests 2024-06-08 21:24:23 -04:00
mertalev
259386cf13 refactor 2024-06-08 21:24:23 -04:00
mertalev
7e587c2703 squeeze output dims 2024-06-08 21:24:23 -04:00
731 changed files with 27037 additions and 54507 deletions

View File

@@ -1,11 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: I have a question or need support
url: https://discord.immich.app
url: https://discord.gg/D8JsnBEuKb
about: We use GitHub for tracking bugs, please check out our Discord channel for freaky fast support.
- name: Feature Request
url: https://github.com/immich-app/immich/discussions/new?category=feature-request
about: Please use our GitHub Discussion for making feature requests.
- name: I'm unsure where to go
url: https://discord.immich.app
url: https://discord.gg/D8JsnBEuKb
about: If you are unsure where to go, then joining our Discord is recommended; Just ask!

35
.github/labeler.yml vendored
View File

@@ -1,35 +0,0 @@
cli:
- changed-files:
- any-glob-to-any-file:
- cli/src/**
documentation:
- changed-files:
- any-glob-to-any-file:
- docs/blob/**
- docs/docs/**
- docs/src/**
- docs/static/**
🖥web:
- changed-files:
- any-glob-to-any-file:
- web/src/**
- web/static/**
📱mobile:
- changed-files:
- any-glob-to-any-file:
- mobile/lib/**
- mobile/test/**
🗄server:
- changed-files:
- any-glob-to-any-file:
- server/src/**
- server/test/**
🧠machine-learning:
- changed-files:
- any-glob-to-any-file:
- machine-learning/app/**

View File

@@ -1,17 +1,16 @@
name: CLI Build
on:
workflow_dispatch:
push:
branches: [main]
paths:
- 'cli/**'
- '.github/workflows/cli.yml'
- "cli/**"
- ".github/workflows/cli.yml"
pull_request:
branches: [main]
paths:
- 'cli/**'
- '.github/workflows/cli.yml'
release:
types: [published]
- "cli/**"
- ".github/workflows/cli.yml"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -33,8 +32,8 @@ jobs:
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v4
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
- name: Prepare SDK
run: npm ci --prefix ../open-api/typescript-sdk/
- name: Build SDK
@@ -42,7 +41,7 @@ jobs:
- run: npm ci
- run: npm run build
- run: npm publish
if: ${{ github.event_name == 'release' }}
if: ${{ github.event_name == 'workflow_dispatch' }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
@@ -56,10 +55,10 @@ jobs:
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.1.0
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.4.0
uses: docker/setup-buildx-action@v3.3.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
@@ -84,15 +83,15 @@ jobs:
images: |
name=ghcr.io/${{ github.repository_owner }}/immich-cli
tags: |
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'release' }}
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'workflow_dispatch' }}
type=raw,value=latest,enable=${{ github.event_name == 'workflow_dispatch' }}
- name: Build and push image
uses: docker/build-push-action@v6.3.0
uses: docker/build-push-action@v5.3.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name == 'release' }}
push: ${{ github.event_name == 'workflow_dispatch' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.metadata.outputs.tags }}

View File

@@ -63,10 +63,10 @@ jobs:
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.1.0
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.4.0
uses: docker/setup-buildx-action@v3.3.0
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
@@ -115,7 +115,7 @@ jobs:
fi
- name: Build and push image
uses: docker/build-push-action@v6.3.0
uses: docker/build-push-action@v5.3.0
with:
context: ${{ matrix.context }}
file: ${{ matrix.file }}
@@ -124,11 +124,7 @@ jobs:
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@@ -26,11 +26,6 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './docs/.nvmrc'
- name: Run npm install
run: npm ci

View File

@@ -1,12 +0,0 @@
name: "Pull Request Labeler"
on:
- pull_request_target
jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v5

13
.github/workflows/pr-require-label.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
name: Enforce PR labels
on:
pull_request:
types: [labeled, unlabeled, opened, edited, synchronize]
jobs:
enforce-label:
name: Enforce label
runs-on: ubuntu-latest
steps:
- if: toJson(github.event.pull_request.labels) == '[]'
run: exit 1

View File

@@ -19,7 +19,7 @@ jobs:
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v4
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
node-version: '20.x'
registry-url: 'https://registry.npmjs.org'
- name: Install deps
run: npm ci

View File

@@ -21,11 +21,6 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
- name: Run npm install
run: npm ci
@@ -59,7 +54,7 @@ jobs:
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './cli/.nvmrc'
node-version: 20
- name: Setup typescript-sdk
run: npm ci && npm run build
@@ -84,38 +79,6 @@ jobs:
run: npm run test:cov
if: ${{ !cancelled() }}
cli-unit-tests-win:
name: CLI (Windows)
runs-on: windows-latest
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './cli/.nvmrc'
- name: Setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: npm ci
# Skip linter & formatter in Windows test.
- name: Run tsc
run: npm run check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test:cov
if: ${{ !cancelled() }}
web-unit-tests:
name: Web
runs-on: ubuntu-latest
@@ -127,11 +90,6 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './web/.nvmrc'
- name: Run setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
@@ -175,7 +133,7 @@ jobs:
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
node-version: 20
- name: Run setup typescript-sdk
run: npm ci && npm run build
@@ -283,11 +241,6 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
- name: Install server dependencies
run: npm --prefix=server ci
@@ -338,11 +291,6 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
- name: Install server dependencies
run: npm ci

View File

@@ -131,4 +131,4 @@ conduct enforcement ladder](https://github.com/mozilla/diversity).
For answers to common questions about this code of conduct, see the
FAQ at https://www.contributor-covenant.org/faq. Translations are
available at https://www.contributor-covenant.org/translations.
available at https://www.contributor-covenant.org/translations.

View File

@@ -10,6 +10,12 @@ dev-update:
dev-scale:
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
stage:
docker compose -f ./docker/docker-compose.staging.yml up --build -V --remove-orphans
pull-stage:
docker compose -f ./docker/docker-compose.staging.yml pull
.PHONY: e2e
e2e:
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
@@ -35,51 +41,3 @@ sql:
attach-server:
docker exec -it docker_immich-server_1 sh
renovate:
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
MODULES = e2e server web cli sdk
audit-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
install-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
build-cli: build-sdk
build-web: build-sdk
build-%: install-%
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'build' >/dev/null \
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build || true
format-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'format:fix' >/dev/null \
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run format:fix || true
lint-%:
npm --prefix $* run lint:fix
check-%:
npm --prefix $* run check
check-web:
npm --prefix web run check:typescript
npm --prefix web run check:svelte
test-%:
npm --prefix $* run test
test-e2e:
docker compose -f ./e2e/docker-compose.yml build
npm --prefix e2e run test
npm --prefix e2e run test:web
build-all: $(foreach M,$(MODULES),build-$M) ;
install-all: $(foreach M,$(MODULES),install-$M) ;
check-all: $(foreach M,$(MODULES),check-$M) ;
lint-all: $(foreach M,$(MODULES),lint-$M) ;
format-all: $(foreach M,$(MODULES),format-$M) ;
audit-all: $(foreach M,$(MODULES),audit-$M) ;
hygiene-all: lint-all format-all check-all sql audit-all;
test-all: $(foreach M,$(MODULES),test-$M) ;
clean:
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
find . -name "dist" -type d -prune -exec rm -rf '{}' +
find . -name "build" -type d -prune -exec rm -rf '{}' +
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
docker compose -f ./e2e/docker-compose.yml rm -v -f || true

View File

@@ -1,7 +1,7 @@
<p align="center">
<br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
<a href="https://discord.immich.app">
<a href="https://discord.gg/D8JsnBEuKb">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a>
<br/>
@@ -19,21 +19,20 @@
<br/>
<p align="center">
<a href="readme_i18n/README_ca_ES.md">Català</a>
<a href="readme_i18n/README_es_ES.md">Español</a>
<a href="readme_i18n/README_fr_FR.md">Français</a>
<a href="readme_i18n/README_it_IT.md">Italiano</a>
<a href="readme_i18n/README_ja_JP.md">日本語</a>
<a href="readme_i18n/README_ko_KR.md">한국어</a>
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
<a href="readme_i18n/README_zh_CN.md">中文</a>
<a href="readme_i18n/README_ru_RU.md">Русский</a>
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
<a href="readme_i18n/README_ar_JO.md">العربية</a>
<a href="readme_i18n/README_ca_ES.md">Català</a>
<a href="readme_i18n/README_es_ES.md">Español</a>
<a href="readme_i18n/README_fr_FR.md">Français</a>
<a href="readme_i18n/README_it_IT.md">Italiano</a>
<a href="readme_i18n/README_ja_JP.md">日本語</a>
<a href="readme_i18n/README_ko_KR.md">한국어</a>
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
<a href="readme_i18n/README_zh_CN.md">中文</a>
<a href="readme_i18n/README_ru_RU.md">Русский</a>
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
<a href="readme_i18n/README_ar_JO.md">العربية</a>
</p>
## Disclaimer
@@ -43,36 +42,45 @@
- ⚠️ **Do not use the app as the only way to store your photos and videos.**
- ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
> [!NOTE]
> You can find the main documentation, including installation guides, at https://immich.app/.
## Content
## Links
- [Documentation](https://immich.app/docs)
- [About](https://immich.app/docs/overview/introduction)
- [Installation](https://immich.app/docs/install/requirements)
- [Roadmap](https://immich.app/roadmap)
- [Official Documentation](https://immich.app/docs)
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
- [Demo](#demo)
- [Features](#features)
- [Translations](https://immich.app/docs/developer/translations)
- [Contributing](https://immich.app/docs/overview/support-the-project)
- [Introduction](https://immich.app/docs/overview/introduction)
- [Installation](https://immich.app/docs/install/requirements)
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
## Documentation
You can find the main documentation, including installation guides, at https://immich.app/.
## Demo
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
You can access the web demo at https://demo.immich.app
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
### Login credentials
```bash title="Demo Credential"
The credential
email: demo@immich.app
password: demo
```
| Email | Password |
| --------------- | -------- |
| demo@immich.app | demo |
```
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
```
## Activities
![Activities](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Repobeats analytics image")
## Features
| Features | Mobile | Web |
| :------------------------------------------- | ------ | --- |
| :--------------------------------------------- | -------- | ----- |
| Upload and view videos and photos | Yes | Yes |
| Auto backup when the app is opened | Yes | N/A |
| Prevent duplication of assets | Yes | Yes |
@@ -102,19 +110,13 @@ For the mobile app, you can use `https://demo.immich.app/api` for the `Server En
| Read-only gallery | Yes | Yes |
| Stacked Photos | Yes | Yes |
## Translations
## Contributors
Read more about translations [here](https://immich.app/docs/developer/translations).
<a href="https://hosted.weblate.org/engage/immich/">
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>
## Repository activity
![Activities](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Repobeats analytics image")
## Star history
## Star History
<a href="https://star-history.com/#immich-app/immich&Date">
<picture>
@@ -123,9 +125,3 @@ Read more about translations [here](https://immich.app/docs/developer/translatio
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
</picture>
</a>
## Contributors
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>

View File

@@ -2,4 +2,4 @@
## Reporting a Vulnerability
Please report security issues to `security@immich.app`
Please report security issues to `alex.tran1502@gmail.com`

View File

@@ -1 +1 @@
20.15
20.14

View File

@@ -1,4 +1,4 @@
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as core
FROM node:20-alpine3.19@sha256:696ae41fb5880949a15ade7879a2deae93b3f0723f757bdb5b8a9e4a744ce27f as core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
@@ -16,4 +16,4 @@ RUN npm run build
WORKDIR /import
ENTRYPOINT ["node", "/usr/src/app/dist"]
ENTRYPOINT ["node", "/usr/src/app/dist"]

492
cli/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.7",
"version": "2.2.0",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -18,7 +18,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.14.9",
"@types/node": "^20.3.1",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^1.2.2",
@@ -28,15 +28,14 @@
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^54.0.0",
"eslint-plugin-unicorn": "^53.0.0",
"mock-fs": "^5.2.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-organize-imports": "^3.2.4",
"typescript": "^5.3.3",
"vite": "^5.0.12",
"vite-tsconfig-paths": "^4.3.2",
"vitest": "^1.2.2",
"vitest-fetch-mock": "^0.2.2",
"yaml": "^2.3.1"
},
"scripts": {
@@ -60,10 +59,9 @@
},
"dependencies": {
"fast-glob": "^3.3.2",
"fastq": "^1.17.1",
"lodash-es": "^4.17.21"
},
"volta": {
"node": "20.15.0"
"node": "20.14.0"
}
}

View File

@@ -1,201 +0,0 @@
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import { describe, expect, it, vi } from 'vitest';
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
import createFetchMock from 'vitest-fetch-mock';
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
vi.mock('@immich/sdk');
describe('getAlbumName', () => {
it('should return a non-undefined value', () => {
if (os.platform() === 'win32') {
// This is meaningless for Unix systems.
expect(getAlbumName(String.raw`D:\test\Filename.txt`, {} as UploadOptionsDto)).toBe('test');
}
expect(getAlbumName('D:/parentfolder/test/Filename.txt', {} as UploadOptionsDto)).toBe('test');
});
it('has higher priority to return `albumName` in `options`', () => {
expect(getAlbumName('/parentfolder/test/Filename.txt', { albumName: 'example' } as UploadOptionsDto)).toBe(
'example',
);
});
});
describe('uploadFiles', () => {
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
const testFilePath = path.join(testDir, 'test.png');
const testFileData = 'test';
const baseUrl = 'http://example.com';
const apiKey = 'key';
const retry = 3;
const fetchMocker = createFetchMock(vi);
beforeEach(() => {
// Create a test file
fs.writeFileSync(testFilePath, testFileData);
// Defaults
vi.mocked(defaults).baseUrl = baseUrl;
vi.mocked(defaults).headers = { 'x-api-key': apiKey };
fetchMocker.enableMocks();
fetchMocker.resetMocks();
});
it('returns new assets when upload file is successful', async () => {
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
return {
status: 200,
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
};
});
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
{
filepath: testFilePath,
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
},
]);
});
it('returns new assets when upload file retry is successful', async () => {
let counter = 0;
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
counter++;
if (counter < retry) {
throw new Error('Network error');
}
return {
status: 200,
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
};
});
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
{
filepath: testFilePath,
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
},
]);
});
it('returns new assets when upload file retry is failed', async () => {
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
throw new Error('Network error');
});
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([]);
});
});
describe('checkForDuplicates', () => {
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
const testFilePath = path.join(testDir, 'test.png');
const testFileData = 'test';
const testFileChecksum = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'; // SHA1
const retry = 3;
beforeEach(() => {
// Create a test file
fs.writeFileSync(testFilePath, testFileData);
});
it('checks duplicates', async () => {
vi.mocked(checkBulkUpload).mockResolvedValue({
results: [
{
action: Action.Accept,
id: testFilePath,
},
],
});
await checkForDuplicates([testFilePath], { concurrency: 1 });
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: [
{
checksum: testFileChecksum,
id: testFilePath,
},
],
},
});
});
it('returns duplicates when check duplicates is rejected', async () => {
vi.mocked(checkBulkUpload).mockResolvedValue({
results: [
{
action: Action.Reject,
id: testFilePath,
assetId: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
reason: Reason.Duplicate,
},
],
});
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [
{
filepath: testFilePath,
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
},
],
newFiles: [],
});
});
it('returns new assets when check duplicates is accepted', async () => {
vi.mocked(checkBulkUpload).mockResolvedValue({
results: [
{
action: Action.Accept,
id: testFilePath,
},
],
});
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [],
newFiles: [testFilePath],
});
});
it('returns results when check duplicates retry is successful', async () => {
let mocked = vi.mocked(checkBulkUpload);
for (let i = 1; i < retry; i++) {
mocked = mocked.mockRejectedValueOnce(new Error('Network error'));
}
mocked.mockResolvedValue({
results: [
{
action: Action.Accept,
id: testFilePath,
},
],
});
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [],
newFiles: [testFilePath],
});
});
it('returns results when check duplicates retry is failed', async () => {
vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error'));
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [],
newFiles: [],
});
});
});

View File

@@ -15,8 +15,8 @@ import { Presets, SingleBar } from 'cli-progress';
import { chunk } from 'lodash-es';
import { Stats, createReadStream } from 'node:fs';
import { stat, unlink } from 'node:fs/promises';
import os from 'node:os';
import path, { basename } from 'node:path';
import { Queue } from 'src/queue';
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
const s = (count: number) => (count === 1 ? '' : 's');
@@ -25,7 +25,7 @@ const s = (count: number) => (count === 1 ? '' : 's');
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
type Asset = { id: string; filepath: string };
export interface UploadOptionsDto {
interface UploadOptionsDto {
recursive?: boolean;
ignore?: string;
dryRun?: boolean;
@@ -84,7 +84,7 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
return files;
};
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
if (skipHash) {
console.log('Skipping hash check, assuming all files are new');
return { newFiles: files, duplicates: [] };
@@ -100,50 +100,32 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
const newFiles: string[] = [];
const duplicates: Asset[] = [];
const queue = new Queue<string[], AssetBulkUploadCheckResults>(
async (filepaths: string[]) => {
const dto = await Promise.all(
filepaths.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })),
);
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
const results = response.results as AssetBulkUploadCheckResults;
for (const { id: filepath, assetId, action } of results) {
try {
// TODO refactor into a queue
for (const items of chunk(files, concurrency)) {
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
if (action === Action.Accept) {
newFiles.push(filepath);
} else {
// rejects are always duplicates
duplicates.push({ id: assetId as string, filepath });
}
progressBar.increment();
}
progressBar.increment(filepaths.length);
return results;
},
{ concurrency, retry: 3 },
);
for (const items of chunk(files, concurrency)) {
await queue.push(items);
}
} finally {
progressBar.stop();
}
await queue.drained();
progressBar.stop();
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
// Report failures
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
if (failedTasks.length > 0) {
console.log(`Failed to verify ${failedTasks.length} file${s(failedTasks.length)}:`);
for (const task of failedTasks) {
console.log(`- ${task.data} - ${task.error}`);
}
}
return { newFiles, duplicates };
};
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
if (files.length === 0) {
console.log('All assets were already uploaded, nothing to do.');
return [];
@@ -177,52 +159,37 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
const newAssets: Asset[] = [];
const queue = new Queue<string, AssetMediaResponseDto>(
async (filepath: string) => {
const stats = statsMap.get(filepath);
if (!stats) {
throw new Error(`Stats not found for ${filepath}`);
}
try {
for (const items of chunk(files, concurrency)) {
await Promise.all(
items.map(async (filepath) => {
const stats = statsMap.get(filepath) as Stats;
const response = await uploadFile(filepath, stats);
const response = await uploadFile(filepath, stats);
newAssets.push({ id: response.id, filepath });
if (response.status === AssetMediaStatus.Duplicate) {
duplicateCount++;
duplicateSize += stats.size ?? 0;
} else {
successCount++;
successSize += stats.size ?? 0;
}
newAssets.push({ id: response.id, filepath });
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
if (response.status === AssetMediaStatus.Duplicate) {
duplicateCount++;
duplicateSize += stats.size ?? 0;
} else {
successCount++;
successSize += stats.size ?? 0;
}
return response;
},
{ concurrency, retry: 3 },
);
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
for (const filepath of files) {
await queue.push(filepath);
return response;
}),
);
}
} finally {
uploadProgress.stop();
}
await queue.drained();
uploadProgress.stop();
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
if (duplicateCount > 0) {
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
}
// Report failures
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
if (failedTasks.length > 0) {
console.log(`Failed to upload ${failedTasks.length} asset${s(failedTasks.length)}:`);
for (const task of failedTasks) {
console.log(`- ${task.data} - ${task.error}`);
}
}
return newAssets;
};
@@ -379,9 +346,7 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
}
};
// `filepath` valid format:
// - Windows: `D:\\test\\Filename.txt` or `D:/test/Filename.txt`
// - Unix: `/test/Filename.txt`
export const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
return options.albumName ?? path.basename(path.dirname(filepath));
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
return options.albumName ?? folderName;
};

View File

@@ -1,131 +0,0 @@
import * as fastq from 'fastq';
import { uniqueId } from 'lodash-es';
export type Task<T, R> = {
readonly id: string;
status: 'idle' | 'processing' | 'succeeded' | 'failed';
data: T;
error: unknown | undefined;
count: number;
// TODO: Could be useful to adding progress property.
// TODO: Could be useful to adding start_at/end_at/duration properties.
result: undefined | R;
};
export type QueueOptions = {
verbose?: boolean;
concurrency?: number;
retry?: number;
// TODO: Could be useful to adding timeout property for retry.
};
export type ComputedQueueOptions = Required<QueueOptions>;
export const defaultQueueOptions = {
concurrency: 1,
retry: 0,
verbose: false,
};
/**
* An in-memory queue that processes tasks in parallel with a given concurrency.
* @see {@link https://www.npmjs.com/package/fastq}
* @template T - The type of the worker task data.
* @template R - The type of the worker output data.
*/
export class Queue<T, R> {
private readonly queue: fastq.queueAsPromised<string, Task<T, R>>;
private readonly store = new Map<string, Task<T, R>>();
readonly options: ComputedQueueOptions;
readonly worker: (data: T) => Promise<R>;
/**
* Create a new queue.
* @param worker - The worker function that processes the task.
* @param options - The queue options.
*/
constructor(worker: (data: T) => Promise<R>, options?: QueueOptions) {
this.options = { ...defaultQueueOptions, ...options };
this.worker = worker;
this.store = new Map<string, Task<T, R>>();
this.queue = this.buildQueue();
}
get tasks(): Task<T, R>[] {
const tasks: Task<T, R>[] = [];
for (const task of this.store.values()) {
tasks.push(task);
}
return tasks;
}
getTask(id: string): Task<T, R> {
const task = this.store.get(id);
if (!task) {
throw new Error(`Task with id ${id} not found`);
}
return task;
}
/**
* Wait for the queue to be empty.
* @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker.
* This promise could be ignored as it will not lead to a `unhandledRejection`.
*/
async drained(): Promise<void> {
await this.queue.drain();
}
/**
* Add a task at the end of the queue.
* @see {@link https://www.npmjs.com/package/fastq}
* @param data
* @returns Promise<void> - A Promise that will be fulfilled (rejected) when the task is completed successfully (unsuccessfully).
* This promise could be ignored as it will not lead to a `unhandledRejection`.
*/
async push(data: T): Promise<Task<T, R>> {
const id = uniqueId();
const task: Task<T, R> = { id, status: 'idle', error: undefined, count: 0, data, result: undefined };
this.store.set(id, task);
return this.queue.push(id);
}
// TODO: Support more function delegation to fastq.
private buildQueue(): fastq.queueAsPromised<string, Task<T, R>> {
return fastq.promise((id: string) => {
const task = this.getTask(id);
return this.work(task);
}, this.options.concurrency);
}
private async work(task: Task<T, R>): Promise<Task<T, R>> {
task.count += 1;
task.error = undefined;
task.status = 'processing';
if (this.options.verbose) {
console.log('[task] processing:', task);
}
try {
task.result = await this.worker(task.data);
task.status = 'succeeded';
if (this.options.verbose) {
console.log('[task] succeeded:', task);
}
return task;
} catch (error) {
task.error = error;
task.status = 'failed';
if (this.options.verbose) {
console.log('[task] failed:', task);
}
if (this.options.retry > 0 && task.count < this.options.retry) {
if (this.options.verbose) {
console.log('[task] retry:', task);
}
return this.work(task);
}
return task;
}
}
}

View File

@@ -1,5 +1,4 @@
import mockfs from 'mock-fs';
import { readFileSync } from 'node:fs';
import { CrawlOptions, crawl } from 'src/utils';
interface Test {
@@ -10,10 +9,6 @@ interface Test {
const cwd = process.cwd();
const readContent = (path: string) => {
return readFileSync(path).toString();
};
const extensions = [
'.jpg',
'.jpeg',
@@ -261,8 +256,7 @@ const tests: Test[] = [
{
test: 'should support ignoring absolute paths',
options: {
// Currently, fast-glob has some caveat when dealing with `/`.
pathsToCrawl: ['/*s'],
pathsToCrawl: ['/'],
recursive: true,
exclusionPattern: '/images/**',
},
@@ -282,16 +276,14 @@ describe('crawl', () => {
describe('crawl', () => {
for (const { test, options, files } of tests) {
it(test, async () => {
// The file contents is the same as the path.
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
const actual = await crawl({ ...options, extensions });
const expected = Object.entries(files)
.filter((entry) => entry[1])
.map(([file]) => file);
// Compare file's content instead of path since a file can be represent in multiple ways.
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
expect(actual.sort()).toEqual(expected.sort());
});
}
});

View File

@@ -1,9 +1,8 @@
import { getMyUser, init, isHttpError } from '@immich/sdk';
import { convertPathToPattern, glob } from 'fast-glob';
import { glob } from 'fast-glob';
import { createHash } from 'node:crypto';
import { createReadStream } from 'node:fs';
import { readFile, stat, writeFile } from 'node:fs/promises';
import { platform } from 'node:os';
import { join, resolve } from 'node:path';
import yaml from 'yaml';
@@ -107,11 +106,6 @@ export interface CrawlOptions {
exclusionPattern?: string;
extensions: string[];
}
const convertPathToPatternOnWin = (path: string) => {
return platform() === 'win32' ? convertPathToPattern(path) : path;
};
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
@@ -130,11 +124,11 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
if (stats.isFile() || stats.isSymbolicLink()) {
crawledFiles.push(absolutePath);
} else {
patterns.push(convertPathToPatternOnWin(absolutePath));
patterns.push(absolutePath);
}
} catch (error: any) {
if (error.code === 'ENOENT') {
patterns.push(convertPathToPatternOnWin(currentPath));
patterns.push(currentPath);
} else {
throw error;
}

View File

@@ -2,7 +2,6 @@ import { defineConfig } from 'vite';
import tsconfigPaths from 'vite-tsconfig-paths';
export default defineConfig({
resolve: { alias: { src: '/src' } },
build: {
rollupOptions: {
input: 'src/index.ts',

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.36.0"
constraints = "4.36.0"
version = "4.34.0"
constraints = "4.34.0"
hashes = [
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.36.0"
version = "4.34.0"
}
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.36.0"
constraints = "4.36.0"
version = "4.34.0"
constraints = "4.34.0"
hashes = [
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.36.0"
version = "4.34.0"
}
}
}

View File

@@ -26,16 +26,6 @@ services:
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
environment:
IMMICH_REPOSITORY: immich-app/immich
IMMICH_REPOSITORY_URL: https://github.com/immich-app/immich
IMMICH_SOURCE_REF: local
IMMICH_SOURCE_COMMIT: af2efbdbbddc27cd06142f22253ccbbbbeec1f55
IMMICH_SOURCE_URL: https://github.com/immich-app/immich/commit/af2efbdbbddc27cd06142f22253ccbbbbeec1f55
IMMICH_BUILD: '9654404849'
IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849
IMMICH_BUILD_IMAGE: development
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
ulimits:
nofile:
soft: 1048576
@@ -94,7 +84,7 @@ services:
redis:
container_name: immich_redis
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
healthcheck:
test: redis-cli ping || exit 1
@@ -113,26 +103,11 @@ services:
ports:
- 5432:5432
healthcheck:
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command:
[
'postgres',
'-c',
'shared_preload_libraries=vectors.so',
'-c',
'search_path="$$user", public, vectors',
'-c',
'logging_collector=on',
'-c',
'max_wal_size=2GB',
'-c',
'shared_buffers=512MB',
'-c',
'wal_compression=on',
]
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
# set IMMICH_METRICS=true in .env to enable metrics
# immich-prometheus:

View File

@@ -41,7 +41,7 @@ services:
redis:
container_name: immich_redis
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -61,7 +61,7 @@ services:
ports:
- 5432:5432
healthcheck:
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
@@ -73,7 +73,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:075b1ba2c4ebb04bc3a6ab86c06ec8d8099f8fda1c96ef6d104d9bb1def1d8bc
image: prom/prometheus@sha256:5c435642ca4d8427ca26f4901c11114023004709037880cd7860d5b7176aa731
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus
@@ -85,7 +85,7 @@ services:
command: ['./run.sh', '-disable-reporting']
ports:
- 3000:3000
image: grafana/grafana:11.1.0-ubuntu@sha256:c7fc29ec783d5e7fc1bdfaad6f92345a345cffbc5d21c388ca228175006fc107
image: grafana/grafana:11.0.0-ubuntu@sha256:dcd3ae78713958a862732c3608d32c03f0c279c35a2032d74b80b12c5cdc47b8
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -43,7 +43,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
image: docker.io/redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -59,7 +59,7 @@ services:
volumes:
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
healthcheck:
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m

View File

@@ -3,10 +3,10 @@ global:
evaluation_interval: 15s
scrape_configs:
- job_name: immich_api
- job_name: immich_server
static_configs:
- targets: ['immich-server:8081']
- job_name: immich_microservices
static_configs:
- targets: ['immich-server:8082']
- targets: ['immich-microservices:8081']

View File

@@ -43,7 +43,7 @@ if [ -n "${quota:-}" ] && [ -n "${period:-}" ]; then
cpus=1
fi
else
cpus=$(grep -c ^processor /proc/cpuinfo)
cpus=$(grep -c processor /proc/cpuinfo)
fi
echo "$cpus"

View File

@@ -1 +1 @@
20.15
20.14

View File

@@ -94,7 +94,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
Cheer!

View File

@@ -142,7 +142,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
Cheer!

View File

@@ -1,77 +0,0 @@
---
title: Immich Update - July 2024
authors: [alextran]
tags: [update, v1.106.0]
---
Hello everybody! Alex from Immich here and I am back with another development progress update for the project.
Summer has returned once again, and the night sky is filled with stars, thank you for **38_000 shining stars** you have sent to our [GitHub repo](https://github.com/immich-app/immich)! Since the last announcement several core contributors have started full time. Everything is going great with development, PRs get merged with _brrrrrrr_ rate, conversation exchange between team members is on a new high, we met and are working with the great engineers at FUTO. The spirit is high and we have a lot of things brewing that we think you will like.
Let's go over some of the updates we had since the last post.
### Container consolidation
Reduced the number of total containers from 5 to 4 by making the microservices thread get spawned directly in the server container. Woohoo, remember when Immich had 7 containers?
### Email notifications
![smtp](https://github.com/immich-app/immich/assets/27055614/949cba85-d3f1-4cd3-b246-a6f5fb5d3ae8)
We added email notifications to the app with SMTP settings that you can configure for the following events
- A new account is created for you.
- You are added to a shared album.
- New media is added to an album.
### Versioned docs
You can now jump back into the past or take a peek at the unreleased version of the documentation by selecting the version on the website.
![version-doc](https://github.com/immich-app/immich/assets/27055614/6d22898a-5093-41ad-b416-4573d7ce6e03)
### Similarity deduplication
With more machine learning and CLIP magic, we now have similarity deduplication built into the application where it will search for closely similar images and let you decide what to do with them; i.e keep or trash.
![similarity-deduplication](https://github.com/immich-app/immich/assets/27055614/3cac8478-fbf7-47ea-acb6-0146901dc67e)
### Permanent URL for asset on the web
The detail view for an asset now has a permanent URL so you can easily share them with your loved ones.
### Web app translations
We now have a public Weblate project which the community can use to translate the webapp to their native languages. We are planning to port the mobile app translation to this platform as well. If you would like to contribute, you can take a look [here](https://hosted.weblate.org/projects/immich/immich/). We're already close to 50% translations -- we really appreciate everyone contributing to that!
![web-translation](https://github.com/immich-app/immich/assets/27055614/363df2ed-656c-4584-bd82-0708a693c5bc)
### Read-only/Editor mode on shared album
As the owner of the album, you can choose if the shared user can edit the album or to only view the content of the album without any modification.
![read-only-album](https://github.com/immich-app/immich/assets/27055614/c6f66375-b869-495a-9a86-3e87b316d109)
### Better video thumbnails
Immich now tries to find a descriptive video thumbnail instead of simply using the first frame. No more black images for thumbnails!
### Public Roadmap
We now have a [public roadmap](https://immich.app/roadmap), giving you a high-level overview of things the team is working on. The first goal of this roadmap is to bring Immich to a stable release, which is expected sometime later this year. Some of the highlights include
- Auto stacking - Auto stacking of burst photos
- Basic editor - Basic photo editing capabilities
- Workflows - Automate tasks with workflows
- Fine grained access controls - Granular access controls for users and api keys
- Better background backups - Rework background backups to be more reliable
- Private/locked photos - Private assets with extra protections
Beyond the items in the roadmap, we have _many many_ more ideas for Immich. The team and I hope that you are enjoying the application, find it helpful in your life and we have nothing but the intention of building out great software for you all!
Have an amazing Summer or Winter for those in the southern hemisphere! :D
Until next time,
Cheers!
Alex

View File

@@ -133,6 +133,40 @@ For example, say you have existing transcodes with the policy "Videos higher tha
No. Our design principle is that the original assets should always be untouched.
### How can I move all data (photos, persons, albums, libraries) from one user to another?
This is not officially supported but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the `psql` command), or you can add, for example, an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file so that you can use a web interface.
<details>
<summary>Steps</summary>
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
2. Find the ID of both the 'source' and the 'destination' user (it's the id column in the `users` table)
3. Four tables need to be updated:
```sql
BEGIN;
-- reassign albums
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
-- reassign people
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
-- reassign assets
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
-- reassign external libraries
UPDATE libraries SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
COMMIT;
```
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
</details>
---
## Albums
@@ -408,11 +442,4 @@ docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --
</details>
If corruption is detected, you should immediately make a backup before performing any other work in the database.
To do so, you may need to set the `zero_damaged_pages=on` flag for the database server to allow `pg_dumpall` to succeed.
After taking a backup, the recommended next step is to restore the database from a healthy backup before corruption was detected.
The damaged database dump can be used to manually recover any changes made since the last backup, if needed.
The causes of possible corruption are many, but can include unexpected poweroffs or unmounts, use of a network share for Postgres data, or a poor storage medium such an SD card or failing HDD/SSD.
[huggingface]: https://huggingface.co/immich-app

View File

@@ -76,7 +76,6 @@ services:
backup:
container_name: immich_db_dumper
image: prodrigestivill/postgres-backup-local:14
restart: always
env_file:
- .env
environment:
@@ -192,6 +191,6 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
</Tabs>
:::danger
Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files.
Do not touch the files inside these folders under any circumstances except taking a backup, changing or removing an asset can cause untracked and missing files.
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
:::

View File

@@ -1,23 +0,0 @@
# Email Notifications
Immich supports the option to send notifications via Email for the following events:
- Creating a new user
- Notifying a user when they get added to a shared album
- Informing other users about the addition of new assets to a shared album
## SMTP settings
You can access the settings panel from the web at `Administration -> Settings -> Notification settings`
Under Email, enter the following details to connect with SMTP servers.
You can use the following [guide](/docs/guides/smtp-gmail) to use Gmail's SMTP server.
<img src={require('./img/email-settings.png').default} width="80%" title="SMTP settings" />
## User's notifications settings
Users can manage their email notification settings from their account settings page on the web. They can choose to turn email notifications on or off for the following events:
<img src={require('./img/user-notifications-settings.png').default} width="80%" title="User notification settings" />

Binary file not shown.

Before

Width:  |  Height:  |  Size: 218 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

View File

@@ -27,7 +27,7 @@ Copy the entire `immich-server` block as a new service and make the following ch
+ container_name: immich_microservices
```
Once you have two copies of the immich-server service, make the following changes to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
Once you have two copies of the immich-server service, make the following chnages to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
```diff
services:

View File

@@ -10,59 +10,6 @@ Viewing and modifying the system settings is restricted to the Administrator.
You can always return to the default settings by clicking the `Reset to default` button.
:::
## Authentication Settings
Manage password, OAuth, and other authentication settings
### OAuth Authentication
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
### Password Authentication
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
:::tip
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
:::
## Image Settings (thumbnails and previews)
- Thumbnails - Used in the main timeline.
- Previews - Used in the asset viewer.
By default Immich creates 3 thumbnails for each asset,
Blurred (thumbhash) , Small - thumbnails (webp) , and Large - previews (jpeg/webp), using these settings you can change the quality for the thumbnails and previews files that are created.
**Thumbnail format**
Allows you to choose the type of format you want for the Thumbnail images, Webp produces smaller files than jpeg, but is slower to encode.
:::tip
You can read in detail about the advantages and disadvantages of using webp over jpeg on [Adobe's website](https://www.adobe.com/creativecloud/file-types/image/raster/webp-file.html)
:::
**Thumbnail resolution**
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
**Preview format**
Allows you to choose the type of format you want for the Preview images, Webp produces smaller files than jpeg, but is slower to encode.
**Preview resolution**
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
**Quality**
Image quality from 1-100. Higher is better for quality but produces larger files, this option affects the Preview and Thumbnail images.
**Prefer wide gamut**
Use Display P3 for thumbnails. This better preserves the vibrance of images with wide colorspaces, but images may appear differently on old devices with an old browser version. sRGB images are kept as sRGB to avoid color shifts.
**Prefer embedded preview**
Use embedded previews in RAW photos as the input to image processing when available. This can produce more accurate colors for some images, but the quality of the preview is camera-dependent and the image may have more compression artifacts.
:::tip
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
:::
## Job Settings
Using these settings, you can determine the amount of work that will run concurrently for each task in microservices. Some tasks can be set to higher values on computers with powerful hardware and storage with good I/O capabilities.
@@ -145,9 +92,17 @@ The map can be adjusted via [OpenMapTiles](https://openmaptiles.org/styles/) for
Immich supports [Reverse Geocoding](/docs/features/reverse-geocoding) using data from the [GeoNames](https://www.geonames.org/) geographical database.
## Notification Settings
## OAuth Authentication
SMTP server setup, for user creation notifications, new albums, etc. More information can be found [here](/docs/administration/email-notification)
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
## Password Authentication
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
:::tip
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
:::
## Server Settings
@@ -175,6 +130,27 @@ p {
}
```
## Thumbnail Settings
By default Immich creates 3 thumbnails for each asset,
Blurred (thumbhash) , Small (webp) , and Large (jpeg), using these settings you can change the quality for the thumbnail files that are created.
**Small thumbnail resolution**
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
**Large thumbnail resolution**
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
**Quality**
Thumbnail quality from 1-100. Higher is better for quality but produces larger files.
**Prefer wide gamut**
Use display p3 for thumbnails. This better preserves the vibrance of images with wide color spaces, but images may appear differently on old devices with an old browser version. Srgb images are kept as srgb to avoid color shifts.
:::tip
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
:::
## Trash Settings
In the system administrator's option to set a trash for deleted files, these files will remain in the trash until the deletion date 30 days (default) or as defined by the system administrator.

View File

@@ -13,20 +13,6 @@ Immich supports multiple users, each with their own library.
<UserCreate />
## Send new user email notification
:::note
This option is only available if an SMTP server has been configured in the administrator settings.
:::
Admin can send a welcome email if the Email option is set, you can learn here how to set up the SMTP server in Immich.
<img
src={require('./img/send-user-email-notification.webp').default}
width="40%"
title="Send user email notification"
/>
## Set Storage Quota For User
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.

View File

@@ -1,21 +0,0 @@
# Translations
:::tip
You can request a new language [here](https://hosted.weblate.org/new-lang/immich/immich/).
:::
## Weblate
[Weblate](https://weblate.org/) is a "libre software web-based continuous localization system". Immich localization efforts are managed on their [hosted platform](https://hosted.weblate.org/projects/immich/immich/).
## International message format
Plurals, numbers, dates and other locale specific message formats can be handled by using the [ICU message format](https://unicode-org.github.io/icu/userguide/format_parse/messages/). Internally, this is handled by the [intl-messageformat](https://www.npmjs.com/package/intl-messageformat) library. Their [documentation](https://formatjs.io/docs/intl-messageformat/) includes common, editable examples via a "live editor" feature, which can be useful to test and debug message formats.
## Progress
Immich currently supports the following languages:
<a href="https://hosted.weblate.org/engage/immich/">
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
</a>

View File

@@ -1,7 +1,7 @@
# Troubleshooting
:::tip
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.immich.app) server, where we have a dedicated channel for `#contributing`.
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.gg/D8JsnBEuKb) server, where we have a dedicated channel for `#contributing`.
:::
## Known Issues

View File

@@ -60,17 +60,17 @@ For RKMPP to work:
#### Basic Setup
1. If you do not already have it, download the latest [`hwaccel.transcoding.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
2. In the `docker-compose.yml` under `immich-server`, uncomment the `extends` section and change `cpu` to the appropriate backend.
2. In the `docker-compose.yml` under `immich-microservices`, uncomment the `extends` section and change `cpu` to the appropriate backend.
- For VAAPI on WSL2, be sure to use `vaapi-wsl` rather than `vaapi`
3. Redeploy the `immich-server` container with these updated settings.
3. Redeploy the `immich-microservices` container with these updated settings.
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
5. (Optional) If using a compatible backend, you may enable hardware decoding for optimal performance.
#### Single Compose File
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-server` service directly.
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-microservices` service directly.
For example, the `qsv` section in this file is:
@@ -79,22 +79,21 @@ devices:
- /dev/dri:/dev/dri
```
You can add this to the `immich-server` service instead of extending from `hwaccel.transcoding.yml`:
You can add this to the `immich-microservices` service instead of extending from `hwaccel.transcoding.yml`:
```yaml
immich-server:
container_name: immich_server
immich-microservices:
container_name: immich_microservices
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
# Note the lack of an `extends` section
devices:
- /dev/dri:/dev/dri
command: ['start.sh', 'microservices']
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
ports:
- 2283:3001
depends_on:
- redis
- database

Binary file not shown.

Before

Width:  |  Height:  |  Size: 218 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

View File

@@ -1,20 +0,0 @@
# SMTP settings using Gmail
This guide walks you through how to get the information you need to set up your Immich instance to send emails using Gmail's SMTP server.
## Create an app password
From your Google account settings
- Add [2-Step Verification](https://support.google.com/accounts/answer/185839) to your Google account (Required)
- [Create an app password](https://myaccount.google.com/apppasswords).
At the end of creating your app passwords, a password will be displayed; save it, it will be used for the password field when setting up the SMTP server in Immich.
<img src={require('./img/google-app-password.webp').default} title="Authorised redirect URIs" />
## Entering the SMTP credential in Immich
Entering your credential in Immich's email notification settings at `Administration -> Settings -> Notification Settings`
<img src={require('./img/email-settings.png').default} width="80%" title="SMTP settings" />

View File

@@ -38,17 +38,17 @@ Regardless of filesystem, it is not recommended to use a network share for your
## General
| Variable | Description | Default | Containers | Workers |
| :---------------------------------- | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
| `TZ` | Timezone | | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
| Variable | Description | Default | Containers | Workers |
| :------------------------------ | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
| `TZ` | Timezone | | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server | api |
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | server | microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
\*1: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
It only need to be set if the Immich deployment method is changing.

View File

@@ -27,7 +27,7 @@ For more information about setting up the community image see [here](https://git
:::info
- Guide was written using Unraid v6.12.10.
- Guide was written using Unraid v6.12.10
- Requires you to have installed the plugin: [Docker Compose Manager](https://forums.unraid.net/topic/114415-plugin-docker-compose-manager/)
- An Unraid share created for your images
- There has been a [report](https://forums.unraid.net/topic/130006-errortraps-traps-node27707-trap-invalid-opcode-ip14fcfc8d03c0-sp7fff32889dd8-more/#comment-1189395) of this not working if your Unraid server doesn't support AVX _(e.g. using a T610)_
@@ -46,8 +46,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
/>
3. Select the cog ⚙️ next to Immich then click "**Edit Stack**"
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default.
<details >
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
<ul>
@@ -71,7 +70,6 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
/>
</ul>
</details>
5. Click "**Save Changes**", you will be promoted to edit stack UI labels, just leave this blank and click "**Ok**"
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:

View File

@@ -13,4 +13,4 @@ Running into an issue or have a question? Try the following:
[github-issues]: https://github.com/immich-app/immich/issues
[github-releases]: https://github.com/immich-app/immich/releases
[discord-link]: https://discord.immich.app
[discord-link]: https://discord.com/invite/D8JsnBEuKb

View File

@@ -5,21 +5,21 @@ sidebar_position: 3
# Quick Start
Here is a quick, no-choices path to install Immich and take it for a test drive.
Once you've tried it, you might use one of the many other ways
Once you've tried it, perhaps you'll use one of the many other ways
to install and use it.
## Requirements
Check the [requirements page](/docs/install/requirements) to get started.
## Install and Launch via Docker Compose
## Install and launch via Docker Compose
Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions
to install the server.
- Where random passwords are required, `pwgen` is a handy utility.
- `UPLOAD_LOCATION` should be set to some new directory on the server
with enough free space.
with free space.
- You may ignore "Step 4 - Upgrading".
## Try the Web UI
@@ -48,26 +48,26 @@ import MobileAppLogin from '/docs/partials/_mobile-app-login.md';
In the mobile app, you should see the photo you uploaded from the web UI.
### Transfer Photos from Your Mobile Device
### Transfer Photos from your Mobile Device
import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
<MobileAppBackup />
The backup time differs depending on how many photos are on your mobile device. Large uploads may
Depending on how many photos are on your mobile device, this backup may
take quite a while.
You can select the **Jobs** tab to see Immich processing your photos.
You can select the Jobs tab to see Immich processing your photos.
<img src={require('/docs/guides/img/jobs-tab.png').default} title="Jobs tab" />
## Set up Your Backups
## Set up your backups
You may want to back up the content of your Immich instance
along with other parts of your server; be sure to read about
[database backup](/docs/administration/backup-and-restore).
## Where to Go From Here
## Where to go from here?
You may decide you'd like to install the server a different way;
the Install category on the left menu provides many options.

View File

@@ -4,17 +4,11 @@ sidebar_position: 5
# Support The Project
## Report issues
## Contributing
By far the easiest way to help make Immich better it to use it and report issues and bugs. Found a bug? [Open an issue on GitHub][github-issue].
## Translations
Support the project by localizing on [Weblate](https://hosted.weblate.org/projects/immich/immich/). For more information, see the [Translations](/docs/developer/translations) section.
## Development
If you are a programmer or developer, take a look at Immich's [technology stack](/docs/developer/architecture.mdx) and consider fixing bugs or building new features. The team and I are always looking for new contributors. For information about how to contribute as a developer, see the [Developer](/docs/developer/architecture.mdx) section.
1. Testing - Using Immich and reporting bugs is a great way to help support the project. Found a bug? [Open an issue on GitHub][github-issue].
1. Translations - The Immich mobile app has been translated into [17 languages][github-langs] so far! To contribute with translations, email me at alex.tran1502@gmail.com or send me a message on discord.
1. Development - If you are a programmer or developer, take a look at Immich's [technology stack](/docs/developer/architecture.mdx) and consider fixing bugs or building new features. The team and I are always looking for new contributors. For information about how to contribute as a developer, see the [Developer](/docs/developer/architecture.mdx) section.
[github-issue]: https://github.com/immich-app/immich/issues/new/choose
[github-langs]: https://github.com/immich-app/immich/tree/main/mobile/assets/i18n

View File

@@ -94,10 +94,6 @@ const config = {
srcDark: 'img/immich-logo-inline-dark.png',
},
items: [
{
type: 'custom-versionSwitcher',
position: 'right',
},
{
to: '/docs/overview/introduction',
position: 'right',
@@ -124,7 +120,7 @@ const config = {
position: 'right',
},
{
href: 'https://discord.immich.app',
href: 'https://discord.gg/D8JsnBEuKb',
label: 'Discord',
position: 'right',
},
@@ -151,7 +147,7 @@ const config = {
items: [
{
label: 'Discord',
href: 'https://discord.immich.app',
href: 'https://discord.com/invite/D8JsnBEuKb',
},
{
label: 'Reddit',

379
docs/package-lock.json generated
View File

@@ -2155,10 +2155,9 @@
}
},
"node_modules/@docusaurus/core": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.4.0.tgz",
"integrity": "sha512-g+0wwmN2UJsBqy2fQRQ6fhXruoEa62JDeEa5d8IdTJlMoaDaEDfHh7WjwGRn4opuTQWpjAwP/fbcgyHKlE+64w==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.3.2.tgz",
"integrity": "sha512-PzKMydKI3IU1LmeZQDi+ut5RSuilbXnA8QdowGeJEgU8EJjmx3rBHNT1LxQxOVqNEwpWi/csLwd9bn7rUjggPA==",
"dependencies": {
"@babel/core": "^7.23.3",
"@babel/generator": "^7.23.3",
@@ -2170,12 +2169,12 @@
"@babel/runtime": "^7.22.6",
"@babel/runtime-corejs3": "^7.22.6",
"@babel/traverse": "^7.22.8",
"@docusaurus/cssnano-preset": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/cssnano-preset": "3.3.2",
"@docusaurus/logger": "3.3.2",
"@docusaurus/mdx-loader": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"autoprefixer": "^10.4.14",
"babel-loader": "^9.1.3",
"babel-plugin-dynamic-import-node": "^2.3.3",
@@ -2241,10 +2240,9 @@
}
},
"node_modules/@docusaurus/cssnano-preset": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.4.0.tgz",
"integrity": "sha512-qwLFSz6v/pZHy/UP32IrprmH5ORce86BGtN0eBtG75PpzQJAzp9gefspox+s8IEOr0oZKuQ/nhzZ3xwyc3jYJQ==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.3.2.tgz",
"integrity": "sha512-+5+epLk/Rp4vFML4zmyTATNc3Is+buMAL6dNjrMWahdJCJlMWMPd/8YfU+2PA57t8mlSbhLJ7vAZVy54cd1vRQ==",
"dependencies": {
"cssnano-preset-advanced": "^6.1.2",
"postcss": "^8.4.38",
@@ -2256,10 +2254,9 @@
}
},
"node_modules/@docusaurus/logger": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.4.0.tgz",
"integrity": "sha512-bZwkX+9SJ8lB9kVRkXw+xvHYSMGG4bpYHKGXeXFvyVc79NMeeBSGgzd4TQLHH+DYeOJoCdl8flrFJVxlZ0wo/Q==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.3.2.tgz",
"integrity": "sha512-Ldu38GJ4P8g4guN7d7pyCOJ7qQugG7RVyaxrK8OnxuTlaImvQw33aDRwaX2eNmX8YK6v+//Z502F4sOZbHHCHQ==",
"dependencies": {
"chalk": "^4.1.2",
"tslib": "^2.6.0"
@@ -2269,14 +2266,13 @@
}
},
"node_modules/@docusaurus/mdx-loader": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.4.0.tgz",
"integrity": "sha512-kSSbrrk4nTjf4d+wtBA9H+FGauf2gCax89kV8SUSJu3qaTdSIKdWERlngsiHaCFgZ7laTJ8a67UFf+xlFPtuTw==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.3.2.tgz",
"integrity": "sha512-AFRxj/aOk3/mfYDPxE3wTbrjeayVRvNSZP7mgMuUlrb2UlPRbSVAFX1k2RbgAJrnTSwMgb92m2BhJgYRfptN3g==",
"dependencies": {
"@docusaurus/logger": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/logger": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"@mdx-js/mdx": "^3.0.0",
"@slorber/remark-comment": "^1.0.0",
"escape-html": "^1.0.3",
@@ -2308,12 +2304,11 @@
}
},
"node_modules/@docusaurus/module-type-aliases": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.4.0.tgz",
"integrity": "sha512-A1AyS8WF5Bkjnb8s+guTDuYmUiwJzNrtchebBHpc0gz0PyHJNMaybUlSrmJjHVcGrya0LKI4YcR3lBDQfXRYLw==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.3.2.tgz",
"integrity": "sha512-b/XB0TBJah5yKb4LYuJT4buFvL0MGAb0+vJDrJtlYMguRtsEBkf2nWl5xP7h4Dlw6ol0hsHrCYzJ50kNIOEclw==",
"dependencies": {
"@docusaurus/types": "3.4.0",
"@docusaurus/types": "3.3.2",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -2327,18 +2322,17 @@
}
},
"node_modules/@docusaurus/plugin-content-blog": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz",
"integrity": "sha512-vv6ZAj78ibR5Jh7XBUT4ndIjmlAxkijM3Sx5MAAzC1gyv0vupDQNhzuFg1USQmQVj3P5I6bquk12etPV3LJ+Xw==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.3.2.tgz",
"integrity": "sha512-fJU+dmqp231LnwDJv+BHVWft8pcUS2xVPZdeYH6/ibH1s2wQ/sLcmUrGWyIv/Gq9Ptj8XWjRPMghlxghuPPoxg==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/logger": "3.3.2",
"@docusaurus/mdx-loader": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"cheerio": "^1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
@@ -2359,19 +2353,18 @@
}
},
"node_modules/@docusaurus/plugin-content-docs": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.4.0.tgz",
"integrity": "sha512-HkUCZffhBo7ocYheD9oZvMcDloRnGhBMOZRyVcAQRFmZPmNqSyISlXA1tQCIxW+r478fty97XXAGjNYzBjpCsg==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.3.2.tgz",
"integrity": "sha512-Dm1ri2VlGATTN3VGk1ZRqdRXWa1UlFubjaEL6JaxaK7IIFqN/Esjpl+Xw10R33loHcRww/H76VdEeYayaL76eg==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/logger": "3.3.2",
"@docusaurus/mdx-loader": "3.3.2",
"@docusaurus/module-type-aliases": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"@types/react-router-config": "^5.0.7",
"combine-promises": "^1.1.0",
"fs-extra": "^11.1.1",
@@ -2390,16 +2383,15 @@
}
},
"node_modules/@docusaurus/plugin-content-pages": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.4.0.tgz",
"integrity": "sha512-h2+VN/0JjpR8fIkDEAoadNjfR3oLzB+v1qSXbIAKjQ46JAHx3X22n9nqS+BWSQnTnp1AjkjSvZyJMekmcwxzxg==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.3.2.tgz",
"integrity": "sha512-EKc9fQn5H2+OcGER8x1aR+7URtAGWySUgULfqE/M14+rIisdrBstuEZ4lUPDRrSIexOVClML82h2fDS+GSb8Ew==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/mdx-loader": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"
@@ -2413,14 +2405,13 @@
}
},
"node_modules/@docusaurus/plugin-debug": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.4.0.tgz",
"integrity": "sha512-uV7FDUNXGyDSD3PwUaf5YijX91T5/H9SX4ErEcshzwgzWwBtK37nUWPU3ZLJfeTavX3fycTOqk9TglpOLaWkCg==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.3.2.tgz",
"integrity": "sha512-oBIBmwtaB+YS0XlmZ3gCO+cMbsGvIYuAKkAopoCh0arVjtlyPbejzPrHuCoRHB9G7abjNZw7zoONOR8+8LM5+Q==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils": "3.3.2",
"fs-extra": "^11.1.1",
"react-json-view-lite": "^1.2.0",
"tslib": "^2.6.0"
@@ -2434,14 +2425,13 @@
}
},
"node_modules/@docusaurus/plugin-google-analytics": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.4.0.tgz",
"integrity": "sha512-mCArluxEGi3cmYHqsgpGGt3IyLCrFBxPsxNZ56Mpur0xSlInnIHoeLDH7FvVVcPJRPSQ9/MfRqLsainRw+BojA==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.3.2.tgz",
"integrity": "sha512-jXhrEIhYPSClMBK6/IA8qf1/FBoxqGXZvg7EuBax9HaK9+kL3L0TJIlatd8jQJOMtds8mKw806TOCc3rtEad1A==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"tslib": "^2.6.0"
},
"engines": {
@@ -2453,14 +2443,13 @@
}
},
"node_modules/@docusaurus/plugin-google-gtag": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.4.0.tgz",
"integrity": "sha512-Dsgg6PLAqzZw5wZ4QjUYc8Z2KqJqXxHxq3vIoyoBWiLEEfigIs7wHR+oiWUQy3Zk9MIk6JTYj7tMoQU0Jm3nqA==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.3.2.tgz",
"integrity": "sha512-vcrKOHGbIDjVnNMrfbNpRQR1x6Jvcrb48kVzpBAOsKbj9rXZm/idjVAXRaewwobHdOrJkfWS/UJoxzK8wyLRBQ==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"@types/gtag.js": "^0.0.12",
"tslib": "^2.6.0"
},
@@ -2473,14 +2462,13 @@
}
},
"node_modules/@docusaurus/plugin-google-tag-manager": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.4.0.tgz",
"integrity": "sha512-O9tX1BTwxIhgXpOLpFDueYA9DWk69WCbDRrjYoMQtFHSkTyE7RhNgyjSPREUWJb9i+YUg3OrsvrBYRl64FCPCQ==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.3.2.tgz",
"integrity": "sha512-ldkR58Fdeks0vC+HQ+L+bGFSJsotQsipXD+iKXQFvkOfmPIV6QbHRd7IIcm5b6UtwOiK33PylNS++gjyLUmaGw==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"tslib": "^2.6.0"
},
"engines": {
@@ -2492,17 +2480,16 @@
}
},
"node_modules/@docusaurus/plugin-sitemap": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.4.0.tgz",
"integrity": "sha512-+0VDvx9SmNrFNgwPoeoCha+tRoAjopwT0+pYO1xAbyLcewXSemq+eLxEa46Q1/aoOaJQ0qqHELuQM7iS2gp33Q==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.3.2.tgz",
"integrity": "sha512-/ZI1+bwZBhAgC30inBsHe3qY9LOZS+79fRGkNdTcGHRMcdAp6Vw2pCd1gzlxd/xU+HXsNP6cLmTOrggmRp3Ujg==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/logger": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"fs-extra": "^11.1.1",
"sitemap": "^7.1.1",
"tslib": "^2.6.0"
@@ -2516,24 +2503,23 @@
}
},
"node_modules/@docusaurus/preset-classic": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.4.0.tgz",
"integrity": "sha512-Ohj6KB7siKqZaQhNJVMBBUzT3Nnp6eTKqO+FXO3qu/n1hJl3YLwVKTWBg28LF7MWrKu46UuYavwMRxud0VyqHg==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.3.2.tgz",
"integrity": "sha512-1SDS7YIUN1Pg3BmD6TOTjhB7RSBHJRpgIRKx9TpxqyDrJ92sqtZhomDc6UYoMMLQNF2wHFZZVGFjxJhw2VpL+Q==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/plugin-content-blog": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/plugin-content-pages": "3.4.0",
"@docusaurus/plugin-debug": "3.4.0",
"@docusaurus/plugin-google-analytics": "3.4.0",
"@docusaurus/plugin-google-gtag": "3.4.0",
"@docusaurus/plugin-google-tag-manager": "3.4.0",
"@docusaurus/plugin-sitemap": "3.4.0",
"@docusaurus/theme-classic": "3.4.0",
"@docusaurus/theme-common": "3.4.0",
"@docusaurus/theme-search-algolia": "3.4.0",
"@docusaurus/types": "3.4.0"
"@docusaurus/core": "3.3.2",
"@docusaurus/plugin-content-blog": "3.3.2",
"@docusaurus/plugin-content-docs": "3.3.2",
"@docusaurus/plugin-content-pages": "3.3.2",
"@docusaurus/plugin-debug": "3.3.2",
"@docusaurus/plugin-google-analytics": "3.3.2",
"@docusaurus/plugin-google-gtag": "3.3.2",
"@docusaurus/plugin-google-tag-manager": "3.3.2",
"@docusaurus/plugin-sitemap": "3.3.2",
"@docusaurus/theme-classic": "3.3.2",
"@docusaurus/theme-common": "3.3.2",
"@docusaurus/theme-search-algolia": "3.3.2",
"@docusaurus/types": "3.3.2"
},
"engines": {
"node": ">=18.0"
@@ -2544,23 +2530,22 @@
}
},
"node_modules/@docusaurus/theme-classic": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.4.0.tgz",
"integrity": "sha512-0IPtmxsBYv2adr1GnZRdMkEQt1YW6tpzrUPj02YxNpvJ5+ju4E13J5tB4nfdaen/tfR1hmpSPlTFPvTf4kwy8Q==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.3.2.tgz",
"integrity": "sha512-gepHFcsluIkPb4Im9ukkiO4lXrai671wzS3cKQkY9BXQgdVwsdPf/KS0Vs4Xlb0F10fTz+T3gNjkxNEgSN9M0A==",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/plugin-content-blog": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/plugin-content-pages": "3.4.0",
"@docusaurus/theme-common": "3.4.0",
"@docusaurus/theme-translations": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/mdx-loader": "3.3.2",
"@docusaurus/module-type-aliases": "3.3.2",
"@docusaurus/plugin-content-blog": "3.3.2",
"@docusaurus/plugin-content-docs": "3.3.2",
"@docusaurus/plugin-content-pages": "3.3.2",
"@docusaurus/theme-common": "3.3.2",
"@docusaurus/theme-translations": "3.3.2",
"@docusaurus/types": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"copy-text-to-clipboard": "^3.2.0",
@@ -2584,18 +2569,17 @@
}
},
"node_modules/@docusaurus/theme-common": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.4.0.tgz",
"integrity": "sha512-0A27alXuv7ZdCg28oPE8nH/Iz73/IUejVaCazqu9elS4ypjiLhK3KfzdSQBnL/g7YfHSlymZKdiOHEo8fJ0qMA==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.3.2.tgz",
"integrity": "sha512-kXqSaL/sQqo4uAMQ4fHnvRZrH45Xz2OdJ3ABXDS7YVGPSDTBC8cLebFrRR4YF9EowUHto1UC/EIklJZQMG/usA==",
"dependencies": {
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/plugin-content-blog": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/plugin-content-pages": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/mdx-loader": "3.3.2",
"@docusaurus/module-type-aliases": "3.3.2",
"@docusaurus/plugin-content-blog": "3.3.2",
"@docusaurus/plugin-content-docs": "3.3.2",
"@docusaurus/plugin-content-pages": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -2614,19 +2598,18 @@
}
},
"node_modules/@docusaurus/theme-search-algolia": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz",
"integrity": "sha512-aiHFx7OCw4Wck1z6IoShVdUWIjntC8FHCw9c5dR8r3q4Ynh+zkS8y2eFFunN/DL6RXPzpnvKCg3vhLQYJDmT9Q==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.3.2.tgz",
"integrity": "sha512-qLkfCl29VNBnF1MWiL9IyOQaHxUvicZp69hISyq/xMsNvFKHFOaOfk9xezYod2Q9xx3xxUh9t/QPigIei2tX4w==",
"dependencies": {
"@docsearch/react": "^3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/theme-common": "3.4.0",
"@docusaurus/theme-translations": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@docusaurus/core": "3.3.2",
"@docusaurus/logger": "3.3.2",
"@docusaurus/plugin-content-docs": "3.3.2",
"@docusaurus/theme-common": "3.3.2",
"@docusaurus/theme-translations": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-validation": "3.3.2",
"algoliasearch": "^4.18.0",
"algoliasearch-helper": "^3.13.3",
"clsx": "^2.0.0",
@@ -2645,10 +2628,9 @@
}
},
"node_modules/@docusaurus/theme-translations": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.4.0.tgz",
"integrity": "sha512-zSxCSpmQCCdQU5Q4CnX/ID8CSUUI3fvmq4hU/GNP/XoAWtXo9SAVnM3TzpU8Gb//H3WCsT8mJcTfyOk3d9ftNg==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.3.2.tgz",
"integrity": "sha512-bPuiUG7Z8sNpGuTdGnmKl/oIPeTwKr0AXLGu9KaP6+UFfRZiyWbWE87ti97RrevB2ffojEdvchNujparR3jEZQ==",
"dependencies": {
"fs-extra": "^11.1.1",
"tslib": "^2.6.0"
@@ -2658,10 +2640,9 @@
}
},
"node_modules/@docusaurus/types": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.4.0.tgz",
"integrity": "sha512-4jcDO8kXi5Cf9TcyikB/yKmz14f2RZ2qTRerbHAsS+5InE9ZgSLBNLsewtFTcTOXSVcbU3FoGOzcNWAmU1TR0A==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.3.2.tgz",
"integrity": "sha512-5p201S7AZhliRxTU7uMKtSsoC8mgPA9bs9b5NQg1IRdRxJfflursXNVsgc3PcMqiUTul/v1s3k3rXXFlRE890w==",
"dependencies": {
"@mdx-js/mdx": "^3.0.0",
"@types/history": "^4.7.11",
@@ -2679,13 +2660,12 @@
}
},
"node_modules/@docusaurus/utils": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.4.0.tgz",
"integrity": "sha512-fRwnu3L3nnWaXOgs88BVBmG1yGjcQqZNHG+vInhEa2Sz2oQB+ZjbEMO5Rh9ePFpZ0YDiDUhpaVjwmS+AU2F14g==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.3.2.tgz",
"integrity": "sha512-f4YMnBVymtkSxONv4Y8js3Gez9IgHX+Lcg6YRMOjVbq8sgCcdYK1lf6SObAuz5qB/mxiSK7tW0M9aaiIaUSUJg==",
"dependencies": {
"@docusaurus/logger": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/logger": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"@svgr/webpack": "^8.1.0",
"escape-string-regexp": "^4.0.0",
"file-loader": "^6.2.0",
@@ -2702,7 +2682,6 @@
"shelljs": "^0.8.5",
"tslib": "^2.6.0",
"url-loader": "^4.1.1",
"utility-types": "^3.10.0",
"webpack": "^5.88.1"
},
"engines": {
@@ -2718,10 +2697,9 @@
}
},
"node_modules/@docusaurus/utils-common": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.4.0.tgz",
"integrity": "sha512-NVx54Wr4rCEKsjOH5QEVvxIqVvm+9kh7q8aYTU5WzUU9/Hctd6aTrcZ3G0Id4zYJ+AeaG5K5qHA4CY5Kcm2iyQ==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.3.2.tgz",
"integrity": "sha512-QWFTLEkPYsejJsLStgtmetMFIA3pM8EPexcZ4WZ7b++gO5jGVH7zsipREnCHzk6+eDgeaXfkR6UPaTt86bp8Og==",
"dependencies": {
"tslib": "^2.6.0"
},
@@ -2738,18 +2716,15 @@
}
},
"node_modules/@docusaurus/utils-validation": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.4.0.tgz",
"integrity": "sha512-hYQ9fM+AXYVTWxJOT1EuNaRnrR2WGpRdLDQG07O8UOpsvCPWUVOeo26Rbm0JWY2sGLfzAb+tvJ62yF+8F+TV0g==",
"license": "MIT",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.3.2.tgz",
"integrity": "sha512-itDgFs5+cbW9REuC7NdXals4V6++KifgVMzoGOOOSIifBQw+8ULhy86u5e1lnptVL0sv8oAjq2alO7I40GR7pA==",
"dependencies": {
"@docusaurus/logger": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"fs-extra": "^11.2.0",
"@docusaurus/logger": "3.3.2",
"@docusaurus/utils": "3.3.2",
"@docusaurus/utils-common": "3.3.2",
"joi": "^17.9.2",
"js-yaml": "^4.1.0",
"lodash": "^4.17.21",
"tslib": "^2.6.0"
},
"engines": {
@@ -12640,10 +12615,9 @@
}
},
"node_modules/picocolors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
"license": "ISC"
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="
},
"node_modules/picomatch": {
"version": "2.3.1",
@@ -12754,9 +12728,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.39",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
"version": "8.4.38",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
"funding": [
{
"type": "opencollective",
@@ -12771,10 +12745,9 @@
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"nanoid": "^3.3.7",
"picocolors": "^1.0.1",
"picocolors": "^1.0.0",
"source-map-js": "^1.2.0"
},
"engines": {
@@ -13600,11 +13573,10 @@
}
},
"node_modules/prettier": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz",
"integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
},
@@ -16014,10 +15986,9 @@
}
},
"node_modules/tailwindcss": {
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.4.tgz",
"integrity": "sha512-ZoyXOdJjISB7/BcLTR6SEsLgKtDStYyYZVLsUtWChO4Ps20CBad7lfJKVDiejocV4ME1hLmyY0WJE3hSDcmQ2A==",
"license": "MIT",
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.3.tgz",
"integrity": "sha512-U7sxQk/n397Bmx4JHbJx/iSOOv5G+II3f1kpLpY2QeUv5DcPdcTsYLlusZfq1NthHS1c1cZoyFmmkex1rzke0A==",
"dependencies": {
"@alloc/quick-lru": "^5.2.0",
"arg": "^5.0.2",
@@ -16054,7 +16025,6 @@
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
"integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.3"
},
@@ -16376,10 +16346,9 @@
}
},
"node_modules/typescript": {
"version": "5.5.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
"license": "Apache-2.0",
"version": "5.4.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"

View File

@@ -56,6 +56,6 @@
"node": ">=20"
},
"volta": {
"node": "20.15.0"
"node": "20.14.0"
}
}

View File

@@ -38,11 +38,6 @@ const guides: CommunityGuidesProps[] = [
description: 'Import your Google Photos files into Immich and add your albums',
url: 'https://github.com/immich-app/immich/discussions/1340',
},
{
title: 'Access Immich with custom domain',
description: 'Access your local Immich installation over the internet using your own domain',
url: 'https://github.com/ppr88/immich-guides/blob/main/open-immich-custom-domain.md',
},
];
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {

View File

@@ -1,59 +0,0 @@
import '@docusaurus/theme-classic/lib/theme/Unlisted/index';
import { useWindowSize } from '@docusaurus/theme-common';
import DropdownNavbarItem from '@theme/NavbarItem/DropdownNavbarItem';
import React, { useEffect, useState } from 'react';
export default function VersionSwitcher(): JSX.Element {
const [versions, setVersions] = useState([]);
const [label, setLabel] = useState('Versions');
const windowSize = useWindowSize();
useEffect(() => {
async function getVersions() {
try {
let baseUrl = 'https://immich.app';
if (window.location.origin === 'http://localhost:3005') {
baseUrl = window.location.origin;
}
const response = await fetch(`${baseUrl}/archived-versions.json`);
const archiveVersions = await response.json();
const allVersions = [
{ label: 'Next', url: 'https://main.preview.immich.app' },
{ label: 'Latest', url: 'https://immich.app' },
...archiveVersions,
];
setVersions(allVersions);
const activeVersion = allVersions.find((version) => new URL(version.url).origin === window.location.origin);
if (activeVersion) {
setLabel(activeVersion.label);
}
} catch (error) {
console.error('Failed to fetch versions', error);
}
}
if (versions.length === 0) {
getVersions();
}
}, []);
return (
versions.length > 0 && (
<DropdownNavbarItem
className="navbar__item"
label={label}
mobile={windowSize === 'mobile'}
items={versions.map(({ label, url }) => ({
label,
to: url,
target: '_self',
}))}
/>
)
);
}

View File

@@ -8,6 +8,7 @@
@tailwind utilities;
@import url('https://fonts.googleapis.com/css2?family=Overpass:ital,wght@0,300;0,400;0,500;0,600;0,700;1,300;1,400;1,500;1,600;1,700&display=swap');
@import url('https://fonts.googleapis.com/css2?family=Snowburst+One&display=swap');
html,
button {
@@ -47,3 +48,7 @@ img {
div[class^='announcementBar_'] {
min-height: 2rem;
}
.navbar__brand .navbar__title {
@apply font-immich-title text-2xl font-normal text-immich-primary dark:text-immich-dark-primary;
}

View File

@@ -1,77 +0,0 @@
import { mdiCalendarToday, mdiLeadPencil, mdiLockOutline, mdiSpeedometerSlow, mdiWeb } from '@mdi/js';
import Layout from '@theme/Layout';
import React from 'react';
import { Item as TimelineItem, Timeline } from '../components/timeline';
const withLanguage = (date: Date) => (language: string) => date.toLocaleDateString(language);
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
const items: Item[] = [
{
icon: mdiLeadPencil,
iconColor: 'gold',
title: 'PostgreSQL NOTIFY is cursed',
description:
'PostgreSQL does everything in a transaction, including NOTIFY. This means using the socket.io postgres-adapter writes to WAL every 5 seconds.',
link: { url: 'https://github.com/immich-app/immich/pull/10801', text: '#10801' },
date: new Date(2024, 6, 3),
},
{
icon: mdiWeb,
iconColor: 'lightskyblue',
title: 'npm scripts are cursed',
description:
'npm scripts make a http call to the npm registry each time they run, which means they are a terrible way to execute a health check.',
link: { url: 'https://github.com/immich-app/immich/issues/10796', text: '#10796' },
date: new Date(2024, 6, 3),
},
{
icon: mdiSpeedometerSlow,
iconColor: 'brown',
title: '50 extra packages are cursed',
description:
'There is a user in the JavaScript community who goes around adding "backwards compatibility" to projects. They do this by adding 50 extra package dependencies to your project, which are maintained by them.',
link: { url: 'https://github.com/immich-app/immich/pull/10690', text: '#10690' },
date: new Date(2024, 5, 28),
},
{
icon: mdiLockOutline,
iconColor: 'gold',
title: 'Long passwords are cursed',
description:
'The bcrypt implementation only uses the first 72 bytes of a string. Any characters after that are ignored.',
// link: GHSA-4p64-9f7h-3432
date: new Date(2024, 5, 25),
},
{
icon: mdiCalendarToday,
iconColor: 'greenyellow',
title: 'JavaScript Date objects are cursed',
description: 'JavaScript date objects are 1 indexed for years and days, but 0 indexed for months.',
link: { url: 'https://github.com/immich-app/immich/pulls/6787', text: '#6787' },
date: new Date(2024, 0, 31),
},
];
export default function CursedKnowledgePage(): JSX.Element {
return (
<Layout title="Cursed Knowledge" description="Things we wish we didn't know">
<section className="my-8">
<h1 className="md:text-6xl text-center mb-10 text-immich-primary dark:text-immich-dark-primary px-2">
Cursed Knowledge
</h1>
<p className="text-center text-xl px-2">
Cursed knowledge we have learned as a result of building Immich that we wish we never knew.
</p>
<div className="flex justify-around mt-8 w-full max-w-full">
<Timeline
items={items
.sort((a, b) => b.date.getTime() - a.date.getTime())
.map((item) => ({ ...item, getDateLabel: withLanguage(item.date) }))}
/>
</div>
</section>
</Layout>
);
}

View File

@@ -36,7 +36,7 @@ function HomepageHeader() {
<Link
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-dark-primary dark:bg-immich-primary rounded-full hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
to="https://discord.immich.app"
to="https://discord.gg/D8JsnBEuKb"
>
Discord
</Link>

View File

@@ -14,7 +14,6 @@ import {
mdiCheckboxMarked,
mdiCloudUploadOutline,
mdiCollage,
mdiContentDuplicate,
mdiDevices,
mdiEmailOutline,
mdiExpansionCard,
@@ -29,14 +28,12 @@ import {
mdiForum,
mdiHandshakeOutline,
mdiHeart,
mdiHistory,
mdiImage,
mdiImageAlbum,
mdiImageEdit,
mdiImageMultipleOutline,
mdiImageSearch,
mdiKeyboardSettingsOutline,
mdiLockOutline,
mdiMagnify,
mdiMagnifyScan,
mdiMap,
@@ -72,7 +69,6 @@ import React from 'react';
import { Item, Timeline } from '../components/timeline';
const releases = {
'v1.106.1': new Date(2024, 5, 11),
'v1.104.0': new Date(2024, 4, 13),
'v1.103.0': new Date(2024, 3, 29),
'v1.102.0': new Date(2024, 3, 15),
@@ -161,14 +157,6 @@ const withRelease = ({
};
const roadmap: Item[] = [
{
done: false,
icon: mdiLockOutline,
iconColor: 'sandybrown',
title: 'Private/locked photos',
description: 'Private assets with extra protections',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiRocketLaunch,
@@ -209,6 +197,14 @@ const roadmap: Item[] = [
description: 'Granular access controls for users and api keys',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiWeb,
iconColor: 'royalblue',
title: 'Web translations',
description: 'Translate the web application to multiple languages',
getDateLabel: () => 'Planned for 2024',
},
{
done: false,
icon: mdiCameraBurst,
@@ -220,32 +216,13 @@ const roadmap: Item[] = [
];
const milestones: Item[] = [
withRelease({
icon: mdiHistory,
title: 'Versioned documentation',
description: 'View documentation as it was at the time of past releases',
release: 'v1.106.1',
}),
withRelease({
icon: mdiWeb,
iconColor: 'royalblue',
title: 'Web translations',
description: 'Translate the web application to multiple languages',
release: 'v1.106.1',
}),
withRelease({
icon: mdiContentDuplicate,
title: 'Similar image detection',
description: 'Detect duplicate assets that arent exactly identical',
release: 'v1.106.1',
}),
withRelease({
icon: mdiVectorCombine,
title: 'Container consolidation',
description:
'The microservices container can be run as a worker within the server image, allowing us to remove it from the default stack.',
release: 'v1.106.1',
}),
// withRelease({
// icon: mdiVectorCombine,
// title: 'Container consolidation',
// description:
// 'The microservices container can be run as a worker within the server image, allowing us to remove it from the default stack.',
// release: 'v1.106.0',
// }),
withRelease({
icon: mdiPencil,
iconColor: 'saddlebrown',

View File

@@ -1,7 +0,0 @@
import ComponentTypes from '@theme-original/NavbarItem/ComponentTypes';
import VersionSwitcher from '@site/src/components/version-switcher';
export default {
...ComponentTypes,
'custom-versionSwitcher': VersionSwitcher,
};

View File

@@ -1,74 +0,0 @@
[
{
"label": "v1.107.2",
"url": "https://v1.107.2.archive.immich.app"
},
{
"label": "v1.107.1",
"url": "https://v1.107.1.archive.immich.app"
},
{
"label": "v1.107.0",
"url": "https://v1.107.0.archive.immich.app"
},
{
"label": "v1.106.4",
"url": "https://v1.106.4.archive.immich.app"
},
{
"label": "v1.106.3",
"url": "https://v1.106.3.archive.immich.app"
},
{
"label": "v1.106.2",
"url": "https://v1.106.2.archive.immich.app"
},
{
"label": "v1.106.1",
"url": "https://v1.106.1.archive.immich.app"
},
{
"label": "v1.105.1",
"url": "https://v1.105.1.archive.immich.app/"
},
{
"label": "v1.105.0",
"url": "https://v1.105.0.archive.immich.app/"
},
{
"label": "v1.104.0",
"url": "https://v1.104.0.archive.immich.app/"
},
{
"label": "v1.103.1",
"url": "https://v1.103.1.archive.immich.app/"
},
{
"label": "v1.103.0",
"url": "https://v1.103.0.archive.immich.app/"
},
{
"label": "v1.102.3",
"url": "https://v1.102.3.archive.immich.app/"
},
{
"label": "v1.102.2",
"url": "https://v1.102.2.archive.immich.app/"
},
{
"label": "v1.102.1",
"url": "https://v1.102.1.archive.immich.app/"
},
{
"label": "v1.102.0",
"url": "https://v1.102.0.archive.immich.app/"
},
{
"label": "v1.101.0",
"url": "https://v1.101.0.archive.immich.app/"
},
{
"label": "v1.100.0",
"url": "https://v1.100.0.archive.immich.app/"
}
]

View File

@@ -21,6 +21,9 @@ module.exports = {
'immich-dark-fg': '#e5e7eb',
'immich-dark-gray': '#212121',
},
fontFamily: {
'immich-title': ['Snowburst One', 'cursive'],
},
},
},
plugins: [],

View File

@@ -1 +1 @@
20.15
20.14

View File

@@ -10,11 +10,6 @@ services:
build:
context: ../
dockerfile: server/Dockerfile
args:
- BUILD_ID=1234567890
- BUILD_IMAGE=e2e
- BUILD_SOURCE_REF=e2e
- BUILD_SOURCE_COMMIT=e2eeeeeeeeeeeeeeeeee
environment:
- DB_HOSTNAME=database
- DB_USERNAME=postgres
@@ -22,7 +17,6 @@ services:
- DB_DATABASE_NAME=immich
- IMMICH_MACHINE_LEARNING_ENABLED=false
- IMMICH_METRICS=true
- IMMICH_ENV=testing
volumes:
- upload:/usr/src/app/upload
- ./test-assets:/test-assets
@@ -33,7 +27,7 @@ services:
- 2283:3001
redis:
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
database:
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0

243
e2e/package-lock.json generated
View File

@@ -1,19 +1,19 @@
{
"name": "immich-e2e",
"version": "1.107.2",
"version": "1.105.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "immich-e2e",
"version": "1.107.2",
"version": "1.105.1",
"license": "GNU Affero General Public License version 3",
"devDependencies": {
"@immich/cli": "file:../cli",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2",
"@types/node": "^20.14.9",
"@types/node": "^20.11.17",
"@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4",
"@types/supertest": "^6.0.2",
@@ -23,13 +23,13 @@
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^54.0.0",
"exiftool-vendored": "^27.0.0",
"eslint-plugin-unicorn": "^53.0.0",
"exiftool-vendored": "^26.0.0",
"luxon": "^3.4.4",
"pg": "^8.11.3",
"pngjs": "^7.0.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-organize-imports": "^3.2.4",
"socket.io-client": "^4.7.4",
"supertest": "^7.0.0",
"typescript": "^5.3.3",
@@ -39,7 +39,7 @@
},
"../cli": {
"name": "@immich/cli",
"version": "2.2.7",
"version": "2.2.0",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
@@ -55,7 +55,7 @@
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.14.9",
"@types/node": "^20.3.1",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^1.2.2",
@@ -65,10 +65,10 @@
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^54.0.0",
"eslint-plugin-unicorn": "^53.0.0",
"mock-fs": "^5.2.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-organize-imports": "^3.2.4",
"typescript": "^5.3.3",
"vite": "^5.0.12",
"vite-tsconfig-paths": "^4.3.2",
@@ -81,14 +81,14 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.107.2",
"version": "1.105.1",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^20.14.9",
"@types/node": "^20.12.13",
"typescript": "^5.3.3"
}
},
@@ -971,19 +971,18 @@
}
},
"node_modules/@playwright/test": {
"version": "1.45.1",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.45.1.tgz",
"integrity": "sha512-Wo1bWTzQvGA7LyKGIZc8nFSTFf2TkthGIFBR+QVNilvwouGzFd4PYukZe3rvf5PSqjHi1+1NyKSDZKcQWETzaA==",
"version": "1.44.1",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"playwright": "1.45.1"
"playwright": "1.44.1"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=18"
"node": ">=16"
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
@@ -1231,9 +1230,9 @@
"dev": true
},
"node_modules/@types/node": {
"version": "20.14.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
"version": "20.12.13",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.13.tgz",
"integrity": "sha512-gBGeanV41c1L171rR7wjbMiEpEI/l5XFQdLLfhr/REwpgDy/4U8y89+i8kRiLzDyZdOkXh+cRaTetUnCYutoXA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1345,17 +1344,17 @@
}
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.15.0.tgz",
"integrity": "sha512-uiNHpyjZtFrLwLDpHnzaDlP3Tt6sGMqTCiqmxaN4n4RP0EfYZDODJyddiFDF44Hjwxr5xAcaYxVKm9QKQFJFLA==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.11.0.tgz",
"integrity": "sha512-P+qEahbgeHW4JQ/87FuItjBj8O3MYv5gELDzr8QaQ7fsll1gSMTYb6j87MYyxwf3DtD7uGFB9ShwgmCJB5KmaQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
"@typescript-eslint/scope-manager": "7.15.0",
"@typescript-eslint/type-utils": "7.15.0",
"@typescript-eslint/utils": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0",
"@typescript-eslint/scope-manager": "7.11.0",
"@typescript-eslint/type-utils": "7.11.0",
"@typescript-eslint/utils": "7.11.0",
"@typescript-eslint/visitor-keys": "7.11.0",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -1379,16 +1378,16 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.15.0.tgz",
"integrity": "sha512-k9fYuQNnypLFcqORNClRykkGOMOj+pV6V91R4GO/l1FDGwpqmSwoOQrOHo3cGaH63e+D3ZiCAOsuS/D2c99j/A==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.11.0.tgz",
"integrity": "sha512-yimw99teuaXVWsBcPO1Ais02kwJ1jmNA1KxE7ng0aT7ndr1pT1wqj0OJnsYVGKKlc4QJai86l/025L6z8CljOg==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"@typescript-eslint/scope-manager": "7.15.0",
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/typescript-estree": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0",
"@typescript-eslint/scope-manager": "7.11.0",
"@typescript-eslint/types": "7.11.0",
"@typescript-eslint/typescript-estree": "7.11.0",
"@typescript-eslint/visitor-keys": "7.11.0",
"debug": "^4.3.4"
},
"engines": {
@@ -1408,14 +1407,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.15.0.tgz",
"integrity": "sha512-Q/1yrF/XbxOTvttNVPihxh1b9fxamjEoz2Os/Pe38OHwxC24CyCqXxGTOdpb4lt6HYtqw9HetA/Rf6gDGaMPlw==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.11.0.tgz",
"integrity": "sha512-27tGdVEiutD4POirLZX4YzT180vevUURJl4wJGmm6TrQoiYwuxTIY98PBp6L2oN+JQxzE0URvYlzJaBHIekXAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0"
"@typescript-eslint/types": "7.11.0",
"@typescript-eslint/visitor-keys": "7.11.0"
},
"engines": {
"node": "^18.18.0 || >=20.0.0"
@@ -1426,14 +1425,14 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.15.0.tgz",
"integrity": "sha512-SkgriaeV6PDvpA6253PDVep0qCqgbO1IOBiycjnXsszNTVQe5flN5wR5jiczoEoDEnAqYFSFFc9al9BSGVltkg==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.11.0.tgz",
"integrity": "sha512-WmppUEgYy+y1NTseNMJ6mCFxt03/7jTOy08bcg7bxJJdsM4nuhnchyBbE8vryveaJUf62noH7LodPSo5Z0WUCg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/typescript-estree": "7.15.0",
"@typescript-eslint/utils": "7.15.0",
"@typescript-eslint/typescript-estree": "7.11.0",
"@typescript-eslint/utils": "7.11.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -1454,9 +1453,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.15.0.tgz",
"integrity": "sha512-aV1+B1+ySXbQH0pLK0rx66I3IkiZNidYobyfn0WFsdGhSXw+P3YOqeTq5GED458SfB24tg+ux3S+9g118hjlTw==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.11.0.tgz",
"integrity": "sha512-MPEsDRZTyCiXkD4vd3zywDCifi7tatc4K37KqTprCvaXptP7Xlpdw0NR2hRJTetG5TxbWDB79Ys4kLmHliEo/w==",
"dev": true,
"license": "MIT",
"engines": {
@@ -1468,14 +1467,14 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.15.0.tgz",
"integrity": "sha512-gjyB/rHAopL/XxfmYThQbXbzRMGhZzGw6KpcMbfe8Q3nNQKStpxnUKeXb0KiN/fFDR42Z43szs6rY7eHk0zdGQ==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.11.0.tgz",
"integrity": "sha512-cxkhZ2C/iyi3/6U9EPc5y+a6csqHItndvN/CzbNXTNrsC3/ASoYQZEt9uMaEp+xFNjasqQyszp5TumAVKKvJeQ==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/visitor-keys": "7.15.0",
"@typescript-eslint/types": "7.11.0",
"@typescript-eslint/visitor-keys": "7.11.0",
"debug": "^4.3.4",
"globby": "^11.1.0",
"is-glob": "^4.0.3",
@@ -1507,9 +1506,9 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"version": "9.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz",
"integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -1523,16 +1522,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.15.0.tgz",
"integrity": "sha512-hfDMDqaqOqsUVGiEPSMLR/AjTSCsmJwjpKkYQRo1FNbmW4tBwBspYDwO9eh7sKSTwMQgBw9/T4DHudPaqshRWA==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.11.0.tgz",
"integrity": "sha512-xlAWwPleNRHwF37AhrZurOxA1wyXowW4PqVXZVUNCLjB48CqdPJoJWkrpH2nij9Q3Lb7rtWindtoXwxjxlKKCA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
"@typescript-eslint/scope-manager": "7.15.0",
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/typescript-estree": "7.15.0"
"@typescript-eslint/scope-manager": "7.11.0",
"@typescript-eslint/types": "7.11.0",
"@typescript-eslint/typescript-estree": "7.11.0"
},
"engines": {
"node": "^18.18.0 || >=20.0.0"
@@ -1546,13 +1545,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "7.15.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.15.0.tgz",
"integrity": "sha512-Hqgy/ETgpt2L5xueA/zHHIl4fJI2O4XUE9l4+OIfbJIRSnTJb/QscncdqqZzofQegIJugRIF57OJea1khw2SDw==",
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.11.0.tgz",
"integrity": "sha512-7syYk4MzjxTEk0g/w3iqtgxnFQspDJfn6QKD36xMuuhTzjcxY7F8EmBLnALjVyaOF1/bVocu3bS/2/F7rXrveQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "7.15.0",
"@typescript-eslint/types": "7.11.0",
"eslint-visitor-keys": "^3.4.3"
},
"engines": {
@@ -1672,11 +1671,10 @@
"dev": true
},
"node_modules/acorn": {
"version": "8.12.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.0.tgz",
"integrity": "sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==",
"version": "8.11.3",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
"integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
"dev": true,
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
@@ -2278,15 +2276,15 @@
"dev": true
},
"node_modules/engine.io-client": {
"version": "6.5.4",
"resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.5.4.tgz",
"integrity": "sha512-GeZeeRjpD2qf49cZQ0Wvh/8NJNfeXkXXcoGh+F77oEAgo9gUHwT1fCRxSNU+YEEaysOJTnsFHmM5oAcPy4ntvQ==",
"version": "6.5.3",
"resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.5.3.tgz",
"integrity": "sha512-9Z0qLB0NIisTRt1DZ/8U2k12RJn8yls/nXMZLn+/N8hANT3TcYjKFKcwbw5zFQiN4NTde3TSY9zb79e1ij6j9Q==",
"dev": true,
"dependencies": {
"@socket.io/component-emitter": "~3.1.0",
"debug": "~4.3.1",
"engine.io-parser": "~5.2.1",
"ws": "~8.17.1",
"ws": "~8.11.0",
"xmlhttprequest-ssl": "~2.0.0"
}
},
@@ -2486,11 +2484,10 @@
}
},
"node_modules/eslint-plugin-unicorn": {
"version": "54.0.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-54.0.0.tgz",
"integrity": "sha512-XxYLRiYtAWiAjPv6z4JREby1TAE2byBC7wlh0V4vWDCpccOSU1KovWV//jqPXF6bq3WKxqX9rdjoRQ1EhdmNdQ==",
"version": "53.0.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-53.0.0.tgz",
"integrity": "sha512-kuTcNo9IwwUCfyHGwQFOK/HjJAYzbODHN3wP0PgqbW+jbXqpNWxNVpVhj2tO9SixBwuAdmal8rVcWKBxwFnGuw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-validator-identifier": "^7.24.5",
"@eslint-community/eslint-utils": "^4.4.0",
@@ -2520,11 +2517,10 @@
}
},
"node_modules/eslint-plugin-unicorn/node_modules/@eslint/eslintrc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.1.0.tgz",
"integrity": "sha512-4Bfj15dVJdoy3RfZmmo86RK1Fwzn6SstsvK9JS+BaVKqC6QQQQyXekNaC+g+LKNgkQ+2VhGAzm6hO40AhMR3zQ==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.0.2.tgz",
"integrity": "sha512-wV19ZEGEMAC1eHgrS7UQPqsdEiCIbTKTasEfcXAigzoXICcqZSjBZEHlZwNVvKg6UBCjSlos84XiLqsRJnIcIg==",
"dev": true,
"license": "MIT",
"dependencies": {
"ajv": "^6.12.4",
"debug": "^4.3.2",
@@ -2548,7 +2544,6 @@
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz",
"integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==",
"dev": true,
"license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -2557,13 +2552,12 @@
}
},
"node_modules/eslint-plugin-unicorn/node_modules/espree": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/espree/-/espree-10.1.0.tgz",
"integrity": "sha512-M1M6CpiE6ffoigIOWYO9UDP8TMUw9kqb21tf+08IgDYjCsOvCuDt4jQcZmoYxx+w7zlKw9/N0KXfto+I8/FrXA==",
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/espree/-/espree-10.0.1.tgz",
"integrity": "sha512-MWkrWZbJsL2UwnjxTX3gG8FneachS/Mwg7tdGXce011sJd5b0JG54vat5KHnfSBODZ3Wvzd2WnjxyzsRoVv+ww==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"acorn": "^8.12.0",
"acorn": "^8.11.3",
"acorn-jsx": "^5.3.2",
"eslint-visitor-keys": "^4.0.0"
},
@@ -2579,7 +2573,6 @@
"resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz",
"integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
@@ -2707,9 +2700,9 @@
}
},
"node_modules/exiftool-vendored": {
"version": "27.0.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-27.0.0.tgz",
"integrity": "sha512-/jHX8Jjadj0YJzpqnuBo1Yy2ln2hnRbBIc+3jcVOLQ6qhHEKsLRlfJ145Ghn7k/EcnfpDzVX3V8AUCTC8juTow==",
"version": "26.1.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-26.1.0.tgz",
"integrity": "sha512-Bhy2Ia86Agt3+PbJJhWeVMqJNXl74XJ0Oygef5F5uCL13fTxlmF8dECHiChyx8bBc3sxIw+2Q3ehWunJh3bs6w==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -4134,11 +4127,10 @@
}
},
"node_modules/pg": {
"version": "8.12.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.12.0.tgz",
"integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==",
"version": "8.11.5",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.11.5.tgz",
"integrity": "sha512-jqgNHSKL5cbDjFlHyYsCXmQDrfIX/3RsNwYqpd4N0Kt8niLuNoRNH+aazv6cOd43gPh9Y4DjQCtb+X0MH0Hvnw==",
"dev": true,
"license": "MIT",
"dependencies": {
"pg-connection-string": "^2.6.4",
"pg-pool": "^3.6.2",
@@ -4263,35 +4255,33 @@
}
},
"node_modules/playwright": {
"version": "1.45.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.45.1.tgz",
"integrity": "sha512-Hjrgae4kpSQBr98nhCj3IScxVeVUixqj+5oyif8TdIn2opTCPEzqAqNMeK42i3cWDCVu9MI+ZsGWw+gVR4ISBg==",
"version": "1.44.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"playwright-core": "1.45.1"
"playwright-core": "1.44.1"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=18"
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/playwright-core": {
"version": "1.45.1",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.45.1.tgz",
"integrity": "sha512-LF4CUUtrUu2TCpDw4mcrAIuYrEjVDfT1cHbJMfwnE2+1b8PZcFzPNgvZCvq2JfQ4aTjRCCHw5EJ2tmr2NSzdPg==",
"version": "1.44.1",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=18"
"node": ">=16"
}
},
"node_modules/pluralize": {
@@ -4395,11 +4385,10 @@
}
},
"node_modules/prettier": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz",
"integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
},
@@ -4423,22 +4412,21 @@
}
},
"node_modules/prettier-plugin-organize-imports": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-4.0.0.tgz",
"integrity": "sha512-vnKSdgv9aOlqKeEFGhf9SCBsTyzDSyScy1k7E0R1Uo4L0cTcOV7c1XQaT7jfXIOc/p08WLBfN2QUQA9zDSZMxA==",
"version": "3.2.4",
"resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz",
"integrity": "sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"@vue/language-plugin-pug": "^2.0.24",
"@volar/vue-language-plugin-pug": "^1.0.4",
"@volar/vue-typescript": "^1.0.4",
"prettier": ">=2.0",
"typescript": ">=2.9",
"vue-tsc": "^2.0.24"
"typescript": ">=2.9"
},
"peerDependenciesMeta": {
"@vue/language-plugin-pug": {
"@volar/vue-language-plugin-pug": {
"optional": true
},
"vue-tsc": {
"@volar/vue-typescript": {
"optional": true
}
}
@@ -5272,11 +5260,10 @@
}
},
"node_modules/typescript": {
"version": "5.5.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
"version": "5.4.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -5585,16 +5572,16 @@
"dev": true
},
"node_modules/ws": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz",
"integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==",
"dev": true,
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.107.2",
"version": "1.105.1",
"description": "",
"main": "index.js",
"type": "module",
@@ -23,7 +23,7 @@
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2",
"@types/node": "^20.14.9",
"@types/node": "^20.11.17",
"@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4",
"@types/supertest": "^6.0.2",
@@ -33,13 +33,13 @@
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^54.0.0",
"exiftool-vendored": "^27.0.0",
"eslint-plugin-unicorn": "^53.0.0",
"exiftool-vendored": "^26.0.0",
"luxon": "^3.4.4",
"pg": "^8.11.3",
"pngjs": "^7.0.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-organize-imports": "^3.2.4",
"socket.io-client": "^4.7.4",
"supertest": "^7.0.0",
"typescript": "^5.3.3",
@@ -47,6 +47,6 @@
"vitest": "^1.3.0"
},
"volta": {
"node": "20.15.0"
"node": "20.14.0"
}
}

View File

@@ -88,7 +88,7 @@ describe('/albums', () => {
});
await addAssetsToAlbum(
{ id: user2Albums[0].id, bulkIdsDto: { ids: [user1Asset1.id, user1Asset2.id] } },
{ id: user2Albums[0].id, bulkIdsDto: { ids: [user1Asset1.id] } },
{ headers: asBearerAuth(user1.accessToken) },
);
@@ -261,7 +261,7 @@ describe('/albums', () => {
.get(`/albums?assetId=${user1Asset2.id}`)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(200);
expect(body).toHaveLength(2);
expect(body).toHaveLength(1);
});
it('should return the album collection filtered by assetId and ignores shared=true', async () => {
@@ -509,17 +509,7 @@ describe('/albums', () => {
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const { status, body } = await request(app)
.delete(`/albums/${user1Albums[1].id}/assets`)
.set('Authorization', `Bearer ${user2.accessToken}`)
.send({ ids: [user1Asset1.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should be able to remove foreign asset from owned album', async () => {
it('should not be able to remove foreign asset from own album', async () => {
const { status, body } = await request(app)
.delete(`/albums/${user2Albums[0].id}/assets`)
.set('Authorization', `Bearer ${user2.accessToken}`)
@@ -529,7 +519,8 @@ describe('/albums', () => {
expect(body).toEqual([
expect.objectContaining({
id: user1Asset1.id,
success: true,
success: false,
error: 'no_permission',
}),
]);
});
@@ -564,10 +555,10 @@ describe('/albums', () => {
const { status, body } = await request(app)
.delete(`/albums/${user2Albums[0].id}/assets`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ ids: [user1Asset2.id] });
.send({ ids: [user1Asset1.id] });
expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ id: user1Asset2.id, success: true })]);
expect(body).toEqual([expect.objectContaining({ id: user1Asset1.id, success: true })]);
});
it('should not be able to remove assets from album as a viewer', async () => {

View File

@@ -588,58 +588,6 @@ describe('/asset', () => {
const after = await utils.getAssetInfo(admin.accessToken, assetId);
expect(after.isTrashed).toBe(true);
});
it('should clean up live photos', async () => {
const { id: motionId } = await utils.createAsset(admin.accessToken, {
assetData: { filename: 'test.mp4', bytes: makeRandomImage() },
});
const { id: photoId } = await utils.createAsset(admin.accessToken, { livePhotoVideoId: motionId });
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: photoId });
await utils.waitForWebsocketEvent({ event: 'assetHidden', id: motionId });
const asset = await utils.getAssetInfo(admin.accessToken, photoId);
expect(asset.livePhotoVideoId).toBe(motionId);
const { status } = await request(app)
.delete('/assets')
.send({ ids: [photoId], force: true })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(204);
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: photoId });
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: motionId });
});
it('should not delete a shared motion asset', async () => {
const { id: motionId } = await utils.createAsset(admin.accessToken, {
assetData: { filename: 'test.mp4', bytes: makeRandomImage() },
});
const { id: asset1 } = await utils.createAsset(admin.accessToken, { livePhotoVideoId: motionId });
const { id: asset2 } = await utils.createAsset(admin.accessToken, { livePhotoVideoId: motionId });
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset1 });
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset2 });
await utils.waitForWebsocketEvent({ event: 'assetHidden', id: motionId });
const asset = await utils.getAssetInfo(admin.accessToken, asset1);
expect(asset.livePhotoVideoId).toBe(motionId);
const { status } = await request(app)
.delete('/assets')
.send({ ids: [asset1], force: true })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(204);
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: asset1 });
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
await expect(utils.getAssetInfo(admin.accessToken, motionId)).resolves.toMatchObject({ id: motionId });
await expect(utils.getAssetInfo(admin.accessToken, asset2)).resolves.toMatchObject({
id: asset2,
livePhotoVideoId: motionId,
});
});
});
describe('GET /assets/:id/thumbnail', () => {
@@ -1200,29 +1148,4 @@ describe('/asset', () => {
expect(video.checksum).toStrictEqual(checksum);
});
});
describe('POST /assets/exist', () => {
it('ignores invalid deviceAssetIds', async () => {
const response = await utils.checkExistingAssets(user1.accessToken, {
deviceId: 'test-assets-exist',
deviceAssetIds: ['invalid', 'INVALID'],
});
expect(response.existingIds).toHaveLength(0);
});
it('returns the IDs of existing assets', async () => {
await utils.createAsset(user1.accessToken, {
deviceId: 'test-assets-exist',
deviceAssetId: 'test-asset-0',
});
const response = await utils.checkExistingAssets(user1.accessToken, {
deviceId: 'test-assets-exist',
deviceAssetIds: ['test-asset-0'],
});
expect(response.existingIds).toEqual(['test-asset-0']);
});
});
});

View File

@@ -1,11 +1,4 @@
import {
LibraryResponseDto,
LoginResponseDto,
ScanLibraryDto,
getAllLibraries,
removeOfflineFiles,
scanLibrary,
} from '@immich/sdk';
import { LibraryResponseDto, LoginResponseDto, ScanLibraryDto, getAllLibraries, scanLibrary } from '@immich/sdk';
import { cpSync, existsSync } from 'node:fs';
import { Socket } from 'socket.io-client';
import { userDto, uuidDto } from 'src/fixtures';
@@ -391,51 +384,6 @@ describe('/libraries', () => {
);
});
it('should not try to delete offline files', async () => {
utils.createImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline1`],
});
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(initialAssets).toEqual({
count: 1,
total: 1,
facets: [],
items: [expect.objectContaining({ originalFileName: 'assetA.png' })],
nextPage: null,
});
utils.removeImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
isOffline: true,
});
expect(offlineAssets).toEqual({
count: 1,
total: 1,
facets: [],
items: [expect.objectContaining({ originalFileName: 'assetA.png' })],
nextPage: null,
});
utils.createImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
await removeOfflineFiles({ id: library.id }, { headers: asBearerAuth(admin.accessToken) });
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForWebsocketEvent({ event: 'assetDelete', total: 1 });
expect(existsSync(`${testAssetDir}/temp/offline1/assetA.png`)).toBe(true);
});
it('should scan new files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
@@ -559,10 +507,10 @@ describe('/libraries', () => {
it('should remove offline files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline2`],
importPaths: [`${testAssetDirInternal}/temp`],
});
utils.createImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
@@ -570,9 +518,9 @@ describe('/libraries', () => {
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
});
expect(initialAssets.count).toBe(1);
expect(initialAssets.count).toBe(3);
utils.removeImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
@@ -593,7 +541,7 @@ describe('/libraries', () => {
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(0);
expect(assets.count).toBe(2);
});
it('should not remove online files', async () => {

View File

@@ -230,21 +230,4 @@ describe('/people', () => {
expect(body).toMatchObject({ birthDate: null });
});
});
describe('POST /people/:id/merge', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/people/${uuidDto.notFound}/merge`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should not supporting merging a person into themselves', async () => {
const { status, body } = await request(app)
.post(`/people/${visiblePerson.id}/merge`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ ids: [visiblePerson.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('Cannot merge a person into themselves'));
});
});
});

View File

@@ -339,13 +339,6 @@ describe('/search', () => {
should: 'should search by model',
deferred: () => ({ dto: { model: 'Canon EOS 7D' }, assets: [assetDenali] }),
},
{
should: 'should allow searching the upload library (libraryId: null)',
deferred: () => ({
dto: { libraryId: null, size: 1 },
assets: [assetLast],
}),
},
];
for (const { should, deferred } of searchTests) {

View File

@@ -15,40 +15,6 @@ describe('/server-info', () => {
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
});
describe('GET /server-info/about', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/server-info/about');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return about information', async () => {
const { status, body } = await request(app)
.get('/server-info/about')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
version: expect.any(String),
versionUrl: expect.any(String),
repository: 'immich-app/immich',
repositoryUrl: 'https://github.com/immich-app/immich',
build: '1234567890',
buildUrl: 'https://github.com/immich-app/immich/actions/runs/1234567890',
buildImage: 'e2e',
buildImageUrl: 'https://github.com/immich-app/immich/pkgs/container/immich-server',
sourceRef: 'e2e',
sourceCommit: 'e2eeeeeeeeeeeeeeeeee',
sourceUrl: 'https://github.com/immich-app/immich/commit/e2eeeeeeeeeeeeeeeeee',
nodejs: expect.any(String),
ffmpeg: expect.any(String),
imagemagick: expect.any(String),
libvips: expect.any(String),
exiftool: expect.any(String),
licensed: false,
});
});
});
describe('GET /server-info/storage', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/server-info/storage');

View File

@@ -1,307 +0,0 @@
import { LoginResponseDto } from '@immich/sdk';
import { createUserDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, describe, expect, it } from 'vitest';
const serverLicense = {
licenseKey: 'IMSV-6ECZ-91TE-WZRM-Q7AQ-MBN4-UW48-2CPT-71X9',
activationKey:
'4kJUNUWMq13J14zqPFm1NodRcI6MV6DeOGvQNIgrM8Sc9nv669wyEVvFw1Nz4Kb1W7zLWblOtXEQzpRRqC4r4fKjewJxfbpeo9sEsqAVIfl4Ero-Vp1Dg21-sVdDGZEAy2oeTCXAyCT5d1JqrqR6N1qTAm4xOx9ujXQRFYhjRG8uwudw7_Q49pF18Tj5OEv9qCqElxztoNck4i6O_azsmsoOQrLIENIWPh3EynBN3ESpYERdCgXO8MlWeuG14_V1HbNjnJPZDuvYg__YfMzoOEtfm1sCqEaJ2Ww-BaX7yGfuCL4XsuZlCQQNHjfscy_WywVfIZPKCiW8QR74i0cSzQ',
};
describe('/server', () => {
let admin: LoginResponseDto;
let nonAdmin: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup({ onboarding: false });
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
});
describe('GET /server/about', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/server/about');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return about information', async () => {
const { status, body } = await request(app)
.get('/server/about')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
version: expect.any(String),
versionUrl: expect.any(String),
repository: 'immich-app/immich',
repositoryUrl: 'https://github.com/immich-app/immich',
build: '1234567890',
buildUrl: 'https://github.com/immich-app/immich/actions/runs/1234567890',
buildImage: 'e2e',
buildImageUrl: 'https://github.com/immich-app/immich/pkgs/container/immich-server',
sourceRef: 'e2e',
sourceCommit: 'e2eeeeeeeeeeeeeeeeee',
sourceUrl: 'https://github.com/immich-app/immich/commit/e2eeeeeeeeeeeeeeeeee',
nodejs: expect.any(String),
ffmpeg: expect.any(String),
imagemagick: expect.any(String),
libvips: expect.any(String),
exiftool: expect.any(String),
licensed: false,
});
});
});
describe('GET /server/storage', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/server/storage');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return the disk information', async () => {
const { status, body } = await request(app)
.get('/server/storage')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
diskAvailable: expect.any(String),
diskAvailableRaw: expect.any(Number),
diskSize: expect.any(String),
diskSizeRaw: expect.any(Number),
diskUsagePercentage: expect.any(Number),
diskUse: expect.any(String),
diskUseRaw: expect.any(Number),
});
});
});
describe('GET /server/ping', () => {
it('should respond with pong', async () => {
const { status, body } = await request(app).get('/server/ping');
expect(status).toBe(200);
expect(body).toEqual({ res: 'pong' });
});
});
describe('GET /server/version', () => {
it('should respond with the server version', async () => {
const { status, body } = await request(app).get('/server/version');
expect(status).toBe(200);
expect(body).toEqual({
major: expect.any(Number),
minor: expect.any(Number),
patch: expect.any(Number),
});
});
});
describe('GET /server/features', () => {
it('should respond with the server features', async () => {
const { status, body } = await request(app).get('/server/features');
expect(status).toBe(200);
expect(body).toEqual({
smartSearch: false,
configFile: false,
duplicateDetection: false,
facialRecognition: false,
map: true,
reverseGeocoding: true,
oauth: false,
oauthAutoLaunch: false,
passwordLogin: true,
search: true,
sidecar: true,
trash: true,
email: false,
});
});
});
describe('GET /server/config', () => {
it('should respond with the server configuration', async () => {
const { status, body } = await request(app).get('/server/config');
expect(status).toBe(200);
expect(body).toEqual({
loginPageMessage: '',
oauthButtonText: 'Login with OAuth',
trashDays: 30,
userDeleteDelay: 7,
isInitialized: true,
externalDomain: '',
isOnboarded: false,
});
});
});
describe('GET /server/statistics', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/server/statistics');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should only work for admins', async () => {
const { status, body } = await request(app)
.get('/server/statistics')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(403);
expect(body).toEqual(errorDto.forbidden);
});
it('should return the server stats', async () => {
const { status, body } = await request(app)
.get('/server/statistics')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
photos: 0,
usage: 0,
usageByUser: [
{
quotaSizeInBytes: null,
photos: 0,
usage: 0,
userName: 'Immich Admin',
userId: admin.userId,
videos: 0,
},
{
quotaSizeInBytes: null,
photos: 0,
usage: 0,
userName: 'User 1',
userId: nonAdmin.userId,
videos: 0,
},
],
videos: 0,
});
});
});
describe('GET /server/media-types', () => {
it('should return accepted media types', async () => {
const { status, body } = await request(app).get('/server/media-types');
expect(status).toBe(200);
expect(body).toEqual({
sidecar: ['.xmp'],
image: expect.any(Array),
video: expect.any(Array),
});
});
});
describe('GET /server/theme', () => {
it('should respond with the server theme', async () => {
const { status, body } = await request(app).get('/server/theme');
expect(status).toBe(200);
expect(body).toEqual({
customCss: '',
});
});
});
describe('GET /server/license', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/server/license');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should only work for admins', async () => {
const { status, body } = await request(app)
.get('/server/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(403);
expect(body).toEqual(errorDto.forbidden);
});
it('should return the server license', async () => {
await request(app).put('/server/license').set('Authorization', `Bearer ${admin.accessToken}`).send(serverLicense);
const { status, body } = await request(app)
.get('/server/license')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
...serverLicense,
activatedAt: expect.any(String),
});
});
});
describe('DELETE /server/license', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).delete('/server/license');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should only work for admins', async () => {
const { status, body } = await request(app)
.delete('/server/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(403);
expect(body).toEqual(errorDto.forbidden);
});
it('should delete the server license', async () => {
await request(app)
.delete('/server/license')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send(serverLicense);
const { status } = await request(app).get('/server/license').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
});
});
describe('PUT /server/license', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put('/server/license');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should only work for admins', async () => {
const { status, body } = await request(app)
.put('/server/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(403);
expect(body).toEqual(errorDto.forbidden);
});
it('should set the server license', async () => {
const { status, body } = await request(app)
.put('/server/license')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send(serverLicense);
expect(status).toBe(200);
expect(body).toEqual({ ...serverLicense, activatedAt: expect.any(String) });
const { body: licenseBody } = await request(app)
.get('/server/license')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(licenseBody).toEqual({ ...serverLicense, activatedAt: expect.any(String) });
});
it('should reject license not starting with IMSV-', async () => {
const { status, body } = await request(app)
.put('/server/license')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ licenseKey: 'IMCL-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD', activationKey: 'activationKey' });
expect(status).toBe(400);
expect(body.message).toBe('Invalid license key');
});
it('should reject license with invalid activation key', async () => {
const { status, body } = await request(app)
.put('/server/license')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ licenseKey: serverLicense.licenseKey, activationKey: `invalid${serverLicense.activationKey}` });
expect(status).toBe(400);
expect(body.message).toBe('Invalid license key');
});
});
});

View File

@@ -5,7 +5,6 @@ import {
getUserAdmin,
getUserPreferencesAdmin,
login,
updateAssets,
} from '@immich/sdk';
import { Socket } from 'socket.io-client';
import { createUserDto, uuidDto } from 'src/fixtures';
@@ -21,16 +20,18 @@ describe('/admin/users', () => {
let nonAdmin: LoginResponseDto;
let deletedUser: LoginResponseDto;
let userToDelete: LoginResponseDto;
let userToHardDelete: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup({ onboarding: false });
[websocket, nonAdmin, deletedUser, userToDelete] = await Promise.all([
[websocket, nonAdmin, deletedUser, userToDelete, userToHardDelete] = await Promise.all([
utils.connectWebsocket(admin.accessToken),
utils.userSetup(admin.accessToken, createUserDto.user1),
utils.userSetup(admin.accessToken, createUserDto.user2),
utils.userSetup(admin.accessToken, createUserDto.user3),
utils.userSetup(admin.accessToken, createUserDto.user4),
]);
await deleteUserAdmin(
@@ -63,12 +64,13 @@ describe('/admin/users', () => {
.get(`/admin/users`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toHaveLength(3);
expect(body).toHaveLength(4);
expect(body).toEqual(
expect.arrayContaining([
expect.objectContaining({ email: admin.userEmail }),
expect.objectContaining({ email: nonAdmin.userEmail }),
expect.objectContaining({ email: userToDelete.userEmail }),
expect.objectContaining({ email: userToHardDelete.userEmail }),
]),
);
});
@@ -79,12 +81,13 @@ describe('/admin/users', () => {
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toHaveLength(4);
expect(body).toHaveLength(5);
expect(body).toEqual(
expect.arrayContaining([
expect.objectContaining({ email: admin.userEmail }),
expect.objectContaining({ email: nonAdmin.userEmail }),
expect.objectContaining({ email: userToDelete.userEmail }),
expect.objectContaining({ email: userToHardDelete.userEmail }),
expect.objectContaining({ email: deletedUser.userEmail }),
]),
);
@@ -247,23 +250,18 @@ describe('/admin/users', () => {
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatar: { color: 'orange' } });
expect(body).toEqual({
avatar: { color: 'orange' },
memories: { enabled: false },
emailNotifications: { enabled: true, albumInvite: true, albumUpdate: true },
});
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatar: { color: 'orange' } });
});
it('should update download archive size', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}/preferences`)
.send({ download: { archiveSize: 1_234_567 } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ download: { archiveSize: 1_234_567 } });
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ download: { archiveSize: 1_234_567 } });
expect(after).toEqual({
avatar: { color: 'orange' },
memories: { enabled: false },
emailNotifications: { enabled: true, albumInvite: true, albumUpdate: true },
});
});
});
@@ -296,49 +294,19 @@ describe('/admin/users', () => {
});
it('should hard delete a user', async () => {
const user = await utils.userSetup(admin.accessToken, createUserDto.create('hard-delete-1'));
const { status, body } = await request(app)
.delete(`/admin/users/${user.userId}`)
.delete(`/admin/users/${userToHardDelete.userId}`)
.send({ force: true })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({
id: user.userId,
id: userToHardDelete.userId,
updatedAt: expect.any(String),
deletedAt: expect.any(String),
});
await utils.waitForWebsocketEvent({ event: 'userDelete', id: user.userId, timeout: 5000 });
});
it('should hard delete a user with stacked assets', async () => {
const user = await utils.userSetup(admin.accessToken, createUserDto.create('hard-delete-1'));
const [asset1, asset2] = await Promise.all([
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken),
]);
await updateAssets(
{ assetBulkUpdateDto: { stackParentId: asset1.id, ids: [asset2.id] } },
{ headers: asBearerAuth(user.accessToken) },
);
const { status, body } = await request(app)
.delete(`/admin/users/${user.userId}`)
.send({ force: true })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({
id: user.userId,
updatedAt: expect.any(String),
deletedAt: expect.any(String),
});
await utils.waitForWebsocketEvent({ event: 'userDelete', id: user.userId, timeout: 5000 });
await utils.waitForWebsocketEvent({ event: 'userDelete', id: userToHardDelete.userId, timeout: 5000 });
});
});

View File

@@ -5,12 +5,6 @@ import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, describe, expect, it } from 'vitest';
const userLicense = {
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
activationKey:
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
};
describe('/users', () => {
let admin: LoginResponseDto;
let deletedUser: LoginResponseDto;
@@ -78,24 +72,6 @@ describe('/users', () => {
quotaUsageInBytes: 0,
});
});
it('should get my user with license info', async () => {
const { status: licenseStatus } = await request(app)
.put(`/users/me/license`)
.send(userLicense)
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(licenseStatus).toBe(200);
const { status, body } = await request(app)
.get(`/users/me`)
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({
id: nonAdmin.userId,
email: nonAdmin.userEmail,
quotaUsageInBytes: 0,
license: userLicense,
});
});
});
describe('PUT /users/me', () => {
@@ -197,45 +173,6 @@ describe('/users', () => {
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ memories: { enabled: false } });
});
it('should update avatar color', async () => {
const { status, body } = await request(app)
.put(`/users/me/preferences`)
.send({ avatar: { color: 'blue' } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatar: { color: 'blue' } });
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatar: { color: 'blue' } });
});
it('should require an integer for download archive size', async () => {
const { status, body } = await request(app)
.put(`/users/me/preferences`)
.send({ download: { archiveSize: 1_234_567.89 } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['download.archiveSize must be an integer number']));
});
it('should update download archive size', async () => {
const before = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
expect(before).toMatchObject({ download: { archiveSize: 4 * 2 ** 30 } });
const { status, body } = await request(app)
.put(`/users/me/preferences`)
.send({ download: { archiveSize: 1_234_567 } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ download: { archiveSize: 1_234_567 } });
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ download: { archiveSize: 1_234_567 } });
});
});
describe('GET /users/:id', () => {
@@ -260,81 +197,4 @@ describe('/users', () => {
});
});
});
describe('GET /server/license', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/users/me/license');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return the user license', async () => {
await request(app)
.put('/users/me/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
.send(userLicense);
const { status, body } = await request(app)
.get('/users/me/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
...userLicense,
activatedAt: expect.any(String),
});
});
});
describe('PUT /users/me/license', () => {
it('should require authentication', async () => {
const { status } = await request(app).put(`/users/me/license`);
expect(status).toEqual(401);
});
it('should set the user license', async () => {
const { status, body } = await request(app)
.put(`/users/me/license`)
.send(userLicense)
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ ...userLicense, activatedAt: expect.any(String) });
expect(status).toBe(200);
expect(body).toEqual({ ...userLicense, activatedAt: expect.any(String) });
const { body: licenseBody } = await request(app)
.get('/users/me/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(licenseBody).toEqual({ ...userLicense, activatedAt: expect.any(String) });
});
it('should reject license not starting with IMCL-', async () => {
const { status, body } = await request(app)
.put('/users/me/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
.send({ licenseKey: 'IMSV-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD', activationKey: 'activationKey' });
expect(status).toBe(400);
expect(body.message).toBe('Invalid license key');
});
it('should reject license with invalid activation key', async () => {
const { status, body } = await request(app)
.put('/users/me/license')
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
.send({ licenseKey: userLicense.licenseKey, activationKey: `invalid${userLicense.activationKey}` });
expect(status).toBe(400);
expect(body.message).toBe('Invalid license key');
});
});
describe('DELETE /users/me/license', () => {
it('should require authentication', async () => {
const { status } = await request(app).put(`/users/me/license`);
expect(status).toEqual(401);
});
it('should delete the user license', async () => {
const { status } = await request(app)
.delete(`/users/me/license`)
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
expect(status).toBe(200);
});
});
});

View File

@@ -9,30 +9,11 @@ describe(`immich-admin`, () => {
describe('list-users', () => {
it('should list the admin user', async () => {
const { stdout, stderr, exitCode } = await immichAdmin(['list-users']).promise;
const { stdout, stderr, exitCode } = await immichAdmin(['list-users']);
expect(exitCode).toBe(0);
expect(stderr).toBe('');
expect(stdout).toContain("email: 'admin@immich.cloud'");
expect(stdout).toContain("name: 'Immich Admin'");
});
});
describe('reset-admin-password', () => {
it('should reset admin password', async () => {
const { child, promise } = immichAdmin(['reset-admin-password']);
let data = '';
child.stdout.on('data', (chunk) => {
data += chunk;
if (data.includes('Please choose a new password (optional)')) {
child.stdin.end('\n');
}
});
const { stderr, stdout, exitCode } = await promise;
expect(exitCode).toBe(0);
expect(stderr).toBe('');
expect(stdout).toContain('The admin password has been updated to:');
});
});
});

View File

@@ -81,7 +81,6 @@ export const signupResponseDto = {
quotaUsageInBytes: 0,
quotaSizeInBytes: null,
status: 'active',
license: null,
},
};

View File

@@ -3,7 +3,6 @@ import {
AssetMediaCreateDto,
AssetMediaResponseDto,
AssetResponseDto,
CheckExistingAssetsDto,
CreateAlbumDto,
CreateLibraryDto,
MetadataSearchDto,
@@ -11,7 +10,6 @@ import {
SharedLinkCreateDto,
UserAdminCreateDto,
ValidateLibraryDto,
checkExistingAssets,
createAlbum,
createApiKey,
createLibrary,
@@ -47,7 +45,7 @@ import { makeRandomImage } from 'src/generators';
import request from 'supertest';
type CommandResponse = { stdout: string; stderr: string; exitCode: number | null };
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete' | 'assetHidden';
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete';
type WaitOptions = { event: EventType; id?: string; total?: number; timeout?: number };
type AdminSetupOptions = { onboarding?: boolean };
type AssetData = { bytes?: Buffer; filename: string };
@@ -64,13 +62,13 @@ export const tempDir = tmpdir();
export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer ${accessToken}` });
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
export const immichCli = (args: string[]) =>
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]).promise;
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]);
export const immichAdmin = (args: string[]) =>
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
const executeCommand = (command: string, args: string[]) => {
let _resolve: (value: CommandResponse) => void;
const promise = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
const deferred = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
const child = spawn(command, args, { stdio: 'pipe' });
let stdout = '';
@@ -86,13 +84,12 @@ const executeCommand = (command: string, args: string[]) => {
});
});
return { promise, child };
return deferred;
};
let client: pg.Client | null = null;
const events: Record<EventType, Set<string>> = {
assetHidden: new Set<string>(),
assetUpload: new Set<string>(),
assetUpdate: new Set<string>(),
assetDelete: new Set<string>(),
@@ -152,6 +149,10 @@ export const utils = {
const sql: string[] = [];
if (tables.includes('asset_stack')) {
sql.push('UPDATE "assets" SET "stackId" = NULL;');
}
for (const table of tables) {
if (table === 'system_metadata') {
// prevent reverse geocoder from being re-initialized
@@ -200,7 +201,6 @@ export const utils = {
.on('connect', () => resolve(websocket))
.on('on_upload_success', (data: AssetResponseDto) => onEvent({ event: 'assetUpload', id: data.id }))
.on('on_asset_update', (data: AssetResponseDto) => onEvent({ event: 'assetUpdate', id: data.id }))
.on('on_asset_hidden', (assetId: string) => onEvent({ event: 'assetHidden', id: assetId }))
.on('on_asset_delete', (assetId: string) => onEvent({ event: 'assetDelete', id: assetId }))
.on('on_user_delete', (userId: string) => onEvent({ event: 'userDelete', id: userId }))
.connect();
@@ -374,9 +374,6 @@ export const utils = {
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>
checkExistingAssets({ checkExistingAssetsDto }, { headers: asBearerAuth(accessToken) }),
metadataSearch: async (accessToken: string, dto: MetadataSearchDto) => {
return searchMetadata({ metadataSearchDto: dto }, { headers: asBearerAuth(accessToken) });
},
@@ -396,7 +393,14 @@ export const utils = {
return;
}
await client.query('INSERT INTO asset_faces ("assetId", "personId") VALUES ($1, $2)', [assetId, personId]);
const vector = Array.from({ length: 512 }, Math.random);
const embedding = `[${vector.join(',')}]`;
await client.query('INSERT INTO asset_faces ("assetId", "personId", "embedding") VALUES ($1, $2, $3)', [
assetId,
personId,
embedding,
]);
},
setPersonThumbnail: async (personId: string) => {

View File

@@ -51,13 +51,6 @@ test.describe('Shared Links', () => {
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
});
test('download all from shared link', async ({ page }) => {
await page.goto(`/share/${sharedLink.key}`);
await page.getByRole('heading', { name: 'Test Album' }).waitFor();
await page.getByRole('button', { name: 'Download' }).click();
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
});
test('enter password for a shared link', async ({ page }) => {
await page.goto(`/share/${sharedLinkPassword.key}`);
await page.getByPlaceholder('Password').fill('test-password');

View File

@@ -1,6 +1,6 @@
ARG DEVICE=cpu
FROM python:3.11-bookworm@sha256:7bec1574675e7fd9e3a540a03cd7d6811c59ca261bd300cd665369d8f435298a as builder-cpu
FROM python:3.11-bookworm@sha256:96de1ea4821d73fd2c1853d1fdc3cf794ccfe2fae4c3f08579e846de51760a61 as builder-cpu
FROM openvino/ubuntu22_runtime:2023.3.0@sha256:176646df619032ea6c10faf842867119c393e7497b7f88b5e307e932a0fd5aa8 as builder-openvino
USER root
@@ -36,7 +36,7 @@ RUN python3 -m venv /opt/venv
COPY poetry.lock pyproject.toml ./
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
FROM python:3.11-slim-bookworm@sha256:17ec9dc2367aa748559d0212f34665ec4df801129de32db705ea34654b5bc77a as prod-cpu
FROM python:3.11-slim-bookworm@sha256:fc39d2e68b554c3f0a5cb8a776280c0b3d73b4c04b83dbade835e2a171ca27ef as prod-cpu
FROM openvino/ubuntu22_runtime:2023.3.0@sha256:176646df619032ea6c10faf842867119c393e7497b7f88b5e307e932a0fd5aa8 as prod-openvino
USER root

View File

@@ -52,6 +52,8 @@ class Ann(metaclass=_Singleton):
def __init__(self, log_level: int = 3, tuning_level: int = 1, tuning_file: str | None = None) -> None:
if not is_available:
raise RuntimeError("libann is not available!")
if tuning_file and not exists(tuning_file):
raise ValueError("tuning_file must point to an existing (possibly empty) file!")
if tuning_level == 0 and tuning_file is None:
raise ValueError("tuning_level == 0 reads existing tuning information and requires a tuning_file")
if tuning_level < 0 or tuning_level > 3:
@@ -65,12 +67,6 @@ class Ann(metaclass=_Singleton):
self.input_shapes: dict[int, tuple[tuple[int], ...]] = {}
self.ann: int | None = None
self.new()
if self.tuning_file is not None:
# make sure tuning file exists (without clearing contents)
# once filled, the tuning file reduces the cost/time of the first
# inference after model load by 10s of seconds
open(self.tuning_file, "a").close()
def new(self) -> None:
if self.ann is None:
@@ -99,19 +95,17 @@ class Ann(metaclass=_Singleton):
model_path: str,
fast_math: bool = True,
fp16: bool = False,
save_cached_network: bool = False,
cached_network_path: str | None = None,
) -> int:
if not model_path.endswith((".armnn", ".tflite", ".onnx")):
raise ValueError("model_path must be a file with extension .armnn, .tflite or .onnx")
if not exists(model_path):
raise ValueError("model_path must point to an existing file!")
save_cached_network = False
if cached_network_path is not None and not exists(cached_network_path):
save_cached_network = True
# create empty model cache file
open(cached_network_path, "a").close()
raise ValueError("cached_network_path must point to an existing (possibly empty) file!")
if save_cached_network and cached_network_path is None:
raise ValueError("save_cached_network is True, cached_network_path must be specified!")
net_id: int = libann.load(
self.ann,
model_path.encode(),

View File

@@ -8,8 +8,6 @@ from fastapi.testclient import TestClient
from numpy.typing import NDArray
from PIL import Image
from app.config import log
from .main import app
@@ -98,77 +96,12 @@ def clip_tokenizer_cfg() -> dict[str, Any]:
@pytest.fixture(scope="function")
def providers(request: pytest.FixtureRequest) -> Iterator[mock.Mock]:
def providers(request: pytest.FixtureRequest) -> Iterator[dict[str, Any]]:
marker = request.node.get_closest_marker("providers")
if marker is None:
raise ValueError("Missing marker 'providers'")
providers = marker.args[0]
with mock.patch("app.sessions.ort.ort.get_available_providers") as mocked:
with mock.patch("app.models.base.ort.get_available_providers") as mocked:
mocked.return_value = providers
yield providers
@pytest.fixture(scope="function")
def ort_pybind() -> Iterator[mock.Mock]:
with mock.patch("app.sessions.ort.ort.capi._pybind_state") as mocked:
yield mocked
@pytest.fixture(scope="function")
def ov_device_ids(request: pytest.FixtureRequest, ort_pybind: mock.Mock) -> Iterator[mock.Mock]:
marker = request.node.get_closest_marker("ov_device_ids")
if marker is None:
raise ValueError("Missing marker 'ov_device_ids'")
ort_pybind.get_available_openvino_device_ids.return_value = marker.args[0]
return ort_pybind
@pytest.fixture(scope="function")
def ort_session() -> Iterator[mock.Mock]:
with mock.patch("app.sessions.ort.ort.InferenceSession") as mocked:
yield mocked
@pytest.fixture(scope="function")
def ann_session() -> Iterator[mock.Mock]:
with mock.patch("app.sessions.ann.Ann") as mocked:
yield mocked
@pytest.fixture(scope="function")
def rmtree() -> Iterator[mock.Mock]:
with mock.patch("app.models.base.rmtree", autospec=True) as mocked:
mocked.avoids_symlink_attacks = True
yield mocked
@pytest.fixture(scope="function")
def path() -> Iterator[mock.Mock]:
path = mock.MagicMock()
path.exists.return_value = True
path.is_dir.return_value = True
path.is_file.return_value = True
path.with_suffix.return_value = path
path.return_value = path
with mock.patch("app.models.base.Path", return_value=path) as mocked:
yield mocked
@pytest.fixture(scope="function")
def info() -> Iterator[mock.Mock]:
with mock.patch.object(log, "info") as mocked:
yield mocked
@pytest.fixture(scope="function")
def warning() -> Iterator[mock.Mock]:
with mock.patch.object(log, "warning") as mocked:
yield mocked
@pytest.fixture(scope="function")
def snapshot_download() -> Iterator[mock.Mock]:
with mock.patch("app.models.base.snapshot_download") as mocked:
yield mocked

View File

@@ -192,18 +192,23 @@ async def load(model: InferenceModel) -> InferenceModel:
return model
def _load(model: InferenceModel) -> InferenceModel:
if model.load_attempts > 1:
raise HTTPException(500, f"Failed to load model '{model.model_name}'")
with lock:
model.load()
return model
try:
return await run(_load, model)
await run(_load, model)
return model
except (OSError, InvalidProtobuf, BadZipFile, NoSuchFile):
log.warning(f"Failed to load {model.model_type.replace('_', ' ')} model '{model.model_name}'. Clearing cache.")
log.warning(
(
f"Failed to load {model.model_type.replace('_', ' ')} model '{model.model_name}'."
"Clearing cache and retrying."
)
)
model.clear_cache()
return await run(_load, model)
await run(_load, model)
return model
async def idle_shutdown_task() -> None:

View File

@@ -7,7 +7,6 @@ import numpy as np
from numpy.typing import NDArray
from ann.ann import Ann
from app.schemas import SessionNode
from ..config import log, settings
@@ -17,15 +16,27 @@ class AnnSession:
Wrapper for ANN to be drop-in replacement for ONNX session.
"""
def __init__(self, model_path: Path, cache_dir: Path = settings.cache_folder) -> None:
self.model_path = model_path
self.cache_dir = cache_dir
self.ann = Ann(tuning_level=3, tuning_file=(cache_dir / "gpu-tuning.ann").as_posix())
def __init__(self, model_path: Path):
tuning_file = Path(settings.cache_folder) / "gpu-tuning.ann"
with tuning_file.open(mode="a"):
# make sure tuning file exists (without clearing contents)
# once filled, the tuning file reduces the cost/time of the first
# inference after model load by 10s of seconds
pass
self.ann = Ann(tuning_level=3, tuning_file=tuning_file.as_posix())
log.info("Loading ANN model %s ...", model_path)
cache_file = model_path.with_suffix(".anncache")
save = False
if not cache_file.is_file():
save = True
with cache_file.open(mode="a"):
# create empty model cache file
pass
self.model = self.ann.load(
model_path.as_posix(),
cached_network_path=model_path.with_suffix(".anncache").as_posix(),
save_cached_network=save,
cached_network_path=cache_file.as_posix(),
)
log.info("Loaded ANN model with ID %d", self.model)
@@ -34,11 +45,11 @@ class AnnSession:
log.info("Unloaded ANN model %d", self.model)
self.ann.destroy()
def get_inputs(self) -> list[SessionNode]:
def get_inputs(self) -> list[AnnNode]:
shapes = self.ann.input_shapes[self.model]
return [AnnNode(None, s) for s in shapes]
def get_outputs(self) -> list[SessionNode]:
def get_outputs(self) -> list[AnnNode]:
shapes = self.ann.output_shapes[self.model]
return [AnnNode(None, s) for s in shapes]

View File

@@ -5,14 +5,15 @@ from pathlib import Path
from shutil import rmtree
from typing import Any, ClassVar
import onnxruntime as ort
from huggingface_hub import snapshot_download
import ann.ann
from app.sessions.ort import OrtSession
from app.models.constants import SUPPORTED_PROVIDERS
from ..config import clean_name, log, settings
from ..schemas import ModelFormat, ModelIdentity, ModelSession, ModelTask, ModelType
from ..sessions.ann import AnnSession
from .ann import AnnSession
class InferenceModel(ABC):
@@ -23,17 +24,19 @@ class InferenceModel(ABC):
self,
model_name: str,
cache_dir: Path | str | None = None,
providers: list[str] | None = None,
provider_options: list[dict[str, Any]] | None = None,
sess_options: ort.SessionOptions | None = None,
preferred_format: ModelFormat | None = None,
session: ModelSession | None = None,
**model_kwargs: Any,
) -> None:
self.loaded = session is not None
self.load_attempts = 0
self.loaded = False
self.model_name = clean_name(model_name)
self.cache_dir = Path(cache_dir) if cache_dir is not None else self._cache_dir_default
self.model_format = preferred_format if preferred_format is not None else self._model_format_default
if session is not None:
self.session = session
self.cache_dir = Path(cache_dir) if cache_dir is not None else self.cache_dir_default
self.providers = providers if providers is not None else self.providers_default
self.provider_options = provider_options if provider_options is not None else self.provider_options_default
self.sess_options = sess_options if sess_options is not None else self.sess_options_default
self.preferred_format = preferred_format if preferred_format is not None else self.preferred_format_default
def download(self) -> None:
if not self.cached:
@@ -45,11 +48,9 @@ class InferenceModel(ABC):
def load(self) -> None:
if self.loaded:
return
self.load_attempts += 1
self.download()
attempt = f"Attempt #{self.load_attempts + 1} to load" if self.load_attempts else "Loading"
log.info(f"{attempt} {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
log.info(f"Loading {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
self.session = self._load()
self.loaded = True
@@ -66,7 +67,7 @@ class InferenceModel(ABC):
pass
def _download(self) -> None:
ignore_patterns = [] if self.model_format == ModelFormat.ARMNN else ["*.armnn"]
ignore_patterns = [] if self.preferred_format == ModelFormat.ARMNN else ["*.armnn"]
snapshot_download(
f"immich-app/{clean_name(self.model_name)}",
cache_dir=self.cache_dir,
@@ -101,11 +102,26 @@ class InferenceModel(ABC):
self.cache_dir.mkdir(parents=True, exist_ok=True)
def _make_session(self, model_path: Path) -> ModelSession:
if not model_path.is_file():
onnx_path = model_path.with_suffix(".onnx")
if not onnx_path.is_file():
raise ValueError(f"Model path '{model_path}' does not exist")
log.warning(
f"Could not find model path '{model_path}'. " f"Falling back to ONNX model path '{onnx_path}' instead.",
)
model_path = onnx_path
match model_path.suffix:
case ".armnn":
session: ModelSession = AnnSession(model_path)
session = AnnSession(model_path)
case ".onnx":
session = OrtSession(model_path)
session = ort.InferenceSession(
model_path.as_posix(),
sess_options=self.sess_options,
providers=self.providers,
provider_options=self.provider_options,
)
case _:
raise ValueError(f"Unsupported model file type: {model_path.suffix}")
return session
@@ -116,7 +132,7 @@ class InferenceModel(ABC):
@property
def model_path(self) -> Path:
return self.model_dir / f"model.{self.model_format}"
return self.model_dir / f"model.{self.preferred_format}"
@property
def model_task(self) -> ModelTask:
@@ -135,7 +151,7 @@ class InferenceModel(ABC):
self._cache_dir = cache_dir
@property
def _cache_dir_default(self) -> Path:
def cache_dir_default(self) -> Path:
return settings.cache_folder / self.model_task.value / self.model_name
@property
@@ -143,18 +159,95 @@ class InferenceModel(ABC):
return self.model_path.is_file()
@property
def model_format(self) -> ModelFormat:
def providers(self) -> list[str]:
return self._providers
@providers.setter
def providers(self, providers: list[str]) -> None:
log.info(
(f"Setting '{self.model_name}' execution providers to {providers}, " "in descending order of preference"),
)
self._providers = providers
@property
def providers_default(self) -> list[str]:
available_providers = set(ort.get_available_providers())
log.debug(f"Available ORT providers: {available_providers}")
if (openvino := "OpenVINOExecutionProvider") in available_providers:
device_ids: list[str] = ort.capi._pybind_state.get_available_openvino_device_ids()
log.debug(f"Available OpenVINO devices: {device_ids}")
gpu_devices = [device_id for device_id in device_ids if device_id.startswith("GPU")]
if not gpu_devices:
log.warning("No GPU device found in OpenVINO. Falling back to CPU.")
available_providers.remove(openvino)
return [provider for provider in SUPPORTED_PROVIDERS if provider in available_providers]
@property
def provider_options(self) -> list[dict[str, Any]]:
return self._provider_options
@provider_options.setter
def provider_options(self, provider_options: list[dict[str, Any]]) -> None:
log.debug(f"Setting execution provider options to {provider_options}")
self._provider_options = provider_options
@property
def provider_options_default(self) -> list[dict[str, Any]]:
options = []
for provider in self.providers:
match provider:
case "CPUExecutionProvider" | "CUDAExecutionProvider":
option = {"arena_extend_strategy": "kSameAsRequested"}
case "OpenVINOExecutionProvider":
option = {"device_type": "GPU_FP32", "cache_dir": (self.cache_dir / "openvino").as_posix()}
case _:
option = {}
options.append(option)
return options
@property
def sess_options(self) -> ort.SessionOptions:
return self._sess_options
@sess_options.setter
def sess_options(self, sess_options: ort.SessionOptions) -> None:
log.debug(f"Setting execution_mode to {sess_options.execution_mode.name}")
log.debug(f"Setting inter_op_num_threads to {sess_options.inter_op_num_threads}")
log.debug(f"Setting intra_op_num_threads to {sess_options.intra_op_num_threads}")
self._sess_options = sess_options
@property
def sess_options_default(self) -> ort.SessionOptions:
sess_options = ort.SessionOptions()
sess_options.enable_cpu_mem_arena = False
# avoid thread contention between models
if settings.model_inter_op_threads > 0:
sess_options.inter_op_num_threads = settings.model_inter_op_threads
# these defaults work well for CPU, but bottleneck GPU
elif settings.model_inter_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
sess_options.inter_op_num_threads = 1
if settings.model_intra_op_threads > 0:
sess_options.intra_op_num_threads = settings.model_intra_op_threads
elif settings.model_intra_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
sess_options.intra_op_num_threads = 2
if sess_options.inter_op_num_threads > 1:
sess_options.execution_mode = ort.ExecutionMode.ORT_PARALLEL
return sess_options
@property
def preferred_format(self) -> ModelFormat:
return self._preferred_format
@model_format.setter
def model_format(self, preferred_format: ModelFormat) -> None:
@preferred_format.setter
def preferred_format(self, preferred_format: ModelFormat) -> None:
log.debug(f"Setting preferred format to {preferred_format}")
self._preferred_format = preferred_format
@property
def _model_format_default(self) -> ModelFormat:
prefer_ann = ann.ann.is_available and settings.ann
ann_exists = (self.model_dir / "model.armnn").is_file()
if prefer_ann and not ann_exists:
log.warning(f"ARM NN is available, but '{self.model_name}' does not support ARM NN. Falling back to ONNX.")
return ModelFormat.ARMNN if prefer_ann and ann_exists else ModelFormat.ONNX
def preferred_format_default(self) -> ModelFormat:
return ModelFormat.ARMNN if ann.ann.is_available and settings.ann else ModelFormat.ONNX

View File

@@ -1,40 +1,33 @@
from pathlib import Path
from typing import Any
import numpy as np
from insightface.model_zoo import RetinaFace
import onnxruntime as ort
from numpy.typing import NDArray
from app.models.base import InferenceModel
from app.models.transforms import decode_cv2
from app.models.session import ort_has_batch_dim, ort_expand_outputs
from app.models.transforms import decode_pil
from app.schemas import FaceDetectionOutput, ModelSession, ModelTask, ModelType
from .scrfd import SCRFD
from PIL import Image
from PIL.ImageOps import pad
class FaceDetector(InferenceModel):
depends = []
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
def __init__(
self,
model_name: str,
min_score: float = 0.7,
cache_dir: Path | str | None = None,
**model_kwargs: Any,
) -> None:
self.min_score = model_kwargs.pop("minScore", min_score)
super().__init__(model_name, cache_dir, **model_kwargs)
def _load(self) -> ModelSession:
session = self._make_session(self.model_path)
self.model = RetinaFace(session=session)
self.model.prepare(ctx_id=0, det_thresh=self.min_score, input_size=(640, 640))
if isinstance(session, ort.InferenceSession) and not ort_has_batch_dim(session):
ort_expand_outputs(session)
self.model = SCRFD(session=session)
return session
def _predict(self, inputs: NDArray[np.uint8] | bytes, **kwargs: Any) -> FaceDetectionOutput:
inputs = decode_cv2(inputs)
def _predict(self, inputs: NDArray[np.uint8] | bytes | Image.Image, **kwargs: Any) -> FaceDetectionOutput:
inputs = self._transform(inputs)
bboxes, landmarks = self._detect(inputs)
[bboxes], [landmarks] = self.model.detect(inputs, threshold=kwargs.pop("minScore", 0.7))
return {
"boxes": bboxes[:, :4].round(),
"scores": bboxes[:, 4],
@@ -44,5 +37,7 @@ class FaceDetector(InferenceModel):
def _detect(self, inputs: NDArray[np.uint8] | bytes) -> tuple[NDArray[np.float32], NDArray[np.float32]]:
return self.model.detect(inputs) # type: ignore
def configure(self, **kwargs: Any) -> None:
self.model.det_thresh = kwargs.pop("minScore", self.model.det_thresh)
def _transform(self, inputs: NDArray[np.uint8] | bytes | Image.Image) -> NDArray[np.uint8]:
image = decode_pil(inputs)
padded = pad(image, (640, 640), method=Image.Resampling.BICUBIC)
return np.array(padded, dtype=np.uint8)[None, ...]

View File

@@ -2,18 +2,17 @@ from pathlib import Path
from typing import Any
import numpy as np
import onnx
import onnxruntime as ort
from insightface.model_zoo import ArcFaceONNX
from insightface.utils.face_align import norm_crop
from numpy.typing import NDArray
from onnx.tools.update_model_dims import update_inputs_outputs_dims
from PIL import Image
from app.config import clean_name, log
from app.models.base import InferenceModel
from app.models.session import ort_add_batch_dim, ort_has_batch_dim
from app.models.transforms import decode_cv2
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelFormat, ModelSession, ModelTask, ModelType
from app.sessions import has_batch_axis
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelSession, ModelTask, ModelType
class FaceRecognizer(InferenceModel):
@@ -27,14 +26,14 @@ class FaceRecognizer(InferenceModel):
cache_dir: Path | str | None = None,
**model_kwargs: Any,
) -> None:
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
self.min_score = model_kwargs.pop("minScore", min_score)
self.batch = self.model_format == ModelFormat.ONNX
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
def _load(self) -> ModelSession:
session = self._make_session(self.model_path)
if self.model_format == ModelFormat.ONNX and not has_batch_axis(session):
self._add_batch_axis(self.model_path)
if isinstance(session, ort.InferenceSession) and not ort_has_batch_dim(session):
log.info(f"Adding batch dimension to recognition model {self.model_name}")
ort_add_batch_dim(self.model_path, self.model_path)
session = self._make_session(self.model_path)
self.model = ArcFaceONNX(
self.model_path.with_suffix(".onnx").as_posix(),
@@ -48,20 +47,9 @@ class FaceRecognizer(InferenceModel):
if faces["boxes"].shape[0] == 0:
return []
inputs = decode_cv2(inputs)
cropped_faces = self._crop(inputs, faces)
embeddings = self._predict_batch(cropped_faces) if self.batch else self._predict_single(cropped_faces)
embeddings: NDArray[np.float32] = self.model.get_feat(self._crop(inputs, faces))
return self.postprocess(faces, embeddings)
def _predict_batch(self, cropped_faces: list[NDArray[np.uint8]]) -> NDArray[np.float32]:
embeddings: NDArray[np.float32] = self.model.get_feat(cropped_faces)
return embeddings
def _predict_single(self, cropped_faces: list[NDArray[np.uint8]]) -> NDArray[np.float32]:
embeddings: list[NDArray[np.float32]] = []
for face in cropped_faces:
embeddings.append(self.model.get_feat(face))
return np.concatenate(embeddings, axis=0)
def postprocess(self, faces: FaceDetectionOutput, embeddings: NDArray[np.float32]) -> FacialRecognitionOutput:
return [
{
@@ -74,13 +62,3 @@ class FaceRecognizer(InferenceModel):
def _crop(self, image: NDArray[np.uint8], faces: FaceDetectionOutput) -> list[NDArray[np.uint8]]:
return [norm_crop(image, landmark) for landmark in faces["landmarks"]]
def _add_batch_axis(self, model_path: Path) -> None:
log.debug(f"Adding batch axis to model {model_path}")
proto = onnx.load(model_path)
static_input_dims = [shape.dim_value for shape in proto.graph.input[0].type.tensor_type.shape.dim[1:]]
static_output_dims = [shape.dim_value for shape in proto.graph.output[0].type.tensor_type.shape.dim[1:]]
input_dims = {proto.graph.input[0].name: ["batch"] + static_input_dims}
output_dims = {proto.graph.output[0].name: ["batch"] + static_output_dims}
updated_proto = update_inputs_outputs_dims(proto, input_dims, output_dims)
onnx.save(updated_proto, model_path)

View File

@@ -0,0 +1,325 @@
# Based on InsightFace-REST by SthPhoenix https://github.com/SthPhoenix/InsightFace-REST/blob/master/src/api_trt/modules/model_zoo/detectors/scrfd.py
# Primary changes made:
# 1. Removed CuPy-related code
# 2. Adapted proposal generation to be thread-safe
# 3. Added typing
# 4. Assume RGB input
# 5. Removed unused variables
# Copyright 2021 SthPhoenix
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
# Based on Jia Guo reference implementation at
# https://github.com/deepinsight/insightface/blob/master/detection/scrfd/tools/scrfd.py
from __future__ import division
import cv2
import numpy as np
from numba import njit
from app.schemas import ModelSession
from numpy.typing import NDArray
@njit(cache=True, nogil=True)
def nms(dets, threshold: float = 0.4) -> NDArray[np.float32]:
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= threshold)[0]
order = order[inds + 1]
return np.asarray(keep)
@njit(fastmath=True, cache=True, nogil=True)
def single_distance2bbox(point: NDArray[np.float32], distance: NDArray[np.float32], stride: int) -> NDArray[np.float32]:
"""
Fast conversion of single bbox distances to coordinates
:param point: Anchor point
:param distance: Bbox distances from anchor point
:param stride: Current stride scale
:return: bbox
"""
distance[0] = point[0] - distance[0] * stride
distance[1] = point[1] - distance[1] * stride
distance[2] = point[0] + distance[2] * stride
distance[3] = point[1] + distance[3] * stride
return distance
@njit(fastmath=True, cache=True, nogil=True)
def single_distance2kps(point: NDArray[np.float32], distance: NDArray[np.float32], stride: int) -> NDArray[np.float32]:
"""
Fast conversion of single keypoint distances to coordinates
:param point: Anchor point
:param distance: Keypoint distances from anchor point
:param stride: Current stride scale
:return: keypoint
"""
for ix in range(0, distance.shape[0], 2):
distance[ix] = distance[ix] * stride + point[0]
distance[ix + 1] = distance[ix + 1] * stride + point[1]
return distance
@njit(fastmath=True, cache=True, nogil=True)
def generate_proposals(
score_blob: NDArray[np.float32],
bbox_blob: NDArray[np.float32],
kpss_blob: NDArray[np.float32],
stride: int,
anchors: NDArray[np.float32],
threshold: float,
) -> tuple[NDArray[np.float32], NDArray[np.float32], NDArray[np.float32]]:
"""
Convert distances from anchors to actual coordinates on source image
and filter proposals by confidence threshold.
:param score_blob: Raw scores for stride
:param bbox_blob: Raw bbox distances for stride
:param kpss_blob: Raw keypoints distances for stride
:param stride: Stride scale
:param anchors: Precomputed anchors for stride
:param threshold: Confidence threshold
:return: Filtered scores, bboxes and keypoints
"""
idxs = []
for ix in range(score_blob.shape[0]):
if score_blob[ix][0] > threshold:
idxs.append(ix)
score_out = np.empty((len(idxs), 1), dtype="float32")
bbox_out = np.empty((len(idxs), 4), dtype="float32")
kpss_out = np.empty((len(idxs), 10), dtype="float32")
for i in range(len(idxs)):
ix = idxs[i]
score_out[i] = score_blob[ix]
bbox_out[i] = single_distance2bbox(anchors[ix], bbox_blob[ix], stride)
kpss_out[i] = single_distance2kps(anchors[ix], kpss_blob[ix], stride)
return score_out, bbox_out, kpss_out
@njit(fastmath=True, cache=True, nogil=True)
def filter(
bboxes_list: NDArray[np.float32],
kpss_list: NDArray[np.float32],
scores_list: NDArray[np.float32],
nms_threshold: float = 0.4,
) -> tuple[NDArray[np.float32], NDArray[np.float32]]:
"""
Filter postprocessed network outputs with NMS
:param bboxes_list: List of bboxes (np.ndarray)
:param kpss_list: List of keypoints (np.ndarray)
:param scores_list: List of scores (np.ndarray)
:return: Face bboxes with scores [t,l,b,r,score], and key points
"""
pre_det = np.hstack((bboxes_list, scores_list))
keep = nms(pre_det, threshold=nms_threshold)
det = pre_det[keep, :]
kpss = kpss_list[keep, :]
kpss = kpss.reshape((kpss.shape[0], -1, 2))
return det, kpss
class SCRFD:
def __init__(self, session: ModelSession):
self.session = session
self.center_cache: dict[tuple[int, int], NDArray[np.float32]] = {}
self.nms_threshold = 0.4
self.fmc = 3
self._feat_stride_fpn = [8, 16, 32]
self._num_anchors = 2
def prepare(self, nms_threshold: float = 0.4) -> None:
"""
Populate class parameters
:param nms_threshold: Threshold for NMS IoU
"""
self.nms_threshold = nms_threshold
def detect(
self, imgs: NDArray[np.uint8], threshold: float = 0.5
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]]]:
"""
Run detection pipeline for provided images
:param img: Raw image as nd.ndarray with HWC shape
:param threshold: Confidence threshold
:return: Face bboxes with scores [t,l,b,r,score], and key points
"""
height, width = imgs.shape[1:3]
blob = self._preprocess(imgs)
net_outs = self._forward(blob)
batch_bboxes, batch_kpss, batch_scores = self._postprocess(net_outs, height, width, threshold)
dets_list = []
kpss_list = []
for e in range(imgs.shape[0]):
if len(batch_bboxes[e]) == 0:
det, kpss = np.zeros((0, 5), dtype="float32"), np.zeros((0, 10), dtype="float32")
else:
det, kpss = filter(batch_bboxes[e], batch_kpss[e], batch_scores[e], self.nms_threshold)
dets_list.append(det)
kpss_list.append(kpss)
return dets_list, kpss_list
@staticmethod
def _build_anchors(
input_height: int, input_width: int, strides: list[int], num_anchors: int
) -> NDArray[np.float32]:
"""
Precompute anchor points for provided image size
:param input_height: Input image height
:param input_width: Input image width
:param strides: Model strides
:param num_anchors: Model num anchors
:return: box centers
"""
centers = []
for stride in strides:
height = input_height // stride
width = input_width // stride
anchor_centers = np.stack(np.mgrid[:height, :width][::-1], axis=-1).astype(np.float32)
anchor_centers = (anchor_centers * stride).reshape((-1, 2))
if num_anchors > 1:
anchor_centers = np.stack([anchor_centers] * num_anchors, axis=1).reshape((-1, 2))
centers.append(anchor_centers)
return centers
def _preprocess(self, images: NDArray[np.uint8]):
"""
Normalize image on CPU if backend can't provide CUDA stream,
otherwise preprocess image on GPU using CuPy
:param img: Raw image as np.ndarray with HWC shape
:return: Preprocessed image or None if image was processed on device
"""
input_size = tuple(images[0].shape[0:2][::-1])
return cv2.dnn.blobFromImages(images, 1.0 / 128, input_size, (127.5, 127.5, 127.5), swapRB=False)
def _forward(self, blob: NDArray[np.float32]) -> list[NDArray[np.float32]]:
"""
Send input data to inference backend.
:param blob: Preprocessed image of shape NCHW or None
:return: network outputs
"""
return self.session.run(None, {"input.1": blob})
def _postprocess(
self, net_outs: list[NDArray[np.float32]], height: int, width: int, threshold: float
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]], list[NDArray[np.float32]]]:
"""
Precompute anchor points for provided image size and process network outputs
:param net_outs: Network outputs
:param input_height: Input image height
:param input_width: Input image width
:param threshold: Confidence threshold
:return: filtered bboxes, keypoints and scores
"""
key = (height, width)
if not self.center_cache.get(key):
self.center_cache[key] = self._build_anchors(height, width, self._feat_stride_fpn, self._num_anchors)
anchor_centers = self.center_cache[key]
bboxes, kpss, scores = self._process_strides(net_outs, threshold, anchor_centers)
return bboxes, kpss, scores
def _process_strides(
self, net_outs: list[NDArray[np.float32]], threshold: float, anchors: NDArray[np.float32]
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]], list[NDArray[np.float32]]]:
"""
Process network outputs by strides and return results proposals filtered by threshold
:param net_outs: Network outputs
:param threshold: Confidence threshold
:param anchor_centers: Precomputed anchor centers for all strides
:return: filtered bboxes, keypoints and scores
"""
batch_size = net_outs[0].shape[0]
bboxes_by_img = []
kpss_by_img = []
scores_by_img = []
for batch in range(batch_size):
scores_strided = []
bboxes_strided = []
kpss_strided = []
for idx, stride in enumerate(self._feat_stride_fpn):
score_blob = net_outs[idx][batch]
bbox_blob = net_outs[idx + self.fmc][batch]
kpss_blob = net_outs[idx + self.fmc * 2][batch]
stride_anchors = anchors[idx]
score_list, bbox_list, kpss_list = generate_proposals(
score_blob,
bbox_blob,
kpss_blob,
stride,
stride_anchors,
threshold,
)
scores_strided.append(score_list)
bboxes_strided.append(bbox_list)
kpss_strided.append(kpss_list)
bboxes_by_img.append(np.concatenate(bboxes_strided, axis=0))
kpss_by_img.append(np.concatenate(kpss_strided, axis=0))
scores_by_img.append(np.concatenate(scores_strided, axis=0))
return bboxes_by_img, kpss_by_img, scores_by_img

View File

@@ -0,0 +1,34 @@
from pathlib import Path
import numpy as np
import onnx
import onnxruntime as ort
from numpy.typing import NDArray
from onnx.shape_inference import infer_shapes
from onnx.tools.update_model_dims import update_inputs_outputs_dims
def ort_has_batch_dim(session: ort.InferenceSession) -> bool:
return session.get_inputs()[0].shape[0] == "batch"
def ort_expand_outputs(session: ort.InferenceSession) -> None:
original_run = session.run
def run(output_names: list[str], input_feed: dict[str, NDArray[np.float32]]) -> list[NDArray[np.float32]]:
out: list[NDArray[np.float32]] = original_run(output_names, input_feed)
out = [np.expand_dims(o, axis=0) for o in out]
return out
session.run = run
def ort_add_batch_dim(input_path: Path, output_path: Path) -> None:
proto = onnx.load(input_path)
static_input_dims = [shape.dim_value for shape in proto.graph.input[0].type.tensor_type.shape.dim[1:]]
static_output_dims = [shape.dim_value for shape in proto.graph.output[0].type.tensor_type.shape.dim[1:]]
input_dims = {proto.graph.input[0].name: ["batch"] + static_input_dims}
output_dims = {proto.graph.output[0].name: ["batch"] + static_output_dims}
updated_proto = update_inputs_outputs_dims(proto, input_dims, output_dims)
inferred = infer_shapes(updated_proto)
onnx.save(inferred, output_path)

View File

@@ -3,6 +3,7 @@ from typing import IO
import cv2
import numpy as np
from numba import njit
from numpy.typing import NDArray
from PIL import Image
@@ -30,10 +31,11 @@ def to_numpy(img: Image.Image) -> NDArray[np.float32]:
return np.asarray(img if img.mode == "RGB" else img.convert("RGB"), dtype=np.float32) / 255.0
@njit(cache=True, fastmath=True, nogil=True)
def normalize(
img: NDArray[np.float32], mean: float | NDArray[np.float32], std: float | NDArray[np.float32]
) -> NDArray[np.float32]:
return np.divide(img - mean, std, dtype=np.float32)
return (img - mean) / std
def get_pil_resampling(resample: str) -> Image.Resampling:

View File

@@ -54,14 +54,6 @@ class ModelSource(StrEnum):
ModelIdentity = tuple[ModelType, ModelTask]
class SessionNode(Protocol):
@property
def name(self) -> str | None: ...
@property
def shape(self) -> tuple[int, ...]: ...
class ModelSession(Protocol):
def run(
self,
@@ -70,10 +62,6 @@ class ModelSession(Protocol):
run_options: Any = None,
) -> list[npt.NDArray[np.float32]]: ...
def get_inputs(self) -> list[SessionNode]: ...
def get_outputs(self) -> list[SessionNode]: ...
class HasProfiling(Protocol):
profiling: dict[str, float]

View File

@@ -1,5 +0,0 @@
from app.schemas import ModelSession
def has_batch_axis(session: ModelSession) -> bool:
return not isinstance(session.get_inputs()[0].shape[0], int) or session.get_inputs()[0].shape[0] < 0

View File

@@ -1,129 +0,0 @@
from __future__ import annotations
from pathlib import Path
from typing import Any
import numpy as np
import onnxruntime as ort
from numpy.typing import NDArray
from app.models.constants import SUPPORTED_PROVIDERS
from app.schemas import SessionNode
from ..config import log, settings
class OrtSession:
def __init__(
self,
model_path: Path | str,
providers: list[str] | None = None,
provider_options: list[dict[str, Any]] | None = None,
sess_options: ort.SessionOptions | None = None,
):
self.model_path = Path(model_path)
self.providers = providers if providers is not None else self._providers_default
self.provider_options = provider_options if provider_options is not None else self._provider_options_default
self.sess_options = sess_options if sess_options is not None else self._sess_options_default
self.session = ort.InferenceSession(
self.model_path.as_posix(),
providers=self.providers,
provider_options=self.provider_options,
sess_options=self.sess_options,
)
def get_inputs(self) -> list[SessionNode]:
inputs: list[SessionNode] = self.session.get_inputs()
return inputs
def get_outputs(self) -> list[SessionNode]:
outputs: list[SessionNode] = self.session.get_outputs()
return outputs
def run(
self,
output_names: list[str] | None,
input_feed: dict[str, NDArray[np.float32]] | dict[str, NDArray[np.int32]],
run_options: Any = None,
) -> list[NDArray[np.float32]]:
outputs: list[NDArray[np.float32]] = self.session.run(output_names, input_feed, run_options)
return outputs
@property
def providers(self) -> list[str]:
return self._providers
@providers.setter
def providers(self, providers: list[str]) -> None:
log.info(f"Setting execution providers to {providers}, in descending order of preference")
self._providers = providers
@property
def _providers_default(self) -> list[str]:
available_providers = set(ort.get_available_providers())
log.debug(f"Available ORT providers: {available_providers}")
if (openvino := "OpenVINOExecutionProvider") in available_providers:
device_ids: list[str] = ort.capi._pybind_state.get_available_openvino_device_ids()
log.debug(f"Available OpenVINO devices: {device_ids}")
gpu_devices = [device_id for device_id in device_ids if device_id.startswith("GPU")]
if not gpu_devices:
log.warning("No GPU device found in OpenVINO. Falling back to CPU.")
available_providers.remove(openvino)
return [provider for provider in SUPPORTED_PROVIDERS if provider in available_providers]
@property
def provider_options(self) -> list[dict[str, Any]]:
return self._provider_options
@provider_options.setter
def provider_options(self, provider_options: list[dict[str, Any]]) -> None:
log.debug(f"Setting execution provider options to {provider_options}")
self._provider_options = provider_options
@property
def _provider_options_default(self) -> list[dict[str, Any]]:
options = []
for provider in self.providers:
match provider:
case "CPUExecutionProvider" | "CUDAExecutionProvider":
option = {"arena_extend_strategy": "kSameAsRequested"}
case "OpenVINOExecutionProvider":
option = {"device_type": "GPU_FP32", "cache_dir": (self.model_path.parent / "openvino").as_posix()}
case _:
option = {}
options.append(option)
return options
@property
def sess_options(self) -> ort.SessionOptions:
return self._sess_options
@sess_options.setter
def sess_options(self, sess_options: ort.SessionOptions) -> None:
log.debug(f"Setting execution_mode to {sess_options.execution_mode.name}")
log.debug(f"Setting inter_op_num_threads to {sess_options.inter_op_num_threads}")
log.debug(f"Setting intra_op_num_threads to {sess_options.intra_op_num_threads}")
self._sess_options = sess_options
@property
def _sess_options_default(self) -> ort.SessionOptions:
sess_options = ort.SessionOptions()
sess_options.enable_cpu_mem_arena = False
# avoid thread contention between models
if settings.model_inter_op_threads > 0:
sess_options.inter_op_num_threads = settings.model_inter_op_threads
# these defaults work well for CPU, but bottleneck GPU
elif settings.model_inter_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
sess_options.inter_op_num_threads = 1
if settings.model_intra_op_threads > 0:
sess_options.intra_op_num_threads = settings.model_intra_op_threads
elif settings.model_intra_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
sess_options.intra_op_num_threads = 2
if sess_options.inter_op_num_threads > 1:
sess_options.execution_mode = ort.ExecutionMode.ORT_PARALLEL
return sess_options

Some files were not shown because too many files have changed in this diff Show More