Compare commits

..

8 Commits

Author SHA1 Message Date
mertalev
72269ab58c add cli 2024-07-12 16:50:48 -04:00
mertalev
3db69b94ed support resnet models, test failed models 2024-07-12 16:50:48 -04:00
mertalev
b5acb71b05 prevent multidimensional bias 2024-07-12 16:50:48 -04:00
mertalev
b39cca1b43 fixes 2024-07-12 16:50:47 -04:00
mertalev
3d62011ae3 handle gather at the end 2024-07-12 16:50:47 -04:00
mertalev
1ad348c407 gather -> slice 2024-07-12 16:50:47 -04:00
mertalev
5dae920ac6 onnx2tf, 4d transpose 2024-07-12 16:50:47 -04:00
mertalev
956480ab2c enhance armnn conversion 2024-07-12 16:50:46 -04:00
1023 changed files with 39313 additions and 67737 deletions

View File

@@ -22,7 +22,6 @@ open-api/typescript-sdk/node_modules/
server/coverage/
server/node_modules/
server/upload/
server/src/queries
server/dist/
server/www/
@@ -30,4 +29,3 @@ web/node_modules/
web/coverage/
web/.svelte-kit
web/build/
web/.env

View File

@@ -1,13 +1,11 @@
title: "[Feature] feature-name-goes-here"
title: "[Feature] <feature-name-goes-here>"
labels: ["feature"]
body:
- type: markdown
attributes:
value: |
Please use this form to request new feature for Immich.
Stick to only a single feature per request. If you list multiple different features at once,
your request will be closed.
Please use this form to request new feature for Immich
- type: checkboxes
attributes:

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
custom: ['https://buy.immich.app']

View File

@@ -83,6 +83,7 @@ body:
2.
3.
...
render: bash
validations:
required: true

40
.github/release.yml vendored
View File

@@ -1,29 +1,41 @@
changelog:
categories:
- title: 🚨 Breaking Changes
- title: ⚠️ Breaking Changes
labels:
- changelog:breaking-change
- breaking-change
- title: 🔒 Security
- title: 🗄️ Server
labels:
- changelog:security
- 🗄server
- title: 🚀 Features
- title: 📱 Mobile
labels:
- changelog:feature
- 📱mobile
- title: 🌟 Enhancements
- title: 🖥️ Web
labels:
- changelog:enhancement
- 🖥web
- title: 🐛 Bug fixes
- title: 🧠 Machine Learning
labels:
- changelog:bugfix
- 🧠machine-learning
- title: 📚 Documentation
- title: ⚡ CLI
labels:
- changelog:documentation
- cli
- title: 🌐 Translations
- title: 📓 Documentation
labels:
- changelog:translation
- documentation
- title: 🔨 Maintenance
labels:
- deployment
- dependencies
- renovate
- maintenance
- tech-debt
- title: Other changes
labels:
- "*"

View File

@@ -16,28 +16,10 @@ concurrency:
cancel-in-progress: true
jobs:
pre-job:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@v3
with:
filters: |
mobile:
- 'mobile/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
build-sign-android:
name: Build and sign Android
needs: pre-job
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }}
runs-on: macos-14
steps:

View File

@@ -56,10 +56,10 @@ jobs:
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
uses: docker/setup-qemu-action@v3.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.6.1
uses: docker/setup-buildx-action@v3.4.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
@@ -88,7 +88,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image
uses: docker/build-push-action@v6.7.0
uses: docker/build-push-action@v6.3.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64

View File

@@ -35,7 +35,7 @@ jobs:
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.8.0
uses: stumpylog/image-cleaner-action/ephemeral@v0.7.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
@@ -64,7 +64,7 @@ jobs:
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.8.0
uses: stumpylog/image-cleaner-action/untagged@v0.7.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"

View File

@@ -17,158 +17,56 @@ permissions:
packages: write
jobs:
pre-job:
build_and_push:
name: Build and Push
runs-on: ubuntu-latest
outputs:
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@v3
with:
filters: |
server:
- 'server/**'
- 'openapi/**'
- 'web/**'
machine-learning:
- 'machine-learning/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
build_and_push_ml:
name: Build and Push ML
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest
env:
image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
strategy:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
include:
- platforms: linux/amd64,linux/arm64
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/amd64,linux/arm64
device: cpu
- platforms: linux/amd64
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/amd64
device: cuda
suffix: -cuda
- platforms: linux/amd64
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/amd64
device: openvino
suffix: -openvino
- platforms: linux/arm64
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/arm64
device: armnn
suffix: -armnn
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.6.1
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v6.7.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
build_and_push_server:
name: Build and Push Server
runs-on: ubuntu-latest
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
env:
image: immich-server
context: .
file: server/Dockerfile
strategy:
fail-fast: false
matrix:
include:
- platforms: linux/amd64,linux/arm64
- image: immich-server
context: .
file: server/Dockerfile
platforms: linux/amd64,linux/arm64
device: cpu
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
uses: docker/setup-qemu-action@v3.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.6.1
uses: docker/setup-buildx-action@v3.4.0
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
@@ -195,8 +93,8 @@ jobs:
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
name=ghcr.io/${{ github.repository_owner }}/${{matrix.image}}
name=altran1502/${{matrix.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
@@ -213,18 +111,18 @@ jobs:
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ matrix.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v6.7.0
uses: docker/build-push-action@v6.3.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
context: ${{ matrix.context }}
file: ${{ matrix.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@@ -2,8 +2,12 @@ name: Docs build
on:
push:
branches: [main]
paths:
- "docs/**"
pull_request:
branches: [main]
paths:
- "docs/**"
release:
types: [published]
@@ -12,26 +16,7 @@ concurrency:
cancel-in-progress: true
jobs:
pre-job:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@v3
with:
filters: |
docs:
- 'docs/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
build:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest
defaults:
run:

View File

@@ -10,28 +10,10 @@ jobs:
runs-on: ubuntu-latest
outputs:
parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }}
steps:
- if: ${{ github.event.workflow_run.conclusion != 'success' }}
run: echo 'The triggering workflow did not succeed' && exit 1
- name: Get artifact
id: get-artifact
uses: actions/github-script@v7
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "docs-build-output"
})[0];
if (!matchArtifact) {
console.log("No artifact found with the name docs-build-output, build job was skipped")
return { found: false };
}
return { found: true, id: matchArtifact.id };
- if: ${{ github.event.workflow_run.conclusion == 'failure' }}
run: echo 'The triggering workflow failed' && exit 1
- name: Determine deploy parameters
id: parameters
uses: actions/github-script@v7
@@ -93,7 +75,7 @@ jobs:
deploy:
runs-on: ubuntu-latest
needs: checks
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
if: ${{ fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -116,11 +98,18 @@ jobs:
uses: actions/github-script@v7
with:
script: |
let artifact = ${{ needs.checks.outputs.artifact }};
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "docs-build-output"
})[0];
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
let fs = require('fs');

View File

@@ -1,21 +0,0 @@
name: PR Label Validation
on:
pull_request_target:
types: [opened, labeled, unlabeled, synchronize]
jobs:
validate-release-label:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: read
steps:
- name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 1
use_regex: true
labels: "changelog:.*"
add_comment: true

View File

@@ -29,17 +29,10 @@ jobs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@v4
with:
token: ${{ steps.generate-token.outputs.token }}
token: ${{ secrets.ORG_RELEASE_TOKEN }}
- name: Install Poetry
run: pipx install poetry
@@ -51,8 +44,10 @@ jobs:
id: push-tag
uses: EndBug/add-and-commit@v9
with:
default_author: github_actions
message: 'chore: version ${{ env.IMMICH_VERSION }}'
author_name: Alex The Bot
author_email: alex.tran1502@gmail.com
default_author: user_info
message: 'Version ${{ env.IMMICH_VERSION }}'
tag: ${{ env.IMMICH_VERSION }}
push: true

View File

@@ -10,27 +10,8 @@ concurrency:
cancel-in-progress: true
jobs:
pre-job:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@v3
with:
filters: |
mobile:
- 'mobile/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
mobile-dart-analyze:
name: Run Dart Code Analysis
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest

View File

@@ -10,47 +10,8 @@ concurrency:
cancel-in-progress: true
jobs:
pre-job:
runs-on: ubuntu-latest
outputs:
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e: ${{ steps.found_paths.outputs.e2e == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_mobile: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@v3
with:
filters: |
web:
- 'web/**'
- 'open-api/typescript-sdk/**'
server:
- 'server/**'
cli:
- 'cli/**'
- 'open-api/typescript-sdk/**'
e2e:
- 'e2e/**'
mobile:
- 'mobile/**'
machine-learning:
- 'machine-learning/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
server-unit-tests:
name: Server
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest
defaults:
run:
@@ -86,8 +47,6 @@ jobs:
cli-unit-tests:
name: CLI
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: ubuntu-latest
defaults:
run:
@@ -127,8 +86,6 @@ jobs:
cli-unit-tests-win:
name: CLI (Windows)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest
defaults:
run:
@@ -161,8 +118,6 @@ jobs:
web-unit-tests:
name: Web
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: ubuntu-latest
defaults:
run:
@@ -204,54 +159,13 @@ jobs:
run: npm run test:cov
if: ${{ !cancelled() }}
e2e-tests-lint:
name: End-to-End Lint
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
e2e-tests:
name: End-to-End Tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
- name: Run setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: npm ci
if: ${{ !cancelled() }}
- name: Run linter
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
- name: Run tsc
run: npm run check
if: ${{ !cancelled() }}
e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: mich
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -277,41 +191,16 @@ jobs:
run: npm ci
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
- name: Run linter
run: npm run lint
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: npm run test
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
e2e-tests-web:
name: End-to-End Tests (Web)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: mich
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: 'recursive'
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
- name: Run setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: npm ci
- name: Run tsc
run: npm run check
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
@@ -322,14 +211,16 @@ jobs:
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: npm run test
if: ${{ !cancelled() }}
- name: Run e2e tests (web)
run: npx playwright test
if: ${{ !cancelled() }}
mobile-unit-tests:
name: Mobile
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -344,8 +235,6 @@ jobs:
ml-unit-tests:
name: Machine Learning
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest
defaults:
run:

2
.gitmodules vendored
View File

@@ -1,6 +1,6 @@
[submodule "mobile/.isar"]
path = mobile/.isar
url = https://github.com/isar/isar
[submodule "e2e/test-assets"]
[submodule "server/test/assets"]
path = e2e/test-assets
url = https://github.com/immich-app/test-assets

1
cli/.eslintignore Normal file
View File

@@ -0,0 +1 @@
/dist

28
cli/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,28 @@
module.exports = {
parser: '@typescript-eslint/parser',
parserOptions: {
project: 'tsconfig.json',
sourceType: 'module',
tsconfigRootDir: __dirname,
},
plugins: ['@typescript-eslint/eslint-plugin'],
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended', 'plugin:unicorn/recommended'],
root: true,
env: {
node: true,
},
ignorePatterns: ['.eslintrc.js'],
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-floating-promises': 'error',
'unicorn/prefer-module': 'off',
'unicorn/prevent-abbreviations': 'off',
'unicorn/no-process-exit': 'off',
'unicorn/import-style': 'off',
curly: 2,
'prettier/prettier': 0,
},
};

View File

@@ -1 +1 @@
20.17.0
20.15.1

View File

@@ -1,4 +1,4 @@
FROM node:20.17.0-alpine3.20@sha256:1a526b97cace6b4006256570efa1a29cd1fe4b96a5301f8d48e87c5139438a45 AS core
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

View File

@@ -4,18 +4,8 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
# For developers
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
$ npm install
$ npm run build
Then, to build the open-api client run the following in the open-api folder:
$ ./bin/generate-open-api.sh
To run the Immich CLI from source, run the following in the cli folder:
$ npm install
$ npm run build
$ ts-node .
@@ -27,4 +17,3 @@ You can also build and install the CLI using
$ npm run build
$ npm install -g .
****

View File

@@ -1,61 +0,0 @@
import { FlatCompat } from '@eslint/eslintrc';
import js from '@eslint/js';
import typescriptEslint from '@typescript-eslint/eslint-plugin';
import tsParser from '@typescript-eslint/parser';
import globals from 'globals';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});
export default [
{
ignores: ['eslint.config.mjs', 'dist'],
},
...compat.extends(
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
'plugin:unicorn/recommended',
),
{
plugins: {
'@typescript-eslint': typescriptEslint,
},
languageOptions: {
globals: {
...globals.node,
},
parser: tsParser,
ecmaVersion: 5,
sourceType: 'module',
parserOptions: {
project: 'tsconfig.json',
tsconfigRootDir: __dirname,
},
},
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-floating-promises': 'error',
'unicorn/prefer-module': 'off',
'unicorn/prevent-abbreviations': 'off',
'unicorn/no-process-exit': 'off',
'unicorn/import-style': 'off',
curly: 2,
'prettier/prettier': 0,
'object-shorthand': ['error', 'always'],
},
},
];

1822
cli/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.17",
"version": "2.2.8",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -13,33 +13,30 @@
"cli"
],
"devDependencies": {
"@eslint/eslintrc": "^3.1.0",
"@eslint/js": "^9.8.0",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@types/byte-size": "^8.1.0",
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/mock-fs": "^4.13.1",
"@types/node": "^20.16.2",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vitest/coverage-v8": "^2.0.5",
"byte-size": "^9.0.0",
"@types/node": "^20.14.10",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^1.2.2",
"byte-size": "^8.1.1",
"cli-progress": "^3.12.0",
"commander": "^12.0.0",
"eslint": "^9.0.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^55.0.0",
"globals": "^15.9.0",
"eslint-plugin-unicorn": "^54.0.0",
"mock-fs": "^5.2.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3",
"vite": "^5.0.12",
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^2.0.5",
"vitest-fetch-mock": "^0.3.0",
"vite-tsconfig-paths": "^4.3.2",
"vitest": "^1.2.2",
"vitest-fetch-mock": "^0.2.2",
"yaml": "^2.3.1"
},
"scripts": {
@@ -67,6 +64,6 @@
"lodash-es": "^4.17.21"
},
"volta": {
"node": "20.17.0"
"node": "20.15.1"
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.40.0"
constraints = "4.40.0"
version = "4.36.0"
constraints = "4.36.0"
hashes = [
"h1:GP2N1tXrmpxu+qEDvFAmkfv9aeZNhag3bchyJpGpYbU=",
"h1:HDJKZBQkVU0kQl4gViQ5L7EcFLn9hB0iuvO+ORJiDS4=",
"h1:KrbeEsZoCJOnnX68yNI5h3QhMjc5bBCQW4yvYaEFq3s=",
"h1:LelwnzU0OVn6g2+T9Ub9XdpC+vbheraIL/qgXhWBs/k=",
"h1:TIq9CynfWrKgCxKL97Akj89cYlvJKn/AL4UXogd8/FM=",
"h1:Uoy5oPdm1ipDG7yIMCUN1IXMpsTGXahPw3I0rVA/6wA=",
"h1:Wunfpm+IZhENdoimrh4iXiakVnCsfKOHo80yJUjMQXM=",
"h1:cRdCuahMOFrNyldnCInqGQRBT1DTkRPSfPnaf5r05iw=",
"h1:k+zpXg8BO7gdbTIfSGyQisHhs5aVWQVbPLa5uUdr2UA=",
"h1:kWNrzZ8Rh0OpHikexkmwJIIucD6SMZPi4oGyDsKJitw=",
"h1:lomfTTjK78BdSEVTFcJUBQRy7IQHuGQImMaPWaYpfgQ=",
"h1:oWcWlZe52ZRyLQciNe94RaWzhHifSTu03nlK0uL7rlM=",
"h1:p3JJrhGEPlPQP7Uwy9FNMdvqCyD8tuT4lnXuJ+pSF/M=",
"h1:wtB0sKxG2K/H41hWJI4uJdImWquuaP34Sip5LmfE410=",
"zh:01742e5946f936548f8e42120287ffc757abf97e7cbbe34e25c266a438fb54fd",
"zh:08d81f5a5aab4cc269f983b8c6b5be0e278105136aca9681740802619577371f",
"zh:0d75131ba70902cfc94a7a5900369bdde56528b2aad6e10b164449cc97d57396",
"zh:3890a715a012e197541daacdacb8cceec6d364814daa4640ddfe98a8ba9036cb",
"zh:58254ce5ebe1faed4664df86210c39d660bcdc60280f17b25fe4d4dbea21ea8c",
"zh:6b0abc1adbc2edee79368ce9f7338ebcb5d0bf941e8d7d9ac505b750f20f80a2",
"zh:81cc415d1477174a1ca288d25fdb57e5ee488c2d7f61f265ef995b255a53b0ce",
"zh:8680140c7fe5beaefe61c5cfa471bf88422dc0c0f05dad6d3cb482d4ffd22be4",
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:a491d26236122ccb83dac8cb490d2c0aa1f4d3a0b4abe99300fd49b1a624f42f",
"zh:a70d9c469dc8d55715ba77c9d1a4ede1fdebf79e60ee18438a0844868db54e0d",
"zh:a7fcb7d5c4222e14ec6d9a15adf8b9a083d84b102c3d0e4a0d102df5a1360b62",
"zh:b4f9677174fabd199c8ebd2e9e5eb3528cf887e700569a4fb61eef4e070cec5e",
"zh:c27f0f7519221d75dae4a3787a59e05acd5cc9a0d30a390eff349a77d20d52e6",
"zh:db00d8605dbf43ca42fe1481a6c67fdcaa73debb7d2a0f613cb95ae5c5e7150e",
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.40.0"
version = "4.36.0"
}
}
}

View File

@@ -9,6 +9,6 @@ resource "cloudflare_record" "immich_app_release_domain" {
proxied = true
ttl = 1
type = "CNAME"
content = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
value = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.40.0"
constraints = "4.40.0"
version = "4.36.0"
constraints = "4.36.0"
hashes = [
"h1:GP2N1tXrmpxu+qEDvFAmkfv9aeZNhag3bchyJpGpYbU=",
"h1:HDJKZBQkVU0kQl4gViQ5L7EcFLn9hB0iuvO+ORJiDS4=",
"h1:KrbeEsZoCJOnnX68yNI5h3QhMjc5bBCQW4yvYaEFq3s=",
"h1:LelwnzU0OVn6g2+T9Ub9XdpC+vbheraIL/qgXhWBs/k=",
"h1:TIq9CynfWrKgCxKL97Akj89cYlvJKn/AL4UXogd8/FM=",
"h1:Uoy5oPdm1ipDG7yIMCUN1IXMpsTGXahPw3I0rVA/6wA=",
"h1:Wunfpm+IZhENdoimrh4iXiakVnCsfKOHo80yJUjMQXM=",
"h1:cRdCuahMOFrNyldnCInqGQRBT1DTkRPSfPnaf5r05iw=",
"h1:k+zpXg8BO7gdbTIfSGyQisHhs5aVWQVbPLa5uUdr2UA=",
"h1:kWNrzZ8Rh0OpHikexkmwJIIucD6SMZPi4oGyDsKJitw=",
"h1:lomfTTjK78BdSEVTFcJUBQRy7IQHuGQImMaPWaYpfgQ=",
"h1:oWcWlZe52ZRyLQciNe94RaWzhHifSTu03nlK0uL7rlM=",
"h1:p3JJrhGEPlPQP7Uwy9FNMdvqCyD8tuT4lnXuJ+pSF/M=",
"h1:wtB0sKxG2K/H41hWJI4uJdImWquuaP34Sip5LmfE410=",
"zh:01742e5946f936548f8e42120287ffc757abf97e7cbbe34e25c266a438fb54fd",
"zh:08d81f5a5aab4cc269f983b8c6b5be0e278105136aca9681740802619577371f",
"zh:0d75131ba70902cfc94a7a5900369bdde56528b2aad6e10b164449cc97d57396",
"zh:3890a715a012e197541daacdacb8cceec6d364814daa4640ddfe98a8ba9036cb",
"zh:58254ce5ebe1faed4664df86210c39d660bcdc60280f17b25fe4d4dbea21ea8c",
"zh:6b0abc1adbc2edee79368ce9f7338ebcb5d0bf941e8d7d9ac505b750f20f80a2",
"zh:81cc415d1477174a1ca288d25fdb57e5ee488c2d7f61f265ef995b255a53b0ce",
"zh:8680140c7fe5beaefe61c5cfa471bf88422dc0c0f05dad6d3cb482d4ffd22be4",
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:a491d26236122ccb83dac8cb490d2c0aa1f4d3a0b4abe99300fd49b1a624f42f",
"zh:a70d9c469dc8d55715ba77c9d1a4ede1fdebf79e60ee18438a0844868db54e0d",
"zh:a7fcb7d5c4222e14ec6d9a15adf8b9a083d84b102c3d0e4a0d102df5a1360b62",
"zh:b4f9677174fabd199c8ebd2e9e5eb3528cf887e700569a4fb61eef4e070cec5e",
"zh:c27f0f7519221d75dae4a3787a59e05acd5cc9a0d30a390eff349a77d20d52e6",
"zh:db00d8605dbf43ca42fe1481a6c67fdcaa73debb7d2a0f613cb95ae5c5e7150e",
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.40.0"
version = "4.36.0"
}
}
}

View File

@@ -9,7 +9,7 @@ resource "cloudflare_record" "immich_app_branch_subdomain" {
proxied = true
ttl = 1
type = "CNAME"
content = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
value = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
}

View File

@@ -46,8 +46,6 @@ services:
depends_on:
- redis
- database
healthcheck:
disable: false
immich-web:
container_name: immich_web
@@ -93,12 +91,10 @@ services:
depends_on:
- database
restart: unless-stopped
healthcheck:
disable: false
redis:
container_name: immich_redis
image: redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
healthcheck:
test: redis-cli ping || exit 1

View File

@@ -21,8 +21,6 @@ services:
- redis
- database
restart: always
healthcheck:
disable: false
immich-machine-learning:
container_name: immich_machine_learning
@@ -35,19 +33,15 @@ services:
dockerfile: Dockerfile
args:
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports:
- 3003:3003
volumes:
- model-cache:/cache
env_file:
- .env
restart: always
healthcheck:
disable: false
redis:
container_name: immich_redis
image: redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -71,7 +65,7 @@ services:
interval: 5m
start_interval: 30s
start_period: 5m
command: ["postgres", "-c", "shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
restart: always
# set IMMICH_METRICS=true in .env to enable metrics
@@ -79,7 +73,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:f6639335d34a77d9d9db382b92eeb7fc00934be8eae81dbc03b31cfe90411a94
image: prom/prometheus@sha256:f20d3127bf2876f4a1df76246fca576b41ddf1125ed1c546fbd8b16ea55117e6
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus
@@ -91,7 +85,7 @@ services:
command: ['./run.sh', '-disable-reporting']
ports:
- 3000:3000
image: grafana/grafana:11.2.0-ubuntu@sha256:8e2c13739563c3da9d45de96c6bcb63ba617cac8c571c060112c7fc8ad6914e9
image: grafana/grafana:11.1.0-ubuntu@sha256:c7fc29ec783d5e7fc1bdfaad6f92345a345cffbc5d21c388ca228175006fc107
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -16,7 +16,6 @@ services:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
volumes:
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
@@ -27,8 +26,6 @@ services:
- redis
- database
restart: always
healthcheck:
disable: false
immich-machine-learning:
container_name: immich_machine_learning
@@ -43,12 +40,10 @@ services:
env_file:
- .env
restart: always
healthcheck:
disable: false
redis:
container_name: immich_redis
image: docker.io/redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
image: docker.io/redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -62,14 +57,13 @@ services:
POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums'
volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
healthcheck:
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: ["postgres", "-c", "shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
restart: always
volumes:

View File

@@ -12,7 +12,6 @@ DB_DATA_LOCATION=./postgres
IMMICH_VERSION=release
# Connection secret for postgres. You should change it to a random password
# Please use only the characters `A-Za-z0-9`, without special characters or spaces
DB_PASSWORD=postgres
# The values below this line do not need to be changed

View File

@@ -1 +1 @@
20.17.0
20.15.1

View File

@@ -1,7 +1,7 @@
---
title: The Immich core team goes full-time
authors: [alextran]
tags: [update, announcement, FUTO]
tags: [update, announcement, futo]
date: 2024-05-01T00:00
---

View File

@@ -1,91 +0,0 @@
---
title: Licensing announcement - Purchase a license to support Immich
authors: [alextran]
tags: [update, announcement, FUTO]
date: 2024-07-18T00:00
---
Hello everybody,
Firstly, on behalf of the Immich team, I'd like to thank everybody for your continuous support of Immich since the very first day! Your contributions, encouragement, and community engagement have helped bring Immich to its current state. The team and I are forever grateful for that.
Since our [last announcement of the core team joining FUTO to work on Immich full-time](https://immich.app/blog/2024/immich-core-team-goes-fulltime), one of the goals of our new position is to foster a healthy relationship between the developers and the users. We believe that this enables us to create great software, establish transparent policies and build trust.
We want to build a great software application that brings value to you and your loved ones' lives. We are not using you as a product, i.e., selling or tracking your data. We are not putting annoying ads into our software. We respect your privacy. We want to be compensated for the hard work we put in to build Immich for you.
With those notes, we have enabled a way for you to financially support the continued development of Immich, ensuring the software can move forward and will be maintained, by offering a lifetime license of the software. We think if you like and use software, you should pay for it, but _we're never going to force anyone to pay or try to limit Immich for those who don't._
There are two types of license that you can choose to purchase: **Server License** and **Individual License**.
### Server License
This is a lifetime license costing **$99.99**. The license is applied to the whole server. You and all users that use your server are licensed.
### Individual License
This is a lifetime license costing **$24.99**. The license is applied to a single user, and can be used on any server they choose to connect to.
<img
width="837"
alt="license-social-gh"
src="https://github.com/user-attachments/assets/241932ed-ef3b-44ec-a9e2-ee80754e0cca"
/>
You can purchase the license on [our page - https://buy.immich.app](https://buy.immich.app).
Starting with release `v1.109.0` you can purchase and enter your purchased license key directly in the app.
<img
width="1414"
alt="license-page-gh"
src="https://github.com/user-attachments/assets/364fc32a-f6ef-4594-9fea-28d5a26ad77c"
/>
## Thank you
Thank you again for your support, this will help create a strong foundation and stability for the Immich team to continue developing and maintaining the project that you love to use.
<p align="center">
<img
src="https://media.giphy.com/media/v1.Y2lkPTc5MGI3NjExbjY2eWc5Y2F0ZW56MmR4aWE0dDhzZXlidXRmYWZyajl1bWZidXZpcyZlcD12MV9pbnRlcm5hbF9naWZfYnlfaWQmY3Q9Zw/87CKDqErVfMqY/giphy.gif"
width="550"
title="SUPPORT THE PROJECT!"
/>
</p>
<br />
<br />
Cheers! 🎉
Immich team
# FAQ
### 1. Where can I purchase a license?
There are several places where you can purchase the license from
- [https://buy.immich.app](https://buy.immich.app)
- [https://pay.futo.org](https://pay.futo.org/)
- or directly from the app.
### 2. Do I need both _Individual License_ and _Server License_?
No,
If you are the admin and the sole user, or your instance has less than a total of 4 users, you can buy the **Individual License** for each user.
If your instance has more than 4 users, it is more cost-effective to buy the **Server License**, which will license all the users on your instance.
### 3. What do I do if I don't pay?
You can continue using Immich without any restriction.
### 4. Will there be any paywalled features?
No, there will never be any paywalled features.
### 5. Where can I get support regarding payment issues?
You can email us with your `orderId` and your email address `billing@futo.org` or on our Discord server.

View File

@@ -1,7 +1,6 @@
---
title: Immich Update - July 2024
authors: [alextran]
date: 2024-07-01T00:00
tags: [update, v1.106.0]
---

View File

@@ -52,25 +52,14 @@ On iOS (iPhone and iPad), the operating system determines if a particular app ca
- Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich.
- Use the Immich app more often.
### Why are features not working with a self-signed cert or mTLS?
Due to limitations in the upstream app/video library, using a self-signed TLS certificate or mutual TLS may break video playback or asset upload (both foreground and/or background).
We recommend using a real SSL certificate from a free provider, for example [Let's Encrypt](https://letsencrypt.org/).
---
## Assets
### Does Immich change the file?
No, Immich does not modify the original files.
All edited metadata is saved in companion `.xmp` sidecar files and the database.
However, Immich will delete original files that have been trashed when the trash is emptied in the Immich UI.
### Why do my file names appear as a random string in the file manager?
When Storage Template is off (default) Immich saves the file names in a random string (also known as random UUIDs) to prevent duplicate file names. To retrieve the original file names, you must enable the Storage Template and then run the STORAGE TEMPLATE MIGRATION job.
It is recommended to read about [Storage Template](https://immich.app/docs/administration/storage-template) before activation.
No, Immich does not touch the original file under any circumstances,
all edited metadata are saved in the companion sidecar file and the database.
### Can I add my existing photo library?
@@ -168,19 +157,6 @@ We haven't implemented an official mechanism for creating albums from external l
Duplicate checking only exists for upload libraries, using the file hash. Furthermore, duplicate checking is not global, but _per library_. Therefore, a situation where the same file appears twice in the timeline is possible, especially for external libraries.
### Why are my edits to files not being saved in read-only external libraries?
Images in read-write external libraries (the default) can be edited as normal.
In read-only libraries (`:ro` in the `docker-compose.yml`), Immich is unable to create the `.xmp` sidecar files to store edited file metadata.
For this reason, the metadata (timestamp, location, description, star rating, etc.) cannot be edited for files in read-only external libraries.
### How are deletions of files handled in external libraries?
Immich will attempt to delete original files that have been trashed when the trash is emptied.
In read-write external libraries (the default), Immich will delete the original file.
In read-only libraries (`:ro` in the `docker-compose.yml`), files can still be trashed in the UI.
However, when the trash is emptied, the files will re-appear in the main timeline since Immich is unable to delete the original file.
---
## Machine Learning
@@ -191,7 +167,7 @@ Immich uses CLIP models. For more information about CLIP and its capabilities, r
### How does facial recognition work?
See [How Facial Recognition Works](/docs/features/facial-recognition#How-Facial-Recognition-Works) for details.
For face detection and recognition, Immich uses [InsightFace models](https://github.com/deepinsight/insightface/tree/master/model_zoo).
### How can I disable machine learning?
@@ -205,15 +181,19 @@ However, disabling all jobs will not disable the machine learning service itself
### I'm getting errors about models being corrupt or failing to download. What do I do?
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Hugging Face][huggingface] and place them in the cache folder.
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface][huggingface] and place them in the cache folder.
### Can I use a custom CLIP model?
No, this is not supported. Only models listed in the [Hugging Face][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
No, this is not supported. Only models listed in the [Huggingface][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
### I want to be able to search in other languages besides English. How can I do that?
You can change to a multilingual CLIP model. See [here](/docs/features/smart-search#CLIP-model) for instructions.
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
:::note
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list][huggingface].
:::
### Does Immich support Facial Recognition for videos?
@@ -254,7 +234,7 @@ ls clip/ facial-recognition/
### Why is Immich slow on low-memory systems like the Raspberry Pi?
Immich optionally uses transcoding and machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning), or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
Immich optionally uses machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning), or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
### Can I lower CPU and RAM usage?
@@ -263,12 +243,10 @@ The initial backup is the most intensive due to the number of jobs running. The
- Lower the job concurrency for these jobs to 1.
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
- At the container level, you can [set resource constraints](/docs/FAQ#can-i-limit-cpu-and-ram-usage) to lower usage further.
- It's recommended to only apply these constraints _after_ taking some of the measures here for best performance.
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
### Can I limit CPU and RAM usage?
### Can I limit the amount of CPU and RAM usage?
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows. To limit this, you can add the following to the `docker-compose.yml` block of any containers that you want to have limited resources.
@@ -288,8 +266,6 @@ deploy:
</details>
For more details, you can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit).
Note that memory constraints work by terminating the container, so this can introduce instability if set too low.
### How can I boost machine learning speed?
:::note
@@ -299,16 +275,21 @@ This advice improves throughput, not latency. This is to say that it will make S
You can increase throughput by increasing the job concurrency for machine learning jobs (Smart Search, Face Detection). With higher concurrency, the host will work on more assets in parallel. You can do this by navigating to Administration > Settings > Job Settings and increasing concurrency as needed.
:::danger
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Storage speed and latency can quickly become the limiting factor beyond this, particularly when using HDDs.
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Beyond this, note that storage speed and latency may quickly become the limiting factor; particularly when using HDDs.
The concurrency can be increased more comfortably with a GPU, but should still not be above 16 in most cases.
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
Do not exaggerate with the job concurrency because you're probably thoroughly overloading the server.
More details can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
:::
### My server shows Server Status Offline | Version Unknown. What can I do?
### Why is Immich using so much of my CPU?
You need to enable WebSockets on your reverse proxy.
When a large number of assets are uploaded to Immich, it makes sense that the CPU and RAM will be heavily used for machine learning work and creating image thumbnails.
Once this process is completed, the percentage of CPU usage will drop to around 3-5% usage
### My server shows Server Status Offline | Version Unknown what can I do?
You need to enable Websocket on your reverse proxy.
---
@@ -318,12 +299,6 @@ You need to enable WebSockets on your reverse proxy.
Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
### How can I reduce the log verbosity of Redis?
To decrease Redis logs, you can add the following line to the `redis:` section of the `docker-compose.yml`:
` command: redis-server --loglevel warning`
### How can I run Immich as a non-root user?
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.

View File

@@ -45,7 +45,7 @@ docker compose up -d # Start remainder of Immich apps
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
```powershell title='Backup'
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | Set-Content -Encoding utf8 "C:\path\to\backup\dump.sql"
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "\path\to\backup\dump.sql"
```
```powershell title='Restore'
@@ -149,21 +149,9 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
- **Encoded Assets:**
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
- **Postgres**
- The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `UPLOAD_LOCATION/postgres`.
:::danger
A backup of this folder does not constitute a backup of your database!
Follow the instructions listed [here](/docs/administration/backup-and-restore#database) to learn how to perform a proper backup.
:::
</TabItem>
<TabItem value="Storage Template On" label="Storage Template On">
@@ -199,19 +187,8 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
- Files uploaded through mobile apps.
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
- **Postgres**
- The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `UPLOAD_LOCATION/postgres`.
:::danger
A backup of this folder does not constitute a backup of your database!
Follow the instructions listed [here](/docs/administration/backup-and-restore#database) to learn how to perform a proper backup.
:::
</TabItem>
</Tabs>
:::danger

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

View File

@@ -52,4 +52,4 @@ Additionally, some jobs run on a schedule, which is every night at midnight. Thi
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
:::
<img src={require('./img/admin-jobs.webp').default} width="60%" title="Admin jobs" />
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />

View File

@@ -3,7 +3,7 @@
This page contains details about using OAuth in Immich.
:::tip
Unable to set `app.immich:///oauth-callback` as a valid redirect URI? See [Mobile Redirect URI](#mobile-redirect-uri) for an alternative solution.
Unable to set `app.immich:/` as a valid redirect URI? See [Mobile Redirect URI](#mobile-redirect-uri) for an alternative solution.
:::
## Overview
@@ -30,7 +30,7 @@ Before enabling OAuth in Immich, a new client application needs to be configured
The **Sign-in redirect URIs** should include:
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
- `app.immich:/` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
@@ -38,7 +38,7 @@ Before enabling OAuth in Immich, a new client application needs to be configured
Mobile
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
- `app.immich:/` (You **MUST** include this for iOS and Android mobile apps to work properly)
Localhost
@@ -96,16 +96,16 @@ When Auto Launch is enabled, the login page will automatically redirect the user
## Mobile Redirect URI
The redirect URI for the mobile app is `app.immich:///oauth-callback`, which is a [Custom Scheme](https://developer.apple.com/documentation/xcode/defining-a-custom-url-scheme-for-your-app). If this custom scheme is an invalid redirect URI for your OAuth Provider, you can work around this by doing the following:
The redirect URI for the mobile app is `app.immich:/`, which is a [Custom Scheme](https://developer.apple.com/documentation/xcode/defining-a-custom-url-scheme-for-your-app). If this custom scheme is an invalid redirect URI for your OAuth Provider, you can work around this by doing the following:
1. Configure an http(s) endpoint to forwards requests to `app.immich:///oauth-callback`
1. Configure an http(s) endpoint to forwards requests to `app.immich:/`
2. Whitelist the new endpoint as a valid redirect URI with your provider.
3. Specify the new endpoint as the `Mobile Redirect URI Override`, in the OAuth settings.
With these steps in place, you should be able to use OAuth from the [Mobile App](/docs/features/mobile-app.mdx) without a custom scheme redirect URI.
:::info
Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to forward requests to `app.immich:///oauth-callback`, and can be used for step 1.
Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to forward requests to `app.immich:/`, and can be used for step 1.
:::
## Example Configuration
@@ -154,21 +154,21 @@ Configuration of Authorised redirect URIs (Google Console)
Configuration of OAuth in Immich System Settings
| Setting | Value |
| ---------------------------- | ---------------------------------------------------------------------------- |
| Issuer URL | `https://accounts.google.com` |
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
| Scope | openid email profile |
| Signing Algorithm | RS256 |
| Storage Label Claim | preferred_username |
| Storage Quota Claim | immich_quota |
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
| Button Text | Sign in with Google (optional) |
| Auto Register | Enabled (optional) |
| Auto Launch | Enabled |
| Mobile Redirect URI Override | Enabled (required) |
| Mobile Redirect URI | `https://example.immich.app/api/oauth/mobile-redirect` |
| Setting | Value |
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
| Issuer URL | [https://accounts.google.com](https://accounts.google.com) |
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
| Scope | openid email profile |
| Signing Algorithm | RS256 |
| Storage Label Claim | preferred_username |
| Storage Quota Claim | immich_quota |
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
| Button Text | Sign in with Google (optional) |
| Auto Register | Enabled (optional) |
| Auto Launch | Enabled |
| Mobile Redirect URI Override | Enabled (required) |
| Mobile Redirect URI | [https://demo.immich.app/api/oauth/mobile-redirect](https://demo.immich.app/api/oauth/mobile-redirect) |
</details>

View File

@@ -104,7 +104,7 @@ You can choose to disable a certain type of machine learning, for example smart
### Smart Search
The [smart search](/docs/features/smart-search) settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
The smart search settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
Smart Search job on all images to fully apply the change.
:::info Internet connection
@@ -113,23 +113,15 @@ After downloading, there is no need for Immich to connect to the network
Unless version checking has been enabled in the settings.
:::
### Duplicate Detection
Use CLIP embeddings to find likely duplicates. The maximum detection distance can be configured in order to improve / reduce the level of accuracy.
- **Maximum detection distance -** Maximum distance between two images to consider them duplicates, ranging from 0.001-0.1. Higher values will detect more duplicates, but may result in false positives.
### Facial Recognition
Under these settings, you can change the facial recognition settings
Editable settings:
- **Facial Recognition Model**
- **Min Detection Score**
- **Max Recognition Distance**
- **Min Recognized Faces**
You can learn more about these options on the [Facial Recognition page](/docs/features/facial-recognition#how-face-detection-works)
- **Facial Recognition Model -** Models are listed in descending order of size. Larger models are slower and use more memory, but produce better results. Note that you must re-run the Face Detection job for all images upon changing a model.
- **Min Detection Score -** Minimum confidence score for a face to be detected from 0-1. Lower values will detect more faces but may result in false positives.
- **Max Recognition Distance -** Maximum distance between two faces to be considered the same person, ranging from 0-2. Lowering this can prevent labeling two people as the same person, while raising it can prevent labeling the same person as two different people. Note that it is easier to merge two people than to split one person in two, so err on the side of a lower threshold when possible.
- **Min Recognized Faces -** The minimum number of recognized faces for a person to be created (AKA: Core face). Increasing this makes Facial Recognition more precise at the cost of increasing the chance that a face is not assigned to a person.
:::info
When changing the values in Min Detection Score, Max Recognition Distance, and Min Recognized Faces.
@@ -161,7 +153,7 @@ SMTP server setup, for user creation notifications, new albums, etc. More inform
### External Domain
Overrides the domain name in shared links and email notifications. The URL should not include a trailing slash.
When set, will override the domain name used when viewing and copying a shared link.
### Welcome Message

View File

@@ -24,7 +24,7 @@ This environment includes the services below. Additional details are available i
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
- Redis
- PostgreSQL development database with exposed port `5432` so you can use any database client to access it
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
All the services are packaged to run as with single Docker Compose command.

View File

@@ -4,8 +4,7 @@
### Unit tests
Unit are run by calling `npm run test` from the `server/` directory.
You need to run `npm install` (in `server/`) before _once_.
Unit are run by calling `npm run test` from the `server` directory.
### End to end tests
@@ -15,11 +14,6 @@ The e2e tests can be run by first starting up a test production environment via:
make e2e
```
Before you can run the tests, you need to run the following commands _once_:
- `npm install` (in `e2e/`)
- `make open-api` (in the project root `/`)
Once the test environment is running, the e2e tests can be run via:
```bash

View File

@@ -2,7 +2,7 @@
## Overview
Immich recognizes faces in your photos and videos and groups them together into people. You can then assign names to these people and search for them.
Immich recognizes faces in your photos and videos and groups them together. You can then assign names to the faces and search for them.
The list of people is shown in the Explore page.
@@ -18,75 +18,13 @@ The asset detail view will also show the faces that are recognized in the asset.
## Actions
Additional actions you can do include:
Additional actions you can do with a detected person are:
- Changing the feature photo of the person
- Setting a person's date of birth
- Merging two or more detected faces into one person
- Hiding the faces of a person from the Explore page and detail view
- Assigning an unrecognized face to a person
- Change the feature face photo of the person
- Set date of birth
- Merge two or more detected faces into one person
- Hide face
It can be found from the app bar when you access the detail view of a person.
<img src={require('./img/facial-recognition-4.png').default} title='Facial Recognition 4' width="70%"/>
## How Face Detection Works
Face detection sends the generated preview image to the machine learning service for processing. The service checks if it has the relevant model downloaded and downloads it if not. The image is decoded, pre-processed and passed to the face detection model (with hardware acceleration if configured). The bounding boxes and scores outputted from this model are used to crop and preprocess the image once again to be passed to a facial recognition model (also accelerated if configured). The embeddings from the recognition model, together with the bounding boxes and scores from the face detection model, are then sent back to the server to be added to the database. The embeddings in particular are indexed so they can be searched quickly during facial recognition clustering.
## How Facial Recognition Works
The facial recognition algorithm we use is derived from [DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok), a popular clustering algorithm. It essentially treats each detected face as a point in a graph and aims to group points that are close to each other.
:::note
An important concept is whether something is a _core point_. A core point has a minimum number of points around it within a certain distance. A non-core point can only be assigned to a cluster if it can reach a core point; a non-core point can't be used to extend a cluster even if it's part of one. In Immich, the _Minimum Recognized Faces_ setting controls the threshold to be considered a core point.
:::
For each face, it looks around it to find other faces within a certain distance. Faces within this distance are considered similar, so it then checks if any of these faces are associated with a person.
If there is an existing person, it assigns the person of the most similar face to the face being processed.
If there is none, then it has to determine something from the DBSCAN algorithm: whether the face is a _core point_. If there are a certain number of similar faces (by default 3, including the face being considered), then this face is a core point. A new person is created for this face and the face is assigned to it. When other faces are processed, if they're similar to this face, they'll see that it has an associated person and can be assigned to that person.
However, if there aren't enough similar faces, no new person will be created. Instead, the face will wait for all the other faces to be processed to see if any matches that previously didn't have an associated person now do. If they do, then the face will be assigned to that person. If not, this face will be considered an outlier, such as a stranger in the background of an image.
The algorithm has some subtle differences compared to DBSCAN:
- DBSCAN doesn't have a concept of incremental clustering: it clusters all points at once. In contrast, facial recognition has to evolve as more assets are added without re-clustering everything each time.
- The algorithm described above works within a set of queued assets. Once these faces are processed and a new round of faces are detected, the behavior will not be the same as traditional DBSCAN since it preserves the clusters (people) generated from the previous round.
- Facial recognition tries to wait for face detection and thumbnail generation to complete before starting for this reason: the larger the set of faces in the queue, the better the results will be.
- Re-running facial recognition on all assets afterwards does behave like DBSCAN, however.
- DBSCAN is designed for range-based searches (i.e. points within a distance), but high-dimensional vector indices are generally optimized for getting the closest K results. The recognition algorithm doesn't try to get _all_ similar faces within a distance for performance reasons. Instead, it searches for a small number of matches for each face. The end result should be very similar if not identical, but with possibly different performance characteristics.
- Because of this, part of the recognition process is handled during a nightly job to ensure that unassigned faces with potential matches can be recognized.
:::tip
If you didn't import your assets at once or if the server was able to process jobs faster than you could upload them, it's possible that the clustering was suboptimal. If you haven't put effort into the current results, it may be worth re-running facial recognition on all assets for the best starting point. If it's too late for that, you can also manually assign a selection of unassigned faces and queue _Missing_ for Facial Recognition to help it learn and assign more faces automatically.
:::
## Configuration
Navigating to Administration > Settings > Machine Learning Settings > Facial Recognition will show the options available.
:::tip
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
:::
### Facial recognition model
There are a few different models available; the default is typically considered the best. On more constrained systems where the default is too intensive, you can choose a smaller model instead.
### Minimum detection score
This setting affects whether a result from the face detecton model is filtered out as a false positive. It may seem tempting to set this low to detect more faces, but it can lead to false positives that are difficult to deal with and can harm facial recognition. It is strongly recommended not to go below 0.5 for this setting. Setting it to a very high number like 0.9 is also not recommended: the default is already biased toward precision, so a threshold that high leads to many undetected faces.
After changing this setting, it will only apply to new face detection jobs. To apply the new setting to all assets, you need to re-run face detection for all assets.
### Maximum recognition distance
The distance threshold described in How Facial Recognition Works. The default works well for most people, but it may be worth lowering it if the library has twins or otherwise very similar looking people. A threshold that's too low just means needing to merge duplicate people after facial recognition, whereas a threshold too high can produce unsalvageable results. It is strongly recommended not to go below 0.3 or above 0.7.
### Minimum recognized faces
The core point threshold described in How Facial Recognition Works. This setting has a few implications. First, it takes effect immediately in that people with fewer faces than this are hidden from view. Secondly, it makes clustering more robust as it prevents loosely-related faces from being linked to each other by requiring a certain level of density.
Increasing this setting is a good idea if you increase the recognition distance or reduce the minimum detection score. Setting it to 1 effectively disables the concept of core points, but can be an option if you prefer a more hands-on approach.

View File

@@ -123,7 +123,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
- You may want to choose a slower preset than for software transcoding to maintain quality and efficiency
- While you can use VAAPI with NVIDIA and Intel devices, prefer the more specific APIs since they're more optimized for their respective devices
- You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for NVIDIA, `intel_gpu_top` for Intel, etc.) when transcoding. A lack of error logs when transcoding also indicates that it's being used.
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html

View File

@@ -107,17 +107,15 @@ The `immich-server` container will need access to the gallery. Modify your docke
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
+ - /home/user/old-pics:/mnt/media/old-pics:ro
+ - /mnt/media/videos:/mnt/media/videos:ro
+ - /mnt/media/videos2:/mnt/media/videos2 # the files in this folder can be deleted, as it does not end with :ro
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
```
:::tip
The `ro` flag at the end only gives read-only access to the volumes.
This will disallow the images from being deleted in the web UI, or adding metadata to the library ([XMP sidecars](/docs/features/xmp-sidecars)).
The `ro` flag at the end only gives read-only access to the volumes. While Immich does not modify files, it's a good practice to mount read-only.
:::
:::info
_Remember to run `docker compose up -d` to register the changes. Make sure you can see the mounted path in the container._
_Remember to bring the container `docker compose down/up` to register the changes. Make sure you can see the mounted path in the container._
:::
### Create External Libraries

View File

@@ -32,13 +32,12 @@ You do not need to redo any machine learning jobs after enabling hardware accele
- Where and how you can get this file depends on device and vendor, but typically, the device vendor also supplies these
- The `hwaccel.ml.yml` file assumes the path to it is `/usr/lib/libmali.so`, so update accordingly if it is elsewhere
- The `hwaccel.ml.yml` file assumes an additional file `/lib/firmware/mali_csffw.bin`, so update accordingly if your device's driver does not require this file
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for ARM NN specific settings
#### CUDA
- The GPU must have compute capability 5.2 or greater.
- The server must have the official NVIDIA driver installed.
- The installed driver must be >= 545 (it must support CUDA 12.3.2).
- The installed driver must be >= 535 (it must support CUDA 12.2).
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
#### OpenVINO

View File

@@ -66,7 +66,7 @@ The provided file is just a starting point. There are a ton of ways to configure
After bringing down the containers with `docker compose down` and back up with `docker compose up -d`, a Prometheus instance will now collect metrics from the immich server and microservices containers. Note that we didn't need to expose any new ports for these containers - the communication is handled in the internal Docker network.
:::note
To see exactly what metrics are made available, you can additionally add `8081:8081` to the server container's ports and `8082:8082` to the microservices container's ports. Visiting the `/metrics` endpoint for these services will show the same raw data that Prometheus collects.
To see exactly what metrics are made available, you can additionally add `8081:8081` to the server container's ports and `8082:8081` to the microservices container's ports. Visiting the `/metrics` endpoint for these services will show the same raw data that Prometheus collects.
:::
### Usage

View File

@@ -16,7 +16,7 @@ When sharing shared albums, whats shared is:
- Download all assets as zip file (Web only).
:::info Archive size limited.
If the size of the album exceeds 4GB, the archive files will by default be divided into 4GB each. This can be changed on the user settings page.
If the size of the album exceeds 4GB, the archive files will be divided into 4GB each.
:::
- Add a description to the album (Web only).
- Slideshow view (Web only).
@@ -73,14 +73,14 @@ You can edit the link properties, options include;
- **Allow public user to download -** whether to allow whoever has the link to download all the images or a certain image (optional).
- **Allow public user to upload -** whether to allow whoever has the link to upload assets to the album (optional).
:::info
Whoever has the link and have uploaded files cannot delete them.
whoever has the link and have uploaded files cannot delete them.
:::
- **Expire after -** adding an expiration date to the link (optional).
## Share Specific Assets
A user can share specific assets without linking them to a specific album.
In order to do this:
in order to do so;
1. Go to the timeline
2. Select the assets (Shift can be used for multiple selection)
@@ -152,7 +152,7 @@ Some of the features are not available on mobile, to understand what the full fe
## Sharing Between Users
#### Add or remove users from the album
#### Add or remove users from the album.
:::info remove user(s)
When a user is removed from the album, the photos he uploaded will still appear in the album.

View File

@@ -7,30 +7,29 @@ Immich uses Postgres as its search database for both metadata and smart search.
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
Archived photos are not included in search results by default. To include them, mark the checkbox in [advanced search filters](/docs/features/smart-search#advanced-search-filters).
:::tip Alternative CLIP Models
More powerful models can be used for more accurate search results. For more information, see the related [FAQ](/docs/FAQ#can-i-use-a-custom-clip-model).
:::
:::info
Smart Search is currently limited to 5,000 results for a single search on the web.
:::
## Advanced Search Filters
In addition, Immich offers advanced search functionality, allowing you to find specific content using customizable search filters. These filters include location, one or more faces, specific albums, and more. You can try out the search filters on the [Demo site](https://demo.immich.app).
The filters smart search allows you to search by include:
Smart search features include:
- People
- Location
- Country
- State
- City
- Camera
- Make
- Model
- Date range
- File name or extension
- Media type
- Image (including live/motion photos)
- Video
- All
- Condition
- Not in any album
- Archived
- Favorited
- Search for one or more faces (with or without context search).
- Search by Country or State or City or by all three.
- Search by camera make and model.
- Search by date range.
- Search by file name.
- Search by media types: image, video or all (**Note:** Image includes live images).
- Search by condition: not in any album or archive or Favorite or all conditions.
<Tabs>
<TabItem value="Computer" label="Computer" default>
@@ -48,27 +47,3 @@ Some search examples:
</TabItem>
</Tabs>
## Configuration
Navigating to `Administration > Settings > Machine Learning Settings > Smart Search` will show the options available.
### CLIP model
More powerful models can be used for more accurate search results, but are slower and can require more server resources. Check out the models [here][huggingface-clip] for more options!
[Multilingual models][huggingface-multilingual-clip] are also available so users can search in their native language. These models support over 100 languages; the `nllb` models in particular support 200.
:::note
Multilingual models are much slower and larger and perform slightly worse for English than English-only models. For this reason, only use them if you actually intend to search in a language besides English.
As a special case, the `ViT-H-14-quickgelu__dfn5b` and `ViT-H-14-378-quickgelu__dfn5b` models are excellent at many European languages despite not specifically being multilingual. They're very intensive regardless, however - especially the latter.
:::
Once you've chosen a model, change this setting to the name of the model you chose. Be sure to re-run Smart Search on all assets after this change.
:::note
Feel free to make a feature request if there's a model you want to use that we don't currently support.
:::
[huggingface-clip]: https://huggingface.co/collections/immich-app/clip-654eaefb077425890874cd07
[huggingface-multilingual-clip]: https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7

View File

@@ -13,14 +13,14 @@ In our `.env` file, we will define variables that will help us in the future whe
# Custom location where your uploaded, thumbnails, and transcoded video files are stored
- UPLOAD_LOCATION=./library
+ UPLOAD_LOCATION=/custom/path/immich/immich_files
+ THUMB_LOCATION=/custom/path/immich/thumbs
+ ENCODED_VIDEO_LOCATION=/custom/path/immich/encoded-video
+ PROFILE_LOCATION=/custom/path/immich/profile
+ UPLOAD_LOCATION=/custom/location/on/your/system/immich/immich_files
+ THUMB_LOCATION=/custom/location/on/your/system/immich/thumbs
+ ENCODED_VIDEO_LOCATION=/custom/location/on/your/system/immich/encoded-video
+ PROFILE_LOCATION=/custom/location/on/your/system/immich/profile
...
```
After defining the locations for these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` container.
After defining the locations for these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` and `immich-microservices` containers.
```diff title="docker-compose.yml"
services:
@@ -29,6 +29,16 @@ services:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
- /etc/localtime:/etc/localtime:ro
...
immich-microservices:
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
- /etc/localtime:/etc/localtime:ro
```
@@ -36,6 +46,7 @@ services:
Restart Immich to register the changes.
```
docker compose down
docker compose up -d
```

View File

@@ -1,22 +1,8 @@
# Custom Map Styles
# Create Custom Map Styles for Immich Using Maptiler
You may decide that you'd like to modify the style document which is used to
draw the maps in Immich. In addition to visual customization, this also allows
you to pick your own map tile provider instead of the default one. The default
`style.json` for [light theme](https://github.com/immich-app/immich/tree/main/server/resources/style-light.json)
and [dark theme](https://github.com/immich-app/immich/blob/main/server/resources/style-dark.json)
can be used as a basis for creating your own style.
You may decide that you'd like to modify the style document which is used to draw the maps in Immich. This can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
There are several sources for already-made `style.json` map themes, as well as
online generators you can use.
1. In **Immich**, navigate to **Administration --> Settings --> Map & GPS Settings** and expand the **Map Settings** subsection.
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
3. Save your selections. Reload the map, and enjoy your custom map style!
## Use Maptiler to build a custom style
Customizing the map style can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
## Steps
1. Create a free account at https://cloud.maptiler.com
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
@@ -25,3 +11,6 @@ Customizing the map style can be done easily using Maptiler, if you do not want
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>![Maptiler Publication Settings](img/immich_map_styles_publish.png)
6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.
8. In **Immich**, navigate to **Administration --> Settings --> Map & GPS Settings** and expand the **Map Settings** subsection.
9. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.
10. Save your selections. Reload the map, and enjoy your custom map style!

View File

@@ -5,7 +5,7 @@ Keep in mind that mucking around in the database might set the moon on fire. Avo
:::
:::tip
Run `docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME>` to connect to the database via the container directly.
Run `docker exec -it immich_postgres psql immich <DB_USERNAME>` to connect to the database via the container directly.
(Replace `<DB_USERNAME>` with the value from your [`.env` file](/docs/install/environment-variables#database)).
:::
@@ -23,7 +23,7 @@ SELECT * FROM "assets" WHERE "originalFileName" LIKE '%_2023_%'; -- all files wi
```
```sql title="Find by path"
SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09-03/PXL_2023.jpg';
SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09-03/PXL_20230903_232542848.jpg';
SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
```
@@ -37,12 +37,6 @@ SELECT * FROM "assets" WHERE "checksum" = decode('69de19c87658c4c15d9cacb9967b8e
SELECT * FROM "assets" WHERE "checksum" = '\x69de19c87658c4c15d9cacb9967b8e033bf74dd1'; -- alternate notation
```
```sql title="Find duplicate assets with identical checksum (SHA-1) (excluding trashed files)"
SELECT T1."checksum", array_agg(T2."id") ids FROM "assets" T1
INNER JOIN "assets" T2 ON T1."checksum" = T2."checksum" AND T1."id" != T2."id" AND T2."deletedAt" IS NULL
WHERE T1."deletedAt" IS NULL GROUP BY T1."checksum";
```
```sql title="Live photos"
SELECT * FROM "assets" WHERE "livePhotoVideoId" IS NOT NULL;
```
@@ -85,7 +79,8 @@ SELECT "assets"."type", COUNT(*) FROM "assets" GROUP BY "assets"."type";
```sql title="Count by type (per user)"
SELECT "users"."email", "assets"."type", COUNT(*) FROM "assets"
JOIN "users" ON "assets"."ownerId" = "users"."id"
GROUP BY "assets"."type", "users"."email" ORDER BY "users"."email";
GROUP BY "assets"."type", "users"."email"
ORDER BY "users"."email";
```
```sql title="Failed file movements"
@@ -111,9 +106,3 @@ SELECT "key", "value" FROM "system_metadata" WHERE "key" = 'system-config';
```sql title="Delete person and unset it for the faces it was associated with"
DELETE FROM "person" WHERE "name" = 'PersonNameHere';
```
## Postgres internal
```sql title="Change DB_PASSWORD"
ALTER USER <DB_USERNAME> WITH ENCRYPTED PASSWORD 'newpasswordhere';
```

View File

@@ -6,18 +6,36 @@ in a directory on the same machine.
# Mount the directory into the containers.
Edit `docker-compose.yml` to add one or more new mount points in the section `immich-server:` under `volumes:`.
If you want Immich to be able to delete the images in the external library or add metadata ([XMP sidecars](/docs/features/xmp-sidecars)), remove `:ro` from the end of the mount point.
Edit `docker-compose.yml` to add two new mount points in the section `immich-server:` under `volumes:`
```diff
immich-server:
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /home/user/photos1:/home/user/photos1:ro
+ - /mnt/photos2:/mnt/photos2:ro # you can delete this line if you only have one mount point, or you can add more lines if you have more than two
+ - ${EXTERNAL_PATH}:/usr/src/app/external
```
Restart Immich by running `docker compose up -d`.
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
```
EXTERNAL_PATH=<your-path-here>
```
On my computer, for example, I use this path:
```
EXTERNAL_PATH=/home/tenino/photos
```
:::info EXTERNAL_PATH design
The design choice to put the EXTERNAL_PATH into .env rather than put two copies of the absolute path in the yml file in order to make everything easier, so if you have two copies of the same path that have to be kept in sync, then someday later when you move the data, update only one of the paths, without everything will break mysteriously.
:::
Restart Immich.
```
docker compose down
docker compose up -d
```
# Create the library

View File

@@ -11,13 +11,13 @@ Never forward port 2283 directly to the internet without additional configuratio
You may use a VPN service to open an encrypted connection to your Immich instance. OpenVPN and Wireguard are two popular VPN solutions. Here is a guide on setting up VPN access to your server - [Pihole documentation](https://docs.pi-hole.net/guides/vpn/wireguard/overview/)
### Pros
### Pros:
- Simple to set up and very secure.
- Single point of potential failure, i.e., the VPN software itself. Even if there is a zero-day vulnerability on Immich, you will not be at risk.
- Both Wireguard and OpenVPN are independently security-audited, so the risk of serious zero-day exploits are minimal.
### Cons
### Cons:
- If you don't have a static IP address, you would need to set up a [Dynamic DNS](https://www.cloudflare.com/learning/dns/glossary/dynamic-dns/). [DuckDNS](https://www.duckdns.org/) is a free DDNS provider.
- VPN software needs to be installed and active on both server-side and client-side.
@@ -27,10 +27,6 @@ You may use a VPN service to open an encrypted connection to your Immich instanc
If you are unable to open a port on your router for Wireguard or OpenVPN to your server, [Tailscale](https://tailscale.com/) is a good option. Tailscale mediates a peer-to-peer wireguard tunnel between your server and remote device, even if one or both of them are behind a [NAT firewall](https://en.wikipedia.org/wiki/Network_address_translation).
:::tip Video toturial
You can learn how to set up Tailscale together with Immich with the [tutorial video](https://www.youtube.com/watch?v=Vt4PDUXB_fg) they created.
:::
### Pros
- Minimal configuration needed on server and client sides.
@@ -48,7 +44,7 @@ A reverse proxy is a service that sits between web servers and clients. A revers
If you're hosting your own reverse proxy, [Nginx](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/) is a great option. An example configuration for Nginx is provided [here](/docs/administration/reverse-proxy.md).
You'll also need your own certificate to authenticate https connections. If you're making Immich publicly accessible, [Let's Encrypt](https://letsencrypt.org/) can provide a free certificate for your domain and is the recommended option. Alternatively, a [self-signed certificate](https://en.wikipedia.org/wiki/Self-signed_certificate) allows you to encrypt your connection to Immich, but it raises a security warning on the client's browser.
You'll also need your own certificate to authenticate https connections. If you're making Immich publicly accesible, [Let's Encrypt](https://letsencrypt.org/) can provide a free certificate for your domain and is the recommended option. Alternatively, a [self-signed certificate](https://en.wikipedia.org/wiki/Self-signed_certificate) allows you to encrypt your connection to Immich, but it raises a security warning on the client's browser.
A remote reverse proxy like [Cloudflare](https://www.cloudflare.com/learning/cdn/glossary/reverse-proxy/) increases security by hiding the server IP address, which makes targeted attacks like [DDoS](https://www.cloudflare.com/learning/ddos/what-is-a-ddos-attack/) harder.

View File

@@ -4,15 +4,14 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
- Copy the following `docker-compose.yml` to your ML system.
- If using [hardware acceleration](/docs/features/ml-hardware-acceleration), the [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be added
- Start the container by running `docker compose up -d`.
:::info
Smart Search and Face Detection will use this feature, but Facial Recognition is handled in the server.
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning container, but facial recognition is done in the `microservices` worker.
:::
:::danger
When using remote machine learning, the thumbnails are sent to the remote machine learning container. Use this option carefully when running this on a public computer or a paid processing cloud.
:::note
The [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be in the same folder if trying to use [hardware acceleration](/docs/features/ml-hardware-acceleration).
:::
```yaml
@@ -38,7 +37,3 @@ volumes:
```
Please note that version mismatches between both hosts may cause instabilities and bugs, so make sure to always perform updates together.
:::caution
As an internal service, the machine learning container has no security measures whatsoever. Please be mindful of where it's deployed and who can access it.
:::

View File

@@ -56,8 +56,7 @@ Optionally, you can enable hardware acceleration for machine learning and transc
- Populate custom database information if necessary.
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets.
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publically exposed, so this password is only used for local authentication.
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`.
- Consider changing `DB_PASSWORD` to something randomly generated
### Step 3 - Start the containers
@@ -109,7 +108,7 @@ Immich is currently under heavy development, which means you can expect [breakin
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
[watchtower]: https://containrrr.dev/watchtower/
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Abreaking-change+sort%3Adate_created
[container-auth]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry
[releases]: https://github.com/immich-app/immich/releases
[docker-repo]: https://docs.docker.com/engine/install/ubuntu/#install-using-the-repository

View File

@@ -38,23 +38,19 @@ Regardless of filesystem, it is not recommended to use a network share for your
## General
| Variable | Description | Default | Containers | Workers |
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
| `TZ` | Timezone | | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media Location inside the container ⚠️**You probably shouldn't set this**<sup>\*1</sup>⚠️ | `./upload`<sup>\*2</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
| `IMMICH_TRUSTED_PROXIES` | List of comma separated IPs set as trusted proxies | | server | api |
| Variable | Description | Default | Containers | Workers |
| :---------------------------------- | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
| `TZ` | Timezone | | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
\*1: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
\*2: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
\*1: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
It only need to be set if the Immich deployment method is changing.
:::tip
@@ -159,29 +155,23 @@ Redis (Sentinel) URL example JSON before encoding:
## Machine Learning
| Variable | Description | Default | Containers |
| :-------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO image) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP` | Name of a CLIP model to be preloaded and kept in cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION` | Name of a facial recognition model to be preloaded and kept in cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| Variable | Description | Default | Containers |
| :----------------------------------------------- | :------------------------------------------------------------------- | :-----------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP` | Name of a CLIP model to be preloaded and kept in cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION` | Name of a facial recognition model to be preloaded and kept in cache | | machine learning |
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
\*2: Since each process duplicates models in memory, changing this is not recommended unless you have abundant memory to go around.
\*3: For scenarios like HPA in K8S. https://github.com/immich-app/immich/discussions/12064
:::info
Other machine learning parameters can be tuned from the admin UI.

View File

@@ -4,7 +4,7 @@ sidebar_position: 10
# Requirements
Hardware and software requirements for Immich:
Hardware and software requirements for Immich
## Software

View File

@@ -8,10 +8,6 @@ sidebar_position: 20
This method is experimental and not currently recommended for production use. For production, please refer to installing with [Docker Compose](/docs/install/docker-compose.mdx).
:::
:::note
The install script only supports Linux operating systems and requires Docker to be already installed on the system.
:::
In the shell, from a directory of your choice, run the following command:
```bash

View File

@@ -45,7 +45,7 @@ width="70%"
alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
/>
3. Select the cogwheel ⚙️ next to Immich and click "**Edit Stack**"
3. Select the cog ⚙️ next to Immich then click "**Edit Stack**"
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
<details >
@@ -130,7 +130,7 @@ For more information on how to use the application once installed, please refer
## Updating Steps
Updating is extremely easy however it's important to be aware that containers managed via the Docker Compose Manager plugin do not integrate with Unraid's native dockerman UI, the label "_update ready_" will always be present on containers installed via the Docker Compose Manager.
Updating is extremely easy however it's important to be aware that containers managed via the Docker Compose Manager plugin do not integrate with Unraid's native dockerman ui, the label "_update ready_" will always be present on containers installed via the Docker Compose Manager.
<img
src={require('./img/unraid09.png').default}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 MiB

After

Width:  |  Height:  |  Size: 404 KiB

View File

@@ -145,44 +145,11 @@ const config = {
label: 'Installation',
to: '/docs/install/requirements',
},
{
label: 'Contributing',
to: '/docs/overview/support-the-project',
},
{
label: 'Privacy Policy',
to: '/privacy-policy',
},
],
},
{
title: 'Documentation',
title: 'Community',
items: [
{
label: 'Roadmap',
to: '/roadmap',
},
{
label: 'API',
to: '/docs/api',
},
{
label: 'Cursed Knowledge',
to: '/cursed-knowledge',
},
],
},
{
title: 'Links',
items: [
{
label: 'GitHub',
href: 'https://github.com/immich-app/immich',
},
{
label: 'YouTube',
href: 'https://www.youtube.com/@immich-app',
},
{
label: 'Discord',
href: 'https://discord.immich.app',
@@ -193,6 +160,23 @@ const config = {
},
],
},
{
title: 'Links',
items: [
// {
// label: "Blog",
// to: "/blog",
// },
{
label: 'GitHub',
href: 'https://github.com/immich-app/immich',
},
{
label: 'YouTube',
href: 'https://www.youtube.com/@immich-app',
},
],
},
],
copyright: `Immich is available as open source under the terms of the GNU AGPL v3 License.`,
},

570
docs/package-lock.json generated
View File

@@ -2155,9 +2155,9 @@
}
},
"node_modules/@docusaurus/core": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.5.2.tgz",
"integrity": "sha512-4Z1WkhCSkX4KO0Fw5m/Vuc7Q3NxBG53NE5u59Rs96fWkMPZVSrzEPP16/Nk6cWb/shK7xXPndTmalJtw7twL/w==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.4.0.tgz",
"integrity": "sha512-g+0wwmN2UJsBqy2fQRQ6fhXruoEa62JDeEa5d8IdTJlMoaDaEDfHh7WjwGRn4opuTQWpjAwP/fbcgyHKlE+64w==",
"license": "MIT",
"dependencies": {
"@babel/core": "^7.23.3",
@@ -2170,12 +2170,12 @@
"@babel/runtime": "^7.22.6",
"@babel/runtime-corejs3": "^7.22.6",
"@babel/traverse": "^7.22.8",
"@docusaurus/cssnano-preset": "3.5.2",
"@docusaurus/logger": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/cssnano-preset": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"autoprefixer": "^10.4.14",
"babel-loader": "^9.1.3",
"babel-plugin-dynamic-import-node": "^2.3.3",
@@ -2236,15 +2236,14 @@
"node": ">=18.0"
},
"peerDependencies": {
"@mdx-js/react": "^3.0.0",
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/cssnano-preset": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.5.2.tgz",
"integrity": "sha512-D3KiQXOMA8+O0tqORBrTOEQyQxNIfPm9jEaJoALjjSjc2M/ZAWcUfPQEnwr2JB2TadHw2gqWgpZckQmrVWkytA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.4.0.tgz",
"integrity": "sha512-qwLFSz6v/pZHy/UP32IrprmH5ORce86BGtN0eBtG75PpzQJAzp9gefspox+s8IEOr0oZKuQ/nhzZ3xwyc3jYJQ==",
"license": "MIT",
"dependencies": {
"cssnano-preset-advanced": "^6.1.2",
@@ -2257,9 +2256,9 @@
}
},
"node_modules/@docusaurus/logger": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.5.2.tgz",
"integrity": "sha512-LHC540SGkeLfyT3RHK3gAMK6aS5TRqOD4R72BEU/DE2M/TY8WwEUAMY576UUc/oNJXv8pGhBmQB6N9p3pt8LQw==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.4.0.tgz",
"integrity": "sha512-bZwkX+9SJ8lB9kVRkXw+xvHYSMGG4bpYHKGXeXFvyVc79NMeeBSGgzd4TQLHH+DYeOJoCdl8flrFJVxlZ0wo/Q==",
"license": "MIT",
"dependencies": {
"chalk": "^4.1.2",
@@ -2270,14 +2269,14 @@
}
},
"node_modules/@docusaurus/mdx-loader": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.5.2.tgz",
"integrity": "sha512-ku3xO9vZdwpiMIVd8BzWV0DCqGEbCP5zs1iHfKX50vw6jX8vQo0ylYo1YJMZyz6e+JFJ17HYHT5FzVidz2IflA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.4.0.tgz",
"integrity": "sha512-kSSbrrk4nTjf4d+wtBA9H+FGauf2gCax89kV8SUSJu3qaTdSIKdWERlngsiHaCFgZ7laTJ8a67UFf+xlFPtuTw==",
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/logger": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@mdx-js/mdx": "^3.0.0",
"@slorber/remark-comment": "^1.0.0",
"escape-html": "^1.0.3",
@@ -2309,12 +2308,12 @@
}
},
"node_modules/@docusaurus/module-type-aliases": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.5.2.tgz",
"integrity": "sha512-Z+Xu3+2rvKef/YKTMxZHsEXp1y92ac0ngjDiExRdqGTmEKtCUpkbNYH8v5eXo5Ls+dnW88n6WTa+Q54kLOkwPg==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.4.0.tgz",
"integrity": "sha512-A1AyS8WF5Bkjnb8s+guTDuYmUiwJzNrtchebBHpc0gz0PyHJNMaybUlSrmJjHVcGrya0LKI4YcR3lBDQfXRYLw==",
"license": "MIT",
"dependencies": {
"@docusaurus/types": "3.5.2",
"@docusaurus/types": "3.4.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -2327,21 +2326,52 @@
"react-dom": "*"
}
},
"node_modules/@docusaurus/plugin-content-docs": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.5.2.tgz",
"integrity": "sha512-Bt+OXn/CPtVqM3Di44vHjE7rPCEsRCB/DMo2qoOuozB9f7+lsdrHvD0QCHdBs0uhz6deYJDppAr2VgqybKPlVQ==",
"node_modules/@docusaurus/plugin-content-blog": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz",
"integrity": "sha512-vv6ZAj78ibR5Jh7XBUT4ndIjmlAxkijM3Sx5MAAzC1gyv0vupDQNhzuFg1USQmQVj3P5I6bquk12etPV3LJ+Xw==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/logger": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/module-type-aliases": "3.5.2",
"@docusaurus/theme-common": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"cheerio": "^1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"reading-time": "^1.5.0",
"srcset": "^4.0.0",
"tslib": "^2.6.0",
"unist-util-visit": "^5.0.0",
"utility-types": "^3.10.0",
"webpack": "^5.88.1"
},
"engines": {
"node": ">=18.0"
},
"peerDependencies": {
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/plugin-content-docs": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.4.0.tgz",
"integrity": "sha512-HkUCZffhBo7ocYheD9oZvMcDloRnGhBMOZRyVcAQRFmZPmNqSyISlXA1tQCIxW+r478fty97XXAGjNYzBjpCsg==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@types/react-router-config": "^5.0.7",
"combine-promises": "^1.1.0",
"fs-extra": "^11.1.1",
@@ -2359,15 +2389,38 @@
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/plugin-debug": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.5.2.tgz",
"integrity": "sha512-kBK6GlN0itCkrmHuCS6aX1wmoWc5wpd5KJlqQ1FyrF0cLDnvsYSnh7+ftdwzt7G6lGBho8lrVwkkL9/iQvaSOA==",
"node_modules/@docusaurus/plugin-content-pages": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.4.0.tgz",
"integrity": "sha512-h2+VN/0JjpR8fIkDEAoadNjfR3oLzB+v1qSXbIAKjQ46JAHx3X22n9nqS+BWSQnTnp1AjkjSvZyJMekmcwxzxg==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"
},
"engines": {
"node": ">=18.0"
},
"peerDependencies": {
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/plugin-debug": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.4.0.tgz",
"integrity": "sha512-uV7FDUNXGyDSD3PwUaf5YijX91T5/H9SX4ErEcshzwgzWwBtK37nUWPU3ZLJfeTavX3fycTOqk9TglpOLaWkCg==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"fs-extra": "^11.1.1",
"react-json-view-lite": "^1.2.0",
"tslib": "^2.6.0"
@@ -2381,14 +2434,14 @@
}
},
"node_modules/@docusaurus/plugin-google-analytics": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.5.2.tgz",
"integrity": "sha512-rjEkJH/tJ8OXRE9bwhV2mb/WP93V441rD6XnM6MIluu7rk8qg38iSxS43ga2V2Q/2ib53PcqbDEJDG/yWQRJhQ==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.4.0.tgz",
"integrity": "sha512-mCArluxEGi3cmYHqsgpGGt3IyLCrFBxPsxNZ56Mpur0xSlInnIHoeLDH7FvVVcPJRPSQ9/MfRqLsainRw+BojA==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"tslib": "^2.6.0"
},
"engines": {
@@ -2400,14 +2453,14 @@
}
},
"node_modules/@docusaurus/plugin-google-gtag": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.5.2.tgz",
"integrity": "sha512-lm8XL3xLkTPHFKKjLjEEAHUrW0SZBSHBE1I+i/tmYMBsjCcUB5UJ52geS5PSiOCFVR74tbPGcPHEV/gaaxFeSA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.4.0.tgz",
"integrity": "sha512-Dsgg6PLAqzZw5wZ4QjUYc8Z2KqJqXxHxq3vIoyoBWiLEEfigIs7wHR+oiWUQy3Zk9MIk6JTYj7tMoQU0Jm3nqA==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@types/gtag.js": "^0.0.12",
"tslib": "^2.6.0"
},
@@ -2420,14 +2473,14 @@
}
},
"node_modules/@docusaurus/plugin-google-tag-manager": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.5.2.tgz",
"integrity": "sha512-QkpX68PMOMu10Mvgvr5CfZAzZQFx8WLlOiUQ/Qmmcl6mjGK6H21WLT5x7xDmcpCoKA/3CegsqIqBR+nA137lQg==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.4.0.tgz",
"integrity": "sha512-O9tX1BTwxIhgXpOLpFDueYA9DWk69WCbDRrjYoMQtFHSkTyE7RhNgyjSPREUWJb9i+YUg3OrsvrBYRl64FCPCQ==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"tslib": "^2.6.0"
},
"engines": {
@@ -2439,17 +2492,17 @@
}
},
"node_modules/@docusaurus/plugin-sitemap": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.5.2.tgz",
"integrity": "sha512-DnlqYyRAdQ4NHY28TfHuVk414ft2uruP4QWCH//jzpHjqvKyXjj2fmDtI8RPUBh9K8iZKFMHRnLtzJKySPWvFA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.4.0.tgz",
"integrity": "sha512-+0VDvx9SmNrFNgwPoeoCha+tRoAjopwT0+pYO1xAbyLcewXSemq+eLxEa46Q1/aoOaJQ0qqHELuQM7iS2gp33Q==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/logger": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"fs-extra": "^11.1.1",
"sitemap": "^7.1.1",
"tslib": "^2.6.0"
@@ -2463,81 +2516,24 @@
}
},
"node_modules/@docusaurus/preset-classic": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.5.2.tgz",
"integrity": "sha512-3ihfXQ95aOHiLB5uCu+9PRy2gZCeSZoDcqpnDvf3B+sTrMvMTr8qRUzBvWkoIqc82yG5prCboRjk1SVILKx6sg==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.4.0.tgz",
"integrity": "sha512-Ohj6KB7siKqZaQhNJVMBBUzT3Nnp6eTKqO+FXO3qu/n1hJl3YLwVKTWBg28LF7MWrKu46UuYavwMRxud0VyqHg==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/plugin-content-blog": "3.5.2",
"@docusaurus/plugin-content-docs": "3.5.2",
"@docusaurus/plugin-content-pages": "3.5.2",
"@docusaurus/plugin-debug": "3.5.2",
"@docusaurus/plugin-google-analytics": "3.5.2",
"@docusaurus/plugin-google-gtag": "3.5.2",
"@docusaurus/plugin-google-tag-manager": "3.5.2",
"@docusaurus/plugin-sitemap": "3.5.2",
"@docusaurus/theme-classic": "3.5.2",
"@docusaurus/theme-common": "3.5.2",
"@docusaurus/theme-search-algolia": "3.5.2",
"@docusaurus/types": "3.5.2"
},
"engines": {
"node": ">=18.0"
},
"peerDependencies": {
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/preset-classic/node_modules/@docusaurus/plugin-content-blog": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.5.2.tgz",
"integrity": "sha512-R7ghWnMvjSf+aeNDH0K4fjyQnt5L0KzUEnUhmf1e3jZrv3wogeytZNN6n7X8yHcMsuZHPOrctQhXWnmxu+IRRg==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/logger": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/theme-common": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"cheerio": "1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"reading-time": "^1.5.0",
"srcset": "^4.0.0",
"tslib": "^2.6.0",
"unist-util-visit": "^5.0.0",
"utility-types": "^3.10.0",
"webpack": "^5.88.1"
},
"engines": {
"node": ">=18.0"
},
"peerDependencies": {
"@docusaurus/plugin-content-docs": "*",
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/preset-classic/node_modules/@docusaurus/plugin-content-pages": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.5.2.tgz",
"integrity": "sha512-WzhHjNpoQAUz/ueO10cnundRz+VUtkjFhhaQ9jApyv1a46FPURO4cef89pyNIOMny1fjDz/NUN2z6Yi+5WUrCw==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"
"@docusaurus/core": "3.4.0",
"@docusaurus/plugin-content-blog": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/plugin-content-pages": "3.4.0",
"@docusaurus/plugin-debug": "3.4.0",
"@docusaurus/plugin-google-analytics": "3.4.0",
"@docusaurus/plugin-google-gtag": "3.4.0",
"@docusaurus/plugin-google-tag-manager": "3.4.0",
"@docusaurus/plugin-sitemap": "3.4.0",
"@docusaurus/theme-classic": "3.4.0",
"@docusaurus/theme-common": "3.4.0",
"@docusaurus/theme-search-algolia": "3.4.0",
"@docusaurus/types": "3.4.0"
},
"engines": {
"node": ">=18.0"
@@ -2548,27 +2544,27 @@
}
},
"node_modules/@docusaurus/theme-classic": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.5.2.tgz",
"integrity": "sha512-XRpinSix3NBv95Rk7xeMF9k4safMkwnpSgThn0UNQNumKvmcIYjfkwfh2BhwYh/BxMXQHJ/PdmNh22TQFpIaYg==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.4.0.tgz",
"integrity": "sha512-0IPtmxsBYv2adr1GnZRdMkEQt1YW6tpzrUPj02YxNpvJ5+ju4E13J5tB4nfdaen/tfR1hmpSPlTFPvTf4kwy8Q==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/module-type-aliases": "3.5.2",
"@docusaurus/plugin-content-blog": "3.5.2",
"@docusaurus/plugin-content-docs": "3.5.2",
"@docusaurus/plugin-content-pages": "3.5.2",
"@docusaurus/theme-common": "3.5.2",
"@docusaurus/theme-translations": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/plugin-content-blog": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/plugin-content-pages": "3.4.0",
"@docusaurus/theme-common": "3.4.0",
"@docusaurus/theme-translations": "3.4.0",
"@docusaurus/types": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"copy-text-to-clipboard": "^3.2.0",
"infima": "0.2.0-alpha.44",
"infima": "0.2.0-alpha.43",
"lodash": "^4.17.21",
"nprogress": "^0.2.0",
"postcss": "^8.4.26",
@@ -2587,73 +2583,19 @@
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/theme-classic/node_modules/@docusaurus/plugin-content-blog": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.5.2.tgz",
"integrity": "sha512-R7ghWnMvjSf+aeNDH0K4fjyQnt5L0KzUEnUhmf1e3jZrv3wogeytZNN6n7X8yHcMsuZHPOrctQhXWnmxu+IRRg==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/logger": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/theme-common": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"cheerio": "1.0.0-rc.12",
"feed": "^4.2.2",
"fs-extra": "^11.1.1",
"lodash": "^4.17.21",
"reading-time": "^1.5.0",
"srcset": "^4.0.0",
"tslib": "^2.6.0",
"unist-util-visit": "^5.0.0",
"utility-types": "^3.10.0",
"webpack": "^5.88.1"
},
"engines": {
"node": ">=18.0"
},
"peerDependencies": {
"@docusaurus/plugin-content-docs": "*",
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/theme-classic/node_modules/@docusaurus/plugin-content-pages": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.5.2.tgz",
"integrity": "sha512-WzhHjNpoQAUz/ueO10cnundRz+VUtkjFhhaQ9jApyv1a46FPURO4cef89pyNIOMny1fjDz/NUN2z6Yi+5WUrCw==",
"license": "MIT",
"dependencies": {
"@docusaurus/core": "3.5.2",
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/types": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"fs-extra": "^11.1.1",
"tslib": "^2.6.0",
"webpack": "^5.88.1"
},
"engines": {
"node": ">=18.0"
},
"peerDependencies": {
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/theme-common": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.5.2.tgz",
"integrity": "sha512-QXqlm9S6x9Ibwjs7I2yEDgsCocp708DrCrgHgKwg2n2AY0YQ6IjU0gAK35lHRLOvAoJUfCKpQAwUykB0R7+Eew==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.4.0.tgz",
"integrity": "sha512-0A27alXuv7ZdCg28oPE8nH/Iz73/IUejVaCazqu9elS4ypjiLhK3KfzdSQBnL/g7YfHSlymZKdiOHEo8fJ0qMA==",
"license": "MIT",
"dependencies": {
"@docusaurus/mdx-loader": "3.5.2",
"@docusaurus/module-type-aliases": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/mdx-loader": "3.4.0",
"@docusaurus/module-type-aliases": "3.4.0",
"@docusaurus/plugin-content-blog": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/plugin-content-pages": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@types/history": "^4.7.11",
"@types/react": "*",
"@types/react-router-config": "*",
@@ -2667,25 +2609,24 @@
"node": ">=18.0"
},
"peerDependencies": {
"@docusaurus/plugin-content-docs": "*",
"react": "^18.0.0",
"react-dom": "^18.0.0"
}
},
"node_modules/@docusaurus/theme-search-algolia": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.5.2.tgz",
"integrity": "sha512-qW53kp3VzMnEqZGjakaV90sst3iN1o32PH+nawv1uepROO8aEGxptcq2R5rsv7aBShSRbZwIobdvSYKsZ5pqvA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz",
"integrity": "sha512-aiHFx7OCw4Wck1z6IoShVdUWIjntC8FHCw9c5dR8r3q4Ynh+zkS8y2eFFunN/DL6RXPzpnvKCg3vhLQYJDmT9Q==",
"license": "MIT",
"dependencies": {
"@docsearch/react": "^3.5.2",
"@docusaurus/core": "3.5.2",
"@docusaurus/logger": "3.5.2",
"@docusaurus/plugin-content-docs": "3.5.2",
"@docusaurus/theme-common": "3.5.2",
"@docusaurus/theme-translations": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-validation": "3.5.2",
"@docusaurus/core": "3.4.0",
"@docusaurus/logger": "3.4.0",
"@docusaurus/plugin-content-docs": "3.4.0",
"@docusaurus/theme-common": "3.4.0",
"@docusaurus/theme-translations": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-validation": "3.4.0",
"algoliasearch": "^4.18.0",
"algoliasearch-helper": "^3.13.3",
"clsx": "^2.0.0",
@@ -2704,9 +2645,9 @@
}
},
"node_modules/@docusaurus/theme-translations": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.5.2.tgz",
"integrity": "sha512-GPZLcu4aT1EmqSTmbdpVrDENGR2yObFEX8ssEFYTCiAIVc0EihNSdOIBTazUvgNqwvnoU1A8vIs1xyzc3LITTw==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.4.0.tgz",
"integrity": "sha512-zSxCSpmQCCdQU5Q4CnX/ID8CSUUI3fvmq4hU/GNP/XoAWtXo9SAVnM3TzpU8Gb//H3WCsT8mJcTfyOk3d9ftNg==",
"license": "MIT",
"dependencies": {
"fs-extra": "^11.1.1",
@@ -2717,9 +2658,9 @@
}
},
"node_modules/@docusaurus/types": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.5.2.tgz",
"integrity": "sha512-N6GntLXoLVUwkZw7zCxwy9QiuEXIcTVzA9AkmNw16oc0AP3SXLrMmDMMBIfgqwuKWa6Ox6epHol9kMtJqekACw==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.4.0.tgz",
"integrity": "sha512-4jcDO8kXi5Cf9TcyikB/yKmz14f2RZ2qTRerbHAsS+5InE9ZgSLBNLsewtFTcTOXSVcbU3FoGOzcNWAmU1TR0A==",
"license": "MIT",
"dependencies": {
"@mdx-js/mdx": "^3.0.0",
@@ -2738,13 +2679,13 @@
}
},
"node_modules/@docusaurus/utils": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.5.2.tgz",
"integrity": "sha512-33QvcNFh+Gv+C2dP9Y9xWEzMgf3JzrpL2nW9PopidiohS1nDcyknKRx2DWaFvyVTTYIkkABVSr073VTj/NITNA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.4.0.tgz",
"integrity": "sha512-fRwnu3L3nnWaXOgs88BVBmG1yGjcQqZNHG+vInhEa2Sz2oQB+ZjbEMO5Rh9ePFpZ0YDiDUhpaVjwmS+AU2F14g==",
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/logger": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"@svgr/webpack": "^8.1.0",
"escape-string-regexp": "^4.0.0",
"file-loader": "^6.2.0",
@@ -2777,9 +2718,9 @@
}
},
"node_modules/@docusaurus/utils-common": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.5.2.tgz",
"integrity": "sha512-i0AZjHiRgJU6d7faQngIhuHKNrszpL/SHQPgF1zH4H+Ij6E9NBYGy6pkcGWToIv7IVPbs+pQLh1P3whn0gWXVg==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.4.0.tgz",
"integrity": "sha512-NVx54Wr4rCEKsjOH5QEVvxIqVvm+9kh7q8aYTU5WzUU9/Hctd6aTrcZ3G0Id4zYJ+AeaG5K5qHA4CY5Kcm2iyQ==",
"license": "MIT",
"dependencies": {
"tslib": "^2.6.0"
@@ -2797,14 +2738,14 @@
}
},
"node_modules/@docusaurus/utils-validation": {
"version": "3.5.2",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.5.2.tgz",
"integrity": "sha512-m+Foq7augzXqB6HufdS139PFxDC5d5q2QKZy8q0qYYvGdI6nnlNsGH4cIGsgBnV7smz+mopl3g4asbSDvMV0jA==",
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.4.0.tgz",
"integrity": "sha512-hYQ9fM+AXYVTWxJOT1EuNaRnrR2WGpRdLDQG07O8UOpsvCPWUVOeo26Rbm0JWY2sGLfzAb+tvJ62yF+8F+TV0g==",
"license": "MIT",
"dependencies": {
"@docusaurus/logger": "3.5.2",
"@docusaurus/utils": "3.5.2",
"@docusaurus/utils-common": "3.5.2",
"@docusaurus/logger": "3.4.0",
"@docusaurus/utils": "3.4.0",
"@docusaurus/utils-common": "3.4.0",
"fs-extra": "^11.2.0",
"joi": "^17.9.2",
"js-yaml": "^4.1.0",
@@ -4296,9 +4237,9 @@
}
},
"node_modules/autoprefixer": {
"version": "10.4.20",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.20.tgz",
"integrity": "sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==",
"version": "10.4.19",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz",
"integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==",
"funding": [
{
"type": "opencollective",
@@ -4313,13 +4254,12 @@
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"browserslist": "^4.23.3",
"caniuse-lite": "^1.0.30001646",
"browserslist": "^4.23.0",
"caniuse-lite": "^1.0.30001599",
"fraction.js": "^4.3.7",
"normalize-range": "^0.1.2",
"picocolors": "^1.0.1",
"picocolors": "^1.0.0",
"postcss-value-parser": "^4.2.0"
},
"bin": {
@@ -4591,9 +4531,9 @@
}
},
"node_modules/browserslist": {
"version": "4.23.3",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz",
"integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==",
"version": "4.23.0",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz",
"integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==",
"funding": [
{
"type": "opencollective",
@@ -4608,12 +4548,11 @@
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"caniuse-lite": "^1.0.30001646",
"electron-to-chromium": "^1.5.4",
"node-releases": "^2.0.18",
"update-browserslist-db": "^1.1.0"
"caniuse-lite": "^1.0.30001587",
"electron-to-chromium": "^1.4.668",
"node-releases": "^2.0.14",
"update-browserslist-db": "^1.0.13"
},
"bin": {
"browserslist": "cli.js"
@@ -4760,9 +4699,9 @@
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001651",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz",
"integrity": "sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==",
"version": "1.0.30001614",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001614.tgz",
"integrity": "sha512-jmZQ1VpmlRwHgdP1/uiKzgiAuGOfLEJsYFP4+GBou/QQ4U6IOJCB4NP1c+1p9RGLpwObcT94jA5/uO+F1vBbog==",
"funding": [
{
"type": "opencollective",
@@ -4776,8 +4715,7 @@
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "CC-BY-4.0"
]
},
"node_modules/ccount": {
"version": "2.0.1",
@@ -6404,10 +6342,9 @@
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
},
"node_modules/electron-to-chromium": {
"version": "1.5.6",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.6.tgz",
"integrity": "sha512-jwXWsM5RPf6j9dPYzaorcBSUg6AiqocPEyMpkchkvntaH9HGfOOMZwxMJjDY/XEs3T5dM7uyH1VhRMkqUU9qVw==",
"license": "ISC"
"version": "1.4.751",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.751.tgz",
"integrity": "sha512-2DEPi++qa89SMGRhufWTiLmzqyuGmNF3SK4+PQetW1JKiZdEpF4XQonJXJCzyuYSA6mauiMhbyVhqYAP45Hvfw=="
},
"node_modules/emoji-regex": {
"version": "9.2.2",
@@ -8826,10 +8763,9 @@
}
},
"node_modules/infima": {
"version": "0.2.0-alpha.44",
"resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.44.tgz",
"integrity": "sha512-tuRkUSO/lB3rEhLJk25atwAjgLuzq070+pOW8XcvpHky/YbENnRRdPd85IBkyeTgttmOy5ah+yHYsK1HhUd4lQ==",
"license": "MIT",
"version": "0.2.0-alpha.43",
"resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.43.tgz",
"integrity": "sha512-2uw57LvUqW0rK/SWYnd/2rRfxNA5DDNOh33jxF7fy46VWoNhGxiUQyVZHbBMjQ33mQem0cjdDVwgWVAmlRfgyQ==",
"engines": {
"node": ">=12"
}
@@ -12022,10 +11958,9 @@
}
},
"node_modules/node-releases": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz",
"integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==",
"license": "MIT"
"version": "2.0.14",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
"integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw=="
},
"node_modules/nopt": {
"version": "1.0.10",
@@ -12819,9 +12754,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.40",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.40.tgz",
"integrity": "sha512-YF2kKIUzAofPMpfH6hOi2cGnv/HrUlfucspc7pDyvv7kGdqXrfj8SCl/t8owkEgKEuu8ZcRjSOxFxVLqwChZ2Q==",
"version": "8.4.39",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
"funding": [
{
"type": "opencollective",
@@ -13665,9 +13600,9 @@
}
},
"node_modules/prettier": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz",
"integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
"dev": true,
"license": "MIT",
"bin": {
@@ -13812,10 +13747,9 @@
}
},
"node_modules/qs": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
"license": "BSD-3-Clause",
"version": "6.12.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz",
"integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==",
"dependencies": {
"side-channel": "^1.0.6"
},
@@ -16080,9 +16014,9 @@
}
},
"node_modules/tailwindcss": {
"version": "3.4.10",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.10.tgz",
"integrity": "sha512-KWZkVPm7yJRhdu4SRSl9d4AK2wM3a50UsvgHZO7xY77NQr2V+fIrEuoDGQcbvswWvFGbS2f6e+jC/6WJm1Dl0w==",
"version": "3.4.4",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.4.tgz",
"integrity": "sha512-ZoyXOdJjISB7/BcLTR6SEsLgKtDStYyYZVLsUtWChO4Ps20CBad7lfJKVDiejocV4ME1hLmyY0WJE3hSDcmQ2A==",
"license": "MIT",
"dependencies": {
"@alloc/quick-lru": "^5.2.0",
@@ -16442,9 +16376,9 @@
}
},
"node_modules/typescript": {
"version": "5.5.4",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz",
"integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==",
"version": "5.5.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
@@ -16673,9 +16607,9 @@
}
},
"node_modules/update-browserslist-db": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz",
"integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==",
"version": "1.0.13",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz",
"integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==",
"funding": [
{
"type": "opencollective",
@@ -16690,10 +16624,9 @@
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"escalade": "^3.1.2",
"picocolors": "^1.0.1"
"escalade": "^3.1.1",
"picocolors": "^1.0.0"
},
"bin": {
"update-browserslist-db": "cli.js"
@@ -16781,16 +16714,12 @@
}
},
"node_modules/url": {
"version": "0.11.4",
"resolved": "https://registry.npmjs.org/url/-/url-0.11.4.tgz",
"integrity": "sha512-oCwdVC7mTuWiPyjLUz/COz5TLk6wgp0RCsN+wHZ2Ekneac9w8uuV0njcbbie2ME+Vs+d6duwmYuR3HgQXs1fOg==",
"license": "MIT",
"version": "0.11.3",
"resolved": "https://registry.npmjs.org/url/-/url-0.11.3.tgz",
"integrity": "sha512-6hxOLGfZASQK/cijlZnZJTq8OXAkt/3YGfQX45vvMYXpZoo8NdWZcY73K108Jf759lS1Bv/8wXnHDTSz17dSRw==",
"dependencies": {
"punycode": "^1.4.1",
"qs": "^6.12.3"
},
"engines": {
"node": ">= 0.4"
"qs": "^6.11.2"
}
},
"node_modules/url-loader": {
@@ -16854,8 +16783,7 @@
"node_modules/url/node_modules/punycode": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
"integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==",
"license": "MIT"
"integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ=="
},
"node_modules/util": {
"version": "0.10.4",

View File

@@ -56,6 +56,6 @@
"node": ">=20"
},
"volta": {
"node": "20.17.0"
"node": "20.15.1"
}
}

View File

@@ -43,11 +43,6 @@ const guides: CommunityGuidesProps[] = [
description: 'Access your local Immich installation over the internet using your own domain',
url: 'https://github.com/ppr88/immich-guides/blob/main/open-immich-custom-domain.md',
},
{
title: 'Nginx caching map server',
description: 'Increase privacy by using nginx as a caching proxy in front of a map tile server',
url: 'https://github.com/pcouy/pcouy.github.io/blob/main/_posts/2024-08-30-proxying-a-map-tile-server-for-increased-privacy.md',
},
];
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {

View File

@@ -28,6 +28,11 @@ const projects: CommunityProjectProps[] = [
description: 'A simple way to remove orphaned offline assets from the Immich database',
url: 'https://github.com/Thoroslives/immich_remove_offline_files',
},
{
title: 'Create albums from folders',
description: 'A Python script to create albums based on the folder structure of an external library.',
url: 'https://github.com/Salvoxia/immich-folder-album-creator',
},
{
title: 'Immich-Tools',
description: 'Provides scripts for handling problems on the repair page.',
@@ -38,11 +43,6 @@ const projects: CommunityProjectProps[] = [
description: 'Lightroom plugin to publish photos from Lightroom collections to Immich albums.',
url: 'https://github.com/midzelis/mi.Immich.Publisher',
},
{
title: 'Lightroom Immich Plugin: lrc-immich-plugin',
description: 'Another Lightroom plugin to publish or export photos from Lightroom to Immich.',
url: 'https://github.com/bmachek/lrc-immich-plugin',
},
{
title: 'Immich Duplicate Finder',
description: 'Webapp that uses machine learning to identify near-duplicate images.',
@@ -58,31 +58,11 @@ const projects: CommunityProjectProps[] = [
description: 'Unofficial Immich Android TV app.',
url: 'https://github.com/giejay/Immich-Android-TV',
},
{
title: 'Create albums from folders',
description: 'A Python script to create albums based on the folder structure of an external library.',
url: 'https://github.com/Salvoxia/immich-folder-album-creator',
},
{
title: 'Powershell Module PSImmich',
description: 'Powershell Module for the Immich API',
url: 'https://github.com/hanpq/PSImmich',
},
{
title: 'Immich Distribution',
description: 'Snap package for easy install and zero-care auto updates of Immich. Self-hosted photo management.',
url: 'https://immich-distribution.nsg.cc',
},
{
title: 'Immich Kiosk',
description: 'Lightweight slideshow to run on kiosk devices and browsers.',
url: 'https://github.com/damongolding/immich-kiosk',
},
{
title: 'Immich Power Tools',
description: 'Power tools for organizing your immich library.',
url: 'https://github.com/varun-raj/immich-power-tools',
},
];
function CommunityProject({ title, description, url }: CommunityProjectProps): JSX.Element {

View File

@@ -1,3 +1,4 @@
import '@docusaurus/theme-classic/lib/theme/Unlisted/index';
import { useWindowSize } from '@docusaurus/theme-common';
import DropdownNavbarItem from '@theme/NavbarItem/DropdownNavbarItem';
import React, { useEffect, useState } from 'react';

View File

@@ -1,13 +1,4 @@
import {
mdiCalendarToday,
mdiCrosshairsOff,
mdiLeadPencil,
mdiLockOff,
mdiLockOutline,
mdiSpeedometerSlow,
mdiWeb,
mdiWrap,
} from '@mdi/js';
import { mdiCalendarToday, mdiLeadPencil, mdiLockOutline, mdiSpeedometerSlow, mdiWeb } from '@mdi/js';
import Layout from '@theme/Layout';
import React from 'react';
import { Item as TimelineItem, Timeline } from '../components/timeline';
@@ -17,41 +8,6 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
const items: Item[] = [
{
icon: mdiWrap,
iconColor: 'gray',
title: 'Carriage returns in bash scripts are cursed',
description: 'Git can be configured to automatically convert LF to CRLF on checkout and CRLF breaks bash scripts.',
link: {
url: 'https://github.com/immich-app/immich/pull/11613',
text: '#11613',
},
date: new Date(2024, 7, 7),
},
{
icon: mdiLockOff,
iconColor: 'red',
title: 'Fetch inside Cloudflare Workers is cursed',
description:
'Fetch requests in Cloudflare Workers use http by default, even if you explicitly specify https, which can often cause redirect loops.',
link: {
url: 'https://community.cloudflare.com/t/does-cloudflare-worker-allow-secure-https-connection-to-fetch-even-on-flexible-ssl/68051/5',
text: 'Cloudflare',
},
date: new Date(2024, 7, 7),
},
{
icon: mdiCrosshairsOff,
iconColor: 'gray',
title: 'GPS sharing on mobile is cursed',
description:
'Some phones will silently strip GPS data from images when apps without location permission try to access them.',
link: {
url: 'https://github.com/immich-app/immich/discussions/11268',
text: '#11268',
},
date: new Date(2024, 6, 21),
},
{
icon: mdiLeadPencil,
iconColor: 'gold',

View File

@@ -41,7 +41,7 @@ function HomepageHeader() {
Discord
</Link>
</div>
<img src="/img/immich-screenshots.webp" alt="screenshots" width={'70%'} />
<img src="/img/immich-screenshots.png" alt="screenshots" width={'70%'} />
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-4 gap-1">
<div className="h-24">
<a href="https://play.google.com/store/apps/details?id=app.alextran.immich">

View File

@@ -1,114 +0,0 @@
import React from 'react';
import Link from '@docusaurus/Link';
import Layout from '@theme/Layout';
import { useColorMode } from '@docusaurus/theme-common';
function HomepageHeader() {
const { isDarkTheme } = useColorMode();
return (
<header>
<section className="max-w-[900px] m-4 p-4 md:p-6 md:m-auto md:my-12 border border-red-400 rounded-2xl bg-slate-200 dark:bg-immich-dark-gray">
<section>
<h1>Privacy Policy</h1>
<p>Last updated: July 31st 2024</p>
<p>
Welcome to Immich. We are committed to respecting your privacy. This Privacy Policy sets out how we collect,
use, and share information when you use our Immich app.
</p>
</section>
{/* 1. Scope of This Policy */}
<section>
<h2>1. Scope of This Policy</h2>
<p>
This Privacy Policy applies to the Immich app ("we", "our", or "us") and covers our collection, use, and
disclosure of your information. This Policy does not cover any third-party websites, services, or
applications that can be accessed through our app, or third-party services you may access through Immich.
</p>
</section>
{/* 2. Information We Collect */}
<section>
<h2>2. Information We Collect</h2>
<div>
<p>
<strong>Locally Stored Data</strong>: Immich stores all your photos, albums, settings, and locally on your
device. We do not have access to this data, nor do we transmit or store it on any of our servers.
</p>
</div>
<div>
<p>
<strong>Purchase Information:</strong> When you make a purchase within the{' '}
<a href="https://buy.immich.app">https://buy.immich.app</a>, we collect the following information for tax
calculation purposes:
</p>
<ul>
<li>Country of origin</li>
<li>Postal code (if the user is from Canada or the United States)</li>
</ul>
</div>
</section>
{/* 3. Use of Your Information */}
<section>
<h2>3. Use of Your Information</h2>
<p>
<strong>Tax Calculation:</strong> The country of origin and postal code (for users from Canada or the United
States) are collected solely for determining the applicable tax rates on your purchase.
</p>
</section>
{/* 4. Sharing of Your Information */}
<section>
<h2>4. Sharing of Your Information</h2>
<ul>
<li>
<strong>Tax Authorities:</strong> The purchase information may be shared with tax authorities as required
by law.
</li>
<li>
<strong>Payment Providers:</strong> The purchase information may be shared with payment providers where
required.
</li>
</ul>
</section>
{/* 5. Changes to This Policy */}
<section>
<h2>5. Changes to This Policy</h2>
<p>
We may update our Privacy Policy from time to time. If we make any changes, we will notify you by revising
the "Last updated" date at the top of this policy. It's encouraged that users frequently check this page for
any changes to stay informed about how we are helping to protect the personal information we collect.
</p>
</section>
{/* 6. Contact Us */}
<section>
<h2>6. Contact Us</h2>
<p>
If you have any questions about this Privacy Policy, please contact us at{' '}
<a href="mailto:immich@futo.org">immich@futo.org</a>
</p>
</section>
</section>
</header>
);
}
export default function Home(): JSX.Element {
return (
<Layout
title="Home"
description="immich Self-hosted photo and video backup solution directly from your mobile phone "
noFooter={true}
>
<HomepageHeader />
<div className="flex flex-col place-items-center place-content-center">
<p>This project is available under GNU AGPL v3 license.</p>
<p className="text-xs">Privacy should not be a luxury</p>
</div>
</Layout>
);
}

View File

@@ -15,7 +15,6 @@ import {
mdiCloudUploadOutline,
mdiCollage,
mdiContentDuplicate,
mdiCrop,
mdiDevices,
mdiEmailOutline,
mdiExpansionCard,
@@ -27,7 +26,6 @@ import {
mdiFileSearch,
mdiFlash,
mdiFolder,
mdiFolderMultiple,
mdiForum,
mdiHandshakeOutline,
mdiHeart,
@@ -38,7 +36,6 @@ import {
mdiImageMultipleOutline,
mdiImageSearch,
mdiKeyboardSettingsOutline,
mdiLicense,
mdiLockOutline,
mdiMagnify,
mdiMagnifyScan,
@@ -58,14 +55,11 @@ import {
mdiScaleBalance,
mdiSecurity,
mdiServer,
mdiShare,
mdiShareAll,
mdiShareCircle,
mdiStar,
mdiStarOutline,
mdiTableKey,
mdiTag,
mdiTagMultiple,
mdiText,
mdiThemeLightDark,
mdiTrashCanOutline,
@@ -78,11 +72,6 @@ import React from 'react';
import { Item, Timeline } from '../components/timeline';
const releases = {
'v1.113.0': new Date(2024, 7, 30),
'v1.112.0': new Date(2024, 7, 14),
'v1.111.0': new Date(2024, 6, 26),
'v1.110.0': new Date(2024, 5, 11),
'v1.109.0': new Date(2024, 6, 18),
'v1.106.1': new Date(2024, 5, 11),
'v1.104.0': new Date(2024, 4, 13),
'v1.103.0': new Date(2024, 3, 29),
@@ -231,67 +220,6 @@ const roadmap: Item[] = [
];
const milestones: Item[] = [
withRelease({
icon: mdiTagMultiple,
iconColor: 'orange',
title: 'Tags',
description: 'Tag your photos and videos',
release: 'v1.113.0',
}),
withRelease({
icon: mdiFolderMultiple,
iconColor: 'brown',
title: 'Folders',
description: 'View your photos and videos in folders',
release: 'v1.113.0',
}),
withRelease({
icon: mdiPalette,
title: 'Theming (mobile)',
description: 'Pick a primary color for the mobile app',
release: 'v1.112.0',
}),
withRelease({
icon: mdiStarOutline,
iconColor: 'gold',
title: 'Star rating',
description: 'Rate your photos and videos',
release: 'v1.112.0',
}),
withRelease({
icon: mdiCrop,
iconColor: 'royalblue',
title: 'Editor (mobile)',
description: 'Crop and rotate on mobile',
release: 'v1.111.0',
}),
withRelease({
icon: mdiMap,
iconColor: 'green',
title: 'Deploy tiles.immich.cloud',
description: 'Dedicated tile server for Immich',
release: 'v1.111.0',
}),
{
icon: mdiStar,
iconColor: 'gold',
title: '40,000 Stars',
description: 'Reached 40K Stars on GitHub!',
getDateLabel: withLanguage(new Date(2024, 6, 21)),
},
withRelease({
icon: mdiShare,
title: 'Deploy my.immich.app',
description: 'Url router for immich links',
release: 'v1.109.0',
}),
withRelease({
icon: mdiLicense,
iconColor: 'gold',
title: 'Supporter Badge',
description: 'The option to buy Immich to support its development!',
release: 'v1.109.0',
}),
withRelease({
icon: mdiHistory,
title: 'Versioned documentation',
@@ -308,7 +236,7 @@ const milestones: Item[] = [
withRelease({
icon: mdiContentDuplicate,
title: 'Similar image detection',
description: "Detect duplicate assets that aren't exactly identical",
description: 'Detect duplicate assets that arent exactly identical',
release: 'v1.106.1',
}),
withRelease({

View File

@@ -1,40 +1,4 @@
[
{
"label": "v1.113.1",
"url": "https://v1.113.1.archive.immich.app"
},
{
"label": "v1.113.0",
"url": "https://v1.113.0.archive.immich.app"
},
{
"label": "v1.112.1",
"url": "https://v1.112.1.archive.immich.app"
},
{
"label": "v1.112.0",
"url": "https://v1.112.0.archive.immich.app"
},
{
"label": "v1.111.0",
"url": "https://v1.111.0.archive.immich.app"
},
{
"label": "v1.110.0",
"url": "https://v1.110.0.archive.immich.app"
},
{
"label": "v1.109.2",
"url": "https://v1.109.2.archive.immich.app"
},
{
"label": "v1.109.1",
"url": "https://v1.109.1.archive.immich.app"
},
{
"label": "v1.109.0",
"url": "https://v1.109.0.archive.immich.app"
},
{
"label": "v1.108.0",
"url": "https://v1.108.0.archive.immich.app"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 KiB

View File

@@ -4,7 +4,7 @@ module.exports = {
corePlugins: {
preflight: false, // disable Tailwind's reset
},
content: ['./src/**/*.{js,jsx,ts,tsx}', './{docs,blog}/**/*.{md,mdx}'], // my markdown stuff is in ../docs, not /src
content: ['./src/**/*.{js,jsx,ts,tsx}', '../docs/**/*.mdx'], // my markdown stuff is in ../docs, not /src
darkMode: ['class', '[data-theme="dark"]'], // hooks into docusaurus' dark mode settigns
theme: {
extend: {

32
e2e/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,32 @@
module.exports = {
parser: '@typescript-eslint/parser',
parserOptions: {
project: 'tsconfig.json',
sourceType: 'module',
tsconfigRootDir: __dirname,
},
plugins: ['@typescript-eslint/eslint-plugin'],
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended', 'plugin:unicorn/recommended'],
root: true,
env: {
node: true,
},
ignorePatterns: ['.eslintrc.js'],
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-floating-promises': 'error',
'unicorn/prefer-module': 'off',
'unicorn/import-style': 'off',
curly: 2,
'prettier/prettier': 0,
'unicorn/prevent-abbreviations': 'off',
'unicorn/filename-case': 'off',
'unicorn/no-null': 'off',
'unicorn/prefer-top-level-await': 'off',
'unicorn/prefer-event-target': 'off',
'unicorn/no-thenable': 'off',
},
};

View File

@@ -1 +1 @@
20.17.0
20.15.1

View File

@@ -1,3 +1,5 @@
version: '3.8'
name: immich-e2e
services:
@@ -30,10 +32,10 @@ services:
- redis
- database
ports:
- 2285:3001
- 2283:3001
redis:
image: redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
database:
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
@@ -43,7 +45,7 @@ services:
POSTGRES_USER: postgres
POSTGRES_DB: immich
ports:
- 5435:5432
- 5433:5432
volumes:
model-cache:

View File

@@ -1,65 +0,0 @@
import { FlatCompat } from '@eslint/eslintrc';
import js from '@eslint/js';
import typescriptEslint from '@typescript-eslint/eslint-plugin';
import tsParser from '@typescript-eslint/parser';
import globals from 'globals';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});
export default [
{
ignores: ['eslint.config.mjs'],
},
...compat.extends(
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
'plugin:unicorn/recommended',
),
{
plugins: {
'@typescript-eslint': typescriptEslint,
},
languageOptions: {
globals: {
...globals.node,
},
parser: tsParser,
ecmaVersion: 5,
sourceType: 'module',
parserOptions: {
project: 'tsconfig.json',
tsconfigRootDir: __dirname,
},
},
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-floating-promises': 'error',
'unicorn/prefer-module': 'off',
'unicorn/import-style': 'off',
curly: 2,
'prettier/prettier': 0,
'unicorn/prevent-abbreviations': 'off',
'unicorn/filename-case': 'off',
'unicorn/no-null': 'off',
'unicorn/prefer-top-level-await': 'off',
'unicorn/prefer-event-target': 'off',
'unicorn/no-thenable': 'off',
'object-shorthand': ['error', 'always'],
},
},
];

1871
e2e/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.113.1",
"version": "1.108.0",
"description": "",
"main": "index.js",
"type": "module",
@@ -19,26 +19,23 @@
"author": "",
"license": "GNU Affero General Public License version 3",
"devDependencies": {
"@eslint/eslintrc": "^3.1.0",
"@eslint/js": "^9.8.0",
"@immich/cli": "file:../cli",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2",
"@types/node": "^20.16.2",
"@types/node": "^20.14.10",
"@types/oidc-provider": "^8.5.1",
"@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4",
"@types/supertest": "^6.0.2",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vitest/coverage-v8": "^2.0.5",
"eslint": "^9.0.0",
"@typescript-eslint/eslint-plugin": "^7.1.0",
"@typescript-eslint/parser": "^7.1.0",
"@vitest/coverage-v8": "^1.3.0",
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^55.0.0",
"exiftool-vendored": "^28.0.0",
"globals": "^15.9.0",
"eslint-plugin-unicorn": "^54.0.0",
"exiftool-vendored": "^27.0.0",
"jose": "^5.6.3",
"luxon": "^3.4.4",
"oidc-provider": "^8.5.1",
@@ -50,9 +47,9 @@
"supertest": "^7.0.0",
"typescript": "^5.3.3",
"utimes": "^5.2.1",
"vitest": "^2.0.5"
"vitest": "^1.3.0"
},
"volta": {
"node": "20.17.0"
"node": "20.15.1"
}
}

View File

@@ -8,7 +8,7 @@ export default defineConfig({
workers: 1,
reporter: 'html',
use: {
baseURL: 'http://127.0.0.1:2285',
baseURL: 'http://127.0.0.1:2283',
trace: 'on-first-retry',
},
@@ -54,7 +54,7 @@ export default defineConfig({
/* Run your local dev server before starting the tests */
webServer: {
command: 'docker compose up --build -V --remove-orphans',
url: 'http://127.0.0.1:2285',
url: 'http://127.0.0.1:2283',
reuseExistingServer: true,
},
});

View File

@@ -344,16 +344,16 @@ describe('/albums', () => {
});
});
describe('GET /albums/statistics', () => {
describe('GET /albums/count', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/albums/statistics');
const { status, body } = await request(app).get('/albums/count');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return total count of albums the user has access to', async () => {
const { status, body } = await request(app)
.get('/albums/statistics')
.get('/albums/count')
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(200);

View File

@@ -1,258 +0,0 @@
import { LoginResponseDto, Permission, createApiKey } from '@immich/sdk';
import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
const create = (accessToken: string, permissions: Permission[]) =>
createApiKey({ apiKeyCreateDto: { name: 'api key', permissions } }, { headers: asBearerAuth(accessToken) });
describe('/api-keys', () => {
let admin: LoginResponseDto;
let user: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
});
beforeEach(async () => {
await utils.resetDatabase(['api_keys']);
});
describe('POST /api-keys', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/api-keys').send({ name: 'API Key' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should not work without permission', async () => {
const { secret } = await create(user.accessToken, [Permission.ApiKeyRead]);
const { status, body } = await request(app).post('/api-keys').set('x-api-key', secret).send({ name: 'API Key' });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('apiKey.create'));
});
it('should work with apiKey.create', async () => {
const { secret } = await create(user.accessToken, [Permission.ApiKeyCreate, Permission.ApiKeyRead]);
const { status, body } = await request(app)
.post('/api-keys')
.set('x-api-key', secret)
.send({
name: 'API Key',
permissions: [Permission.ApiKeyRead],
});
expect(body).toEqual({
secret: expect.any(String),
apiKey: {
id: expect.any(String),
name: 'API Key',
permissions: [Permission.ApiKeyRead],
createdAt: expect.any(String),
updatedAt: expect.any(String),
},
});
expect(status).toBe(201);
});
it('should not create an api key with all permissions', async () => {
const { secret } = await create(user.accessToken, [Permission.ApiKeyCreate]);
const { status, body } = await request(app)
.post('/api-keys')
.set('x-api-key', secret)
.send({ name: 'API Key', permissions: [Permission.All] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('Cannot grant permissions you do not have'));
});
it('should not create an api key with more permissions', async () => {
const { secret } = await create(user.accessToken, [Permission.ApiKeyCreate]);
const { status, body } = await request(app)
.post('/api-keys')
.set('x-api-key', secret)
.send({ name: 'API Key', permissions: [Permission.ApiKeyRead] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('Cannot grant permissions you do not have'));
});
it('should create an api key', async () => {
const { status, body } = await request(app)
.post('/api-keys')
.send({ name: 'API Key', permissions: [Permission.All] })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual({
apiKey: {
id: expect.any(String),
name: 'API Key',
permissions: [Permission.All],
createdAt: expect.any(String),
updatedAt: expect.any(String),
},
secret: expect.any(String),
});
expect(status).toEqual(201);
});
});
describe('GET /api-keys', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/api-keys');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should start off empty', async () => {
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([]);
expect(status).toEqual(200);
});
it('should return a list of api keys', async () => {
const [{ apiKey: apiKey1 }, { apiKey: apiKey2 }, { apiKey: apiKey3 }] = await Promise.all([
create(admin.accessToken, [Permission.All]),
create(admin.accessToken, [Permission.All]),
create(admin.accessToken, [Permission.All]),
]);
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toHaveLength(3);
expect(body).toEqual(expect.arrayContaining([apiKey1, apiKey2, apiKey3]));
expect(status).toEqual(200);
});
});
describe('GET /api-keys/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/api-keys/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app)
.get(`/api-keys/${apiKey.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('API Key not found'));
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.get(`/api-keys/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should get api key details', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app)
.get(`/api-keys/${apiKey.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
id: expect.any(String),
name: 'api key',
permissions: [Permission.All],
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
});
});
describe('PUT /api-keys/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/api-keys/${uuidDto.notFound}`).send({ name: 'new name' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app)
.put(`/api-keys/${apiKey.id}`)
.send({ name: 'new name' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('API Key not found'));
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.put(`/api-keys/${uuidDto.invalid}`)
.send({ name: 'new name' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should update api key details', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app)
.put(`/api-keys/${apiKey.id}`)
.send({ name: 'new name' })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
id: expect.any(String),
name: 'new name',
permissions: [Permission.All],
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
});
});
describe('DELETE /api-keys/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).delete(`/api-keys/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app)
.delete(`/api-keys/${apiKey.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('API Key not found'));
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.delete(`/api-keys/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should delete an api key', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status } = await request(app)
.delete(`/api-keys/${apiKey.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(204);
});
});
describe('authentication', () => {
it('should work as a header', async () => {
const { secret } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app).get('/api-keys').set('x-api-key', secret);
expect(body).toHaveLength(1);
expect(status).toBe(200);
});
it('should work as a query param', async () => {
const { secret } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app).get(`/api-keys?apiKey=${secret}`);
expect(body).toHaveLength(1);
expect(status).toBe(200);
});
});
});

View File

@@ -7,6 +7,7 @@ import {
SharedLinkType,
getAssetInfo,
getMyUser,
updateAssets,
} from '@immich/sdk';
import { exiftool } from 'exiftool-vendored';
import { DateTime } from 'luxon';
@@ -42,7 +43,6 @@ const makeUploadDto = (options?: { omit: string }): Record<string, any> => {
const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`;
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
const readTags = async (bytes: Buffer, filename: string) => {
const filepath = join(tempDir, filename);
@@ -66,23 +66,25 @@ describe('/asset', () => {
let timeBucketUser: LoginResponseDto;
let quotaUser: LoginResponseDto;
let statsUser: LoginResponseDto;
let stackUser: LoginResponseDto;
let user1Assets: AssetMediaResponseDto[];
let user2Assets: AssetMediaResponseDto[];
let stackAssets: AssetMediaResponseDto[];
let locationAsset: AssetMediaResponseDto;
let ratingAsset: AssetMediaResponseDto;
const setupTests = async () => {
await utils.resetDatabase();
admin = await utils.adminSetup({ onboarding: false });
[websocket, user1, user2, statsUser, quotaUser, timeBucketUser] = await Promise.all([
[websocket, user1, user2, statsUser, quotaUser, timeBucketUser, stackUser] = await Promise.all([
utils.connectWebsocket(admin.accessToken),
utils.userSetup(admin.accessToken, createUserDto.create('1')),
utils.userSetup(admin.accessToken, createUserDto.create('2')),
utils.userSetup(admin.accessToken, createUserDto.create('stats')),
utils.userSetup(admin.accessToken, createUserDto.userQuota),
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
utils.userSetup(admin.accessToken, createUserDto.create('stack')),
]);
await utils.createPartner(user1.accessToken, user2.userId);
@@ -97,16 +99,6 @@ describe('/asset', () => {
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: locationAsset.id });
// asset rating
ratingAsset = await utils.createAsset(admin.accessToken, {
assetData: {
filename: 'mongolels.jpg',
bytes: await readFile(ratingAssetFilepath),
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: ratingAsset.id });
user1Assets = await Promise.all([
utils.createAsset(user1.accessToken),
utils.createAsset(user1.accessToken),
@@ -145,6 +137,20 @@ describe('/asset', () => {
}),
]);
// stacks
stackAssets = await Promise.all([
utils.createAsset(stackUser.accessToken),
utils.createAsset(stackUser.accessToken),
utils.createAsset(stackUser.accessToken),
utils.createAsset(stackUser.accessToken),
utils.createAsset(stackUser.accessToken),
]);
await updateAssets(
{ assetBulkUpdateDto: { stackParentId: stackAssets[0].id, ids: [stackAssets[1].id, stackAssets[2].id] } },
{ headers: asBearerAuth(stackUser.accessToken) },
);
const person1 = await utils.createPerson(user1.accessToken, {
name: 'Test Person',
});
@@ -208,22 +214,6 @@ describe('/asset', () => {
expect(body).toMatchObject({ id: user1Assets[0].id });
});
it('should get the asset rating', async () => {
await utils.waitForWebsocketEvent({
event: 'assetUpload',
id: ratingAsset.id,
});
const { status, body } = await request(app)
.get(`/assets/${ratingAsset.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({
id: ratingAsset.id,
exifInfo: expect.objectContaining({ rating: 3 }),
});
});
it('should work with a shared link', async () => {
const sharedLink = await utils.createSharedLink(user1.accessToken, {
type: SharedLinkType.Individual,
@@ -363,8 +353,6 @@ describe('/asset', () => {
utils.createAsset(user1.accessToken),
utils.createAsset(user1.accessToken),
]);
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
});
it('should require authentication', async () => {
@@ -401,14 +389,17 @@ describe('/asset', () => {
}
});
it.skip('should return 1 asset if there are 10 assets in the database but user 2 only has 1', async () => {
const { status, body } = await request(app)
.get('/assets/random')
.set('Authorization', `Bearer ${user2.accessToken}`);
it.each(TEN_TIMES)(
'should return 1 asset if there are 10 assets in the database but user 2 only has 1',
async () => {
const { status, body } = await request(app)
.get('/assets/random')
.set('Authorization', `Bearer ${user2.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
});
expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
},
);
it('should return error', async () => {
const { status } = await request(app)
@@ -481,44 +472,6 @@ describe('/asset', () => {
expect(status).toEqual(200);
});
it('should update date time original when sidecar file contains DateTimeOriginal', async () => {
const sidecarData = `<?xpacket begin='?' id='W5M0MpCehiHzreSzNTczkc9d'?>
<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='Image::ExifTool 12.40'>
<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>
<rdf:Description rdf:about=''
xmlns:exif='http://ns.adobe.com/exif/1.0/'>
<exif:ExifVersion>0220</exif:ExifVersion> <exif:DateTimeOriginal>2024-07-11T10:32:52Z</exif:DateTimeOriginal>
<exif:GPSVersionID>2.3.0.0</exif:GPSVersionID>
</rdf:Description>
</rdf:RDF>
</x:xmpmeta>
<?xpacket end='w'?>`;
const { id } = await utils.createAsset(user1.accessToken, {
sidecarData: {
bytes: Buffer.from(sidecarData),
filename: 'example.xmp',
},
});
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
const assetInfo = await utils.getAssetInfo(user1.accessToken, id);
expect(assetInfo.exifInfo?.dateTimeOriginal).toBe('2024-07-11T10:32:52.000Z');
const { status, body } = await request(app)
.put(`/assets/${id}`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
expect(body).toMatchObject({
id,
exifInfo: expect.objectContaining({
dateTimeOriginal: '2023-11-20T01:11:00.000Z',
}),
});
expect(status).toEqual(200);
});
it('should reject invalid gps coordinates', async () => {
for (const test of [
{ latitude: 12 },
@@ -554,22 +507,6 @@ describe('/asset', () => {
expect(status).toEqual(200);
});
it.skip('should geocode country from gps data in the middle of nowhere', async () => {
const { status } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ latitude: 42, longitude: 69 });
expect(status).toEqual(200);
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
const asset = await getAssetInfo({ id: user1Assets[0].id }, { headers: asBearerAuth(user1.accessToken) });
expect(asset).toMatchObject({
id: user1Assets[0].id,
exifInfo: expect.objectContaining({ city: null, country: 'Kazakhstan' }),
});
});
it('should set the description', async () => {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
@@ -584,31 +521,6 @@ describe('/asset', () => {
expect(status).toEqual(200);
});
it('should set the rating', async () => {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ rating: 2 });
expect(body).toMatchObject({
id: user1Assets[0].id,
exifInfo: expect.objectContaining({
rating: 2,
}),
});
expect(status).toEqual(200);
});
it('should reject invalid rating', async () => {
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: null }]) {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
.send(test)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
}
});
it('should return tagged people', async () => {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
@@ -807,8 +719,145 @@ describe('/asset', () => {
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid parent id', async () => {
const { status, body } = await request(app)
.put('/assets')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ stackParentId: uuidDto.invalid, ids: [stackAssets[0].id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['stackParentId must be a UUID']));
});
it('should require access to the parent', async () => {
const { status, body } = await request(app)
.put('/assets')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ stackParentId: stackAssets[3].id, ids: [user1Assets[0].id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should add stack children', async () => {
const { status } = await request(app)
.put('/assets')
.set('Authorization', `Bearer ${stackUser.accessToken}`)
.send({ stackParentId: stackAssets[0].id, ids: [stackAssets[3].id] });
expect(status).toBe(204);
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
expect(asset.stack).not.toBeUndefined();
expect(asset.stack).toEqual(expect.arrayContaining([expect.objectContaining({ id: stackAssets[3].id })]));
});
it('should remove stack children', async () => {
const { status } = await request(app)
.put('/assets')
.set('Authorization', `Bearer ${stackUser.accessToken}`)
.send({ removeParent: true, ids: [stackAssets[1].id] });
expect(status).toBe(204);
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
expect(asset.stack).not.toBeUndefined();
expect(asset.stack).toEqual(
expect.arrayContaining([
expect.objectContaining({ id: stackAssets[2].id }),
expect.objectContaining({ id: stackAssets[3].id }),
]),
);
});
it('should remove all stack children', async () => {
const { status } = await request(app)
.put('/assets')
.set('Authorization', `Bearer ${stackUser.accessToken}`)
.send({ removeParent: true, ids: [stackAssets[2].id, stackAssets[3].id] });
expect(status).toBe(204);
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
expect(asset.stack).toBeUndefined();
});
it('should merge stack children', async () => {
// create stack after previous test removed stack children
await updateAssets(
{ assetBulkUpdateDto: { stackParentId: stackAssets[0].id, ids: [stackAssets[1].id, stackAssets[2].id] } },
{ headers: asBearerAuth(stackUser.accessToken) },
);
const { status } = await request(app)
.put('/assets')
.set('Authorization', `Bearer ${stackUser.accessToken}`)
.send({ stackParentId: stackAssets[3].id, ids: [stackAssets[0].id] });
expect(status).toBe(204);
const asset = await getAssetInfo({ id: stackAssets[3].id }, { headers: asBearerAuth(stackUser.accessToken) });
expect(asset.stack).not.toBeUndefined();
expect(asset.stack).toEqual(
expect.arrayContaining([
expect.objectContaining({ id: stackAssets[0].id }),
expect.objectContaining({ id: stackAssets[1].id }),
expect.objectContaining({ id: stackAssets[2].id }),
]),
);
});
});
describe('PUT /assets/stack/parent', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put('/assets/stack/parent');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.put('/assets/stack/parent')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ oldParentId: uuidDto.invalid, newParentId: uuidDto.invalid });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
it('should require access', async () => {
const { status, body } = await request(app)
.put('/assets/stack/parent')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ oldParentId: stackAssets[3].id, newParentId: stackAssets[0].id });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should make old parent child of new parent', async () => {
const { status } = await request(app)
.put('/assets/stack/parent')
.set('Authorization', `Bearer ${stackUser.accessToken}`)
.send({ oldParentId: stackAssets[3].id, newParentId: stackAssets[0].id });
expect(status).toBe(200);
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
// new parent
expect(asset.stack).not.toBeUndefined();
expect(asset.stack).toEqual(
expect.arrayContaining([
expect.objectContaining({ id: stackAssets[1].id }),
expect.objectContaining({ id: stackAssets[2].id }),
expect.objectContaining({ id: stackAssets[3].id }),
]),
);
});
});
describe('POST /assets', () => {
beforeAll(setupTests, 30_000);
@@ -837,12 +886,13 @@ describe('/asset', () => {
expect(body).toEqual(errorDto.badRequest());
});
const tests = [
it.each([
{
input: 'formats/avif/8bit-sRGB.avif',
expected: {
type: AssetTypeEnum.Image,
originalFileName: '8bit-sRGB.avif',
resized: true,
exifInfo: {
description: '',
exifImageHeight: 1080,
@@ -858,6 +908,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: 'el_torcal_rocks.jpg',
resized: true,
exifInfo: {
dateTimeOriginal: '2012-08-05T11:39:59.000Z',
exifImageWidth: 512,
@@ -881,6 +932,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: '8bit-sRGB.jxl',
resized: true,
exifInfo: {
description: '',
exifImageHeight: 1080,
@@ -896,6 +948,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: 'IMG_2682.heic',
resized: true,
fileCreatedAt: '2019-03-21T16:04:22.348Z',
exifInfo: {
dateTimeOriginal: '2019-03-21T16:04:22.348Z',
@@ -920,6 +973,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: 'density_plot.png',
resized: true,
exifInfo: {
exifImageWidth: 800,
exifImageHeight: 800,
@@ -934,6 +988,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: 'glarus.nef',
resized: true,
fileCreatedAt: '2010-07-20T17:27:12.000Z',
exifInfo: {
make: 'NIKON CORPORATION',
@@ -955,6 +1010,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: 'philadelphia.nef',
resized: true,
fileCreatedAt: '2016-09-22T22:10:29.060Z',
exifInfo: {
make: 'NIKON CORPORATION',
@@ -977,6 +1033,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: '4_3.rw2',
resized: true,
fileCreatedAt: '2018-05-10T08:42:37.842Z',
exifInfo: {
make: 'Panasonic',
@@ -1000,6 +1057,7 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: '12bit-compressed-(3_2).arw',
resized: true,
fileCreatedAt: '2016-09-27T10:51:44.000Z',
exifInfo: {
make: 'SONY',
@@ -1024,7 +1082,8 @@ describe('/asset', () => {
expected: {
type: AssetTypeEnum.Image,
originalFileName: '14bit-uncompressed-(3_2).arw',
fileCreatedAt: '2016-01-08T14:08:01.000Z',
resized: true,
fileCreatedAt: '2016-01-08T15:08:01.000Z',
exifInfo: {
make: 'SONY',
model: 'ILCE-7M2',
@@ -1036,39 +1095,28 @@ describe('/asset', () => {
iso: 100,
lensModel: 'E 25mm F2',
fileSizeInByte: 49_512_448,
dateTimeOriginal: '2016-01-08T14:08:01.000Z',
dateTimeOriginal: '2016-01-08T15:08:01.000Z',
latitude: null,
longitude: null,
orientation: '1',
},
},
},
];
])(`should upload and generate a thumbnail for $input`, async ({ input, expected }) => {
const filepath = join(testAssetDir, input);
const { id, status } = await utils.createAsset(admin.accessToken, {
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
});
it(`should upload and generate a thumbnail for different file types`, async () => {
// upload in parallel
const assets = await Promise.all(
tests.map(async ({ input }) => {
const filepath = join(testAssetDir, input);
return utils.createAsset(admin.accessToken, {
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
});
}),
);
expect(status).toBe(AssetMediaStatus.Created);
for (const { id, status } of assets) {
expect(status).toBe(AssetMediaStatus.Created);
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
}
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: id });
for (const [i, { id }] of assets.entries()) {
const { expected } = tests[i];
const asset = await utils.getAssetInfo(admin.accessToken, id);
const asset = await utils.getAssetInfo(admin.accessToken, id);
expect(asset.exifInfo).toBeDefined();
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
expect(asset).toMatchObject(expected);
}
expect(asset.exifInfo).toBeDefined();
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
expect(asset).toMatchObject(expected);
});
it('should handle a duplicate', async () => {
@@ -1122,25 +1170,17 @@ describe('/asset', () => {
// into the test here.
it.each([
{
filepath: 'formats/motionphoto/samsung-one-ui-5.jpg',
filepath: 'formats/motionphoto/Samsung One UI 5.jpg',
checksum: 'fr14niqCq6N20HB8rJYEvpsUVtI=',
},
{
filepath: 'formats/motionphoto/samsung-one-ui-6.jpg',
filepath: 'formats/motionphoto/Samsung One UI 6.jpg',
checksum: 'lT9Uviw/FFJYCjfIxAGPTjzAmmw=',
},
{
filepath: 'formats/motionphoto/samsung-one-ui-6.heic',
filepath: 'formats/motionphoto/Samsung One UI 6.heic',
checksum: '/ejgzywvgvzvVhUYVfvkLzFBAF0=',
},
{
filepath: 'formats/motionphoto/pixel-6-pro.jpg',
checksum: 'bFhLGbdK058PSk4FTfrSnoKWykc=',
},
{
filepath: 'formats/motionphoto/pixel-8a.jpg',
checksum: '7YdY+WF0h+CXHbiXpi0HiCMTTjs=',
},
])(`should extract motionphoto video from $filepath`, async ({ filepath, checksum }) => {
const response = await utils.createAsset(admin.accessToken, {
assetData: {

View File

@@ -353,7 +353,7 @@ describe('/libraries', () => {
expect(assets.count).toBe(2);
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 3 });
@@ -361,11 +361,11 @@ describe('/libraries', () => {
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(newAssets.count).toBe(3);
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
});
it('should offline a file missing from disk', async () => {
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
it('should offline missing files', async () => {
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
@@ -374,40 +374,7 @@ describe('/libraries', () => {
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(3);
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(newAssets.count).toBe(3);
expect(newAssets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
isOffline: true,
originalFileName: 'assetC.png',
}),
]),
);
});
it('should offline a file outside of import paths', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await request(app)
.put(`/libraries/${library.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ importPaths: [`${testAssetDirInternal}/temp/directoryA`] });
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
@@ -416,45 +383,6 @@ describe('/libraries', () => {
expect(assets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
isOffline: false,
originalFileName: 'assetA.png',
}),
expect.objectContaining({
isOffline: true,
originalFileName: 'assetB.png',
}),
]),
);
});
it('should offline a file covered by an exclusion pattern', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await request(app)
.put(`/libraries/${library.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ exclusionPatterns: ['**/directoryB/**'] });
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(2);
expect(assets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
isOffline: false,
originalFileName: 'assetA.png',
}),
expect.objectContaining({
isOffline: true,
originalFileName: 'assetB.png',
@@ -506,8 +434,6 @@ describe('/libraries', () => {
await utils.waitForWebsocketEvent({ event: 'assetDelete', total: 1 });
expect(existsSync(`${testAssetDir}/temp/offline1/assetA.png`)).toBe(true);
utils.removeImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
});
it('should scan new files', async () => {
@@ -519,14 +445,14 @@ describe('/libraries', () => {
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
utils.createImageFile(`${testAssetDir}/temp/directoryC/assetC.png`);
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(3);
expect(assets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
@@ -534,8 +460,6 @@ describe('/libraries', () => {
}),
]),
);
utils.removeImageFile(`${testAssetDir}/temp/directoryC/assetC.png`);
});
describe('with refreshModifiedFiles=true', () => {
@@ -635,11 +559,10 @@ describe('/libraries', () => {
it('should remove offline files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
importPaths: [`${testAssetDirInternal}/temp/offline2`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/online.png`);
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
utils.createImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
@@ -647,9 +570,9 @@ describe('/libraries', () => {
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
});
expect(initialAssets.count).toBe(2);
expect(initialAssets.count).toBe(1);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
utils.removeImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
@@ -670,54 +593,7 @@ describe('/libraries', () => {
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
utils.removeImageFile(`${testAssetDir}/temp/offline/online.png`);
});
it('should remove offline files from trash', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/online.png`);
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
});
expect(initialAssets.count).toBe(2);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
isOffline: true,
});
expect(offlineAssets.count).toBe(1);
const { status } = await request(app)
.post(`/libraries/${library.id}/removeOffline`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
expect(assets.items[0].isOffline).toBe(false);
expect(assets.items[0].originalPath).toEqual(`${testAssetDirInternal}/temp/offline/online.png`);
utils.removeImageFile(`${testAssetDir}/temp/offline/online.png`);
expect(assets.count).toBe(0);
});
it('should not remove online files', async () => {

View File

@@ -159,75 +159,4 @@ describe('/map', () => {
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
});
});
describe('GET /map/reverse-geocode', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/map/reverse-geocode');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should throw an error if a lat is not provided', async () => {
const { status, body } = await request(app)
.get('/map/reverse-geocode?lon=123')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['lat must be a number between -90 and 90']));
});
it('should throw an error if a lat is not a number', async () => {
const { status, body } = await request(app)
.get('/map/reverse-geocode?lat=abc&lon=123.456')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['lat must be a number between -90 and 90']));
});
it('should throw an error if a lat is out of range', async () => {
const { status, body } = await request(app)
.get('/map/reverse-geocode?lat=91&lon=123.456')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['lat must be a number between -90 and 90']));
});
it('should throw an error if a lon is not provided', async () => {
const { status, body } = await request(app)
.get('/map/reverse-geocode?lat=75')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['lon must be a number between -180 and 180']));
});
const reverseGeocodeTestCases = [
{
name: 'Vaucluse',
lat: -33.858_977_058_663_13,
lon: 151.278_490_730_270_48,
results: [{ city: 'Vaucluse', state: 'New South Wales', country: 'Australia' }],
},
{
name: 'Ravenhall',
lat: -37.765_732_399_174_75,
lon: 144.752_453_164_883_3,
results: [{ city: 'Ravenhall', state: 'Victoria', country: 'Australia' }],
},
{
name: 'Scarborough',
lat: -31.894_346_156_789_997,
lon: 115.757_617_103_904_64,
results: [{ city: 'Scarborough', state: 'Western Australia', country: 'Australia' }],
},
];
it.each(reverseGeocodeTestCases)(`should resolve to $name`, async ({ lat, lon, results }) => {
const { status, body } = await request(app)
.get(`/map/reverse-geocode?lat=${lat}&lon=${lon}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(Array.isArray(body)).toBe(true);
expect(body.length).toBe(results.length);
expect(body).toEqual(results);
});
});
});

View File

@@ -92,14 +92,14 @@ describe(`/oauth`, () => {
it('should return a redirect uri', async () => {
const { status, body } = await request(app)
.post('/oauth/authorize')
.send({ redirectUri: 'http://127.0.0.1:2285/auth/login' });
.send({ redirectUri: 'http://127.0.0.1:2283/auth/login' });
expect(status).toBe(201);
expect(body).toEqual({ url: expect.stringContaining(`${authServer.internal}/auth?`) });
const params = new URL(body.url).searchParams;
expect(params.get('client_id')).toBe('client-default');
expect(params.get('response_type')).toBe('code');
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2285/auth/login');
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2283/auth/login');
expect(params.get('state')).toBeDefined();
});
});

View File

@@ -6,19 +6,10 @@ import request from 'supertest';
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
const invalidBirthday = [
{
birthDate: 'false',
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
},
{
birthDate: '123567',
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
},
{
birthDate: 123_567,
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
},
{ birthDate: '9999-01-01', response: ['Birth date cannot be in the future'] },
{ birthDate: 'false', response: 'birthDate must be a date string' },
{ birthDate: '123567', response: 'birthDate must be a date string' },
{ birthDate: 123_567, response: 'birthDate must be a date string' },
{ birthDate: new Date(9999, 0, 0).toISOString(), response: ['Birth date cannot be in the future'] },
];
describe('/people', () => {
@@ -74,7 +65,6 @@ describe('/people', () => {
expect(status).toBe(200);
expect(body).toEqual({
hasNextPage: false,
total: 3,
hidden: 1,
people: [
@@ -90,7 +80,6 @@ describe('/people', () => {
expect(status).toBe(200);
expect(body).toEqual({
hasNextPage: false,
total: 3,
hidden: 1,
people: [
@@ -99,21 +88,6 @@ describe('/people', () => {
],
});
});
it('should support pagination', async () => {
const { status, body } = await request(app)
.get('/people')
.set('Authorization', `Bearer ${admin.accessToken}`)
.query({ withHidden: true, page: 2, size: 1 });
expect(status).toBe(200);
expect(body).toEqual({
hasNextPage: true,
total: 3,
hidden: 1,
people: [expect.objectContaining({ name: 'visible_person' })],
});
});
});
describe('GET /people/:id', () => {
@@ -194,13 +168,13 @@ describe('/people', () => {
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({
name: 'New Person',
birthDate: '1990-01-01',
birthDate: '1990-01-01T05:00:00.000Z',
});
expect(status).toBe(201);
expect(body).toMatchObject({
id: expect.any(String),
name: 'New Person',
birthDate: '1990-01-01',
birthDate: '1990-01-01T05:00:00.000Z',
});
});
});
@@ -242,7 +216,7 @@ describe('/people', () => {
const { status, body } = await request(app)
.put(`/people/${visiblePerson.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ birthDate: '1990-01-01' });
.send({ birthDate: '1990-01-01T05:00:00.000Z' });
expect(status).toBe(200);
expect(body).toMatchObject({ birthDate: '1990-01-01' });
});

View File

@@ -1,4 +1,4 @@
import { AssetMediaResponseDto, LoginResponseDto, deleteAssets, updateAsset } from '@immich/sdk';
import { AssetMediaResponseDto, LoginResponseDto, deleteAssets, getMapMarkers, updateAsset } from '@immich/sdk';
import { DateTime } from 'luxon';
import { readFile } from 'node:fs/promises';
import { join } from 'node:path';
@@ -32,6 +32,9 @@ describe('/search', () => {
let assetOneJpg5: AssetMediaResponseDto;
let assetSprings: AssetMediaResponseDto;
let assetLast: AssetMediaResponseDto;
let cities: string[];
let states: string[];
let countries: string[];
beforeAll(async () => {
await utils.resetDatabase();
@@ -46,9 +49,9 @@ describe('/search', () => {
{ filename: '/albums/nature/silver_fir.jpg' },
{ filename: '/formats/heic/IMG_2682.heic' },
{ filename: '/formats/jpg/el_torcal_rocks.jpg' },
{ filename: '/formats/motionphoto/samsung-one-ui-6.jpg' },
{ filename: '/formats/motionphoto/samsung-one-ui-6.heic' },
{ filename: '/formats/motionphoto/samsung-one-ui-5.jpg' },
{ filename: '/formats/motionphoto/Samsung One UI 6.jpg' },
{ filename: '/formats/motionphoto/Samsung One UI 6.heic' },
{ filename: '/formats/motionphoto/Samsung One UI 5.jpg' },
{ filename: '/metadata/gps-position/thompson-springs.jpg', dto: { isArchived: true } },
@@ -82,7 +85,7 @@ describe('/search', () => {
// note: the coordinates here are not the actual coordinates of the images and are random for most of them
const coordinates = [
{ latitude: 48.853_41, longitude: 2.3488 }, // paris
{ latitude: 35.6895, longitude: 139.691_71 }, // tokyo
{ latitude: 63.0695, longitude: -151.0074 }, // denali
{ latitude: 52.524_37, longitude: 13.410_53 }, // berlin
{ latitude: 1.314_663_1, longitude: 103.845_409_3 }, // singapore
{ latitude: 41.013_84, longitude: 28.949_66 }, // istanbul
@@ -98,15 +101,16 @@ describe('/search', () => {
{ latitude: 31.634_16, longitude: -7.999_94 }, // marrakesh
{ latitude: 38.523_735_4, longitude: -78.488_619_4 }, // tanners ridge
{ latitude: 59.938_63, longitude: 30.314_13 }, // st. petersburg
{ latitude: 35.6895, longitude: 139.691_71 }, // tokyo
];
const updates = coordinates.map((dto, i) =>
updateAsset({ id: assets[i].id, updateAssetDto: dto }, { headers: asBearerAuth(admin.accessToken) }),
const updates = assets.map((asset, i) =>
updateAsset({ id: asset.id, updateAssetDto: coordinates[i] }, { headers: asBearerAuth(admin.accessToken) }),
);
await Promise.all(updates);
for (const [i] of coordinates.entries()) {
await utils.waitForWebsocketEvent({ event: 'assetUpdate', id: assets[i].id });
for (const asset of assets) {
await utils.waitForWebsocketEvent({ event: 'assetUpdate', id: asset.id });
}
[
@@ -133,6 +137,12 @@ describe('/search', () => {
assetLast = assets.at(-1) as AssetMediaResponseDto;
await deleteAssets({ assetBulkDeleteDto: { ids: [assetSilver.id] } }, { headers: asBearerAuth(admin.accessToken) });
const mapMarkers = await getMapMarkers({}, { headers: asBearerAuth(admin.accessToken) });
const nonTrashed = mapMarkers.filter((mark) => mark.id !== assetSilver.id);
cities = [...new Set(nonTrashed.map((mark) => mark.city).filter((entry): entry is string => !!entry))].sort();
states = [...new Set(nonTrashed.map((mark) => mark.state).filter((entry): entry is string => !!entry))].sort();
countries = [...new Set(nonTrashed.map((mark) => mark.country).filter((entry): entry is string => !!entry))].sort();
}, 30_000);
afterAll(async () => {
@@ -305,126 +315,29 @@ describe('/search', () => {
{
should: 'should search by originalFilename with spaces',
deferred: () => ({
dto: { originalFileName: 'samsung-one', type: 'IMAGE' },
dto: { originalFileName: 'Samsung One', type: 'IMAGE' },
assets: [assetOneJpg5, assetOneJpg6, assetOneHeic6],
}),
},
{
should: 'should search by city',
deferred: () => ({
dto: {
city: 'Accra',
includeNull: true,
},
assets: [assetHeic],
}),
},
{
should: "should search city ('')",
deferred: () => ({
dto: {
city: '',
isVisible: true,
includeNull: true,
},
assets: [assetLast],
}),
},
{
should: 'should search city (null)',
deferred: () => ({
dto: {
city: null,
isVisible: true,
includeNull: true,
},
assets: [assetLast],
}),
deferred: () => ({ dto: { city: 'Accra' }, assets: [assetHeic] }),
},
{
should: 'should search by state',
deferred: () => ({
dto: {
state: 'New York',
includeNull: true,
},
assets: [assetDensity],
}),
},
{
should: "should search state ('')",
deferred: () => ({
dto: {
state: '',
isVisible: true,
withExif: true,
includeNull: true,
},
assets: [assetLast, assetNotocactus],
}),
},
{
should: 'should search state (null)',
deferred: () => ({
dto: {
state: null,
isVisible: true,
includeNull: true,
},
assets: [assetLast, assetNotocactus],
}),
deferred: () => ({ dto: { state: 'New York' }, assets: [assetDensity] }),
},
{
should: 'should search by country',
deferred: () => ({
dto: {
country: 'France',
includeNull: true,
},
assets: [assetFalcon],
}),
},
{
should: "should search country ('')",
deferred: () => ({
dto: {
country: '',
isVisible: true,
includeNull: true,
},
assets: [assetLast],
}),
},
{
should: 'should search country (null)',
deferred: () => ({
dto: {
country: null,
isVisible: true,
includeNull: true,
},
assets: [assetLast],
}),
deferred: () => ({ dto: { country: 'France' }, assets: [assetFalcon] }),
},
{
should: 'should search by make',
deferred: () => ({
dto: {
make: 'Canon',
includeNull: true,
},
assets: [assetFalcon, assetDenali],
}),
deferred: () => ({ dto: { make: 'Canon' }, assets: [assetFalcon, assetDenali] }),
},
{
should: 'should search by model',
deferred: () => ({
dto: {
model: 'Canon EOS 7D',
includeNull: true,
},
assets: [assetDenali],
}),
deferred: () => ({ dto: { model: 'Canon EOS 7D' }, assets: [assetDenali] }),
},
{
should: 'should allow searching the upload library (libraryId: null)',
@@ -537,79 +450,32 @@ describe('/search', () => {
it('should get suggestions for country', async () => {
const { status, body } = await request(app)
.get('/search/suggestions?type=country&includeNull=true')
.get('/search/suggestions?type=country')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([
'Cuba',
'France',
'Georgia',
'Germany',
'Ghana',
'Japan',
'Morocco',
"People's Republic of China",
'Russian Federation',
'Singapore',
'Spain',
'Switzerland',
'United States of America',
null,
]);
expect(body).toEqual(countries);
expect(status).toBe(200);
});
it('should get suggestions for state', async () => {
const { status, body } = await request(app)
.get('/search/suggestions?type=state&includeNull=true')
.get('/search/suggestions?type=state')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([
'Andalusia',
'Berlin',
'Glarus',
'Greater Accra',
'Havana',
'Île-de-France',
'Marrakesh-Safi',
'Mississippi',
'New York',
'Shanghai',
'St.-Petersburg',
'Tbilisi',
'Tokyo',
'Virginia',
null,
]);
expect(body).toHaveLength(states.length);
expect(body).toEqual(expect.arrayContaining(states));
expect(status).toBe(200);
});
it('should get suggestions for city', async () => {
const { status, body } = await request(app)
.get('/search/suggestions?type=city&includeNull=true')
.get('/search/suggestions?type=city')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([
'Accra',
'Berlin',
'Glarus',
'Havana',
'Marrakesh',
'Montalbán de Córdoba',
'New York City',
'Novena',
'Paris',
'Philadelphia',
'Saint Petersburg',
'Shanghai',
'Stanley',
'Tbilisi',
'Tokyo',
null,
]);
expect(body).toEqual(cities);
expect(status).toBe(200);
});
it('should get suggestions for camera make', async () => {
const { status, body } = await request(app)
.get('/search/suggestions?type=camera-make&includeNull=true')
.get('/search/suggestions?type=camera-make')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([
'Apple',
@@ -619,14 +485,13 @@ describe('/search', () => {
'PENTAX Corporation',
'samsung',
'SONY',
null,
]);
expect(status).toBe(200);
});
it('should get suggestions for camera model', async () => {
const { status, body } = await request(app)
.get('/search/suggestions?type=camera-model&includeNull=true')
.get('/search/suggestions?type=camera-model')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([
'Canon EOS 7D',
@@ -641,7 +506,6 @@ describe('/search', () => {
'SM-F711N',
'SM-S906U',
'SM-T970',
null,
]);
expect(status).toBe(200);
});

View File

@@ -254,7 +254,7 @@ describe('/server', () => {
.set('Authorization', `Bearer ${admin.accessToken}`)
.send(serverLicense);
const { status } = await request(app).get('/server/license').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(404);
expect(status).toBe(200);
});
});

View File

@@ -112,13 +112,6 @@ describe('/shared-links', () => {
expect(resp.header['content-type']).toContain('text/html');
expect(resp.text).toContain(`<meta name="description" content="1 shared photos & videos" />`);
});
it('should have fqdn og:image meta tag for shared asset', async () => {
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`);
expect(resp.status).toBe(200);
expect(resp.header['content-type']).toContain('text/html');
expect(resp.text).toContain(`<meta property="og:image" content="http://`);
});
});
describe('GET /shared-links', () => {

View File

@@ -1,211 +0,0 @@
import { AssetMediaResponseDto, LoginResponseDto, searchStacks } from '@immich/sdk';
import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, describe, expect, it } from 'vitest';
describe('/stacks', () => {
let admin: LoginResponseDto;
let user1: LoginResponseDto;
let user2: LoginResponseDto;
let asset: AssetMediaResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
[user1, user2] = await Promise.all([
utils.userSetup(admin.accessToken, createUserDto.user1),
utils.userSetup(admin.accessToken, createUserDto.user2),
]);
asset = await utils.createAsset(user1.accessToken);
});
describe('POST /stacks', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.post('/stacks')
.send({ assetIds: [asset.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require at least two assets', async () => {
const { status, body } = await request(app)
.post('/stacks')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ assetIds: [asset.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.post('/stacks')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ assetIds: [uuidDto.invalid, uuidDto.invalid] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
it('should require access', async () => {
const user2Asset = await utils.createAsset(user2.accessToken);
const { status, body } = await request(app)
.post('/stacks')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ assetIds: [asset.id, user2Asset.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should create a stack', async () => {
const [asset1, asset2] = await Promise.all([
utils.createAsset(user1.accessToken),
utils.createAsset(user1.accessToken),
]);
const { status, body } = await request(app)
.post('/stacks')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ assetIds: [asset1.id, asset2.id] });
expect(status).toBe(201);
expect(body).toEqual({
id: expect.any(String),
primaryAssetId: asset1.id,
assets: [expect.objectContaining({ id: asset1.id }), expect.objectContaining({ id: asset2.id })],
});
});
it('should merge an existing stack', async () => {
const [asset1, asset2, asset3] = await Promise.all([
utils.createAsset(user1.accessToken),
utils.createAsset(user1.accessToken),
utils.createAsset(user1.accessToken),
]);
const response1 = await request(app)
.post('/stacks')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ assetIds: [asset1.id, asset2.id] });
expect(response1.status).toBe(201);
const stacksBefore = await searchStacks({}, { headers: asBearerAuth(user1.accessToken) });
const { status, body } = await request(app)
.post('/stacks')
.set('Authorization', `Bearer ${user1.accessToken}`)
.send({ assetIds: [asset1.id, asset3.id] });
expect(status).toBe(201);
expect(body).toEqual({
id: expect.any(String),
primaryAssetId: asset1.id,
assets: expect.arrayContaining([
expect.objectContaining({ id: asset1.id }),
expect.objectContaining({ id: asset2.id }),
expect.objectContaining({ id: asset3.id }),
]),
});
const stacksAfter = await searchStacks({}, { headers: asBearerAuth(user1.accessToken) });
expect(stacksAfter.length).toBe(stacksBefore.length);
});
// it('should require a valid parent id', async () => {
// const { status, body } = await request(app)
// .put('/assets')
// .set('Authorization', `Bearer ${user1.accessToken}`)
// .send({ stackParentId: uuidDto.invalid, ids: [stackAssets[0].id] });
// expect(status).toBe(400);
// expect(body).toEqual(errorDto.badRequest(['stackParentId must be a UUID']));
// });
});
// it('should require access to the parent', async () => {
// const { status, body } = await request(app)
// .put('/assets')
// .set('Authorization', `Bearer ${user1.accessToken}`)
// .send({ stackParentId: stackAssets[3].id, ids: [user1Assets[0].id] });
// expect(status).toBe(400);
// expect(body).toEqual(errorDto.noPermission);
// });
// it('should add stack children', async () => {
// const { status } = await request(app)
// .put('/assets')
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
// .send({ stackParentId: stackAssets[0].id, ids: [stackAssets[3].id] });
// expect(status).toBe(204);
// const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
// expect(asset.stack).not.toBeUndefined();
// expect(asset.stack).toEqual(expect.arrayContaining([expect.objectContaining({ id: stackAssets[3].id })]));
// });
// it('should remove stack children', async () => {
// const { status } = await request(app)
// .put('/assets')
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
// .send({ removeParent: true, ids: [stackAssets[1].id] });
// expect(status).toBe(204);
// const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
// expect(asset.stack).not.toBeUndefined();
// expect(asset.stack).toEqual(
// expect.arrayContaining([
// expect.objectContaining({ id: stackAssets[2].id }),
// expect.objectContaining({ id: stackAssets[3].id }),
// ]),
// );
// });
// it('should remove all stack children', async () => {
// const { status } = await request(app)
// .put('/assets')
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
// .send({ removeParent: true, ids: [stackAssets[2].id, stackAssets[3].id] });
// expect(status).toBe(204);
// const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
// expect(asset.stack).toBeUndefined();
// });
// it('should merge stack children', async () => {
// // create stack after previous test removed stack children
// await updateAssets(
// { assetBulkUpdateDto: { stackParentId: stackAssets[0].id, ids: [stackAssets[1].id, stackAssets[2].id] } },
// { headers: asBearerAuth(stackUser.accessToken) },
// );
// const { status } = await request(app)
// .put('/assets')
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
// .send({ stackParentId: stackAssets[3].id, ids: [stackAssets[0].id] });
// expect(status).toBe(204);
// const asset = await getAssetInfo({ id: stackAssets[3].id }, { headers: asBearerAuth(stackUser.accessToken) });
// expect(asset.stack).not.toBeUndefined();
// expect(asset.stack).toEqual(
// expect.arrayContaining([
// expect.objectContaining({ id: stackAssets[0].id }),
// expect.objectContaining({ id: stackAssets[1].id }),
// expect.objectContaining({ id: stackAssets[2].id }),
// ]),
// );
// });
});

View File

@@ -1,603 +0,0 @@
import {
AssetMediaResponseDto,
LoginResponseDto,
Permission,
TagCreateDto,
TagResponseDto,
createTag,
getAllTags,
tagAssets,
upsertTags,
} from '@immich/sdk';
import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest';
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
const create = (accessToken: string, dto: TagCreateDto) =>
createTag({ tagCreateDto: dto }, { headers: asBearerAuth(accessToken) });
const upsert = (accessToken: string, tags: string[]) =>
upsertTags({ tagUpsertDto: { tags } }, { headers: asBearerAuth(accessToken) });
describe('/tags', () => {
let admin: LoginResponseDto;
let user: LoginResponseDto;
let userAsset: AssetMediaResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
userAsset = await utils.createAsset(user.accessToken);
});
beforeEach(async () => {
// tagging assets eventually triggers metadata extraction which can impact other tests
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
await utils.resetDatabase(['tags']);
});
describe('POST /tags', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/tags').send({ name: 'TagA' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization (api key)', async () => {
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app).post('/tags').set('x-api-key', secret).send({ name: 'TagA' });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.create'));
});
it('should work with tag.create', async () => {
const { secret } = await utils.createApiKey(user.accessToken, [Permission.TagCreate]);
const { status, body } = await request(app).post('/tags').set('x-api-key', secret).send({ name: 'TagA' });
expect(body).toEqual({
id: expect.any(String),
name: 'TagA',
value: 'TagA',
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
expect(status).toBe(201);
});
it('should create a tag', async () => {
const { status, body } = await request(app)
.post('/tags')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ name: 'TagA' });
expect(body).toEqual({
id: expect.any(String),
name: 'TagA',
value: 'TagA',
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
expect(status).toBe(201);
});
it('should allow multiple users to create tags with the same value', async () => {
await create(admin.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.post('/tags')
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ name: 'TagA' });
expect(body).toEqual({
id: expect.any(String),
name: 'TagA',
value: 'TagA',
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
expect(status).toBe(201);
});
it('should create a nested tag', async () => {
const parent = await create(admin.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.post('/tags')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ name: 'TagB', parentId: parent.id });
expect(body).toEqual({
id: expect.any(String),
parentId: parent.id,
name: 'TagB',
value: 'TagA/TagB',
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
expect(status).toBe(201);
});
});
describe('GET /tags', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/tags');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization (api key)', async () => {
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app).get('/tags').set('x-api-key', secret);
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.read'));
});
it('should start off empty', async () => {
const { status, body } = await request(app).get('/tags').set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([]);
expect(status).toEqual(200);
});
it('should return a list of tags', async () => {
const [tagA, tagB, tagC] = await Promise.all([
create(admin.accessToken, { name: 'TagA' }),
create(admin.accessToken, { name: 'TagB' }),
create(admin.accessToken, { name: 'TagC' }),
]);
const { status, body } = await request(app).get('/tags').set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toHaveLength(3);
expect(body).toEqual([tagA, tagB, tagC]);
expect(status).toEqual(200);
});
it('should return a nested tags', async () => {
await upsert(admin.accessToken, ['TagA/TagB/TagC', 'TagD']);
const { status, body } = await request(app).get('/tags').set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toHaveLength(4);
expect(status).toEqual(200);
const tags = body as TagResponseDto[];
const tagA = tags.find((tag) => tag.value === 'TagA') as TagResponseDto;
const tagB = tags.find((tag) => tag.value === 'TagA/TagB') as TagResponseDto;
const tagC = tags.find((tag) => tag.value === 'TagA/TagB/TagC') as TagResponseDto;
const tagD = tags.find((tag) => tag.value === 'TagD') as TagResponseDto;
expect(tagA).toEqual(expect.objectContaining({ name: 'TagA', value: 'TagA' }));
expect(tagB).toEqual(expect.objectContaining({ name: 'TagB', value: 'TagA/TagB', parentId: tagA.id }));
expect(tagC).toEqual(expect.objectContaining({ name: 'TagC', value: 'TagA/TagB/TagC', parentId: tagB.id }));
expect(tagD).toEqual(expect.objectContaining({ name: 'TagD', value: 'TagD' }));
});
});
describe('PUT /tags', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/tags`).send({ name: 'TagA/TagB' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization (api key)', async () => {
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app).put('/tags').set('x-api-key', secret).send({ name: 'TagA' });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.create'));
});
it('should upsert tags', async () => {
const { status, body } = await request(app)
.put(`/tags`)
.send({ tags: ['TagA/TagB/TagC/TagD'] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ name: 'TagD', value: 'TagA/TagB/TagC/TagD' })]);
});
it('should upsert tags in parallel without conflicts', async () => {
const [[tag1], [tag2], [tag3], [tag4]] = await Promise.all([
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
]);
const { id, parentId, createdAt } = tag1;
for (const tag of [tag1, tag2, tag3, tag4]) {
expect(tag).toMatchObject({
id,
parentId,
createdAt,
name: 'TagD',
value: 'TagA/TagB/TagC/TagD',
});
}
});
});
describe('PUT /tags/assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/tags/assets`).send({ tagIds: [], assetIds: [] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization (api key)', async () => {
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app)
.put('/tags/assets')
.set('x-api-key', secret)
.send({ assetIds: [], tagIds: [] });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.asset'));
});
it('should skip assets that are not owned by the user', async () => {
const [tagA, tagB, tagC, assetA, assetB] = await Promise.all([
create(user.accessToken, { name: 'TagA' }),
create(user.accessToken, { name: 'TagB' }),
create(user.accessToken, { name: 'TagC' }),
utils.createAsset(user.accessToken),
utils.createAsset(admin.accessToken),
]);
const { status, body } = await request(app)
.put(`/tags/assets`)
.send({ tagIds: [tagA.id, tagB.id, tagC.id], assetIds: [assetA.id, assetB.id] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({ count: 3 });
});
it('should skip tags that are not owned by the user', async () => {
const [tagA, tagB, tagC, assetA, assetB] = await Promise.all([
create(user.accessToken, { name: 'TagA' }),
create(user.accessToken, { name: 'TagB' }),
create(admin.accessToken, { name: 'TagC' }),
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken),
]);
const { status, body } = await request(app)
.put(`/tags/assets`)
.send({ tagIds: [tagA.id, tagB.id, tagC.id], assetIds: [assetA.id, assetB.id] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({ count: 4 });
});
it('should bulk tag assets', async () => {
const [tagA, tagB, tagC, assetA, assetB] = await Promise.all([
create(user.accessToken, { name: 'TagA' }),
create(user.accessToken, { name: 'TagB' }),
create(user.accessToken, { name: 'TagC' }),
utils.createAsset(user.accessToken),
utils.createAsset(user.accessToken),
]);
const { status, body } = await request(app)
.put(`/tags/assets`)
.send({ tagIds: [tagA.id, tagB.id, tagC.id], assetIds: [assetA.id, assetB.id] })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({ count: 6 });
});
});
describe('GET /tags/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/tags/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.get(`/tags/${tag.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should require authorization (api key)', async () => {
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app)
.get(`/tags/${uuidDto.notFound}`)
.set('x-api-key', secret)
.send({ assetIds: [], tagIds: [] });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.read'));
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.get(`/tags/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should get tag details', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.get(`/tags/${tag.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
id: expect.any(String),
name: 'TagA',
value: 'TagA',
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
});
it('should get nested tag details', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
const tagB = await create(user.accessToken, { name: 'TagB', parentId: tagA.id });
const tagC = await create(user.accessToken, { name: 'TagC', parentId: tagB.id });
const tagD = await create(user.accessToken, { name: 'TagD', parentId: tagC.id });
const { status, body } = await request(app)
.get(`/tags/${tagD.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
id: expect.any(String),
parentId: tagC.id,
name: 'TagD',
value: 'TagA/TagB/TagC/TagD',
createdAt: expect.any(String),
updatedAt: expect.any(String),
});
});
});
describe('PUT /tags/:id', () => {
it('should require authentication', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app).put(`/tags/${tag.id}`).send({ color: '#000000' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const tag = await create(admin.accessToken, { name: 'tagA' });
const { status, body } = await request(app)
.put(`/tags/${tag.id}`)
.send({ color: '#000000' })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should require authorization (api key)', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app)
.put(`/tags/${tag.id}`)
.set('x-api-key', secret)
.send({ color: '#000000' });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.update'));
});
it('should update a tag', async () => {
const tag = await create(user.accessToken, { name: 'tagA' });
const { status, body } = await request(app)
.put(`/tags/${tag.id}`)
.send({ color: '#000000' })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual(expect.objectContaining({ color: `#000000` }));
});
it('should update a tag color without a # prefix', async () => {
const tag = await create(user.accessToken, { name: 'tagA' });
const { status, body } = await request(app)
.put(`/tags/${tag.id}`)
.send({ color: '000000' })
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual(expect.objectContaining({ color: `#000000` }));
});
});
describe('DELETE /tags/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).delete(`/tags/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.delete(`/tags/${tag.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should require authorization (api key)', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app).delete(`/tags/${tag.id}`).set('x-api-key', secret);
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.delete'));
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.delete(`/tags/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should delete a tag', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { status } = await request(app)
.delete(`/tags/${tag.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(204);
});
it('should delete a nested tag (root)', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
await create(user.accessToken, { name: 'TagB', parentId: tagA.id });
const { status } = await request(app)
.delete(`/tags/${tagA.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(204);
const tags = await getAllTags({ headers: asBearerAuth(user.accessToken) });
expect(tags.length).toBe(0);
});
it('should delete a nested tag (leaf)', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
const tagB = await create(user.accessToken, { name: 'TagB', parentId: tagA.id });
const { status } = await request(app)
.delete(`/tags/${tagB.id}`)
.set('Authorization', `Bearer ${user.accessToken}`);
expect(status).toBe(204);
const tags = await getAllTags({ headers: asBearerAuth(user.accessToken) });
expect(tags.length).toBe(1);
expect(tags[0]).toEqual(tagA);
});
});
describe('PUT /tags/:id/assets', () => {
it('should require authentication', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.put(`/tags/${tagA.id}/assets`)
.send({ ids: [userAsset.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.put(`/tags/${tag.id}/assets`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ ids: [userAsset.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should require authorization (api key)', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app)
.put(`/tags/${tag.id}/assets`)
.set('x-api-key', secret)
.send({ ids: [userAsset.id] });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.asset'));
});
it('should be able to tag own asset', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.put(`/tags/${tagA.id}/assets`)
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ ids: [userAsset.id] });
expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ id: userAsset.id, success: true })]);
});
it("should not be able to add assets to another user's tag", async () => {
const tagA = await create(admin.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.put(`/tags/${tagA.id}/assets`)
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ ids: [userAsset.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('Not found or no tag.asset access'));
});
it('should add duplicate assets only once', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.put(`/tags/${tagA.id}/assets`)
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ ids: [userAsset.id, userAsset.id] });
expect(status).toBe(200);
expect(body).toEqual([
expect.objectContaining({ id: userAsset.id, success: true }),
expect.objectContaining({ id: userAsset.id, success: false, error: 'duplicate' }),
]);
});
});
describe('DELETE /tags/:id/assets', () => {
it('should require authentication', async () => {
const tagA = await create(admin.accessToken, { name: 'TagA' });
const { status, body } = await request(app)
.delete(`/tags/${tagA}/assets`)
.send({ ids: [userAsset.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
await tagAssets(
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
{ headers: asBearerAuth(user.accessToken) },
);
const { status, body } = await request(app)
.delete(`/tags/${tagA.id}/assets`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ ids: [userAsset.id] });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should require authorization (api key)', async () => {
const tag = await create(user.accessToken, { name: 'TagA' });
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
const { status, body } = await request(app)
.delete(`/tags/${tag.id}/assets`)
.set('x-api-key', secret)
.send({ ids: [userAsset.id] });
expect(status).toBe(403);
expect(body).toEqual(errorDto.missingPermission('tag.asset'));
});
it('should be able to remove own asset from own tag', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
await tagAssets(
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
{ headers: asBearerAuth(user.accessToken) },
);
const { status, body } = await request(app)
.delete(`/tags/${tagA.id}/assets`)
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ ids: [userAsset.id] });
expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ id: userAsset.id, success: true })]);
});
it('should remove duplicate assets only once', async () => {
const tagA = await create(user.accessToken, { name: 'TagA' });
await tagAssets(
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
{ headers: asBearerAuth(user.accessToken) },
);
const { status, body } = await request(app)
.delete(`/tags/${tagA.id}/assets`)
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ ids: [userAsset.id, userAsset.id] });
expect(status).toBe(200);
expect(body).toEqual([
expect.objectContaining({ id: userAsset.id, success: true }),
expect.objectContaining({ id: userAsset.id, success: false, error: 'not_found' }),
]);
});
});
});

View File

@@ -42,23 +42,6 @@ describe('/trash', () => {
const after = await getAssetStatistics({ isTrashed: true }, { headers: asBearerAuth(admin.accessToken) });
expect(after.total).toBe(0);
});
it('should empty the trash with archived assets', async () => {
const { id: assetId } = await utils.createAsset(admin.accessToken);
await utils.archiveAssets(admin.accessToken, [assetId]);
await utils.deleteAssets(admin.accessToken, [assetId]);
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isTrashed: true, isArchived: true }));
const { status } = await request(app).post('/trash/empty').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(204);
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: assetId });
const after = await getAssetStatistics({ isTrashed: true }, { headers: asBearerAuth(admin.accessToken) });
expect(after.total).toBe(0);
});
});
describe('POST /trash/restore', () => {

View File

@@ -1,11 +1,11 @@
import {
LoginResponseDto,
createStack,
deleteUserAdmin,
getMyUser,
getUserAdmin,
getUserPreferencesAdmin,
login,
updateAssets,
} from '@immich/sdk';
import { Socket } from 'socket.io-client';
import { createUserDto, uuidDto } from 'src/fixtures';
@@ -321,8 +321,8 @@ describe('/admin/users', () => {
utils.createAsset(user.accessToken),
]);
await createStack(
{ stackCreateDto: { assetIds: [asset1.id, asset2.id] } },
await updateAssets(
{ assetBulkUpdateDto: { stackParentId: asset1.id, ids: [asset2.id] } },
{ headers: asBearerAuth(user.accessToken) },
);

Some files were not shown because too many files have changed in this diff Show More