Compare commits
105 Commits
feat/unass
...
chore/hand
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8d2a849edc | ||
|
|
fb4fe5d40b | ||
|
|
717961ce7b | ||
|
|
259386cf13 | ||
|
|
7e587c2703 | ||
|
|
4d862525bc | ||
|
|
d8d64ecc45 | ||
|
|
e1e7de4d4c | ||
|
|
df412d60c5 | ||
|
|
dbd9782763 | ||
|
|
2519922dd3 | ||
|
|
78b10bbcc6 | ||
|
|
4ec47b4186 | ||
|
|
3c5ba77e86 | ||
|
|
62f8bd80f4 | ||
|
|
69193598cb | ||
|
|
f92513be7e | ||
|
|
b5e0a1cec8 | ||
|
|
2ffde69caf | ||
|
|
9291c782d5 | ||
|
|
b595881084 | ||
|
|
4b49d3a85d | ||
|
|
def5f59242 | ||
|
|
9ac2ac2fcb | ||
|
|
d5f3d98dfc | ||
|
|
3e118793de | ||
|
|
c8f2d994c6 | ||
|
|
f2148ddf03 | ||
|
|
2b1b43a7e4 | ||
|
|
7a46f80ddc | ||
|
|
c6c480c882 | ||
|
|
4ad97ccded | ||
|
|
ca12f3b15f | ||
|
|
86eb2525d7 | ||
|
|
079864dfbe | ||
|
|
051e6cfc0b | ||
|
|
8f42766979 | ||
|
|
7e2a03a8d9 | ||
|
|
1947316b0b | ||
|
|
0f976edf96 | ||
|
|
ce985ef8f8 | ||
|
|
cf223dc98c | ||
|
|
97ffddee7c | ||
|
|
abf6fc25f7 | ||
|
|
b2761b12d1 | ||
|
|
588860455f | ||
|
|
643309b27f | ||
|
|
e0ec75119f | ||
|
|
f446bc8caa | ||
|
|
a2bccf23c9 | ||
|
|
6937440772 | ||
|
|
69bce6680f | ||
|
|
454c995e90 | ||
|
|
bcff21f72b | ||
|
|
47ec6c41ec | ||
|
|
471cf5eaf7 | ||
|
|
b3ee394fdc | ||
|
|
15474e81b2 | ||
|
|
bb9e18247b | ||
|
|
e7dc1f7968 | ||
|
|
1323c7ee88 | ||
|
|
d1135db8cf | ||
|
|
5af67d159f | ||
|
|
203cbbbfdb | ||
|
|
01f52c9021 | ||
|
|
4e16e2520d | ||
|
|
21718cc343 | ||
|
|
7524c746a6 | ||
|
|
69d2fcb43e | ||
|
|
66fced40e7 | ||
|
|
380ed966d8 | ||
|
|
afa10ebcb2 | ||
|
|
5ef2553bca | ||
|
|
53562e8439 | ||
|
|
0a53dc412b | ||
|
|
371a5ce0aa | ||
|
|
9fc0a0d935 | ||
|
|
8fc4ce14ab | ||
|
|
4376104e3a | ||
|
|
77d1b9ace6 | ||
|
|
802b4ef190 | ||
|
|
9d0aceb768 | ||
|
|
8315488b99 | ||
|
|
61051ba479 | ||
|
|
30e18aba69 | ||
|
|
12cf116798 | ||
|
|
416399499b | ||
|
|
5463660746 | ||
|
|
5ef144bf79 | ||
|
|
db4c66094c | ||
|
|
d400925aeb | ||
|
|
e459d524a4 | ||
|
|
3f6e61d073 | ||
|
|
cc013158d1 | ||
|
|
ce524256da | ||
|
|
771df7f09f | ||
|
|
ee3530b34c | ||
|
|
8812c3afcf | ||
|
|
fbc3790cb6 | ||
|
|
0fc6d69824 | ||
|
|
1f9158c545 | ||
|
|
832084687d | ||
|
|
bce916e4c8 | ||
|
|
f020d29ab6 | ||
|
|
dedf1ecc9d |
@@ -4,6 +4,7 @@
|
||||
|
||||
design/
|
||||
docker/
|
||||
!docker/scripts
|
||||
docs/
|
||||
e2e/
|
||||
fastlane/
|
||||
|
||||
2
.github/workflows/build-mobile.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.22.0'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
|
||||
4
.github/workflows/docker-cleanup.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.6.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.7.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.6.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.7.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
|
||||
43
.github/workflows/docs-build.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Docs build
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
retention-days: 1
|
||||
189
.github/workflows/docs-deploy.yml
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
name: Docs deploy
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Docs build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
checks:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
steps:
|
||||
- if: ${{ github.event.workflow_run.conclusion == 'failure' }}
|
||||
run: echo 'The triggering workflow failed' && exit 1
|
||||
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
const isFork = context.payload.workflow_run.repository.fork;
|
||||
|
||||
let parameters;
|
||||
|
||||
console.log({eventType, isFork});
|
||||
|
||||
if (eventType == "push") {
|
||||
const branch = context.payload.workflow_run.head_branch;
|
||||
console.log({branch});
|
||||
const shouldDeploy = !isFork && branch == "main";
|
||||
parameters = {
|
||||
event: "branch",
|
||||
name: "main",
|
||||
shouldDeploy
|
||||
};
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
}
|
||||
const pullRequestNumber = items[0].number
|
||||
console.info("Pull request number is", pullRequestNumber)
|
||||
pull_number = pullRequestNumber
|
||||
}
|
||||
const {data: pr} = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number
|
||||
});
|
||||
|
||||
console.log({pull_number});
|
||||
|
||||
parameters = {
|
||||
event: "pr",
|
||||
name: `pr-${pull_number}`,
|
||||
pr_number: pull_number,
|
||||
shouldDeploy: true
|
||||
};
|
||||
} else if (eventType == "release") {
|
||||
parameters = {
|
||||
event: "release",
|
||||
name: context.payload.workflow_run.head_branch,
|
||||
shouldDeploy: !isFork
|
||||
};
|
||||
}
|
||||
|
||||
console.log(parameters);
|
||||
return parameters;
|
||||
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
if: ${{ fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const json = `${{ needs.checks.outputs.parameters }}`;
|
||||
const parameters = JSON.parse(json);
|
||||
core.setOutput("event", parameters.event);
|
||||
core.setOutput("name", parameters.name);
|
||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||
|
||||
- run: |
|
||||
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "docs-build-output"
|
||||
})[0];
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
let fs = require('fs');
|
||||
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/docs-build-output.zip`, Buffer.from(download.data));
|
||||
|
||||
- name: Unzip artifact
|
||||
run: unzip "${{ github.workspace }}/docs-build-output.zip" -d "${{ github.workspace }}/docs/build"
|
||||
|
||||
- name: Deploy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "apply"
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "output -json"
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
run: |
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||
workingDirectory: "docs"
|
||||
directory: "build"
|
||||
branch: ${{ steps.parameters.outputs.name }}
|
||||
wranglerVersion: '3'
|
||||
|
||||
- name: Deploy Docs Release Domain
|
||||
if: ${{ steps.parameters.outputs.event == 'release' }}
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs-release'
|
||||
tg_command: 'apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
body: |
|
||||
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
|
||||
emojis: 'rocket'
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
32
.github/workflows/docs-destroy.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Docs destroy
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
|
||||
TF_VAR_prefix_event_type: "pr"
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "destroy"
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
with:
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
2
.github/workflows/static_analysis.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.22.0'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
|
||||
24
.github/workflows/test.yml
vendored
@@ -10,28 +10,6 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
doc-tests:
|
||||
name: Docs
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-unit-tests:
|
||||
name: Server
|
||||
runs-on: ubuntu-latest
|
||||
@@ -208,7 +186,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.22.0'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
</p>
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.13
|
||||
20.14
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine3.19@sha256:291e84d956f1aff38454bbd3da38941461ad569a185c20aa289f71f37ea08e23 as core
|
||||
FROM node:20-alpine3.19@sha256:696ae41fb5880949a15ade7879a2deae93b3f0723f757bdb5b8a9e4a744ce27f as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
106
cli/package-lock.json
generated
@@ -54,7 +54,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.12.12",
|
||||
"@types/node": "^20.12.13",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -1138,9 +1138,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.12.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz",
|
||||
"integrity": "sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==",
|
||||
"version": "20.12.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.13.tgz",
|
||||
"integrity": "sha512-gBGeanV41c1L171rR7wjbMiEpEI/l5XFQdLLfhr/REwpgDy/4U8y89+i8kRiLzDyZdOkXh+cRaTetUnCYutoXA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1154,17 +1154,17 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.9.0.tgz",
|
||||
"integrity": "sha512-6e+X0X3sFe/G/54aC3jt0txuMTURqLyekmEHViqyA2VnxhLMpvA6nqmcjIy+Cr9tLDHPssA74BP5Mx9HQIxBEA==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.11.0.tgz",
|
||||
"integrity": "sha512-P+qEahbgeHW4JQ/87FuItjBj8O3MYv5gELDzr8QaQ7fsll1gSMTYb6j87MYyxwf3DtD7uGFB9ShwgmCJB5KmaQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "7.9.0",
|
||||
"@typescript-eslint/type-utils": "7.9.0",
|
||||
"@typescript-eslint/utils": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0",
|
||||
"@typescript-eslint/scope-manager": "7.11.0",
|
||||
"@typescript-eslint/type-utils": "7.11.0",
|
||||
"@typescript-eslint/utils": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.3.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
@@ -1188,16 +1188,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.9.0.tgz",
|
||||
"integrity": "sha512-qHMJfkL5qvgQB2aLvhUSXxbK7OLnDkwPzFalg458pxQgfxKDfT1ZDbHQM/I6mDIf/svlMkj21kzKuQ2ixJlatQ==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.11.0.tgz",
|
||||
"integrity": "sha512-yimw99teuaXVWsBcPO1Ais02kwJ1jmNA1KxE7ng0aT7ndr1pT1wqj0OJnsYVGKKlc4QJai86l/025L6z8CljOg==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "7.9.0",
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/typescript-estree": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0",
|
||||
"@typescript-eslint/scope-manager": "7.11.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/typescript-estree": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1217,14 +1217,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.9.0.tgz",
|
||||
"integrity": "sha512-ZwPK4DeCDxr3GJltRz5iZejPFAAr4Wk3+2WIBaj1L5PYK5RgxExu/Y68FFVclN0y6GGwH8q+KgKRCvaTmFBbgQ==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.11.0.tgz",
|
||||
"integrity": "sha512-27tGdVEiutD4POirLZX4YzT180vevUURJl4wJGmm6TrQoiYwuxTIY98PBp6L2oN+JQxzE0URvYlzJaBHIekXAw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0"
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
@@ -1235,14 +1235,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.9.0.tgz",
|
||||
"integrity": "sha512-6Qy8dfut0PFrFRAZsGzuLoM4hre4gjzWJB6sUvdunCYZsYemTkzZNwF1rnGea326PHPT3zn5Lmg32M/xfJfByA==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.11.0.tgz",
|
||||
"integrity": "sha512-WmppUEgYy+y1NTseNMJ6mCFxt03/7jTOy08bcg7bxJJdsM4nuhnchyBbE8vryveaJUf62noH7LodPSo5Z0WUCg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "7.9.0",
|
||||
"@typescript-eslint/utils": "7.9.0",
|
||||
"@typescript-eslint/typescript-estree": "7.11.0",
|
||||
"@typescript-eslint/utils": "7.11.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
@@ -1263,9 +1263,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.9.0.tgz",
|
||||
"integrity": "sha512-oZQD9HEWQanl9UfsbGVcZ2cGaR0YT5476xfWE0oE5kQa2sNK2frxOlkeacLOTh9po4AlUT5rtkGyYM5kew0z5w==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.11.0.tgz",
|
||||
"integrity": "sha512-MPEsDRZTyCiXkD4vd3zywDCifi7tatc4K37KqTprCvaXptP7Xlpdw0NR2hRJTetG5TxbWDB79Ys4kLmHliEo/w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -1277,14 +1277,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.9.0.tgz",
|
||||
"integrity": "sha512-zBCMCkrb2YjpKV3LA0ZJubtKCDxLttxfdGmwZvTqqWevUPN0FZvSI26FalGFFUZU/9YQK/A4xcQF9o/VVaCKAg==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.11.0.tgz",
|
||||
"integrity": "sha512-cxkhZ2C/iyi3/6U9EPc5y+a6csqHItndvN/CzbNXTNrsC3/ASoYQZEt9uMaEp+xFNjasqQyszp5TumAVKKvJeQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -1306,16 +1306,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.9.0.tgz",
|
||||
"integrity": "sha512-5KVRQCzZajmT4Ep+NEgjXCvjuypVvYHUW7RHlXzNPuak2oWpVoD1jf5xCP0dPAuNIchjC7uQyvbdaSTFaLqSdA==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.11.0.tgz",
|
||||
"integrity": "sha512-xlAWwPleNRHwF37AhrZurOxA1wyXowW4PqVXZVUNCLjB48CqdPJoJWkrpH2nij9Q3Lb7rtWindtoXwxjxlKKCA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.9.0",
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/typescript-estree": "7.9.0"
|
||||
"@typescript-eslint/scope-manager": "7.11.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/typescript-estree": "7.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
@@ -1329,13 +1329,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.9.0.tgz",
|
||||
"integrity": "sha512-iESPx2TNLDNGQLyjKhUvIKprlP49XNEK+MvIf9nIO7ZZaZdbnfWKHnXAgufpxqfA0YryH8XToi4+CjBgVnFTSQ==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.11.0.tgz",
|
||||
"integrity": "sha512-7syYk4MzjxTEk0g/w3iqtgxnFQspDJfn6QKD36xMuuhTzjcxY7F8EmBLnALjVyaOF1/bVocu3bS/2/F7rXrveQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"eslint-visitor-keys": "^3.4.3"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1801,10 +1801,11 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "12.0.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-12.0.0.tgz",
|
||||
"integrity": "sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==",
|
||||
"version": "12.1.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz",
|
||||
"integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
@@ -4280,10 +4281,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.2.11",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.2.11.tgz",
|
||||
"integrity": "sha512-HndV31LWW05i1BLPMUCE1B9E9GFbOu1MbenhS58FuK6owSO5qHm7GiCotrNY1YE5rMeQSFBGmT5ZaLEjFizgiQ==",
|
||||
"version": "5.2.12",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.2.12.tgz",
|
||||
"integrity": "sha512-/gC8GxzxMK5ntBwb48pR32GGhENnjtY30G4A0jemunsBkiEZFw60s8InGpN8gkhHEkjnRK1aSAxeQgwvFhUHAA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.20.1",
|
||||
"postcss": "^8.4.38",
|
||||
|
||||
@@ -62,6 +62,6 @@
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.13.1"
|
||||
"node": "20.14.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import {
|
||||
Action,
|
||||
AssetBulkUploadCheckResult,
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetMediaStatus,
|
||||
addAssetsToAlbum,
|
||||
checkBulkUpload,
|
||||
createAlbum,
|
||||
@@ -167,7 +168,7 @@ const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptio
|
||||
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
|
||||
if (response.duplicate) {
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
@@ -192,7 +193,7 @@ const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptio
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaResponseDto> => {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const assetPath = path.parse(input);
|
||||
@@ -225,7 +226,7 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadR
|
||||
formData.append('sidecarData', sidecarData);
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
const response = await fetch(`${baseUrl}/assets`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import { version } from '../package.json';
|
||||
|
||||
export interface ICliVersion {
|
||||
major: number;
|
||||
minor: number;
|
||||
patch: number;
|
||||
}
|
||||
|
||||
export class CliVersion implements ICliVersion {
|
||||
constructor(
|
||||
public readonly major: number,
|
||||
public readonly minor: number,
|
||||
public readonly patch: number,
|
||||
) {}
|
||||
|
||||
toString() {
|
||||
return `${this.major}.${this.minor}.${this.patch}`;
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
const { major, minor, patch } = this;
|
||||
return { major, minor, patch };
|
||||
}
|
||||
|
||||
static fromString(version: string): CliVersion {
|
||||
const regex = /v?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)/i;
|
||||
const matchResult = version.match(regex);
|
||||
if (matchResult) {
|
||||
const [, major, minor, patch] = matchResult.map(Number);
|
||||
return new CliVersion(major, minor, patch);
|
||||
} else {
|
||||
throw new Error(`Invalid version format: ${version}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const cliVersion = CliVersion.fromString(version);
|
||||
38
deployment/.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# OpenTofu
|
||||
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
# Terragrunt
|
||||
|
||||
# terragrunt cache directories
|
||||
**/.terragrunt-cache/*
|
||||
|
||||
# Terragrunt debug output file (when using `--terragrunt-debug` option)
|
||||
# See: https://terragrunt.gruntwork.io/docs/reference/cli-options/#terragrunt-debug
|
||||
terragrunt-debug.tfvars.json
|
||||
38
deployment/modules/cloudflare/docs-release/.terraform.lock.hcl
generated
Normal file
@@ -0,0 +1,38 @@
|
||||
# This file is maintained automatically by "tofu init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.34.0"
|
||||
constraints = "4.34.0"
|
||||
hashes = [
|
||||
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
|
||||
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
|
||||
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
|
||||
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
|
||||
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
|
||||
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
|
||||
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
|
||||
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
|
||||
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
|
||||
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
|
||||
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
|
||||
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
|
||||
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
|
||||
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
|
||||
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
|
||||
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
|
||||
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
|
||||
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
|
||||
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
|
||||
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
|
||||
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
|
||||
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
|
||||
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
|
||||
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
|
||||
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
|
||||
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
|
||||
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
|
||||
]
|
||||
}
|
||||
11
deployment/modules/cloudflare/docs-release/config.tf
Normal file
@@ -0,0 +1,11 @@
|
||||
terraform {
|
||||
backend "pg" {}
|
||||
required_version = "~> 1.7"
|
||||
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.34.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
14
deployment/modules/cloudflare/docs-release/domain.tf
Normal file
@@ -0,0 +1,14 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_release_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name
|
||||
domain = "immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_release_domain" {
|
||||
name = "immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
value = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
3
deployment/modules/cloudflare/docs-release/providers.tf
Normal file
@@ -0,0 +1,3 @@
|
||||
provider "cloudflare" {
|
||||
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||
}
|
||||
27
deployment/modules/cloudflare/docs-release/remote-state.tf
Normal file
@@ -0,0 +1,27 @@
|
||||
data "terraform_remote_state" "api_keys_state" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_api_keys"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_account" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_account"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_immich_app_docs" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_immich_app_docs_${var.prefix_name}"
|
||||
}
|
||||
}
|
||||
|
||||
20
deployment/modules/cloudflare/docs-release/terragrunt.hcl
Normal file
@@ -0,0 +1,20 @@
|
||||
terraform {
|
||||
source = "."
|
||||
|
||||
extra_arguments custom_vars {
|
||||
commands = get_terraform_commands_that_need_vars()
|
||||
}
|
||||
}
|
||||
|
||||
include {
|
||||
path = find_in_parent_folders("state.hcl")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
schema_name = "prod_cloudflare_immich_app_docs_release"
|
||||
}
|
||||
}
|
||||
4
deployment/modules/cloudflare/docs-release/variables.tf
Normal file
@@ -0,0 +1,4 @@
|
||||
variable "cloudflare_account_id" {}
|
||||
variable "tf_state_postgres_conn_str" {}
|
||||
|
||||
variable "prefix_name" {}
|
||||
38
deployment/modules/cloudflare/docs/.terraform.lock.hcl
generated
Normal file
@@ -0,0 +1,38 @@
|
||||
# This file is maintained automatically by "tofu init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.34.0"
|
||||
constraints = "4.34.0"
|
||||
hashes = [
|
||||
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
|
||||
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
|
||||
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
|
||||
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
|
||||
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
|
||||
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
|
||||
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
|
||||
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
|
||||
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
|
||||
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
|
||||
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
|
||||
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
|
||||
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
|
||||
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
|
||||
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
|
||||
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
|
||||
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
|
||||
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
|
||||
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
|
||||
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
|
||||
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
|
||||
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
|
||||
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
|
||||
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
|
||||
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
|
||||
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
|
||||
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
|
||||
]
|
||||
}
|
||||
11
deployment/modules/cloudflare/docs/config.tf
Normal file
@@ -0,0 +1,11 @@
|
||||
terraform {
|
||||
backend "pg" {}
|
||||
required_version = "~> 1.7"
|
||||
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.34.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
26
deployment/modules/cloudflare/docs/domain.tf
Normal file
@@ -0,0 +1,26 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_branch_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_name
|
||||
domain = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_branch_subdomain" {
|
||||
name = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
value = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
|
||||
output "immich_app_branch_subdomain" {
|
||||
value = cloudflare_record.immich_app_branch_subdomain.hostname
|
||||
}
|
||||
|
||||
output "immich_app_branch_pages_hostname" {
|
||||
value = cloudflare_record.immich_app_branch_subdomain.value
|
||||
}
|
||||
|
||||
output "pages_project_name" {
|
||||
value = cloudflare_pages_domain.immich_app_branch_domain.project_name
|
||||
}
|
||||
7
deployment/modules/cloudflare/docs/locals.tf
Normal file
@@ -0,0 +1,7 @@
|
||||
locals {
|
||||
domain_name = "immich.app"
|
||||
preview_prefix = contains(["branch", "pr"], var.prefix_event_type) ? "preview" : ""
|
||||
archive_prefix = contains(["release"], var.prefix_event_type) ? "archive" : ""
|
||||
deploy_domain_prefix = coalesce(local.preview_prefix, local.archive_prefix)
|
||||
is_release = contains(["release"], var.prefix_event_type)
|
||||
}
|
||||
3
deployment/modules/cloudflare/docs/providers.tf
Normal file
@@ -0,0 +1,3 @@
|
||||
provider "cloudflare" {
|
||||
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||
}
|
||||
17
deployment/modules/cloudflare/docs/remote-state.tf
Normal file
@@ -0,0 +1,17 @@
|
||||
data "terraform_remote_state" "api_keys_state" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_api_keys"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_account" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_account"
|
||||
}
|
||||
}
|
||||
24
deployment/modules/cloudflare/docs/terragrunt.hcl
Normal file
@@ -0,0 +1,24 @@
|
||||
terraform {
|
||||
source = "."
|
||||
|
||||
extra_arguments custom_vars {
|
||||
commands = get_terraform_commands_that_need_vars()
|
||||
}
|
||||
}
|
||||
|
||||
include {
|
||||
path = find_in_parent_folders("state.hcl")
|
||||
}
|
||||
|
||||
locals {
|
||||
prefix_name = get_env("TF_VAR_prefix_name")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
schema_name = "prod_cloudflare_immich_app_docs_${local.prefix_name}"
|
||||
}
|
||||
}
|
||||
5
deployment/modules/cloudflare/docs/variables.tf
Normal file
@@ -0,0 +1,5 @@
|
||||
variable "cloudflare_account_id" {}
|
||||
variable "tf_state_postgres_conn_str" {}
|
||||
|
||||
variable "prefix_name" {}
|
||||
variable "prefix_event_type" {}
|
||||
20
deployment/state.hcl
Normal file
@@ -0,0 +1,20 @@
|
||||
locals {
|
||||
cloudflare_account_id = get_env("CLOUDFLARE_ACCOUNT_ID")
|
||||
cloudflare_api_token = get_env("CLOUDFLARE_API_TOKEN")
|
||||
|
||||
tf_state_postgres_conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = local.tf_state_postgres_conn_str
|
||||
}
|
||||
}
|
||||
|
||||
inputs = {
|
||||
cloudflare_account_id = local.cloudflare_account_id
|
||||
cloudflare_api_token = local.cloudflare_api_token
|
||||
tf_state_postgres_conn_str = local.tf_state_postgres_conn_str
|
||||
}
|
||||
@@ -84,7 +84,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -85,7 +85,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.0.0-ubuntu@sha256:02e99d1ee0b52dc9d3000c7b5314e7a07e0dfd69cc49bb3f8ce323491ed3406b
|
||||
image: grafana/grafana:11.0.0-ubuntu@sha256:dcd3ae78713958a862732c3608d32c03f0c279c35a2032d74b80b12c5cdc47b8
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
image: docker.io/redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
@@ -5,6 +5,9 @@ UPLOAD_LOCATION=./library
|
||||
# The location where your database files are stored
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
# TZ=Etc/UTC
|
||||
|
||||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||
IMMICH_VERSION=release
|
||||
|
||||
|
||||
49
docker/scripts/get-cpus.sh
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
LOG_LEVEL="${IMMICH_LOG_LEVEL:='info'}"
|
||||
|
||||
logDebug() {
|
||||
if [ "$LOG_LEVEL" = "debug" ] || [ "$LOG_LEVEL" = "verbose" ]; then
|
||||
echo "DEBUG: $1" >&2
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
|
||||
logDebug "cgroup v2 detected."
|
||||
if [ -f /sys/fs/cgroup/cpu.max ]; then
|
||||
read -r quota period </sys/fs/cgroup/cpu.max
|
||||
if [ "$quota" = "max" ]; then
|
||||
logDebug "No CPU limits set."
|
||||
unset quota period
|
||||
fi
|
||||
else
|
||||
logDebug "/sys/fs/cgroup/cpu.max not found."
|
||||
fi
|
||||
else
|
||||
logDebug "cgroup v1 detected."
|
||||
|
||||
if [ -f /sys/fs/cgroup/cpu/cpu.cfs_quota_us ] && [ -f /sys/fs/cgroup/cpu/cpu.cfs_period_us ]; then
|
||||
quota=$(cat /sys/fs/cgroup/cpu/cpu.cfs_quota_us)
|
||||
period=$(cat /sys/fs/cgroup/cpu/cpu.cfs_period_us)
|
||||
|
||||
if [ "$quota" = "-1" ]; then
|
||||
logDebug "No CPU limits set."
|
||||
unset quota period
|
||||
fi
|
||||
else
|
||||
logDebug "/sys/fs/cgroup/cpu/cpu.cfs_quota_us or /sys/fs/cgroup/cpu/cpu.cfs_period_us not found."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "${quota:-}" ] && [ -n "${period:-}" ]; then
|
||||
cpus=$((quota / period))
|
||||
if [ "$cpus" -eq 0 ]; then
|
||||
cpus=1
|
||||
fi
|
||||
else
|
||||
cpus=$(grep -c processor /proc/cpuinfo)
|
||||
fi
|
||||
|
||||
echo "$cpus"
|
||||
@@ -1 +1 @@
|
||||
20.13
|
||||
20.14
|
||||
|
||||
@@ -67,7 +67,7 @@ Yes, with an [External Library](/docs/features/libraries.md).
|
||||
|
||||
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
||||
|
||||
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs.md) page.
|
||||
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs-workers/#jobs) page.
|
||||
|
||||
### Why are only photos and not videos being uploaded to Immich?
|
||||
|
||||
|
||||
|
After Width: | Height: | Size: 55 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 113 KiB |
|
Before Width: | Height: | Size: 57 KiB |
55
docs/docs/administration/jobs-workers.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Jobs and Workers
|
||||
|
||||
## Workers
|
||||
|
||||
### Architecture
|
||||
|
||||
The `immich-server` container contains multiple workers:
|
||||
|
||||
- `api`: responds to API requests for data and files for the web and mobile app.
|
||||
- `microservices`: handles most other work, such as thumbnail generation and video encoding, in the form of _jobs_. Simply put, a job is a request to process data in the background.
|
||||
|
||||
## Split workers
|
||||
|
||||
If you prefer to throttle or distribute the workers, you can do this using the [environment variables](/docs/install/environment-variables) to specify which container should pick up which tasks.
|
||||
|
||||
For example, for a simple setup with one container for the Web/API and one for all other microservices, you can do the following:
|
||||
|
||||
Copy the entire `immich-server` block as a new service and make the following changes to the **copy**:
|
||||
|
||||
```diff
|
||||
- immich-server:
|
||||
- container_name: immich_server
|
||||
...
|
||||
- ports:
|
||||
- - 2283:3001
|
||||
+ immich-microservices:
|
||||
+ container_name: immich_microservices
|
||||
```
|
||||
|
||||
Once you have two copies of the immich-server service, make the following chnages to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
|
||||
|
||||
```diff
|
||||
services:
|
||||
immich-server:
|
||||
...
|
||||
+ environment:
|
||||
+ IMMICH_WORKERS_INCLUDE: 'api'
|
||||
|
||||
immich-microservices:
|
||||
...
|
||||
+ environment:
|
||||
+ IMMICH_WORKERS_EXCLUDE: 'api'
|
||||
```
|
||||
|
||||
## Jobs
|
||||
|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||
|
||||
:::info
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
@@ -1,13 +0,0 @@
|
||||
# Jobs
|
||||
|
||||
The `immich-server` responds to API requests for data and files for the web and mobile app. To do this quickly and reliably, it offloads most other work to `immich-microservices` in the form of _jobs_. Simply put, a job is a request to process data in the background. Jobs are picked up automatically by microservices containers.
|
||||
|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||
|
||||
:::info
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
@@ -117,7 +117,27 @@ Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to
|
||||
|
||||
Here's an example of OAuth configured for Authentik:
|
||||
|
||||
<img src={require('./img/oauth-settings.png').default} title="OAuth settings" />
|
||||
Configuration of Authorised redirect URIs (Authentik OAuth2/OpenID Provider)
|
||||
|
||||
<img src={require('./img/authentik-redirect-uris-example.webp').default} width='70%' title="Authentik authorised redirect URIs" />
|
||||
|
||||
Configuration of OAuth in Immich System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ---------------------------------------------------------------------------------- |
|
||||
| Issuer URL | `https://example.immich.app/application/o/immich/.well-known/openid-configuration` |
|
||||
| Client ID | AFCj2rM1f4rps**\*\*\*\***\***\*\*\*\***lCLEum6hH9... |
|
||||
| Client Secret | 0v89FXkQOWO\***\*\*\*\*\***\*\*\***\*\*\*\*\***mprbvXD549HH6s1iw... |
|
||||
| Scope | openid email profile |
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Button Text | Sign in with Authentik (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled (optional) |
|
||||
| Mobile Redirect URI Override | Disable |
|
||||
| Mobile Redirect URI | |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -126,11 +146,13 @@ Here's an example of OAuth configured for Authentik:
|
||||
|
||||
### Google Example
|
||||
|
||||
Here's an example of OAuth configured for Google:
|
||||
|
||||
Configuration of Authorised redirect URIs (Google Console)
|
||||
|
||||
<img src={require('./img/google-example.webp').default} width='50%' title="Authorised redirect URIs" />
|
||||
<img src={require('./img/google-redirect-uris-example.webp').default} width='50%' title="Google authorised redirect URIs" />
|
||||
|
||||
Configuration of OAuth in System Settings
|
||||
Configuration of OAuth in Immich System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||
|
||||
@@ -77,7 +77,6 @@ immich-admin list-users
|
||||
deletedAt: null,
|
||||
updatedAt: 2023-09-21T15:42:28.129Z,
|
||||
oauthId: '',
|
||||
memoriesEnabled: true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
@@ -19,6 +19,11 @@ this advice improves throughput, not latency, for example, it will make Smart Se
|
||||
|
||||
It is important to remember that jobs like Smart Search, Face Detection, Facial Recognition, and Transcode Videos require a **lot** of processing power and therefore do not exaggerate the amount of jobs because you're probably thoroughly overloading the server.
|
||||
|
||||
:::danger IMPORTANT
|
||||
If you increase the concurrency from the defaults we set, especially for thumbnail generation, make sure you do not increase them past the amount of CPU cores you have available.
|
||||
Doing so can impact API responsiveness with no gain in thumbnail generation speed.
|
||||
:::
|
||||
|
||||
:::info Facial Recognition Concurrency
|
||||
The Facial Recognition Concurrency value cannot be changed because
|
||||
[DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok) is traditionally sequential, but there are parallel implementations of it out there. Our implementation isn't parallel.
|
||||
|
||||
@@ -80,7 +80,7 @@ The Immich Microservices image uses the same `Dockerfile` as the Immich Server,
|
||||
- Background jobs (file deletion, user deletion)
|
||||
|
||||
:::info
|
||||
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs.md) page, which provides some remote queue management capabilities.
|
||||
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs-workers/#jobs) page, which provides some remote queue management capabilities.
|
||||
:::
|
||||
|
||||
### Machine Learning
|
||||
|
||||
@@ -44,16 +44,16 @@ If the import paths are edited in a way that an external file is no longer in an
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
Sometimes, an external library will not scan correctly. This can happen if immich_server or immich_microservices can't access the files. Here are some things to check:
|
||||
Sometimes, an external library will not scan correctly. This can happen if Immich can't access the files. Here are some things to check:
|
||||
|
||||
- In the docker-compose file, are the volumes mounted correctly?
|
||||
- Are the volumes identical between the `server` and `microservices` container?
|
||||
- Are the volumes also mounted to any worker containers?
|
||||
- Are the import paths set correctly, and do they match the path set in docker-compose file?
|
||||
- Make sure you don't use symlinks in your import libraries, and that you aren't linking across docker mounts.
|
||||
- Are the permissions set correctly?
|
||||
- Make sure you are using forward slashes (`/`) and not backward slashes.
|
||||
|
||||
To validate that Immich can reach your external library, start a shell inside the container. Run `docker exec -it immich_microservices /bin/bash` to a bash shell. If your import path is `/data/import/photos`, check it with `ls /data/import/photos`. Do the same check for the `immich_server` container. If you cannot access this directory in both the `microservices` and `server` containers, Immich won't be able to import files.
|
||||
To validate that Immich can reach your external library, start a shell inside the container. Run `docker exec -it immich_server bash` to a bash shell. If your import path is `/data/import/photos`, check it with `ls /data/import/photos`. Do the same check for the same in any microservices containers.
|
||||
|
||||
### Exclusion Patterns
|
||||
|
||||
@@ -98,7 +98,7 @@ First, we need to plan how we want to organize the libraries. The christmas trip
|
||||
|
||||
### Mount Docker Volumes
|
||||
|
||||
`immich-server` and `immich-microservices` containers will need access to the gallery. Modify your docker compose file as follows
|
||||
The `immich-server` container will need access to the gallery. Modify your docker compose file as follows
|
||||
|
||||
```diff title="docker-compose.yml"
|
||||
immich-server:
|
||||
@@ -107,15 +107,6 @@ First, we need to plan how we want to organize the libraries. The christmas trip
|
||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||
|
||||
|
||||
immich-microservices:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||
```
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ It is important to remember to update the backup settings after following the gu
|
||||
In our `.env` file, we will define variables that will help us in the future when we want to move to a more advanced server in the future
|
||||
|
||||
```diff title=".env"
|
||||
# You can find documentation for all the supported env variables at https://immich.app/docs/install/environment-variables
|
||||
# You can find documentation for all the supported env variables [here](/docs/install/environment-variables)
|
||||
|
||||
# Custom location where your uploaded, thumbnails, and transcoded video files are stored
|
||||
- UPLOAD_LOCATION=./library
|
||||
|
||||
@@ -11,9 +11,8 @@ docker ps -a # see a list of running and stopped containers
|
||||
|
||||
```bash
|
||||
docker exec -it <id or name> <command> # attach to a container with a command
|
||||
docker exec -it immich_server sh
|
||||
docker exec -it immich_microservices sh
|
||||
docker exec -it immich_machine_learning sh
|
||||
docker exec -it immich_server bash
|
||||
docker exec -it immich_machine_learning bash
|
||||
```
|
||||
|
||||
## Logs
|
||||
@@ -22,7 +21,6 @@ docker exec -it immich_machine_learning sh
|
||||
docker logs <id or name> # see the logs for a specific container (by id or name)
|
||||
|
||||
docker logs immich_server
|
||||
docker logs immich_microservices
|
||||
docker logs immich_machine_learning
|
||||
```
|
||||
|
||||
|
||||
@@ -6,20 +6,14 @@ in a directory on the same machine.
|
||||
|
||||
# Mount the directory into the containers.
|
||||
|
||||
Edit `docker-compose.yml` to add two new mount points in the sections `immich-server:` and `immich-microservices:` under `volumes:`
|
||||
Edit `docker-compose.yml` to add two new mount points in the section `immich-server:` under `volumes:`
|
||||
|
||||
```diff
|
||||
immich-server:
|
||||
volumes:
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
|
||||
immich-microservices:
|
||||
volumes:
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
```
|
||||
|
||||
Be sure to add exactly the same path to both services.
|
||||
|
||||
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
|
||||
|
||||
```
|
||||
|
||||
@@ -32,7 +32,7 @@ def upload(file):
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f'{BASE_URL}/asset/upload', headers=headers, data=data, files=files)
|
||||
f'{BASE_URL}/assets', headers=headers, data=data, files=files)
|
||||
|
||||
print(response.json())
|
||||
# {'id': 'ef96f635-61c7-4639-9e60-61a11c4bbfba', 'duplicate': False}
|
||||
|
||||
@@ -7,7 +7,7 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
|
||||
- Start the container by running `docker compose up -d`.
|
||||
|
||||
:::info
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning service, but facial recognition is done in the immich_microservices service.
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning container, but facial recognition is done in the `microservices` worker.
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
@@ -17,11 +17,11 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
||||
|
||||
## Docker Compose
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :----------------- | :------------------------------ | :-------: | :-------------------------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, microservices, machine learning |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server, microservices |
|
||||
| `DB_DATA_LOCATION` | Host Path for Postgres database | | database |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server |
|
||||
| `DB_DATA_LOCATION` | Host Path for Postgres database | | database |
|
||||
|
||||
:::tip
|
||||
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
||||
@@ -38,15 +38,17 @@ Regardless of filesystem, it is not recommended to use a network share for your
|
||||
|
||||
## General
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :------------------------------ | :------------------------------------------- | :----------------------: | :-------------------------------------- |
|
||||
| `TZ` | Timezone | | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, microservices, machine learning |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices, machine learning |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | microservices |
|
||||
| Variable | Description | Default | Containers | Workers |
|
||||
| :------------------------------ | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server | api |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | server | microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
|
||||
\*1: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
It only need to be set if the Immich deployment method is changing.
|
||||
@@ -54,28 +56,39 @@ It only need to be set if the Immich deployment method is changing.
|
||||
:::tip
|
||||
`TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
||||
|
||||
`TZ` is only used by `exiftool`, which is present in the microservices container, as a fallback in case the timezone cannot be determined from the image metadata.
|
||||
`TZ` is used by `exiftool` as a fallback in case the timezone cannot be determined from the image metadata. It is also used for logfile timestamps and cron job execution.
|
||||
:::
|
||||
|
||||
## Workers
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :----------------------- | :--------------------------------------------------------------------------------------------------- | :-----: | :--------- |
|
||||
| `IMMICH_WORKERS_INCLUDE` | Only run these workers. | | server |
|
||||
| `IMMICH_WORKERS_EXCLUDE` | Do not run these workers. Matches against default workers, or `IMMICH_WORKERS_INCLUDE` if specified. | | server |
|
||||
|
||||
:::info
|
||||
Information on the current workers can be found [here](/docs/administration/jobs-workers).
|
||||
:::
|
||||
|
||||
## Ports
|
||||
|
||||
| Variable | Description | Default |
|
||||
| :------------ | :------------- | :------------------------------------: |
|
||||
| `IMMICH_HOST` | Listening host | `0.0.0.0` |
|
||||
| `IMMICH_PORT` | Listening port | 3001 (server), 3003 (machine learning) |
|
||||
| Variable | Description | Default |
|
||||
| :------------ | :------------- | :----------------------------------------: |
|
||||
| `IMMICH_HOST` | Listening host | `0.0.0.0` |
|
||||
| `IMMICH_PORT` | Listening port | `3001` (server), `3003` (machine learning) |
|
||||
|
||||
## Database
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :-------------------------------------------- |
|
||||
| `DB_URL` | Database URL | | server, microservices |
|
||||
| `DB_HOSTNAME` | Database Host | `database` | server, microservices |
|
||||
| `DB_PORT` | Database Port | `5432` | server, microservices |
|
||||
| `DB_USERNAME` | Database User | `postgres` | server, microservices, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database Password | `postgres` | server, microservices, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database Name | `immich` | server, microservices, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database Vector Extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server, microservices |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server, microservices |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database Host | `database` | server |
|
||||
| `DB_PORT` | Database Port | `5432` | server |
|
||||
| `DB_USERNAME` | Database User | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database Password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database Name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database Vector Extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
|
||||
@@ -83,30 +96,34 @@ It only need to be set if the Immich deployment method is changing.
|
||||
|
||||
:::info
|
||||
|
||||
All `DB_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
|
||||
`DB_URL` must be in the format `postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename`.
|
||||
You can require SSL by adding `?sslmode=require` to the end of the `DB_URL` string, or require SSL and skip certificate verification by adding `?sslmode=require&sslmode=no-verify`.
|
||||
|
||||
When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSWORD` and `DB_DATABASE_NAME` database variables are ignored.
|
||||
|
||||
:::
|
||||
|
||||
## Redis
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :--------------- | :------------- | :-----: | :-------------------- |
|
||||
| `REDIS_URL` | Redis URL | | server, microservices |
|
||||
| `REDIS_HOSTNAME` | Redis Host | `redis` | server, microservices |
|
||||
| `REDIS_PORT` | Redis Port | `6379` | server, microservices |
|
||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server, microservices |
|
||||
| `REDIS_USERNAME` | Redis Username | | server, microservices |
|
||||
| `REDIS_PASSWORD` | Redis Password | | server, microservices |
|
||||
| `REDIS_SOCKET` | Redis Socket | | server, microservices |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :--------------- | :------------- | :-----: | :--------- |
|
||||
| `REDIS_URL` | Redis URL | | server |
|
||||
| `REDIS_SOCKET` | Redis Socket | | server |
|
||||
| `REDIS_HOSTNAME` | Redis Host | `redis` | server |
|
||||
| `REDIS_PORT` | Redis Port | `6379` | server |
|
||||
| `REDIS_USERNAME` | Redis Username | | server |
|
||||
| `REDIS_PASSWORD` | Redis Password | | server |
|
||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server |
|
||||
|
||||
:::info
|
||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
|
||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
||||
More info can be found in the upstream [ioredis][redis-api] documentation.
|
||||
|
||||
- When `REDIS_URL` is defined, the other redis (`REDIS_*`) variables are ignored.
|
||||
- When `REDIS_SOCKET` is defined, the other redis (`REDIS_*`) variables are ignored.
|
||||
|
||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
||||
:::
|
||||
|
||||
Redis (Sentinel) URL example JSON before encoding:
|
||||
@@ -138,7 +155,7 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :----------------------------------------------- | :------------------------------------------------------------------- | :-----------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
@@ -163,13 +180,13 @@ Other machine learning parameters can be tuned from the admin UI.
|
||||
|
||||
## Prometheus
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :----------------------------- | :-------------------------------------------------------------------------------------------- | :-----: | :-------------------- |
|
||||
| `IMMICH_METRICS`<sup>\*1</sup> | Toggle all metrics (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_API_METRICS` | Toggle metrics for endpoints and response times (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_HOST_METRICS` | Toggle metrics for CPU and memory utilization for host and process (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_IO_METRICS` | Toggle metrics for database queries, image processing, etc. (one of [`true`, `false`]) | | server, microservices |
|
||||
| `IMMICH_JOB_METRICS` | Toggle metrics for jobs and queues (one of [`true`, `false`]) | | server, microservices |
|
||||
| Variable | Description | Default | Containers | Workers |
|
||||
| :----------------------------- | :-------------------------------------------------------------------------------------------- | :-----: | :--------- | :----------------- |
|
||||
| `IMMICH_METRICS`<sup>\*1</sup> | Toggle all metrics (one of [`true`, `false`]) | | server | api, microservices |
|
||||
| `IMMICH_API_METRICS` | Toggle metrics for endpoints and response times (one of [`true`, `false`]) | | server | api, microservices |
|
||||
| `IMMICH_HOST_METRICS` | Toggle metrics for CPU and memory utilization for host and process (one of [`true`, `false`]) | | server | api, microservices |
|
||||
| `IMMICH_IO_METRICS` | Toggle metrics for database queries, image processing, etc. (one of [`true`, `false`]) | | server | api, microservices |
|
||||
| `IMMICH_JOB_METRICS` | Toggle metrics for jobs and queues (one of [`true`, `false`]) | | server | api, microservices |
|
||||
|
||||
\*1: Overridden for a metric group when its corresponding environmental variable is set.
|
||||
|
||||
|
||||
@@ -100,9 +100,9 @@ const config = {
|
||||
label: 'Docs',
|
||||
},
|
||||
{
|
||||
to: '/milestones',
|
||||
to: '/roadmap',
|
||||
position: 'right',
|
||||
label: 'Milestones',
|
||||
label: 'Roadmap',
|
||||
},
|
||||
{
|
||||
to: '/docs/api',
|
||||
|
||||
@@ -56,6 +56,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.13.1"
|
||||
"node": "20.14.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,23 @@
|
||||
import React from 'react';
|
||||
import Icon from '@mdi/react';
|
||||
import { mdiCheckboxMarkedCircleOutline } from '@mdi/js';
|
||||
import useIsBrowser from '@docusaurus/useIsBrowser';
|
||||
import { mdiCheckboxBlankCircle, mdiCheckboxMarkedCircle } from '@mdi/js';
|
||||
import Icon from '@mdi/react';
|
||||
import React from 'react';
|
||||
|
||||
export interface Item {
|
||||
export type Item = {
|
||||
icon: string;
|
||||
iconColor: string;
|
||||
title: string;
|
||||
description?: string;
|
||||
release?: string;
|
||||
tag?: string;
|
||||
date: Date;
|
||||
dateType: DateType;
|
||||
}
|
||||
|
||||
export enum DateType {
|
||||
RELEASE = 'Release Date',
|
||||
DATE = 'Date',
|
||||
}
|
||||
link?: { url: string; text: string };
|
||||
done?: false;
|
||||
getDateLabel: (language: string) => string;
|
||||
};
|
||||
|
||||
interface Props {
|
||||
items: Item[];
|
||||
}
|
||||
|
||||
export default function Timeline({ items }: Props): JSX.Element {
|
||||
export function Timeline({ items }: Props): JSX.Element {
|
||||
const isBrowser = useIsBrowser();
|
||||
|
||||
return (
|
||||
@@ -30,21 +25,15 @@ export default function Timeline({ items }: Props): JSX.Element {
|
||||
{items.map((item, index) => {
|
||||
const isFirst = index === 0;
|
||||
const isLast = index === items.length - 1;
|
||||
|
||||
const classNames: string[] = [];
|
||||
|
||||
if (isFirst) {
|
||||
classNames.push('');
|
||||
}
|
||||
|
||||
if (isLast) {
|
||||
classNames.push('rounded rounded-b-full');
|
||||
}
|
||||
const done = item.done ?? true;
|
||||
const dateLabel = item.getDateLabel(isBrowser ? navigator.language : 'en-US');
|
||||
const timelineIcon = done ? mdiCheckboxMarkedCircle : mdiCheckboxBlankCircle;
|
||||
const cardIcon = item.icon;
|
||||
|
||||
return (
|
||||
<li key={index} className="flex min-h-24 w-[700px] max-w-[90vw]">
|
||||
<li key={index} className={`flex min-h-24 w-[700px] max-w-[90vw] ${done ? '' : 'italic'}`}>
|
||||
<div className="md:flex justify-start w-36 mr-8 items-center dark:text-immich-dark-primary text-immich-primary hidden">
|
||||
{isBrowser ? item.date.toLocaleDateString(navigator.language) : ''}
|
||||
{dateLabel}
|
||||
</div>
|
||||
<div className={`${isFirst && 'relative top-[50%]'} ${isLast && 'relative bottom-[50%]'}`}>
|
||||
<div
|
||||
@@ -54,33 +43,32 @@ export default function Timeline({ items }: Props): JSX.Element {
|
||||
></div>
|
||||
</div>
|
||||
<div className="z-10 flex items-center bg-immich-primary dark:bg-immich-dark-primary border-2 border-solid rounded-full dark:text-black text-white relative top-[50%] left-[-3px] translate-y-[-50%] translate-x-[-50%] w-8 h-8 shadow-lg ">
|
||||
<Icon path={mdiCheckboxMarkedCircleOutline} size={1.25} />
|
||||
{<Icon path={timelineIcon} size={1.25} />}
|
||||
</div>
|
||||
<section className=" dark:bg-immich-dark-gray bg-immich-gray dark:border-0 border-gray-200 border border-solid rounded-2xl flex flex-col w-full gap-2 p-4 md:ml-4 my-2 hover:bg-immich-primary/10 dark:hover:bg-immich-dark-primary/10 transition-all">
|
||||
<div className="m-0 text-lg flex w-full items-center justify-between gap-2">
|
||||
<p className="m-0 items-start flex gap-2">
|
||||
<Icon path={item.icon} size={1} />
|
||||
<span>{item.title}</span>
|
||||
</p>
|
||||
|
||||
<span className="dark:text-immich-dark-primary text-immich-primary">
|
||||
{item.tag ? (
|
||||
<a
|
||||
href={`https://github.com/immich-app/immich/releases/tag/${item.tag}`}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
>
|
||||
[{item.release ?? item.tag}]{' '}
|
||||
</a>
|
||||
<section className=" dark:bg-immich-dark-gray bg-immich-gray dark:border-0 border-gray-200 border border-solid rounded-2xl flex flex-row w-full gap-2 p-4 md:ml-4 my-2 hover:bg-immich-primary/10 dark:hover:bg-immich-dark-primary/10 transition-all">
|
||||
<div className="flex flex-col flex-grow justify-between gap-2">
|
||||
<div className="flex gap-2 items-center">
|
||||
{cardIcon === 'immich' ? (
|
||||
<img src="img/immich-logo.svg" height="30" />
|
||||
) : (
|
||||
item.release && <span>[{item.release}]</span>
|
||||
<Icon path={cardIcon} size={1} color={item.iconColor} />
|
||||
)}
|
||||
<p className="m-0 mt-1 text-lg items-start flex gap-2 place-items-center content-center">
|
||||
<span>{item.title}</span>
|
||||
</p>
|
||||
</div>
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300">{item.description}</p>
|
||||
</div>
|
||||
<div className="flex flex-col justify-between place-items-end">
|
||||
<span className="dark:text-immich-dark-primary text-immich-primary">
|
||||
{item.link && (
|
||||
<a href={item.link.url} target="_blank" rel="noopener">
|
||||
[{item.link.text}]
|
||||
</a>
|
||||
)}
|
||||
</span>
|
||||
<div className="md:hidden text-sm text-right">{dateLabel}</div>
|
||||
</div>
|
||||
<div className="md:hidden text-xs">
|
||||
{`${item.dateType} - ${isBrowser ? item.date.toLocaleDateString(navigator.language) : ''}`}
|
||||
</div>
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300">{item.description}</p>
|
||||
</section>
|
||||
</li>
|
||||
);
|
||||
|
||||
@@ -15,6 +15,10 @@ button {
|
||||
font-family: 'Overpass', sans-serif;
|
||||
}
|
||||
|
||||
img {
|
||||
border-radius: 15px;
|
||||
}
|
||||
|
||||
/* You can override the default Infima variables here. */
|
||||
:root {
|
||||
--ifm-color-primary: #4250af;
|
||||
@@ -42,7 +46,7 @@ button {
|
||||
}
|
||||
|
||||
div[class^='announcementBar_'] {
|
||||
height: 2rem;
|
||||
min-height: 2rem;
|
||||
}
|
||||
|
||||
.navbar__brand .navbar__title {
|
||||
|
||||
1
docs/static/_redirects
vendored
@@ -28,3 +28,4 @@
|
||||
/docs/features/search /docs/features/smart-search 301
|
||||
/docs/guides/api-album-sync /docs/community-projects 301
|
||||
/docs/guides/remove-offline-files /docs/community-projects 301
|
||||
/milestones /roadmap 301
|
||||
|
||||
BIN
docs/static/img/ios-app-store-badge.png
vendored
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 70 KiB |
@@ -1 +1 @@
|
||||
20.13
|
||||
20.14
|
||||
|
||||
@@ -27,7 +27,7 @@ services:
|
||||
- 2283:3001
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
|
||||
139
e2e/package-lock.json
generated
@@ -88,7 +88,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.11.0",
|
||||
"@types/node": "^20.12.13",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -1230,9 +1230,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.12.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz",
|
||||
"integrity": "sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==",
|
||||
"version": "20.12.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.13.tgz",
|
||||
"integrity": "sha512-gBGeanV41c1L171rR7wjbMiEpEI/l5XFQdLLfhr/REwpgDy/4U8y89+i8kRiLzDyZdOkXh+cRaTetUnCYutoXA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1344,17 +1344,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.9.0.tgz",
|
||||
"integrity": "sha512-6e+X0X3sFe/G/54aC3jt0txuMTURqLyekmEHViqyA2VnxhLMpvA6nqmcjIy+Cr9tLDHPssA74BP5Mx9HQIxBEA==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.11.0.tgz",
|
||||
"integrity": "sha512-P+qEahbgeHW4JQ/87FuItjBj8O3MYv5gELDzr8QaQ7fsll1gSMTYb6j87MYyxwf3DtD7uGFB9ShwgmCJB5KmaQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "7.9.0",
|
||||
"@typescript-eslint/type-utils": "7.9.0",
|
||||
"@typescript-eslint/utils": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0",
|
||||
"@typescript-eslint/scope-manager": "7.11.0",
|
||||
"@typescript-eslint/type-utils": "7.11.0",
|
||||
"@typescript-eslint/utils": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.3.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
@@ -1378,16 +1378,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.9.0.tgz",
|
||||
"integrity": "sha512-qHMJfkL5qvgQB2aLvhUSXxbK7OLnDkwPzFalg458pxQgfxKDfT1ZDbHQM/I6mDIf/svlMkj21kzKuQ2ixJlatQ==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.11.0.tgz",
|
||||
"integrity": "sha512-yimw99teuaXVWsBcPO1Ais02kwJ1jmNA1KxE7ng0aT7ndr1pT1wqj0OJnsYVGKKlc4QJai86l/025L6z8CljOg==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "7.9.0",
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/typescript-estree": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0",
|
||||
"@typescript-eslint/scope-manager": "7.11.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/typescript-estree": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1407,14 +1407,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.9.0.tgz",
|
||||
"integrity": "sha512-ZwPK4DeCDxr3GJltRz5iZejPFAAr4Wk3+2WIBaj1L5PYK5RgxExu/Y68FFVclN0y6GGwH8q+KgKRCvaTmFBbgQ==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.11.0.tgz",
|
||||
"integrity": "sha512-27tGdVEiutD4POirLZX4YzT180vevUURJl4wJGmm6TrQoiYwuxTIY98PBp6L2oN+JQxzE0URvYlzJaBHIekXAw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0"
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
@@ -1425,14 +1425,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.9.0.tgz",
|
||||
"integrity": "sha512-6Qy8dfut0PFrFRAZsGzuLoM4hre4gjzWJB6sUvdunCYZsYemTkzZNwF1rnGea326PHPT3zn5Lmg32M/xfJfByA==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.11.0.tgz",
|
||||
"integrity": "sha512-WmppUEgYy+y1NTseNMJ6mCFxt03/7jTOy08bcg7bxJJdsM4nuhnchyBbE8vryveaJUf62noH7LodPSo5Z0WUCg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "7.9.0",
|
||||
"@typescript-eslint/utils": "7.9.0",
|
||||
"@typescript-eslint/typescript-estree": "7.11.0",
|
||||
"@typescript-eslint/utils": "7.11.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
@@ -1453,9 +1453,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.9.0.tgz",
|
||||
"integrity": "sha512-oZQD9HEWQanl9UfsbGVcZ2cGaR0YT5476xfWE0oE5kQa2sNK2frxOlkeacLOTh9po4AlUT5rtkGyYM5kew0z5w==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.11.0.tgz",
|
||||
"integrity": "sha512-MPEsDRZTyCiXkD4vd3zywDCifi7tatc4K37KqTprCvaXptP7Xlpdw0NR2hRJTetG5TxbWDB79Ys4kLmHliEo/w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -1467,14 +1467,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.9.0.tgz",
|
||||
"integrity": "sha512-zBCMCkrb2YjpKV3LA0ZJubtKCDxLttxfdGmwZvTqqWevUPN0FZvSI26FalGFFUZU/9YQK/A4xcQF9o/VVaCKAg==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.11.0.tgz",
|
||||
"integrity": "sha512-cxkhZ2C/iyi3/6U9EPc5y+a6csqHItndvN/CzbNXTNrsC3/ASoYQZEt9uMaEp+xFNjasqQyszp5TumAVKKvJeQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/visitor-keys": "7.9.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/visitor-keys": "7.11.0",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -1522,16 +1522,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.9.0.tgz",
|
||||
"integrity": "sha512-5KVRQCzZajmT4Ep+NEgjXCvjuypVvYHUW7RHlXzNPuak2oWpVoD1jf5xCP0dPAuNIchjC7uQyvbdaSTFaLqSdA==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.11.0.tgz",
|
||||
"integrity": "sha512-xlAWwPleNRHwF37AhrZurOxA1wyXowW4PqVXZVUNCLjB48CqdPJoJWkrpH2nij9Q3Lb7rtWindtoXwxjxlKKCA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.9.0",
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/typescript-estree": "7.9.0"
|
||||
"@typescript-eslint/scope-manager": "7.11.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"@typescript-eslint/typescript-estree": "7.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
@@ -1545,13 +1545,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "7.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.9.0.tgz",
|
||||
"integrity": "sha512-iESPx2TNLDNGQLyjKhUvIKprlP49XNEK+MvIf9nIO7ZZaZdbnfWKHnXAgufpxqfA0YryH8XToi4+CjBgVnFTSQ==",
|
||||
"version": "7.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.11.0.tgz",
|
||||
"integrity": "sha512-7syYk4MzjxTEk0g/w3iqtgxnFQspDJfn6QKD36xMuuhTzjcxY7F8EmBLnALjVyaOF1/bVocu3bS/2/F7rXrveQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.9.0",
|
||||
"@typescript-eslint/types": "7.11.0",
|
||||
"eslint-visitor-keys": "^3.4.3"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1831,13 +1831,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fill-range": "^7.0.1"
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
@@ -2700,10 +2700,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/exiftool-vendored": {
|
||||
"version": "26.0.0",
|
||||
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-26.0.0.tgz",
|
||||
"integrity": "sha512-2TRxx21ovD95VvdSzHb/sTYYcwhiizQIhhVAbrgua9KoL902QRieREGvaUtfBZNjsptdjonuyku2kUBJCPqsgw==",
|
||||
"version": "26.1.0",
|
||||
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-26.1.0.tgz",
|
||||
"integrity": "sha512-Bhy2Ia86Agt3+PbJJhWeVMqJNXl74XJ0Oygef5F5uCL13fTxlmF8dECHiChyx8bBc3sxIw+2Q3ehWunJh3bs6w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@photostructure/tz-lookup": "^10.0.0",
|
||||
"@types/luxon": "^3.4.2",
|
||||
@@ -2712,25 +2713,27 @@
|
||||
"luxon": "^3.4.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"exiftool-vendored.exe": "12.84.0",
|
||||
"exiftool-vendored.pl": "12.84.0"
|
||||
"exiftool-vendored.exe": "12.85.0",
|
||||
"exiftool-vendored.pl": "12.85.0"
|
||||
}
|
||||
},
|
||||
"node_modules/exiftool-vendored.exe": {
|
||||
"version": "12.84.0",
|
||||
"resolved": "https://registry.npmjs.org/exiftool-vendored.exe/-/exiftool-vendored.exe-12.84.0.tgz",
|
||||
"integrity": "sha512-9ocqJb0Pr9k0TownEMd75payF/XOQLF/swr/l0Ep49D+m609uIZsW09CtowhXmk1KrIFobS3+SkdXK04CSyUwQ==",
|
||||
"version": "12.85.0",
|
||||
"resolved": "https://registry.npmjs.org/exiftool-vendored.exe/-/exiftool-vendored.exe-12.85.0.tgz",
|
||||
"integrity": "sha512-rWsKVp9oXsS79S3bfCNXKeEo4av0xcd7slk/TfPpCa5pojg8ZVXSVfPZMAAlhOuK63YXrKN/e3jRNReeGP+2Gw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/exiftool-vendored.pl": {
|
||||
"version": "12.84.0",
|
||||
"resolved": "https://registry.npmjs.org/exiftool-vendored.pl/-/exiftool-vendored.pl-12.84.0.tgz",
|
||||
"integrity": "sha512-TxvMRaVYtd24Vupn48zy24LOYItIIWEu4dgt/VlqLwxQItTpvJTV9YH04iZRvaNh9ZdPRgVKWMuuUDBBHv+lAg==",
|
||||
"version": "12.85.0",
|
||||
"resolved": "https://registry.npmjs.org/exiftool-vendored.pl/-/exiftool-vendored.pl-12.85.0.tgz",
|
||||
"integrity": "sha512-AelZQCCfl0a0g7PYx90TqbNGlSu2zDbRfCTjGw6bBBYnJF0NUfUWVhTpa8XGe2lHx1KYikH8AkJaey3esAxMAg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"!win32"
|
||||
@@ -2818,9 +2821,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -3630,13 +3633,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/micromatch": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
|
||||
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz",
|
||||
"integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"braces": "^3.0.2",
|
||||
"braces": "^3.0.3",
|
||||
"picomatch": "^2.3.1"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@@ -47,6 +47,6 @@
|
||||
"vitest": "^1.3.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.13.1"
|
||||
"node": "20.14.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import {
|
||||
ActivityCreateDto,
|
||||
AlbumResponseDto,
|
||||
AlbumUserRole,
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
LoginResponseDto,
|
||||
ReactionType,
|
||||
createActivity as create,
|
||||
@@ -17,7 +17,7 @@ import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
describe('/activities', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let nonOwner: LoginResponseDto;
|
||||
let asset: AssetFileUploadResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
let album: AlbumResponseDto;
|
||||
|
||||
const createActivity = (dto: ActivityCreateDto, accessToken?: string) =>
|
||||
|
||||
@@ -2,7 +2,7 @@ import {
|
||||
addAssetsToAlbum,
|
||||
AlbumResponseDto,
|
||||
AlbumUserRole,
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetOrder,
|
||||
deleteUserAdmin,
|
||||
getAlbumInfo,
|
||||
@@ -26,8 +26,8 @@ const user2NotShared = 'user2NotShared';
|
||||
describe('/albums', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user1: LoginResponseDto;
|
||||
let user1Asset1: AssetFileUploadResponseDto;
|
||||
let user1Asset2: AssetFileUploadResponseDto;
|
||||
let user1Asset1: AssetMediaResponseDto;
|
||||
let user1Asset2: AssetMediaResponseDto;
|
||||
let user1Albums: AlbumResponseDto[];
|
||||
let user2: LoginResponseDto;
|
||||
let user2Albums: AlbumResponseDto[];
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetMediaStatus,
|
||||
AssetResponseDto,
|
||||
AssetTypeEnum,
|
||||
LoginResponseDto,
|
||||
@@ -67,10 +68,10 @@ describe('/asset', () => {
|
||||
let statsUser: LoginResponseDto;
|
||||
let stackUser: LoginResponseDto;
|
||||
|
||||
let user1Assets: AssetFileUploadResponseDto[];
|
||||
let user2Assets: AssetFileUploadResponseDto[];
|
||||
let stackAssets: AssetFileUploadResponseDto[];
|
||||
let locationAsset: AssetFileUploadResponseDto;
|
||||
let user1Assets: AssetMediaResponseDto[];
|
||||
let user2Assets: AssetMediaResponseDto[];
|
||||
let stackAssets: AssetMediaResponseDto[];
|
||||
let locationAsset: AssetMediaResponseDto;
|
||||
|
||||
const setupTests = async () => {
|
||||
await utils.resetDatabase();
|
||||
@@ -121,7 +122,7 @@ describe('/asset', () => {
|
||||
]);
|
||||
|
||||
for (const asset of [...user1Assets, ...user2Assets]) {
|
||||
expect(asset.duplicate).toBe(false);
|
||||
expect(asset.status).toBe(AssetMediaStatus.Created);
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
@@ -164,16 +165,34 @@ describe('/asset', () => {
|
||||
utils.disconnectWebsocket(websocket);
|
||||
});
|
||||
|
||||
describe('GET /asset/:id', () => {
|
||||
describe('GET /assets/:id/original', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/asset/${uuidDto.notFound}`);
|
||||
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}/original`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download the file', async () => {
|
||||
const response = await request(app)
|
||||
.get(`/assets/${user1Assets[0].id}/original`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['content-type']).toEqual('image/png');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /assets/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}`);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
expect(status).toBe(401);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/asset/${uuidDto.invalid}`)
|
||||
.get(`/assets/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
@@ -181,7 +200,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/asset/${user2Assets[0].id}`)
|
||||
.get(`/assets/${user2Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
@@ -189,7 +208,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should get the asset info', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/asset/${user1Assets[0].id}`)
|
||||
.get(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id });
|
||||
@@ -201,14 +220,14 @@ describe('/asset', () => {
|
||||
assetIds: [user1Assets[0].id],
|
||||
});
|
||||
|
||||
const { status, body } = await request(app).get(`/asset/${user1Assets[0].id}?key=${sharedLink.key}`);
|
||||
const { status, body } = await request(app).get(`/assets/${user1Assets[0].id}?key=${sharedLink.key}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id });
|
||||
});
|
||||
|
||||
it('should not send people data for shared links for un-authenticated users', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/asset/${user1Assets[0].id}`)
|
||||
.get(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toEqual(200);
|
||||
@@ -231,7 +250,7 @@ describe('/asset', () => {
|
||||
assetIds: [user1Assets[0].id],
|
||||
});
|
||||
|
||||
const data = await request(app).get(`/asset/${user1Assets[0].id}?key=${sharedLink.key}`);
|
||||
const data = await request(app).get(`/assets/${user1Assets[0].id}?key=${sharedLink.key}`);
|
||||
expect(data.status).toBe(200);
|
||||
expect(data.body).toMatchObject({ people: [] });
|
||||
});
|
||||
@@ -239,7 +258,7 @@ describe('/asset', () => {
|
||||
describe('partner assets', () => {
|
||||
it('should get the asset info', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/asset/${user1Assets[0].id}`)
|
||||
.get(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id });
|
||||
@@ -249,7 +268,7 @@ describe('/asset', () => {
|
||||
const asset = await utils.createAsset(user1.accessToken, { isArchived: true });
|
||||
|
||||
const { status } = await request(app)
|
||||
.get(`/asset/${asset.id}`)
|
||||
.get(`/assets/${asset.id}`)
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
@@ -259,16 +278,16 @@ describe('/asset', () => {
|
||||
await utils.deleteAssets(user1.accessToken, [asset.id]);
|
||||
|
||||
const { status } = await request(app)
|
||||
.get(`/asset/${asset.id}`)
|
||||
.get(`/assets/${asset.id}`)
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/statistics', () => {
|
||||
describe('GET /assets/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/asset/statistics');
|
||||
const { status, body } = await request(app).get('/assets/statistics');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
@@ -276,7 +295,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should return stats of all assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/statistics')
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`);
|
||||
|
||||
expect(body).toEqual({ images: 3, videos: 1, total: 4 });
|
||||
@@ -285,7 +304,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should return stats of all favored assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/statistics')
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: true });
|
||||
|
||||
@@ -295,7 +314,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should return stats of all archived assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/statistics')
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isArchived: true });
|
||||
|
||||
@@ -305,7 +324,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should return stats of all favored and archived assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/statistics')
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: true, isArchived: true });
|
||||
|
||||
@@ -315,7 +334,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should return stats of all assets neither favored nor archived', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/statistics')
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: false, isArchived: false });
|
||||
|
||||
@@ -324,7 +343,7 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/random', () => {
|
||||
describe('GET /assets/random', () => {
|
||||
beforeAll(async () => {
|
||||
await Promise.all([
|
||||
utils.createAsset(user1.accessToken),
|
||||
@@ -337,7 +356,7 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/asset/random');
|
||||
const { status, body } = await request(app).get('/assets/random');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
@@ -345,7 +364,7 @@ describe('/asset', () => {
|
||||
|
||||
it.each(TEN_TIMES)('should return 1 random assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/random')
|
||||
.get('/assets/random')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
@@ -357,7 +376,7 @@ describe('/asset', () => {
|
||||
|
||||
it.each(TEN_TIMES)('should return 2 random assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/random?count=2')
|
||||
.get('/assets/random?count=2')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
@@ -374,7 +393,7 @@ describe('/asset', () => {
|
||||
'should return 1 asset if there are 10 assets in the database but user 2 only has 1',
|
||||
async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/random')
|
||||
.get('/assets/random')
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
@@ -384,23 +403,23 @@ describe('/asset', () => {
|
||||
|
||||
it('should return error', async () => {
|
||||
const { status } = await request(app)
|
||||
.get('/asset/random?count=ABC')
|
||||
.get('/assets/random?count=ABC')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /asset/:id', () => {
|
||||
describe('PUT /assets/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/asset/:${uuidDto.notFound}`);
|
||||
const { status, body } = await request(app).put(`/assets/:${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${uuidDto.invalid}`)
|
||||
.put(`/assets/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
@@ -408,7 +427,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user2Assets[0].id}`)
|
||||
.put(`/assets/${user2Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
@@ -419,7 +438,7 @@ describe('/asset', () => {
|
||||
expect(before.isFavorite).toBe(false);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ isFavorite: true });
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id, isFavorite: true });
|
||||
@@ -431,7 +450,7 @@ describe('/asset', () => {
|
||||
expect(before.isArchived).toBe(false);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ isArchived: true });
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id, isArchived: true });
|
||||
@@ -440,7 +459,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should update date time original', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
|
||||
|
||||
@@ -467,7 +486,7 @@ describe('/asset', () => {
|
||||
{ latitude: 12, longitude: 181 },
|
||||
]) {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.send(test)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
@@ -477,7 +496,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should update gps data', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ latitude: 12, longitude: 12 });
|
||||
|
||||
@@ -490,7 +509,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should set the description', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ description: 'Test asset description' });
|
||||
expect(body).toMatchObject({
|
||||
@@ -504,7 +523,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should return tagged people', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/asset/${user1Assets[0].id}`)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ isFavorite: true });
|
||||
expect(status).toEqual(200);
|
||||
@@ -524,10 +543,10 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /asset', () => {
|
||||
describe('DELETE /assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/asset`)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.notFound] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
@@ -536,7 +555,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/asset`)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
@@ -546,7 +565,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should throw an error when the id is not found', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/asset`)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.notFound] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
@@ -561,7 +580,7 @@ describe('/asset', () => {
|
||||
expect(before.isTrashed).toBe(false);
|
||||
|
||||
const { status } = await request(app)
|
||||
.delete('/asset')
|
||||
.delete('/assets')
|
||||
.send({ ids: [assetId] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
@@ -571,9 +590,9 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/thumbnail/:id', () => {
|
||||
describe('GET /assets/:id/thumbnail', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/asset/thumbnail/${locationAsset.id}`);
|
||||
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/thumbnail`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
@@ -586,7 +605,7 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
const { status, body, type } = await request(app)
|
||||
.get(`/asset/thumbnail/${locationAsset.id}?format=WEBP`)
|
||||
.get(`/assets/${locationAsset.id}/thumbnail?format=WEBP`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
@@ -598,9 +617,9 @@ describe('/asset', () => {
|
||||
expect(exifData).not.toHaveProperty('GPSLatitude');
|
||||
});
|
||||
|
||||
it('should not include gps data for jpeg thumbnails', async () => {
|
||||
it('should not include gps data for jpeg previews', async () => {
|
||||
const { status, body, type } = await request(app)
|
||||
.get(`/asset/thumbnail/${locationAsset.id}?format=JPEG`)
|
||||
.get(`/assets/${locationAsset.id}/thumbnail?size=preview`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
@@ -613,9 +632,9 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/file/:id', () => {
|
||||
describe('GET /assets/:id/original', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/asset/thumbnail/${locationAsset.id}`);
|
||||
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/original`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
@@ -623,7 +642,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should download the original', async () => {
|
||||
const { status, body, type } = await request(app)
|
||||
.get(`/asset/file/${locationAsset.id}`)
|
||||
.get(`/assets/${locationAsset.id}/original`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
@@ -641,98 +660,9 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /asset/map-marker', () => {
|
||||
beforeAll(async () => {
|
||||
const files = [
|
||||
'formats/avif/8bit-sRGB.avif',
|
||||
'formats/jpg/el_torcal_rocks.jpg',
|
||||
'formats/jxl/8bit-sRGB.jxl',
|
||||
'formats/heic/IMG_2682.heic',
|
||||
'formats/png/density_plot.png',
|
||||
'formats/raw/Nikon/D80/glarus.nef',
|
||||
'formats/raw/Nikon/D700/philadelphia.nef',
|
||||
'formats/raw/Panasonic/DMC-GH4/4_3.rw2',
|
||||
'formats/raw/Sony/ILCE-6300/12bit-compressed-(3_2).arw',
|
||||
'formats/raw/Sony/ILCE-7M2/14bit-uncompressed-(3_2).arw',
|
||||
];
|
||||
utils.resetEvents();
|
||||
const uploadFile = async (input: string) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
};
|
||||
const uploads = files.map((f) => uploadFile(f));
|
||||
await Promise.all(uploads);
|
||||
}, 30_000);
|
||||
|
||||
describe('PUT /assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/asset/map-marker');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
// TODO archive one of these assets
|
||||
it('should get map markers for all non-archived assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/map-marker')
|
||||
.query({ isArchived: false })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(2);
|
||||
expect(body).toEqual([
|
||||
{
|
||||
city: 'Palisade',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(39.115),
|
||||
lon: expect.closeTo(-108.400_968),
|
||||
state: 'Colorado',
|
||||
},
|
||||
{
|
||||
city: 'Ralston',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(41.2203),
|
||||
lon: expect.closeTo(-96.071_625),
|
||||
state: 'Nebraska',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// TODO archive one of these assets
|
||||
it('should get all map markers', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/asset/map-marker')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
{
|
||||
city: 'Palisade',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(39.115),
|
||||
lon: expect.closeTo(-108.400_968),
|
||||
state: 'Colorado',
|
||||
},
|
||||
{
|
||||
city: 'Ralston',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(41.2203),
|
||||
lon: expect.closeTo(-96.071_625),
|
||||
state: 'Nebraska',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /asset', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/asset');
|
||||
const { status, body } = await request(app).put('/assets');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
@@ -740,7 +670,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require a valid parent id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/asset')
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ stackParentId: uuidDto.invalid, ids: [stackAssets[0].id] });
|
||||
|
||||
@@ -750,7 +680,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require access to the parent', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/asset')
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ stackParentId: stackAssets[3].id, ids: [user1Assets[0].id] });
|
||||
|
||||
@@ -760,7 +690,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should add stack children', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/asset')
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ stackParentId: stackAssets[0].id, ids: [stackAssets[3].id] });
|
||||
|
||||
@@ -773,7 +703,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should remove stack children', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/asset')
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ removeParent: true, ids: [stackAssets[1].id] });
|
||||
|
||||
@@ -791,7 +721,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should remove all stack children', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/asset')
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ removeParent: true, ids: [stackAssets[2].id, stackAssets[3].id] });
|
||||
|
||||
@@ -809,7 +739,7 @@ describe('/asset', () => {
|
||||
);
|
||||
|
||||
const { status } = await request(app)
|
||||
.put('/asset')
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ stackParentId: stackAssets[3].id, ids: [stackAssets[0].id] });
|
||||
|
||||
@@ -827,9 +757,9 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /asset/stack/parent', () => {
|
||||
describe('PUT /assets/stack/parent', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/asset/stack/parent');
|
||||
const { status, body } = await request(app).put('/assets/stack/parent');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
@@ -837,7 +767,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/asset/stack/parent')
|
||||
.put('/assets/stack/parent')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ oldParentId: uuidDto.invalid, newParentId: uuidDto.invalid });
|
||||
|
||||
@@ -847,7 +777,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/asset/stack/parent')
|
||||
.put('/assets/stack/parent')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ oldParentId: stackAssets[3].id, newParentId: stackAssets[0].id });
|
||||
|
||||
@@ -857,7 +787,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should make old parent child of new parent', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/asset/stack/parent')
|
||||
.put('/assets/stack/parent')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ oldParentId: stackAssets[3].id, newParentId: stackAssets[0].id });
|
||||
|
||||
@@ -876,11 +806,11 @@ describe('/asset', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
describe('POST /asset/upload', () => {
|
||||
describe('POST /assets', () => {
|
||||
beforeAll(setupTests, 30_000);
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/asset/upload`);
|
||||
const { status, body } = await request(app).post(`/assets`);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
expect(status).toBe(401);
|
||||
});
|
||||
@@ -896,7 +826,7 @@ describe('/asset', () => {
|
||||
{ should: 'throw if `isArchived` is not a boolean', dto: { ...makeUploadDto(), isArchived: 'not-a-boolean' } },
|
||||
])('should $should', async ({ dto }) => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/asset/upload')
|
||||
.post('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.attach('assetData', makeRandomImage(), 'example.png')
|
||||
.field(dto);
|
||||
@@ -1122,11 +1052,11 @@ describe('/asset', () => {
|
||||
},
|
||||
])(`should upload and generate a thumbnail for $input`, async ({ input, expected }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
const { id, duplicate } = await utils.createAsset(admin.accessToken, {
|
||||
const { id, status } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
|
||||
expect(duplicate).toBe(false);
|
||||
expect(status).toBe(AssetMediaStatus.Created);
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: id });
|
||||
|
||||
@@ -1139,19 +1069,19 @@ describe('/asset', () => {
|
||||
|
||||
it('should handle a duplicate', async () => {
|
||||
const filepath = 'formats/jpeg/el_torcal_rocks.jpeg';
|
||||
const { duplicate } = await utils.createAsset(admin.accessToken, {
|
||||
const { status } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
bytes: await readFile(join(testAssetDir, filepath)),
|
||||
filename: basename(filepath),
|
||||
},
|
||||
});
|
||||
|
||||
expect(duplicate).toBe(true);
|
||||
expect(status).toBe(AssetMediaStatus.Duplicate);
|
||||
});
|
||||
|
||||
it('should update the used quota', async () => {
|
||||
const { body, status } = await request(app)
|
||||
.post('/asset/upload')
|
||||
.post('/assets')
|
||||
.set('Authorization', `Bearer ${quotaUser.accessToken}`)
|
||||
.field('deviceAssetId', 'example-image')
|
||||
.field('deviceId', 'e2e')
|
||||
@@ -1159,7 +1089,7 @@ describe('/asset', () => {
|
||||
.field('fileModifiedAt', new Date().toISOString())
|
||||
.attach('assetData', makeRandomImage(), 'example.jpg');
|
||||
|
||||
expect(body).toEqual({ id: expect.any(String), duplicate: false });
|
||||
expect(body).toEqual({ id: expect.any(String), status: AssetMediaStatus.Created });
|
||||
expect(status).toBe(201);
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(quotaUser.accessToken) });
|
||||
@@ -1169,7 +1099,7 @@ describe('/asset', () => {
|
||||
|
||||
it('should not upload an asset if it would exceed the quota', async () => {
|
||||
const { body, status } = await request(app)
|
||||
.post('/asset/upload')
|
||||
.post('/assets')
|
||||
.set('Authorization', `Bearer ${quotaUser.accessToken}`)
|
||||
.field('deviceAssetId', 'example-image')
|
||||
.field('deviceId', 'e2e')
|
||||
@@ -1209,7 +1139,7 @@ describe('/asset', () => {
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: response.id });
|
||||
|
||||
expect(response.duplicate).toBe(false);
|
||||
expect(response.status).toBe(AssetMediaStatus.Created);
|
||||
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, response.id);
|
||||
expect(asset.livePhotoVideoId).toBeDefined();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto } from '@immich/sdk';
|
||||
import { AssetMediaResponseDto, LoginResponseDto } from '@immich/sdk';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, tempDir, utils } from 'src/utils';
|
||||
@@ -7,8 +7,8 @@ import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/download', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let asset1: AssetFileUploadResponseDto;
|
||||
let asset2: AssetFileUploadResponseDto;
|
||||
let asset1: AssetMediaResponseDto;
|
||||
let asset2: AssetMediaResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
@@ -73,22 +73,4 @@ describe('/download', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /download/asset/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/download/asset/${asset1.id}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download file', async () => {
|
||||
const response = await request(app)
|
||||
.post(`/download/asset/${asset1.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['content-type']).toEqual('image/png');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
162
e2e/src/api/specs/map.e2e-spec.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, testAssetDir, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/map', () => {
|
||||
let websocket: Socket;
|
||||
let admin: LoginResponseDto;
|
||||
let nonAdmin: LoginResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
|
||||
websocket = await utils.connectWebsocket(admin.accessToken);
|
||||
|
||||
asset = await utils.createAsset(admin.accessToken);
|
||||
|
||||
const files = ['formats/heic/IMG_2682.heic', 'metadata/gps-position/thompson-springs.jpg'];
|
||||
utils.resetEvents();
|
||||
const uploadFile = async (input: string) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
};
|
||||
await Promise.all(files.map((f) => uploadFile(f)));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
utils.disconnectWebsocket(websocket);
|
||||
});
|
||||
|
||||
describe('GET /map/markers', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/map/markers');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
// TODO archive one of these assets
|
||||
it('should get map markers for all non-archived assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/markers')
|
||||
.query({ isArchived: false })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(2);
|
||||
expect(body).toEqual([
|
||||
{
|
||||
city: 'Palisade',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(39.115),
|
||||
lon: expect.closeTo(-108.400_968),
|
||||
state: 'Colorado',
|
||||
},
|
||||
{
|
||||
city: 'Ralston',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(41.2203),
|
||||
lon: expect.closeTo(-96.071_625),
|
||||
state: 'Nebraska',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// TODO archive one of these assets
|
||||
it('should get all map markers', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/markers')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
{
|
||||
city: 'Palisade',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(39.115),
|
||||
lon: expect.closeTo(-108.400_968),
|
||||
state: 'Colorado',
|
||||
},
|
||||
{
|
||||
city: 'Ralston',
|
||||
country: 'United States of America',
|
||||
id: expect.any(String),
|
||||
lat: expect.closeTo(41.2203),
|
||||
lon: expect.closeTo(-96.071_625),
|
||||
state: 'Nebraska',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /map/style.json', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/map/style.json');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should allow shared link access', async () => {
|
||||
const sharedLink = await utils.createSharedLink(admin.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
const { status, body } = await request(app).get(`/map/style.json?key=${sharedLink.key}`).query({ theme: 'dark' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
|
||||
it('should throw an error if a theme is not light or dark', async () => {
|
||||
for (const theme of ['dark1', true, 123, '', null, undefined]) {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/style.json')
|
||||
.query({ theme })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['theme must be one of the following values: light, dark']));
|
||||
}
|
||||
});
|
||||
|
||||
it('should return the light style.json', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/style.json')
|
||||
.query({ theme: 'light' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-light' }));
|
||||
});
|
||||
|
||||
it('should return the dark style.json', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/style.json')
|
||||
.query({ theme: 'dark' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
|
||||
it('should not require admin authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/style.json')
|
||||
.query({ theme: 'dark' })
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import {
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
LoginResponseDto,
|
||||
MemoryResponseDto,
|
||||
MemoryType,
|
||||
@@ -15,9 +15,9 @@ import { beforeAll, describe, expect, it } from 'vitest';
|
||||
describe('/memories', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let adminAsset: AssetFileUploadResponseDto;
|
||||
let userAsset1: AssetFileUploadResponseDto;
|
||||
let userAsset2: AssetFileUploadResponseDto;
|
||||
let adminAsset: AssetMediaResponseDto;
|
||||
let userAsset1: AssetMediaResponseDto;
|
||||
let userAsset2: AssetMediaResponseDto;
|
||||
let userMemory: MemoryResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, deleteAssets, getMapMarkers, updateAsset } from '@immich/sdk';
|
||||
import { AssetMediaResponseDto, LoginResponseDto, deleteAssets, getMapMarkers, updateAsset } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
@@ -13,25 +13,25 @@ describe('/search', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
|
||||
let assetFalcon: AssetFileUploadResponseDto;
|
||||
let assetDenali: AssetFileUploadResponseDto;
|
||||
let assetCyclamen: AssetFileUploadResponseDto;
|
||||
let assetNotocactus: AssetFileUploadResponseDto;
|
||||
let assetSilver: AssetFileUploadResponseDto;
|
||||
let assetDensity: AssetFileUploadResponseDto;
|
||||
// let assetPhiladelphia: AssetFileUploadResponseDto;
|
||||
// let assetOrychophragmus: AssetFileUploadResponseDto;
|
||||
// let assetRidge: AssetFileUploadResponseDto;
|
||||
// let assetPolemonium: AssetFileUploadResponseDto;
|
||||
// let assetWood: AssetFileUploadResponseDto;
|
||||
// let assetGlarus: AssetFileUploadResponseDto;
|
||||
let assetHeic: AssetFileUploadResponseDto;
|
||||
let assetRocks: AssetFileUploadResponseDto;
|
||||
let assetOneJpg6: AssetFileUploadResponseDto;
|
||||
let assetOneHeic6: AssetFileUploadResponseDto;
|
||||
let assetOneJpg5: AssetFileUploadResponseDto;
|
||||
let assetSprings: AssetFileUploadResponseDto;
|
||||
let assetLast: AssetFileUploadResponseDto;
|
||||
let assetFalcon: AssetMediaResponseDto;
|
||||
let assetDenali: AssetMediaResponseDto;
|
||||
let assetCyclamen: AssetMediaResponseDto;
|
||||
let assetNotocactus: AssetMediaResponseDto;
|
||||
let assetSilver: AssetMediaResponseDto;
|
||||
let assetDensity: AssetMediaResponseDto;
|
||||
// let assetPhiladelphia: AssetMediaResponseDto;
|
||||
// let assetOrychophragmus: AssetMediaResponseDto;
|
||||
// let assetRidge: AssetMediaResponseDto;
|
||||
// let assetPolemonium: AssetMediaResponseDto;
|
||||
// let assetWood: AssetMediaResponseDto;
|
||||
// let assetGlarus: AssetMediaResponseDto;
|
||||
let assetHeic: AssetMediaResponseDto;
|
||||
let assetRocks: AssetMediaResponseDto;
|
||||
let assetOneJpg6: AssetMediaResponseDto;
|
||||
let assetOneHeic6: AssetMediaResponseDto;
|
||||
let assetOneJpg5: AssetMediaResponseDto;
|
||||
let assetSprings: AssetMediaResponseDto;
|
||||
let assetLast: AssetMediaResponseDto;
|
||||
let cities: string[];
|
||||
let states: string[];
|
||||
let countries: string[];
|
||||
@@ -66,7 +66,7 @@ describe('/search', () => {
|
||||
// last asset
|
||||
{ filename: '/albums/nature/wood_anemones.jpg' },
|
||||
];
|
||||
const assets: AssetFileUploadResponseDto[] = [];
|
||||
const assets: AssetMediaResponseDto[] = [];
|
||||
for (const { filename, dto } of files) {
|
||||
const bytes = await readFile(join(testAssetDir, filename));
|
||||
assets.push(
|
||||
@@ -134,7 +134,7 @@ describe('/search', () => {
|
||||
// assetWood,
|
||||
] = assets;
|
||||
|
||||
assetLast = assets.at(-1) as AssetFileUploadResponseDto;
|
||||
assetLast = assets.at(-1) as AssetMediaResponseDto;
|
||||
|
||||
await deleteAssets({ assetBulkDeleteDto: { ids: [assetSilver.id] } }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
AlbumResponseDto,
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
LoginResponseDto,
|
||||
SharedLinkResponseDto,
|
||||
SharedLinkType,
|
||||
@@ -15,8 +15,8 @@ import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/shared-links', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let asset1: AssetFileUploadResponseDto;
|
||||
let asset2: AssetFileUploadResponseDto;
|
||||
let asset1: AssetMediaResponseDto;
|
||||
let asset2: AssetMediaResponseDto;
|
||||
let user1: LoginResponseDto;
|
||||
let user2: LoginResponseDto;
|
||||
let album: AlbumResponseDto;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, SharedLinkType, getConfig } from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { LoginResponseDto, getConfig } from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -9,74 +8,10 @@ const getSystemConfig = (accessToken: string) => getConfig({ headers: asBearerAu
|
||||
|
||||
describe('/system-config', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let nonAdmin: LoginResponseDto;
|
||||
let asset: AssetFileUploadResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
|
||||
asset = await utils.createAsset(admin.accessToken);
|
||||
});
|
||||
|
||||
describe('GET /system-config/map/style.json', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/system-config/map/style.json');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should allow shared link access', async () => {
|
||||
const sharedLink = await utils.createSharedLink(admin.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
const { status, body } = await request(app)
|
||||
.get(`/system-config/map/style.json?key=${sharedLink.key}`)
|
||||
.query({ theme: 'dark' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
|
||||
it('should throw an error if a theme is not light or dark', async () => {
|
||||
for (const theme of ['dark1', true, 123, '', null, undefined]) {
|
||||
const { status, body } = await request(app)
|
||||
.get('/system-config/map/style.json')
|
||||
.query({ theme })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['theme must be one of the following values: light, dark']));
|
||||
}
|
||||
});
|
||||
|
||||
it('should return the light style.json', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/system-config/map/style.json')
|
||||
.query({ theme: 'light' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-light' }));
|
||||
});
|
||||
|
||||
it('should return the dark style.json', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/system-config/map/style.json')
|
||||
.query({ theme: 'dark' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
|
||||
it('should not require admin authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/system-config/map/style.json')
|
||||
.query({ theme: 'dark' })
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /system-config', () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
|
||||
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
@@ -19,7 +19,7 @@ describe('/timeline', () => {
|
||||
let user: LoginResponseDto;
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
|
||||
let userAssets: AssetFileUploadResponseDto[];
|
||||
let userAssets: AssetMediaResponseDto[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import { LoginResponseDto, deleteUserAdmin, getMyUser, getUserAdmin, login } from '@immich/sdk';
|
||||
import {
|
||||
LoginResponseDto,
|
||||
deleteUserAdmin,
|
||||
getMyUser,
|
||||
getUserAdmin,
|
||||
getUserPreferencesAdmin,
|
||||
login,
|
||||
} from '@immich/sdk';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
@@ -103,15 +110,7 @@ describe('/admin/users', () => {
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
for (const key of [
|
||||
'password',
|
||||
'email',
|
||||
'name',
|
||||
'quotaSizeInBytes',
|
||||
'shouldChangePassword',
|
||||
'memoriesEnabled',
|
||||
'notify',
|
||||
]) {
|
||||
for (const key of ['password', 'email', 'name', 'quotaSizeInBytes', 'shouldChangePassword', 'notify']) {
|
||||
it(`should not allow null ${key}`, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/admin/users`)
|
||||
@@ -139,23 +138,6 @@ describe('/admin/users', () => {
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should create a user without memories enabled', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/admin/users`)
|
||||
.send({
|
||||
email: 'no-memories@immich.cloud',
|
||||
password: 'Password123',
|
||||
name: 'No Memories',
|
||||
memoriesEnabled: false,
|
||||
})
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toMatchObject({
|
||||
email: 'no-memories@immich.cloud',
|
||||
memoriesEnabled: false,
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /admin/users/:id', () => {
|
||||
@@ -173,7 +155,7 @@ describe('/admin/users', () => {
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
for (const key of ['password', 'email', 'name', 'shouldChangePassword', 'memoriesEnabled']) {
|
||||
for (const key of ['password', 'email', 'name', 'shouldChangePassword']) {
|
||||
it(`should not allow null ${key}`, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${uuidDto.notFound}`)
|
||||
@@ -221,22 +203,6 @@ describe('/admin/users', () => {
|
||||
expect(before.updatedAt).not.toEqual(body.updatedAt);
|
||||
});
|
||||
|
||||
it('should update memories enabled', async () => {
|
||||
const before = await getUserAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}`)
|
||||
.send({ memoriesEnabled: false })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
...before,
|
||||
updatedAt: expect.anything(),
|
||||
memoriesEnabled: false,
|
||||
});
|
||||
expect(before.updatedAt).not.toEqual(body.updatedAt);
|
||||
});
|
||||
|
||||
it('should update password', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${nonAdmin.userId}`)
|
||||
@@ -254,6 +220,51 @@ describe('/admin/users', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /admin/users/:id/preferences', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/admin/users/${userToDelete.userId}/preferences`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should update memories enabled', async () => {
|
||||
const before = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(before).toMatchObject({ memories: { enabled: true } });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}/preferences`)
|
||||
.send({ memories: { enabled: false } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ memories: { enabled: false } });
|
||||
|
||||
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ memories: { enabled: false } });
|
||||
});
|
||||
|
||||
it('should update the avatar color', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}/preferences`)
|
||||
.send({ avatar: { color: 'orange' } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
avatar: { color: 'orange' },
|
||||
memories: { enabled: false },
|
||||
emailNotifications: { enabled: true, albumInvite: true, albumUpdate: true },
|
||||
});
|
||||
|
||||
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toEqual({
|
||||
avatar: { color: 'orange' },
|
||||
memories: { enabled: false },
|
||||
emailNotifications: { enabled: true, albumInvite: true, albumUpdate: true },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /admin/users/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/admin/users/${userToDelete.userId}`);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LoginResponseDto, SharedLinkType, deleteUserAdmin, getMyUser, login } from '@immich/sdk';
|
||||
import { LoginResponseDto, SharedLinkType, deleteUserAdmin, getMyPreferences, getMyUser, login } from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
@@ -69,7 +69,6 @@ describe('/users', () => {
|
||||
expect(body).toMatchObject({
|
||||
id: admin.userId,
|
||||
email: 'admin@immich.cloud',
|
||||
memoriesEnabled: true,
|
||||
quotaUsageInBytes: 0,
|
||||
});
|
||||
});
|
||||
@@ -82,7 +81,7 @@ describe('/users', () => {
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
for (const key of ['email', 'name', 'memoriesEnabled', 'avatarColor']) {
|
||||
for (const key of ['email', 'name']) {
|
||||
it(`should not allow null ${key}`, async () => {
|
||||
const dto = { [key]: null };
|
||||
const { status, body } = await request(app)
|
||||
@@ -110,24 +109,6 @@ describe('/users', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should update memories enabled', async () => {
|
||||
const before = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me`)
|
||||
.send({ memoriesEnabled: false })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
...before,
|
||||
updatedAt: expect.anything(),
|
||||
memoriesEnabled: false,
|
||||
});
|
||||
|
||||
const after = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after.memoriesEnabled).toBe(false);
|
||||
});
|
||||
|
||||
/** @deprecated */
|
||||
it('should allow a user to change their password (deprecated)', async () => {
|
||||
const user = await getMyUser({ headers: asBearerAuth(nonAdmin.accessToken) });
|
||||
@@ -176,6 +157,24 @@ describe('/users', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /users/me/preferences', () => {
|
||||
it('should update memories enabled', async () => {
|
||||
const before = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(before).toMatchObject({ memories: { enabled: true } });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/preferences`)
|
||||
.send({ memories: { enabled: false } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ memories: { enabled: false } });
|
||||
|
||||
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ memories: { enabled: false } });
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /users/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status } = await request(app).get(`/users/${admin.userId}`);
|
||||
@@ -194,7 +193,6 @@ describe('/users', () => {
|
||||
|
||||
expect(body).not.toMatchObject({
|
||||
shouldChangePassword: expect.anything(),
|
||||
memoriesEnabled: expect.anything(),
|
||||
storageLabel: expect.anything(),
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { UserAvatarColor } from '@immich/sdk';
|
||||
|
||||
export const uuidDto = {
|
||||
invalid: 'invalid-uuid',
|
||||
// valid uuid v4
|
||||
@@ -70,8 +68,6 @@ export const userDto = {
|
||||
updatedAt: new Date('2021-01-01'),
|
||||
tags: [],
|
||||
assets: [],
|
||||
memoriesEnabled: true,
|
||||
avatarColor: UserAvatarColor.Primary,
|
||||
quotaSizeInBytes: null,
|
||||
quotaUsageInBytes: 0,
|
||||
},
|
||||
@@ -88,8 +84,6 @@ export const userDto = {
|
||||
updatedAt: new Date('2021-01-01'),
|
||||
tags: [],
|
||||
assets: [],
|
||||
memoriesEnabled: true,
|
||||
avatarColor: UserAvatarColor.Primary,
|
||||
quotaSizeInBytes: null,
|
||||
quotaUsageInBytes: 0,
|
||||
},
|
||||
|
||||
@@ -5,51 +5,61 @@ export const errorDto = {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Authentication required',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
forbidden: {
|
||||
error: 'Forbidden',
|
||||
statusCode: 403,
|
||||
message: expect.any(String),
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
wrongPassword: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: 'Wrong password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidToken: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Invalid user token',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidShareKey: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Invalid share key',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidSharePassword: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Invalid password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
badRequest: (message: any = null) => ({
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: message ?? expect.anything(),
|
||||
correlationId: expect.any(String),
|
||||
}),
|
||||
noPermission: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: expect.stringContaining('Not found or no'),
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
incorrectLogin: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Incorrect email or password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
alreadyHasAdmin: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: 'The server already has an admin',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -68,7 +78,6 @@ export const signupResponseDto = {
|
||||
updatedAt: expect.any(String),
|
||||
deletedAt: null,
|
||||
oauthId: '',
|
||||
memoriesEnabled: true,
|
||||
quotaUsageInBytes: 0,
|
||||
quotaSizeInBytes: null,
|
||||
status: 'active',
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import {
|
||||
AllJobStatusResponseDto,
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaCreateDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetResponseDto,
|
||||
CreateAlbumDto,
|
||||
CreateAssetDto,
|
||||
CreateLibraryDto,
|
||||
MetadataSearchDto,
|
||||
PersonCreateDto,
|
||||
@@ -292,7 +292,7 @@ export const utils = {
|
||||
|
||||
createAsset: async (
|
||||
accessToken: string,
|
||||
dto?: Partial<Omit<CreateAssetDto, 'assetData'>> & { assetData?: AssetData },
|
||||
dto?: Partial<Omit<AssetMediaCreateDto, 'assetData'>> & { assetData?: AssetData },
|
||||
) => {
|
||||
const _dto = {
|
||||
deviceAssetId: 'test-1',
|
||||
@@ -310,7 +310,7 @@ export const utils = {
|
||||
}
|
||||
|
||||
const builder = request(app)
|
||||
.post(`/asset/upload`)
|
||||
.post(`/assets`)
|
||||
.attach('assetData', assetData, filename)
|
||||
.set('Authorization', `Bearer ${accessToken}`);
|
||||
|
||||
@@ -320,7 +320,41 @@ export const utils = {
|
||||
|
||||
const { body } = await builder;
|
||||
|
||||
return body as AssetFileUploadResponseDto;
|
||||
return body as AssetMediaResponseDto;
|
||||
},
|
||||
|
||||
replaceAsset: async (
|
||||
accessToken: string,
|
||||
assetId: string,
|
||||
dto?: Partial<Omit<AssetMediaCreateDto, 'assetData'>> & { assetData?: AssetData },
|
||||
) => {
|
||||
const _dto = {
|
||||
deviceAssetId: 'test-1',
|
||||
deviceId: 'test',
|
||||
fileCreatedAt: new Date().toISOString(),
|
||||
fileModifiedAt: new Date().toISOString(),
|
||||
...dto,
|
||||
};
|
||||
|
||||
const assetData = dto?.assetData?.bytes || makeRandomImage();
|
||||
const filename = dto?.assetData?.filename || 'example.png';
|
||||
|
||||
if (dto?.assetData?.bytes) {
|
||||
console.log(`Uploading ${filename}`);
|
||||
}
|
||||
|
||||
const builder = request(app)
|
||||
.put(`/assets/${assetId}/original`)
|
||||
.attach('assetData', assetData, filename)
|
||||
.set('Authorization', `Bearer ${accessToken}`);
|
||||
|
||||
for (const [key, value] of Object.entries(_dto)) {
|
||||
void builder.field(key, String(value));
|
||||
}
|
||||
|
||||
const { body } = await builder;
|
||||
|
||||
return body as AssetMediaResponseDto;
|
||||
},
|
||||
|
||||
createImageFile: (path: string) => {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
|
||||
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
test.describe('Detail Panel', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let asset: AssetFileUploadResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AssetFileUploadResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
|
||||
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
test.describe('Asset Viewer Navbar', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let asset: AssetFileUploadResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
|
||||
57
e2e/src/web/specs/photo-viewer.e2e-spec.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { AssetMediaResponseDto, LoginResponseDto } from '@immich/sdk';
|
||||
import { Page, expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
function imageLocator(page: Page) {
|
||||
return page.getByAltText('Image taken on').locator('visible=true');
|
||||
}
|
||||
test.describe('Photo Viewer', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
asset = await utils.createAsset(admin.accessToken);
|
||||
});
|
||||
|
||||
test.beforeEach(async ({ context, page }) => {
|
||||
// before each test, login as user
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
await page.goto('/photos');
|
||||
await page.waitForLoadState('networkidle');
|
||||
});
|
||||
|
||||
test('initially shows a loading spinner', async ({ page }) => {
|
||||
await page.route(`/api/assets/${asset.id}/thumbnail**`, async (route) => {
|
||||
// slow down the request for thumbnail, so spiner has chance to show up
|
||||
await new Promise((f) => setTimeout(f, 2000));
|
||||
await route.continue();
|
||||
});
|
||||
await page.goto(`/photos/${asset.id}`);
|
||||
await page.waitForLoadState('load');
|
||||
// this is the spinner
|
||||
await page.waitForSelector('svg[role=status]');
|
||||
await expect(page.getByRole('status')).toBeVisible();
|
||||
});
|
||||
|
||||
test('loads high resolution photo when zoomed', async ({ page }) => {
|
||||
await page.goto(`/photos/${asset.id}`);
|
||||
await expect.poll(async () => await imageLocator(page).getAttribute('src')).toContain('thumbnail');
|
||||
const box = await imageLocator(page).boundingBox();
|
||||
expect(box).toBeTruthy;
|
||||
const { x, y, width, height } = box!;
|
||||
await page.mouse.move(x + width / 2, y + height / 2);
|
||||
await page.mouse.wheel(0, -1);
|
||||
await expect.poll(async () => await imageLocator(page).getAttribute('src')).toContain('original');
|
||||
});
|
||||
|
||||
test('reloads photo when checksum changes', async ({ page }) => {
|
||||
await page.goto(`/photos/${asset.id}`);
|
||||
await expect.poll(async () => await imageLocator(page).getAttribute('src')).toContain('thumbnail');
|
||||
const initialSrc = await imageLocator(page).getAttribute('src');
|
||||
await utils.replaceAsset(admin.accessToken, asset.id);
|
||||
await expect.poll(async () => await imageLocator(page).getAttribute('src')).not.toBe(initialSrc);
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
AlbumResponseDto,
|
||||
AssetFileUploadResponseDto,
|
||||
AssetMediaResponseDto,
|
||||
LoginResponseDto,
|
||||
SharedLinkResponseDto,
|
||||
SharedLinkType,
|
||||
@@ -11,7 +11,7 @@ import { asBearerAuth, utils } from 'src/utils';
|
||||
|
||||
test.describe('Shared Links', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let asset: AssetFileUploadResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
let album: AlbumResponseDto;
|
||||
let sharedLink: SharedLinkResponseDto;
|
||||
let sharedLinkPassword: SharedLinkResponseDto;
|
||||
|
||||
@@ -12,8 +12,6 @@ from rich.logging import RichHandler
|
||||
from uvicorn import Server
|
||||
from uvicorn.workers import UvicornWorker
|
||||
|
||||
from .schemas import ModelType
|
||||
|
||||
|
||||
class PreloadModelData(BaseModel):
|
||||
clip: str | None
|
||||
@@ -21,7 +19,7 @@ class PreloadModelData(BaseModel):
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
cache_folder: str = "/cache"
|
||||
cache_folder: Path = Path("/cache")
|
||||
model_ttl: int = 300
|
||||
model_ttl_poll_s: int = 10
|
||||
host: str = "0.0.0.0"
|
||||
@@ -55,14 +53,6 @@ def clean_name(model_name: str) -> str:
|
||||
return model_name.split("/")[-1].translate(_clean_name)
|
||||
|
||||
|
||||
def get_cache_dir(model_name: str, model_type: ModelType) -> Path:
|
||||
return Path(settings.cache_folder) / model_type.value / clean_name(model_name)
|
||||
|
||||
|
||||
def get_hf_model_name(model_name: str) -> str:
|
||||
return f"immich-app/{clean_name(model_name)}"
|
||||
|
||||
|
||||
LOG_LEVELS: dict[str, int] = {
|
||||
"critical": logging.ERROR,
|
||||
"error": logging.ERROR,
|
||||
|
||||
@@ -6,22 +6,34 @@ import threading
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from contextlib import asynccontextmanager
|
||||
from functools import partial
|
||||
from typing import Any, AsyncGenerator, Callable, Iterator
|
||||
from zipfile import BadZipFile
|
||||
|
||||
import orjson
|
||||
from fastapi import Depends, FastAPI, Form, HTTPException, UploadFile
|
||||
from fastapi import Depends, FastAPI, File, Form, HTTPException
|
||||
from fastapi.responses import ORJSONResponse
|
||||
from onnxruntime.capi.onnxruntime_pybind11_state import InvalidProtobuf, NoSuchFile
|
||||
from PIL.Image import Image
|
||||
from pydantic import ValidationError
|
||||
from starlette.formparsers import MultiPartParser
|
||||
|
||||
from app.models import get_model_deps
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.transforms import decode_pil
|
||||
|
||||
from .config import PreloadModelData, log, settings
|
||||
from .models.cache import ModelCache
|
||||
from .schemas import (
|
||||
InferenceEntries,
|
||||
InferenceEntry,
|
||||
InferenceResponse,
|
||||
MessageResponse,
|
||||
ModelIdentity,
|
||||
ModelTask,
|
||||
ModelType,
|
||||
PipelineRequest,
|
||||
T,
|
||||
TextResponse,
|
||||
)
|
||||
|
||||
@@ -63,12 +75,21 @@ async def lifespan(_: FastAPI) -> AsyncGenerator[None, None]:
|
||||
gc.collect()
|
||||
|
||||
|
||||
async def preload_models(preload_models: PreloadModelData) -> None:
|
||||
log.info(f"Preloading models: {preload_models}")
|
||||
if preload_models.clip is not None:
|
||||
await load(await model_cache.get(preload_models.clip, ModelType.CLIP))
|
||||
if preload_models.facial_recognition is not None:
|
||||
await load(await model_cache.get(preload_models.facial_recognition, ModelType.FACIAL_RECOGNITION))
|
||||
async def preload_models(preload: PreloadModelData) -> None:
|
||||
log.info(f"Preloading models: {preload}")
|
||||
if preload.clip is not None:
|
||||
model = await model_cache.get(preload.clip, ModelType.TEXTUAL, ModelTask.SEARCH)
|
||||
await load(model)
|
||||
|
||||
model = await model_cache.get(preload.clip, ModelType.VISUAL, ModelTask.SEARCH)
|
||||
await load(model)
|
||||
|
||||
if preload.facial_recognition is not None:
|
||||
model = await model_cache.get(preload.facial_recognition, ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
|
||||
await load(model)
|
||||
|
||||
model = await model_cache.get(preload.facial_recognition, ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
await load(model)
|
||||
|
||||
|
||||
def update_state() -> Iterator[None]:
|
||||
@@ -81,6 +102,27 @@ def update_state() -> Iterator[None]:
|
||||
active_requests -= 1
|
||||
|
||||
|
||||
def get_entries(entries: str = Form()) -> InferenceEntries:
|
||||
try:
|
||||
request: PipelineRequest = orjson.loads(entries)
|
||||
without_deps: list[InferenceEntry] = []
|
||||
with_deps: list[InferenceEntry] = []
|
||||
for task, types in request.items():
|
||||
for type, entry in types.items():
|
||||
parsed: InferenceEntry = {
|
||||
"name": entry["modelName"],
|
||||
"task": task,
|
||||
"type": type,
|
||||
"options": entry.get("options", {}),
|
||||
}
|
||||
dep = get_model_deps(parsed["name"], type, task)
|
||||
(with_deps if dep else without_deps).append(parsed)
|
||||
return without_deps, with_deps
|
||||
except (orjson.JSONDecodeError, ValidationError, KeyError, AttributeError) as e:
|
||||
log.error(f"Invalid request format: {e}")
|
||||
raise HTTPException(422, "Invalid request format.")
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
@@ -96,42 +138,63 @@ def ping() -> str:
|
||||
|
||||
@app.post("/predict", dependencies=[Depends(update_state)])
|
||||
async def predict(
|
||||
model_name: str = Form(alias="modelName"),
|
||||
model_type: ModelType = Form(alias="modelType"),
|
||||
options: str = Form(default="{}"),
|
||||
entries: InferenceEntries = Depends(get_entries),
|
||||
image: bytes | None = File(default=None),
|
||||
text: str | None = Form(default=None),
|
||||
image: UploadFile | None = None,
|
||||
) -> Any:
|
||||
if image is not None:
|
||||
inputs: str | bytes = await image.read()
|
||||
inputs: Image | str = await run(lambda: decode_pil(image))
|
||||
elif text is not None:
|
||||
inputs = text
|
||||
else:
|
||||
raise HTTPException(400, "Either image or text must be provided")
|
||||
try:
|
||||
kwargs = orjson.loads(options)
|
||||
except orjson.JSONDecodeError:
|
||||
raise HTTPException(400, f"Invalid options JSON: {options}")
|
||||
|
||||
model = await load(await model_cache.get(model_name, model_type, ttl=settings.model_ttl, **kwargs))
|
||||
model.configure(**kwargs)
|
||||
outputs = await run(model.predict, inputs)
|
||||
return ORJSONResponse(outputs)
|
||||
response = await run_inference(inputs, entries)
|
||||
return ORJSONResponse(response)
|
||||
|
||||
|
||||
async def run(func: Callable[..., Any], inputs: Any) -> Any:
|
||||
async def run_inference(payload: Image | str, entries: InferenceEntries) -> InferenceResponse:
|
||||
outputs: dict[ModelIdentity, Any] = {}
|
||||
response: InferenceResponse = {}
|
||||
|
||||
async def _run_inference(entry: InferenceEntry) -> None:
|
||||
model = await model_cache.get(entry["name"], entry["type"], entry["task"], ttl=settings.model_ttl)
|
||||
inputs = [payload]
|
||||
for dep in model.depends:
|
||||
try:
|
||||
inputs.append(outputs[dep])
|
||||
except KeyError:
|
||||
message = f"Task {entry['task']} of type {entry['type']} depends on output of {dep}"
|
||||
raise HTTPException(400, message)
|
||||
model = await load(model)
|
||||
output = await run(model.predict, *inputs, **entry["options"])
|
||||
outputs[model.identity] = output
|
||||
response[entry["task"]] = output
|
||||
|
||||
without_deps, with_deps = entries
|
||||
await asyncio.gather(*[_run_inference(entry) for entry in without_deps])
|
||||
if with_deps:
|
||||
await asyncio.gather(*[_run_inference(entry) for entry in with_deps])
|
||||
if isinstance(payload, Image):
|
||||
response["imageHeight"], response["imageWidth"] = payload.height, payload.width
|
||||
|
||||
return response
|
||||
|
||||
|
||||
async def run(func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
|
||||
if thread_pool is None:
|
||||
return func(inputs)
|
||||
return await asyncio.get_running_loop().run_in_executor(thread_pool, func, inputs)
|
||||
return func(*args, **kwargs)
|
||||
partial_func = partial(func, *args, **kwargs)
|
||||
return await asyncio.get_running_loop().run_in_executor(thread_pool, partial_func)
|
||||
|
||||
|
||||
async def load(model: InferenceModel) -> InferenceModel:
|
||||
if model.loaded:
|
||||
return model
|
||||
|
||||
def _load(model: InferenceModel) -> None:
|
||||
def _load(model: InferenceModel) -> InferenceModel:
|
||||
with lock:
|
||||
model.load()
|
||||
return model
|
||||
|
||||
try:
|
||||
await run(_load, model)
|
||||
|
||||
@@ -1,24 +1,40 @@
|
||||
from typing import Any
|
||||
|
||||
from app.schemas import ModelType
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.clip.textual import MClipTextualEncoder, OpenClipTextualEncoder
|
||||
from app.models.clip.visual import OpenClipVisualEncoder
|
||||
from app.schemas import ModelSource, ModelTask, ModelType
|
||||
|
||||
from .base import InferenceModel
|
||||
from .clip import MCLIPEncoder, OpenCLIPEncoder
|
||||
from .constants import is_insightface, is_mclip, is_openclip
|
||||
from .facial_recognition import FaceRecognizer
|
||||
from .constants import get_model_source
|
||||
from .facial_recognition.detection import FaceDetector
|
||||
from .facial_recognition.recognition import FaceRecognizer
|
||||
|
||||
|
||||
def from_model_type(model_type: ModelType, model_name: str, **model_kwargs: Any) -> InferenceModel:
|
||||
match model_type:
|
||||
case ModelType.CLIP:
|
||||
if is_openclip(model_name):
|
||||
return OpenCLIPEncoder(model_name, **model_kwargs)
|
||||
elif is_mclip(model_name):
|
||||
return MCLIPEncoder(model_name, **model_kwargs)
|
||||
case ModelType.FACIAL_RECOGNITION:
|
||||
if is_insightface(model_name):
|
||||
return FaceRecognizer(model_name, **model_kwargs)
|
||||
def get_model_class(model_name: str, model_type: ModelType, model_task: ModelTask) -> type[InferenceModel]:
|
||||
source = get_model_source(model_name)
|
||||
match source, model_type, model_task:
|
||||
case ModelSource.OPENCLIP | ModelSource.MCLIP, ModelType.VISUAL, ModelTask.SEARCH:
|
||||
return OpenClipVisualEncoder
|
||||
|
||||
case ModelSource.OPENCLIP, ModelType.TEXTUAL, ModelTask.SEARCH:
|
||||
return OpenClipTextualEncoder
|
||||
|
||||
case ModelSource.MCLIP, ModelType.TEXTUAL, ModelTask.SEARCH:
|
||||
return MClipTextualEncoder
|
||||
|
||||
case ModelSource.INSIGHTFACE, ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION:
|
||||
return FaceDetector
|
||||
|
||||
case ModelSource.INSIGHTFACE, ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION:
|
||||
return FaceRecognizer
|
||||
|
||||
case _:
|
||||
raise ValueError(f"Unknown model type {model_type}")
|
||||
raise ValueError(f"Unknown model combination: {source}, {model_type}, {model_task}")
|
||||
|
||||
raise ValueError(f"Unknown {model_type} model {model_name}")
|
||||
|
||||
def from_model_type(model_name: str, model_type: ModelType, model_task: ModelTask, **kwargs: Any) -> InferenceModel:
|
||||
return get_model_class(model_name, model_type, model_task)(model_name, **kwargs)
|
||||
|
||||
|
||||
def get_model_deps(model_name: str, model_type: ModelType, model_task: ModelTask) -> list[tuple[ModelType, ModelTask]]:
|
||||
return get_model_class(model_name, model_type, model_task).depends
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from typing import Any
|
||||
from typing import Any, ClassVar
|
||||
|
||||
import onnxruntime as ort
|
||||
from huggingface_hub import snapshot_download
|
||||
@@ -11,13 +11,14 @@ from huggingface_hub import snapshot_download
|
||||
import ann.ann
|
||||
from app.models.constants import SUPPORTED_PROVIDERS
|
||||
|
||||
from ..config import get_cache_dir, get_hf_model_name, log, settings
|
||||
from ..schemas import ModelRuntime, ModelType
|
||||
from ..config import clean_name, log, settings
|
||||
from ..schemas import ModelFormat, ModelIdentity, ModelSession, ModelTask, ModelType
|
||||
from .ann import AnnSession
|
||||
|
||||
|
||||
class InferenceModel(ABC):
|
||||
_model_type: ModelType
|
||||
depends: ClassVar[list[ModelIdentity]]
|
||||
identity: ClassVar[ModelIdentity]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -26,16 +27,16 @@ class InferenceModel(ABC):
|
||||
providers: list[str] | None = None,
|
||||
provider_options: list[dict[str, Any]] | None = None,
|
||||
sess_options: ort.SessionOptions | None = None,
|
||||
preferred_runtime: ModelRuntime | None = None,
|
||||
preferred_format: ModelFormat | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.loaded = False
|
||||
self.model_name = model_name
|
||||
self.model_name = clean_name(model_name)
|
||||
self.cache_dir = Path(cache_dir) if cache_dir is not None else self.cache_dir_default
|
||||
self.providers = providers if providers is not None else self.providers_default
|
||||
self.provider_options = provider_options if provider_options is not None else self.provider_options_default
|
||||
self.sess_options = sess_options if sess_options is not None else self.sess_options_default
|
||||
self.preferred_runtime = preferred_runtime if preferred_runtime is not None else self.preferred_runtime_default
|
||||
self.preferred_format = preferred_format if preferred_format is not None else self.preferred_format_default
|
||||
|
||||
def download(self) -> None:
|
||||
if not self.cached:
|
||||
@@ -47,35 +48,36 @@ class InferenceModel(ABC):
|
||||
def load(self) -> None:
|
||||
if self.loaded:
|
||||
return
|
||||
|
||||
self.download()
|
||||
log.info(f"Loading {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
|
||||
self._load()
|
||||
self.session = self._load()
|
||||
self.loaded = True
|
||||
|
||||
def predict(self, inputs: Any, **model_kwargs: Any) -> Any:
|
||||
def predict(self, *inputs: Any, **model_kwargs: Any) -> Any:
|
||||
self.load()
|
||||
if model_kwargs:
|
||||
self.configure(**model_kwargs)
|
||||
return self._predict(inputs)
|
||||
return self._predict(*inputs, **model_kwargs)
|
||||
|
||||
@abstractmethod
|
||||
def _predict(self, inputs: Any) -> Any: ...
|
||||
def _predict(self, *inputs: Any, **model_kwargs: Any) -> Any: ...
|
||||
|
||||
def configure(self, **model_kwargs: Any) -> None:
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
def _download(self) -> None:
|
||||
ignore_patterns = [] if self.preferred_runtime == ModelRuntime.ARMNN else ["*.armnn"]
|
||||
ignore_patterns = [] if self.preferred_format == ModelFormat.ARMNN else ["*.armnn"]
|
||||
snapshot_download(
|
||||
get_hf_model_name(self.model_name),
|
||||
f"immich-app/{clean_name(self.model_name)}",
|
||||
cache_dir=self.cache_dir,
|
||||
local_dir=self.cache_dir,
|
||||
local_dir_use_symlinks=False,
|
||||
ignore_patterns=ignore_patterns,
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
def _load(self) -> None: ...
|
||||
def _load(self) -> ModelSession:
|
||||
return self._make_session(self.model_path)
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
if not self.cache_dir.exists():
|
||||
@@ -99,7 +101,7 @@ class InferenceModel(ABC):
|
||||
self.cache_dir.unlink()
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _make_session(self, model_path: Path) -> AnnSession | ort.InferenceSession:
|
||||
def _make_session(self, model_path: Path) -> ModelSession:
|
||||
if not model_path.is_file():
|
||||
onnx_path = model_path.with_suffix(".onnx")
|
||||
if not onnx_path.is_file():
|
||||
@@ -124,9 +126,21 @@ class InferenceModel(ABC):
|
||||
raise ValueError(f"Unsupported model file type: {model_path.suffix}")
|
||||
return session
|
||||
|
||||
@property
|
||||
def model_dir(self) -> Path:
|
||||
return self.cache_dir / self.model_type.value
|
||||
|
||||
@property
|
||||
def model_path(self) -> Path:
|
||||
return self.model_dir / f"model.{self.preferred_format}"
|
||||
|
||||
@property
|
||||
def model_task(self) -> ModelTask:
|
||||
return self.identity[1]
|
||||
|
||||
@property
|
||||
def model_type(self) -> ModelType:
|
||||
return self._model_type
|
||||
return self.identity[0]
|
||||
|
||||
@property
|
||||
def cache_dir(self) -> Path:
|
||||
@@ -138,11 +152,11 @@ class InferenceModel(ABC):
|
||||
|
||||
@property
|
||||
def cache_dir_default(self) -> Path:
|
||||
return get_cache_dir(self.model_name, self.model_type)
|
||||
return settings.cache_folder / self.model_task.value / self.model_name
|
||||
|
||||
@property
|
||||
def cached(self) -> bool:
|
||||
return self.cache_dir.is_dir() and any(self.cache_dir.iterdir())
|
||||
return self.model_path.is_file()
|
||||
|
||||
@property
|
||||
def providers(self) -> list[str]:
|
||||
@@ -226,14 +240,14 @@ class InferenceModel(ABC):
|
||||
return sess_options
|
||||
|
||||
@property
|
||||
def preferred_runtime(self) -> ModelRuntime:
|
||||
return self._preferred_runtime
|
||||
def preferred_format(self) -> ModelFormat:
|
||||
return self._preferred_format
|
||||
|
||||
@preferred_runtime.setter
|
||||
def preferred_runtime(self, preferred_runtime: ModelRuntime) -> None:
|
||||
log.debug(f"Setting preferred runtime to {preferred_runtime}")
|
||||
self._preferred_runtime = preferred_runtime
|
||||
@preferred_format.setter
|
||||
def preferred_format(self, preferred_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting preferred format to {preferred_format}")
|
||||
self._preferred_format = preferred_format
|
||||
|
||||
@property
|
||||
def preferred_runtime_default(self) -> ModelRuntime:
|
||||
return ModelRuntime.ARMNN if ann.ann.is_available and settings.ann else ModelRuntime.ONNX
|
||||
def preferred_format_default(self) -> ModelFormat:
|
||||
return ModelFormat.ARMNN if ann.ann.is_available and settings.ann else ModelFormat.ONNX
|
||||
|
||||
@@ -5,9 +5,9 @@ from aiocache.lock import OptimisticLock
|
||||
from aiocache.plugins import TimingPlugin
|
||||
|
||||
from app.models import from_model_type
|
||||
from app.models.base import InferenceModel
|
||||
|
||||
from ..schemas import ModelType, has_profiling
|
||||
from .base import InferenceModel
|
||||
from ..schemas import ModelTask, ModelType, has_profiling
|
||||
|
||||
|
||||
class ModelCache:
|
||||
@@ -31,28 +31,21 @@ class ModelCache:
|
||||
if profiling:
|
||||
plugins.append(TimingPlugin())
|
||||
|
||||
self.revalidate_enable = revalidate
|
||||
self.should_revalidate = revalidate
|
||||
|
||||
self.cache = SimpleMemoryCache(timeout=timeout, plugins=plugins, namespace=None)
|
||||
|
||||
async def get(self, model_name: str, model_type: ModelType, **model_kwargs: Any) -> InferenceModel:
|
||||
"""
|
||||
Args:
|
||||
model_name: Name of model in the model hub used for the task.
|
||||
model_type: Model type or task, which determines which model zoo is used.
|
||||
|
||||
Returns:
|
||||
model: The requested model.
|
||||
"""
|
||||
|
||||
key = f"{model_name}{model_type.value}{model_kwargs.get('mode', '')}"
|
||||
async def get(
|
||||
self, model_name: str, model_type: ModelType, model_task: ModelTask, **model_kwargs: Any
|
||||
) -> InferenceModel:
|
||||
key = f"{model_name}{model_type}{model_task}"
|
||||
|
||||
async with OptimisticLock(self.cache, key) as lock:
|
||||
model: InferenceModel | None = await self.cache.get(key)
|
||||
if model is None:
|
||||
model = from_model_type(model_type, model_name, **model_kwargs)
|
||||
model = from_model_type(model_name, model_type, model_task, **model_kwargs)
|
||||
await lock.cas(model, ttl=model_kwargs.get("ttl", None))
|
||||
elif self.revalidate_enable:
|
||||
elif self.should_revalidate:
|
||||
await self.revalidate(key, model_kwargs.get("ttl", None))
|
||||
return model
|
||||
|
||||
|
||||
@@ -1,189 +0,0 @@
|
||||
import json
|
||||
from abc import abstractmethod
|
||||
from functools import cached_property
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal
|
||||
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
from tokenizers import Encoding, Tokenizer
|
||||
|
||||
from app.config import clean_name, log
|
||||
from app.models.transforms import crop, get_pil_resampling, normalize, resize, to_numpy
|
||||
from app.schemas import ModelType
|
||||
|
||||
from .base import InferenceModel
|
||||
|
||||
|
||||
class BaseCLIPEncoder(InferenceModel):
|
||||
_model_type = ModelType.CLIP
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: Path | str | None = None,
|
||||
mode: Literal["text", "vision"] | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.mode = mode
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
|
||||
def _load(self) -> None:
|
||||
if self.mode == "text" or self.mode is None:
|
||||
log.debug(f"Loading clip text model '{self.model_name}'")
|
||||
self.text_model = self._make_session(self.textual_path)
|
||||
log.debug(f"Loaded clip text model '{self.model_name}'")
|
||||
|
||||
if self.mode == "vision" or self.mode is None:
|
||||
log.debug(f"Loading clip vision model '{self.model_name}'")
|
||||
self.vision_model = self._make_session(self.visual_path)
|
||||
log.debug(f"Loaded clip vision model '{self.model_name}'")
|
||||
|
||||
def _predict(self, image_or_text: Image.Image | str) -> NDArray[np.float32]:
|
||||
if isinstance(image_or_text, bytes):
|
||||
image_or_text = Image.open(BytesIO(image_or_text))
|
||||
|
||||
match image_or_text:
|
||||
case Image.Image():
|
||||
if self.mode == "text":
|
||||
raise TypeError("Cannot encode image as text-only model")
|
||||
outputs: NDArray[np.float32] = self.vision_model.run(None, self.transform(image_or_text))[0][0]
|
||||
case str():
|
||||
if self.mode == "vision":
|
||||
raise TypeError("Cannot encode text as vision-only model")
|
||||
outputs = self.text_model.run(None, self.tokenize(image_or_text))[0][0]
|
||||
case _:
|
||||
raise TypeError(f"Expected Image or str, but got: {type(image_or_text)}")
|
||||
|
||||
return outputs
|
||||
|
||||
@abstractmethod
|
||||
def tokenize(self, text: str) -> dict[str, NDArray[np.int32]]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def transform(self, image: Image.Image) -> dict[str, NDArray[np.float32]]:
|
||||
pass
|
||||
|
||||
@property
|
||||
def textual_dir(self) -> Path:
|
||||
return self.cache_dir / "textual"
|
||||
|
||||
@property
|
||||
def visual_dir(self) -> Path:
|
||||
return self.cache_dir / "visual"
|
||||
|
||||
@property
|
||||
def model_cfg_path(self) -> Path:
|
||||
return self.cache_dir / "config.json"
|
||||
|
||||
@property
|
||||
def textual_path(self) -> Path:
|
||||
return self.textual_dir / f"model.{self.preferred_runtime}"
|
||||
|
||||
@property
|
||||
def visual_path(self) -> Path:
|
||||
return self.visual_dir / f"model.{self.preferred_runtime}"
|
||||
|
||||
@property
|
||||
def tokenizer_file_path(self) -> Path:
|
||||
return self.textual_dir / "tokenizer.json"
|
||||
|
||||
@property
|
||||
def tokenizer_cfg_path(self) -> Path:
|
||||
return self.textual_dir / "tokenizer_config.json"
|
||||
|
||||
@property
|
||||
def preprocess_cfg_path(self) -> Path:
|
||||
return self.visual_dir / "preprocess_cfg.json"
|
||||
|
||||
@property
|
||||
def cached(self) -> bool:
|
||||
return self.textual_path.is_file() and self.visual_path.is_file()
|
||||
|
||||
@cached_property
|
||||
def model_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading model config for CLIP model '{self.model_name}'")
|
||||
model_cfg: dict[str, Any] = json.load(self.model_cfg_path.open())
|
||||
log.debug(f"Loaded model config for CLIP model '{self.model_name}'")
|
||||
return model_cfg
|
||||
|
||||
@cached_property
|
||||
def tokenizer_file(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading tokenizer file for CLIP model '{self.model_name}'")
|
||||
tokenizer_file: dict[str, Any] = json.load(self.tokenizer_file_path.open())
|
||||
log.debug(f"Loaded tokenizer file for CLIP model '{self.model_name}'")
|
||||
return tokenizer_file
|
||||
|
||||
@cached_property
|
||||
def tokenizer_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading tokenizer config for CLIP model '{self.model_name}'")
|
||||
tokenizer_cfg: dict[str, Any] = json.load(self.tokenizer_cfg_path.open())
|
||||
log.debug(f"Loaded tokenizer config for CLIP model '{self.model_name}'")
|
||||
return tokenizer_cfg
|
||||
|
||||
@cached_property
|
||||
def preprocess_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading visual preprocessing config for CLIP model '{self.model_name}'")
|
||||
preprocess_cfg: dict[str, Any] = json.load(self.preprocess_cfg_path.open())
|
||||
log.debug(f"Loaded visual preprocessing config for CLIP model '{self.model_name}'")
|
||||
return preprocess_cfg
|
||||
|
||||
|
||||
class OpenCLIPEncoder(BaseCLIPEncoder):
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: Path | str | None = None,
|
||||
mode: Literal["text", "vision"] | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
super().__init__(clean_name(model_name), cache_dir, mode, **model_kwargs)
|
||||
|
||||
def _load(self) -> None:
|
||||
super()._load()
|
||||
self._load_tokenizer()
|
||||
|
||||
size: list[int] | int = self.preprocess_cfg["size"]
|
||||
self.size = size[0] if isinstance(size, list) else size
|
||||
|
||||
self.resampling = get_pil_resampling(self.preprocess_cfg["interpolation"])
|
||||
self.mean = np.array(self.preprocess_cfg["mean"], dtype=np.float32)
|
||||
self.std = np.array(self.preprocess_cfg["std"], dtype=np.float32)
|
||||
|
||||
def _load_tokenizer(self) -> Tokenizer:
|
||||
log.debug(f"Loading tokenizer for CLIP model '{self.model_name}'")
|
||||
|
||||
text_cfg: dict[str, Any] = self.model_cfg["text_cfg"]
|
||||
context_length: int = text_cfg.get("context_length", 77)
|
||||
pad_token: str = self.tokenizer_cfg["pad_token"]
|
||||
|
||||
self.tokenizer: Tokenizer = Tokenizer.from_file(self.tokenizer_file_path.as_posix())
|
||||
|
||||
pad_id: int = self.tokenizer.token_to_id(pad_token)
|
||||
self.tokenizer.enable_padding(length=context_length, pad_token=pad_token, pad_id=pad_id)
|
||||
self.tokenizer.enable_truncation(max_length=context_length)
|
||||
|
||||
log.debug(f"Loaded tokenizer for CLIP model '{self.model_name}'")
|
||||
|
||||
def tokenize(self, text: str) -> dict[str, NDArray[np.int32]]:
|
||||
tokens: Encoding = self.tokenizer.encode(text)
|
||||
return {"text": np.array([tokens.ids], dtype=np.int32)}
|
||||
|
||||
def transform(self, image: Image.Image) -> dict[str, NDArray[np.float32]]:
|
||||
image = resize(image, self.size)
|
||||
image = crop(image, self.size)
|
||||
image_np = to_numpy(image)
|
||||
image_np = normalize(image_np, self.mean, self.std)
|
||||
return {"image": np.expand_dims(image_np.transpose(2, 0, 1), 0)}
|
||||
|
||||
|
||||
class MCLIPEncoder(OpenCLIPEncoder):
|
||||
def tokenize(self, text: str) -> dict[str, NDArray[np.int32]]:
|
||||
tokens: Encoding = self.tokenizer.encode(text)
|
||||
return {
|
||||
"input_ids": np.array([tokens.ids], dtype=np.int32),
|
||||
"attention_mask": np.array([tokens.attention_mask], dtype=np.int32),
|
||||
}
|
||||
98
machine-learning/app/models/clip/textual.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import json
|
||||
from abc import abstractmethod
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from tokenizers import Encoding, Tokenizer
|
||||
|
||||
from app.config import log
|
||||
from app.models.base import InferenceModel
|
||||
from app.schemas import ModelSession, ModelTask, ModelType
|
||||
|
||||
|
||||
class BaseCLIPTextualEncoder(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.TEXTUAL, ModelTask.SEARCH)
|
||||
|
||||
def _predict(self, inputs: str, **kwargs: Any) -> NDArray[np.float32]:
|
||||
res: NDArray[np.float32] = self.session.run(None, self.tokenize(inputs))[0][0]
|
||||
return res
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
log.debug(f"Loading tokenizer for CLIP model '{self.model_name}'")
|
||||
self.tokenizer = self._load_tokenizer()
|
||||
log.debug(f"Loaded tokenizer for CLIP model '{self.model_name}'")
|
||||
|
||||
return super()._load()
|
||||
|
||||
@abstractmethod
|
||||
def _load_tokenizer(self) -> Tokenizer:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def tokenize(self, text: str) -> dict[str, NDArray[np.int32]]:
|
||||
pass
|
||||
|
||||
@property
|
||||
def model_cfg_path(self) -> Path:
|
||||
return self.cache_dir / "config.json"
|
||||
|
||||
@property
|
||||
def tokenizer_file_path(self) -> Path:
|
||||
return self.model_dir / "tokenizer.json"
|
||||
|
||||
@property
|
||||
def tokenizer_cfg_path(self) -> Path:
|
||||
return self.model_dir / "tokenizer_config.json"
|
||||
|
||||
@cached_property
|
||||
def model_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading model config for CLIP model '{self.model_name}'")
|
||||
model_cfg: dict[str, Any] = json.load(self.model_cfg_path.open())
|
||||
log.debug(f"Loaded model config for CLIP model '{self.model_name}'")
|
||||
return model_cfg
|
||||
|
||||
@cached_property
|
||||
def tokenizer_file(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading tokenizer file for CLIP model '{self.model_name}'")
|
||||
tokenizer_file: dict[str, Any] = json.load(self.tokenizer_file_path.open())
|
||||
log.debug(f"Loaded tokenizer file for CLIP model '{self.model_name}'")
|
||||
return tokenizer_file
|
||||
|
||||
@cached_property
|
||||
def tokenizer_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading tokenizer config for CLIP model '{self.model_name}'")
|
||||
tokenizer_cfg: dict[str, Any] = json.load(self.tokenizer_cfg_path.open())
|
||||
log.debug(f"Loaded tokenizer config for CLIP model '{self.model_name}'")
|
||||
return tokenizer_cfg
|
||||
|
||||
|
||||
class OpenClipTextualEncoder(BaseCLIPTextualEncoder):
|
||||
def _load_tokenizer(self) -> Tokenizer:
|
||||
text_cfg: dict[str, Any] = self.model_cfg["text_cfg"]
|
||||
context_length: int = text_cfg.get("context_length", 77)
|
||||
pad_token: str = self.tokenizer_cfg["pad_token"]
|
||||
|
||||
tokenizer: Tokenizer = Tokenizer.from_file(self.tokenizer_file_path.as_posix())
|
||||
|
||||
pad_id: int = tokenizer.token_to_id(pad_token)
|
||||
tokenizer.enable_padding(length=context_length, pad_token=pad_token, pad_id=pad_id)
|
||||
tokenizer.enable_truncation(max_length=context_length)
|
||||
|
||||
return tokenizer
|
||||
|
||||
def tokenize(self, text: str) -> dict[str, NDArray[np.int32]]:
|
||||
tokens: Encoding = self.tokenizer.encode(text)
|
||||
return {"text": np.array([tokens.ids], dtype=np.int32)}
|
||||
|
||||
|
||||
class MClipTextualEncoder(OpenClipTextualEncoder):
|
||||
def tokenize(self, text: str) -> dict[str, NDArray[np.int32]]:
|
||||
tokens: Encoding = self.tokenizer.encode(text)
|
||||
return {
|
||||
"input_ids": np.array([tokens.ids], dtype=np.int32),
|
||||
"attention_mask": np.array([tokens.attention_mask], dtype=np.int32),
|
||||
}
|
||||
69
machine-learning/app/models/clip/visual.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import json
|
||||
from abc import abstractmethod
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
|
||||
from app.config import log
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.transforms import crop_pil, decode_pil, get_pil_resampling, normalize, resize_pil, to_numpy
|
||||
from app.schemas import ModelSession, ModelTask, ModelType
|
||||
|
||||
|
||||
class BaseCLIPVisualEncoder(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.VISUAL, ModelTask.SEARCH)
|
||||
|
||||
def _predict(self, inputs: Image.Image | bytes, **kwargs: Any) -> NDArray[np.float32]:
|
||||
image = decode_pil(inputs)
|
||||
res: NDArray[np.float32] = self.session.run(None, self.transform(image))[0][0]
|
||||
return res
|
||||
|
||||
@abstractmethod
|
||||
def transform(self, image: Image.Image) -> dict[str, NDArray[np.float32]]:
|
||||
pass
|
||||
|
||||
@property
|
||||
def model_cfg_path(self) -> Path:
|
||||
return self.cache_dir / "config.json"
|
||||
|
||||
@property
|
||||
def preprocess_cfg_path(self) -> Path:
|
||||
return self.model_dir / "preprocess_cfg.json"
|
||||
|
||||
@cached_property
|
||||
def model_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading model config for CLIP model '{self.model_name}'")
|
||||
model_cfg: dict[str, Any] = json.load(self.model_cfg_path.open())
|
||||
log.debug(f"Loaded model config for CLIP model '{self.model_name}'")
|
||||
return model_cfg
|
||||
|
||||
@cached_property
|
||||
def preprocess_cfg(self) -> dict[str, Any]:
|
||||
log.debug(f"Loading visual preprocessing config for CLIP model '{self.model_name}'")
|
||||
preprocess_cfg: dict[str, Any] = json.load(self.preprocess_cfg_path.open())
|
||||
log.debug(f"Loaded visual preprocessing config for CLIP model '{self.model_name}'")
|
||||
return preprocess_cfg
|
||||
|
||||
|
||||
class OpenClipVisualEncoder(BaseCLIPVisualEncoder):
|
||||
def _load(self) -> ModelSession:
|
||||
size: list[int] | int = self.preprocess_cfg["size"]
|
||||
self.size = size[0] if isinstance(size, list) else size
|
||||
|
||||
self.resampling = get_pil_resampling(self.preprocess_cfg["interpolation"])
|
||||
self.mean = np.array(self.preprocess_cfg["mean"], dtype=np.float32)
|
||||
self.std = np.array(self.preprocess_cfg["std"], dtype=np.float32)
|
||||
|
||||
return super()._load()
|
||||
|
||||
def transform(self, image: Image.Image) -> dict[str, NDArray[np.float32]]:
|
||||
image = resize_pil(image, self.size)
|
||||
image = crop_pil(image, self.size)
|
||||
image_np = to_numpy(image)
|
||||
image_np = normalize(image_np, self.mean, self.std)
|
||||
return {"image": np.expand_dims(image_np.transpose(2, 0, 1), 0)}
|
||||
@@ -1,4 +1,5 @@
|
||||
from app.config import clean_name
|
||||
from app.schemas import ModelSource
|
||||
|
||||
_OPENCLIP_MODELS = {
|
||||
"RN50__openai",
|
||||
@@ -54,13 +55,16 @@ _INSIGHTFACE_MODELS = {
|
||||
SUPPORTED_PROVIDERS = ["CUDAExecutionProvider", "OpenVINOExecutionProvider", "CPUExecutionProvider"]
|
||||
|
||||
|
||||
def is_openclip(model_name: str) -> bool:
|
||||
return clean_name(model_name) in _OPENCLIP_MODELS
|
||||
def get_model_source(model_name: str) -> ModelSource | None:
|
||||
cleaned_name = clean_name(model_name)
|
||||
|
||||
if cleaned_name in _INSIGHTFACE_MODELS:
|
||||
return ModelSource.INSIGHTFACE
|
||||
|
||||
def is_mclip(model_name: str) -> bool:
|
||||
return clean_name(model_name) in _MCLIP_MODELS
|
||||
if cleaned_name in _MCLIP_MODELS:
|
||||
return ModelSource.MCLIP
|
||||
|
||||
if cleaned_name in _OPENCLIP_MODELS:
|
||||
return ModelSource.OPENCLIP
|
||||
|
||||
def is_insightface(model_name: str) -> bool:
|
||||
return clean_name(model_name) in _INSIGHTFACE_MODELS
|
||||
return None
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from insightface.model_zoo import ArcFaceONNX, RetinaFace
|
||||
from insightface.utils.face_align import norm_crop
|
||||
from numpy.typing import NDArray
|
||||
|
||||
from app.config import clean_name
|
||||
from app.schemas import Face, ModelType, is_ndarray
|
||||
|
||||
from .base import InferenceModel
|
||||
|
||||
|
||||
class FaceRecognizer(InferenceModel):
|
||||
_model_type = ModelType.FACIAL_RECOGNITION
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
|
||||
|
||||
def _load(self) -> None:
|
||||
self.det_model = RetinaFace(session=self._make_session(self.det_file))
|
||||
self.rec_model = ArcFaceONNX(
|
||||
self.rec_file.with_suffix(".onnx").as_posix(),
|
||||
session=self._make_session(self.rec_file),
|
||||
)
|
||||
|
||||
self.det_model.prepare(
|
||||
ctx_id=0,
|
||||
det_thresh=self.min_score,
|
||||
input_size=(640, 640),
|
||||
)
|
||||
self.rec_model.prepare(ctx_id=0)
|
||||
|
||||
def _predict(self, image: NDArray[np.uint8] | bytes) -> list[Face]:
|
||||
if isinstance(image, bytes):
|
||||
decoded_image = cv2.imdecode(np.frombuffer(image, np.uint8), cv2.IMREAD_COLOR)
|
||||
else:
|
||||
decoded_image = image
|
||||
assert is_ndarray(decoded_image, np.uint8)
|
||||
bboxes, kpss = self.det_model.detect(decoded_image)
|
||||
if bboxes.size == 0:
|
||||
return []
|
||||
assert is_ndarray(kpss, np.float32)
|
||||
|
||||
scores = bboxes[:, 4].tolist()
|
||||
bboxes = bboxes[:, :4].round().tolist()
|
||||
|
||||
results = []
|
||||
height, width, _ = decoded_image.shape
|
||||
for (x1, y1, x2, y2), score, kps in zip(bboxes, scores, kpss):
|
||||
cropped_img = norm_crop(decoded_image, kps)
|
||||
embedding: NDArray[np.float32] = self.rec_model.get_feat(cropped_img)[0]
|
||||
face: Face = {
|
||||
"imageWidth": width,
|
||||
"imageHeight": height,
|
||||
"boundingBox": {
|
||||
"x1": x1,
|
||||
"y1": y1,
|
||||
"x2": x2,
|
||||
"y2": y2,
|
||||
},
|
||||
"score": score,
|
||||
"embedding": embedding,
|
||||
}
|
||||
results.append(face)
|
||||
return results
|
||||
|
||||
@property
|
||||
def cached(self) -> bool:
|
||||
return self.det_file.is_file() and self.rec_file.is_file()
|
||||
|
||||
@property
|
||||
def det_file(self) -> Path:
|
||||
return self.cache_dir / "detection" / f"model.{self.preferred_runtime}"
|
||||
|
||||
@property
|
||||
def rec_file(self) -> Path:
|
||||
return self.cache_dir / "recognition" / f"model.{self.preferred_runtime}"
|
||||
|
||||
def configure(self, **model_kwargs: Any) -> None:
|
||||
self.det_model.det_thresh = model_kwargs.pop("minScore", self.det_model.det_thresh)
|
||||
43
machine-learning/app/models/facial_recognition/detection.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
from numpy.typing import NDArray
|
||||
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.session import ort_has_batch_dim, ort_expand_outputs
|
||||
from app.models.transforms import decode_pil
|
||||
from app.schemas import FaceDetectionOutput, ModelSession, ModelTask, ModelType
|
||||
from .scrfd import SCRFD
|
||||
from PIL import Image
|
||||
from PIL.ImageOps import pad
|
||||
|
||||
class FaceDetector(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
if isinstance(session, ort.InferenceSession) and not ort_has_batch_dim(session):
|
||||
ort_expand_outputs(session)
|
||||
self.model = SCRFD(session=session)
|
||||
|
||||
return session
|
||||
|
||||
def _predict(self, inputs: NDArray[np.uint8] | bytes | Image.Image, **kwargs: Any) -> FaceDetectionOutput:
|
||||
inputs = self._transform(inputs)
|
||||
|
||||
[bboxes], [landmarks] = self.model.detect(inputs, threshold=kwargs.pop("minScore", 0.7))
|
||||
return {
|
||||
"boxes": bboxes[:, :4].round(),
|
||||
"scores": bboxes[:, 4],
|
||||
"landmarks": landmarks,
|
||||
}
|
||||
|
||||
def _detect(self, inputs: NDArray[np.uint8] | bytes) -> tuple[NDArray[np.float32], NDArray[np.float32]]:
|
||||
return self.model.detect(inputs) # type: ignore
|
||||
|
||||
def _transform(self, inputs: NDArray[np.uint8] | bytes | Image.Image) -> NDArray[np.uint8]:
|
||||
image = decode_pil(inputs)
|
||||
padded = pad(image, (640, 640), method=Image.Resampling.BICUBIC)
|
||||
return np.array(padded, dtype=np.uint8)[None, ...]
|
||||
@@ -0,0 +1,64 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
from insightface.model_zoo import ArcFaceONNX
|
||||
from insightface.utils.face_align import norm_crop
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
|
||||
from app.config import clean_name, log
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.session import ort_add_batch_dim, ort_has_batch_dim
|
||||
from app.models.transforms import decode_cv2
|
||||
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelSession, ModelTask, ModelType
|
||||
|
||||
|
||||
class FaceRecognizer(InferenceModel):
|
||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
if isinstance(session, ort.InferenceSession) and not ort_has_batch_dim(session):
|
||||
log.info(f"Adding batch dimension to recognition model {self.model_name}")
|
||||
ort_add_batch_dim(self.model_path, self.model_path)
|
||||
session = self._make_session(self.model_path)
|
||||
self.model = ArcFaceONNX(
|
||||
self.model_path.with_suffix(".onnx").as_posix(),
|
||||
session=session,
|
||||
)
|
||||
return session
|
||||
|
||||
def _predict(
|
||||
self, inputs: NDArray[np.uint8] | bytes | Image.Image, faces: FaceDetectionOutput, **kwargs: Any
|
||||
) -> FacialRecognitionOutput:
|
||||
if faces["boxes"].shape[0] == 0:
|
||||
return []
|
||||
inputs = decode_cv2(inputs)
|
||||
embeddings: NDArray[np.float32] = self.model.get_feat(self._crop(inputs, faces))
|
||||
return self.postprocess(faces, embeddings)
|
||||
|
||||
def postprocess(self, faces: FaceDetectionOutput, embeddings: NDArray[np.float32]) -> FacialRecognitionOutput:
|
||||
return [
|
||||
{
|
||||
"boundingBox": {"x1": x1, "y1": y1, "x2": x2, "y2": y2},
|
||||
"embedding": embedding,
|
||||
"score": score,
|
||||
}
|
||||
for (x1, y1, x2, y2), embedding, score in zip(faces["boxes"], embeddings, faces["scores"])
|
||||
]
|
||||
|
||||
def _crop(self, image: NDArray[np.uint8], faces: FaceDetectionOutput) -> list[NDArray[np.uint8]]:
|
||||
return [norm_crop(image, landmark) for landmark in faces["landmarks"]]
|
||||
325
machine-learning/app/models/facial_recognition/scrfd.py
Normal file
@@ -0,0 +1,325 @@
|
||||
# Based on InsightFace-REST by SthPhoenix https://github.com/SthPhoenix/InsightFace-REST/blob/master/src/api_trt/modules/model_zoo/detectors/scrfd.py
|
||||
# Primary changes made:
|
||||
# 1. Removed CuPy-related code
|
||||
# 2. Adapted proposal generation to be thread-safe
|
||||
# 3. Added typing
|
||||
# 4. Assume RGB input
|
||||
# 5. Removed unused variables
|
||||
|
||||
# Copyright 2021 SthPhoenix
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
# Based on Jia Guo reference implementation at
|
||||
# https://github.com/deepinsight/insightface/blob/master/detection/scrfd/tools/scrfd.py
|
||||
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numba import njit
|
||||
from app.schemas import ModelSession
|
||||
from numpy.typing import NDArray
|
||||
|
||||
|
||||
@njit(cache=True, nogil=True)
|
||||
def nms(dets, threshold: float = 0.4) -> NDArray[np.float32]:
|
||||
x1 = dets[:, 0]
|
||||
y1 = dets[:, 1]
|
||||
x2 = dets[:, 2]
|
||||
y2 = dets[:, 3]
|
||||
scores = dets[:, 4]
|
||||
|
||||
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
|
||||
order = scores.argsort()[::-1]
|
||||
|
||||
keep = []
|
||||
while order.size > 0:
|
||||
i = order[0]
|
||||
keep.append(i)
|
||||
xx1 = np.maximum(x1[i], x1[order[1:]])
|
||||
yy1 = np.maximum(y1[i], y1[order[1:]])
|
||||
xx2 = np.minimum(x2[i], x2[order[1:]])
|
||||
yy2 = np.minimum(y2[i], y2[order[1:]])
|
||||
|
||||
w = np.maximum(0.0, xx2 - xx1 + 1)
|
||||
h = np.maximum(0.0, yy2 - yy1 + 1)
|
||||
inter = w * h
|
||||
ovr = inter / (areas[i] + areas[order[1:]] - inter)
|
||||
|
||||
inds = np.where(ovr <= threshold)[0]
|
||||
order = order[inds + 1]
|
||||
|
||||
return np.asarray(keep)
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def single_distance2bbox(point: NDArray[np.float32], distance: NDArray[np.float32], stride: int) -> NDArray[np.float32]:
|
||||
"""
|
||||
Fast conversion of single bbox distances to coordinates
|
||||
|
||||
:param point: Anchor point
|
||||
:param distance: Bbox distances from anchor point
|
||||
:param stride: Current stride scale
|
||||
:return: bbox
|
||||
"""
|
||||
distance[0] = point[0] - distance[0] * stride
|
||||
distance[1] = point[1] - distance[1] * stride
|
||||
distance[2] = point[0] + distance[2] * stride
|
||||
distance[3] = point[1] + distance[3] * stride
|
||||
return distance
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def single_distance2kps(point: NDArray[np.float32], distance: NDArray[np.float32], stride: int) -> NDArray[np.float32]:
|
||||
"""
|
||||
Fast conversion of single keypoint distances to coordinates
|
||||
|
||||
:param point: Anchor point
|
||||
:param distance: Keypoint distances from anchor point
|
||||
:param stride: Current stride scale
|
||||
:return: keypoint
|
||||
"""
|
||||
for ix in range(0, distance.shape[0], 2):
|
||||
distance[ix] = distance[ix] * stride + point[0]
|
||||
distance[ix + 1] = distance[ix + 1] * stride + point[1]
|
||||
return distance
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def generate_proposals(
|
||||
score_blob: NDArray[np.float32],
|
||||
bbox_blob: NDArray[np.float32],
|
||||
kpss_blob: NDArray[np.float32],
|
||||
stride: int,
|
||||
anchors: NDArray[np.float32],
|
||||
threshold: float,
|
||||
) -> tuple[NDArray[np.float32], NDArray[np.float32], NDArray[np.float32]]:
|
||||
"""
|
||||
Convert distances from anchors to actual coordinates on source image
|
||||
and filter proposals by confidence threshold.
|
||||
|
||||
:param score_blob: Raw scores for stride
|
||||
:param bbox_blob: Raw bbox distances for stride
|
||||
:param kpss_blob: Raw keypoints distances for stride
|
||||
:param stride: Stride scale
|
||||
:param anchors: Precomputed anchors for stride
|
||||
:param threshold: Confidence threshold
|
||||
:return: Filtered scores, bboxes and keypoints
|
||||
"""
|
||||
|
||||
idxs = []
|
||||
for ix in range(score_blob.shape[0]):
|
||||
if score_blob[ix][0] > threshold:
|
||||
idxs.append(ix)
|
||||
|
||||
score_out = np.empty((len(idxs), 1), dtype="float32")
|
||||
bbox_out = np.empty((len(idxs), 4), dtype="float32")
|
||||
kpss_out = np.empty((len(idxs), 10), dtype="float32")
|
||||
|
||||
for i in range(len(idxs)):
|
||||
ix = idxs[i]
|
||||
score_out[i] = score_blob[ix]
|
||||
bbox_out[i] = single_distance2bbox(anchors[ix], bbox_blob[ix], stride)
|
||||
kpss_out[i] = single_distance2kps(anchors[ix], kpss_blob[ix], stride)
|
||||
|
||||
return score_out, bbox_out, kpss_out
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def filter(
|
||||
bboxes_list: NDArray[np.float32],
|
||||
kpss_list: NDArray[np.float32],
|
||||
scores_list: NDArray[np.float32],
|
||||
nms_threshold: float = 0.4,
|
||||
) -> tuple[NDArray[np.float32], NDArray[np.float32]]:
|
||||
"""
|
||||
Filter postprocessed network outputs with NMS
|
||||
|
||||
:param bboxes_list: List of bboxes (np.ndarray)
|
||||
:param kpss_list: List of keypoints (np.ndarray)
|
||||
:param scores_list: List of scores (np.ndarray)
|
||||
:return: Face bboxes with scores [t,l,b,r,score], and key points
|
||||
"""
|
||||
|
||||
pre_det = np.hstack((bboxes_list, scores_list))
|
||||
keep = nms(pre_det, threshold=nms_threshold)
|
||||
det = pre_det[keep, :]
|
||||
kpss = kpss_list[keep, :]
|
||||
kpss = kpss.reshape((kpss.shape[0], -1, 2))
|
||||
|
||||
return det, kpss
|
||||
|
||||
|
||||
class SCRFD:
|
||||
def __init__(self, session: ModelSession):
|
||||
self.session = session
|
||||
self.center_cache: dict[tuple[int, int], NDArray[np.float32]] = {}
|
||||
self.nms_threshold = 0.4
|
||||
self.fmc = 3
|
||||
self._feat_stride_fpn = [8, 16, 32]
|
||||
self._num_anchors = 2
|
||||
|
||||
def prepare(self, nms_threshold: float = 0.4) -> None:
|
||||
"""
|
||||
Populate class parameters
|
||||
|
||||
:param nms_threshold: Threshold for NMS IoU
|
||||
"""
|
||||
|
||||
self.nms_threshold = nms_threshold
|
||||
|
||||
def detect(
|
||||
self, imgs: NDArray[np.uint8], threshold: float = 0.5
|
||||
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]]]:
|
||||
"""
|
||||
Run detection pipeline for provided images
|
||||
|
||||
:param img: Raw image as nd.ndarray with HWC shape
|
||||
:param threshold: Confidence threshold
|
||||
:return: Face bboxes with scores [t,l,b,r,score], and key points
|
||||
"""
|
||||
|
||||
height, width = imgs.shape[1:3]
|
||||
blob = self._preprocess(imgs)
|
||||
net_outs = self._forward(blob)
|
||||
|
||||
batch_bboxes, batch_kpss, batch_scores = self._postprocess(net_outs, height, width, threshold)
|
||||
|
||||
dets_list = []
|
||||
kpss_list = []
|
||||
for e in range(imgs.shape[0]):
|
||||
if len(batch_bboxes[e]) == 0:
|
||||
det, kpss = np.zeros((0, 5), dtype="float32"), np.zeros((0, 10), dtype="float32")
|
||||
else:
|
||||
det, kpss = filter(batch_bboxes[e], batch_kpss[e], batch_scores[e], self.nms_threshold)
|
||||
|
||||
dets_list.append(det)
|
||||
kpss_list.append(kpss)
|
||||
|
||||
return dets_list, kpss_list
|
||||
|
||||
@staticmethod
|
||||
def _build_anchors(
|
||||
input_height: int, input_width: int, strides: list[int], num_anchors: int
|
||||
) -> NDArray[np.float32]:
|
||||
"""
|
||||
Precompute anchor points for provided image size
|
||||
|
||||
:param input_height: Input image height
|
||||
:param input_width: Input image width
|
||||
:param strides: Model strides
|
||||
:param num_anchors: Model num anchors
|
||||
:return: box centers
|
||||
"""
|
||||
|
||||
centers = []
|
||||
for stride in strides:
|
||||
height = input_height // stride
|
||||
width = input_width // stride
|
||||
|
||||
anchor_centers = np.stack(np.mgrid[:height, :width][::-1], axis=-1).astype(np.float32)
|
||||
anchor_centers = (anchor_centers * stride).reshape((-1, 2))
|
||||
if num_anchors > 1:
|
||||
anchor_centers = np.stack([anchor_centers] * num_anchors, axis=1).reshape((-1, 2))
|
||||
centers.append(anchor_centers)
|
||||
return centers
|
||||
|
||||
def _preprocess(self, images: NDArray[np.uint8]):
|
||||
"""
|
||||
Normalize image on CPU if backend can't provide CUDA stream,
|
||||
otherwise preprocess image on GPU using CuPy
|
||||
|
||||
:param img: Raw image as np.ndarray with HWC shape
|
||||
:return: Preprocessed image or None if image was processed on device
|
||||
"""
|
||||
|
||||
input_size = tuple(images[0].shape[0:2][::-1])
|
||||
return cv2.dnn.blobFromImages(images, 1.0 / 128, input_size, (127.5, 127.5, 127.5), swapRB=False)
|
||||
|
||||
def _forward(self, blob: NDArray[np.float32]) -> list[NDArray[np.float32]]:
|
||||
"""
|
||||
Send input data to inference backend.
|
||||
|
||||
:param blob: Preprocessed image of shape NCHW or None
|
||||
:return: network outputs
|
||||
"""
|
||||
|
||||
return self.session.run(None, {"input.1": blob})
|
||||
|
||||
def _postprocess(
|
||||
self, net_outs: list[NDArray[np.float32]], height: int, width: int, threshold: float
|
||||
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]], list[NDArray[np.float32]]]:
|
||||
"""
|
||||
Precompute anchor points for provided image size and process network outputs
|
||||
|
||||
:param net_outs: Network outputs
|
||||
:param input_height: Input image height
|
||||
:param input_width: Input image width
|
||||
:param threshold: Confidence threshold
|
||||
:return: filtered bboxes, keypoints and scores
|
||||
"""
|
||||
|
||||
key = (height, width)
|
||||
|
||||
if not self.center_cache.get(key):
|
||||
self.center_cache[key] = self._build_anchors(height, width, self._feat_stride_fpn, self._num_anchors)
|
||||
anchor_centers = self.center_cache[key]
|
||||
bboxes, kpss, scores = self._process_strides(net_outs, threshold, anchor_centers)
|
||||
return bboxes, kpss, scores
|
||||
|
||||
def _process_strides(
|
||||
self, net_outs: list[NDArray[np.float32]], threshold: float, anchors: NDArray[np.float32]
|
||||
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]], list[NDArray[np.float32]]]:
|
||||
"""
|
||||
Process network outputs by strides and return results proposals filtered by threshold
|
||||
|
||||
:param net_outs: Network outputs
|
||||
:param threshold: Confidence threshold
|
||||
:param anchor_centers: Precomputed anchor centers for all strides
|
||||
:return: filtered bboxes, keypoints and scores
|
||||
"""
|
||||
|
||||
batch_size = net_outs[0].shape[0]
|
||||
bboxes_by_img = []
|
||||
kpss_by_img = []
|
||||
scores_by_img = []
|
||||
|
||||
for batch in range(batch_size):
|
||||
scores_strided = []
|
||||
bboxes_strided = []
|
||||
kpss_strided = []
|
||||
for idx, stride in enumerate(self._feat_stride_fpn):
|
||||
score_blob = net_outs[idx][batch]
|
||||
bbox_blob = net_outs[idx + self.fmc][batch]
|
||||
kpss_blob = net_outs[idx + self.fmc * 2][batch]
|
||||
stride_anchors = anchors[idx]
|
||||
score_list, bbox_list, kpss_list = generate_proposals(
|
||||
score_blob,
|
||||
bbox_blob,
|
||||
kpss_blob,
|
||||
stride,
|
||||
stride_anchors,
|
||||
threshold,
|
||||
)
|
||||
|
||||
scores_strided.append(score_list)
|
||||
bboxes_strided.append(bbox_list)
|
||||
kpss_strided.append(kpss_list)
|
||||
bboxes_by_img.append(np.concatenate(bboxes_strided, axis=0))
|
||||
kpss_by_img.append(np.concatenate(kpss_strided, axis=0))
|
||||
scores_by_img.append(np.concatenate(scores_strided, axis=0))
|
||||
|
||||
return bboxes_by_img, kpss_by_img, scores_by_img
|
||||
34
machine-learning/app/models/session.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
import onnxruntime as ort
|
||||
from numpy.typing import NDArray
|
||||
from onnx.shape_inference import infer_shapes
|
||||
from onnx.tools.update_model_dims import update_inputs_outputs_dims
|
||||
|
||||
|
||||
def ort_has_batch_dim(session: ort.InferenceSession) -> bool:
|
||||
return session.get_inputs()[0].shape[0] == "batch"
|
||||
|
||||
|
||||
def ort_expand_outputs(session: ort.InferenceSession) -> None:
|
||||
original_run = session.run
|
||||
|
||||
def run(output_names: list[str], input_feed: dict[str, NDArray[np.float32]]) -> list[NDArray[np.float32]]:
|
||||
out: list[NDArray[np.float32]] = original_run(output_names, input_feed)
|
||||
out = [np.expand_dims(o, axis=0) for o in out]
|
||||
return out
|
||||
|
||||
session.run = run
|
||||
|
||||
|
||||
def ort_add_batch_dim(input_path: Path, output_path: Path) -> None:
|
||||
proto = onnx.load(input_path)
|
||||
static_input_dims = [shape.dim_value for shape in proto.graph.input[0].type.tensor_type.shape.dim[1:]]
|
||||
static_output_dims = [shape.dim_value for shape in proto.graph.output[0].type.tensor_type.shape.dim[1:]]
|
||||
input_dims = {proto.graph.input[0].name: ["batch"] + static_input_dims}
|
||||
output_dims = {proto.graph.output[0].name: ["batch"] + static_output_dims}
|
||||
updated_proto = update_inputs_outputs_dims(proto, input_dims, output_dims)
|
||||
inferred = infer_shapes(updated_proto)
|
||||
onnx.save(inferred, output_path)
|
||||