Compare commits
72 Commits
qr-code-lo
...
feature/Ad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f19cf206ba | ||
|
|
7ac30995a8 | ||
|
|
fe2ddc3644 | ||
|
|
ec0eb93036 | ||
|
|
d9a41b8ea0 | ||
|
|
f30fac971a | ||
|
|
fe26ccd1b7 | ||
|
|
3f4bbab4eb | ||
|
|
2da9e3152b | ||
|
|
56b85f7479 | ||
|
|
8b43066632 | ||
|
|
20acdcd884 | ||
|
|
22d348beca | ||
|
|
3b0af1c8a9 | ||
|
|
61c8237a4d | ||
|
|
d740f0283a | ||
|
|
4ada28ac99 | ||
|
|
63c01b78e2 | ||
|
|
1423cfd53c | ||
|
|
867eec86f5 | ||
|
|
86e8effd8e | ||
|
|
49d393216a | ||
|
|
75c9f63757 | ||
|
|
63984890df | ||
|
|
1356468c38 | ||
|
|
c23c53bf6f | ||
|
|
0dcfc43461 | ||
|
|
d1fd0076cc | ||
|
|
ff19502035 | ||
|
|
6ef069b537 | ||
|
|
a03e999bde | ||
|
|
ad1ba4be5f | ||
|
|
f89e74181b | ||
|
|
e2c34f17ba | ||
|
|
23b1256592 | ||
|
|
7bbc1d9f68 | ||
|
|
8b24c31d20 | ||
|
|
7f61ac6983 | ||
|
|
4db8f0c666 | ||
|
|
3d6a6f77a8 | ||
|
|
5698f446f7 | ||
|
|
eb74fafb00 | ||
|
|
24da25dbbf | ||
|
|
9b842d4cca | ||
|
|
a99bd94717 | ||
|
|
4b568dcbb3 | ||
|
|
12ab56c885 | ||
|
|
eed6465b41 | ||
|
|
5f6c16080b | ||
|
|
a2aab1f373 | ||
|
|
8e076ecfe4 | ||
|
|
fe702ba6d7 | ||
|
|
869839f642 | ||
|
|
8885e3105e | ||
|
|
6e51c4ec71 | ||
|
|
6bf2e8dbcb | ||
|
|
366f23774a | ||
|
|
fd5e931617 | ||
|
|
d8d87bb565 | ||
|
|
6cc1978b2d | ||
|
|
506d2d0f81 | ||
|
|
f13d13b2ea | ||
|
|
2510684bf7 | ||
|
|
c8eef5ad4d | ||
|
|
0cb3dc6211 | ||
|
|
f11080cc2d | ||
|
|
efcf773ea0 | ||
|
|
dc143046e3 | ||
|
|
e684062569 | ||
|
|
5c0538e52c | ||
|
|
84cf0d1670 | ||
|
|
bfcde05b1c |
@@ -11,7 +11,7 @@ body:
|
|||||||
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||||
options:
|
options:
|
||||||
- label: "Yes"
|
- label: "Yes"
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
7
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
7
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,6 +1,13 @@
|
|||||||
name: Report an issue with Immich
|
name: Report an issue with Immich
|
||||||
description: Report an issue with Immich
|
description: Report an issue with Immich
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||||
|
options:
|
||||||
|
- label: "Yes"
|
||||||
|
required: true
|
||||||
|
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
|
|||||||
6
.github/workflows/cli.yml
vendored
6
.github/workflows/cli.yml
vendored
@@ -56,10 +56,10 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.4.0
|
uses: docker/setup-qemu-action@v3.5.0
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3.9.0
|
uses: docker/setup-buildx-action@v3.10.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@@ -88,7 +88,7 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||||
|
|
||||||
- name: Build and push image
|
- name: Build and push image
|
||||||
uses: docker/build-push-action@v6.13.0
|
uses: docker/build-push-action@v6.15.0
|
||||||
with:
|
with:
|
||||||
file: cli/Dockerfile
|
file: cli/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
50
.github/workflows/docker.yml
vendored
50
.github/workflows/docker.yml
vendored
@@ -5,7 +5,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
@@ -50,23 +49,23 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
suffix: ["", "-cuda", "-openvino", "-armnn"]
|
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn']
|
||||||
steps:
|
steps:
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Re-tag image
|
- name: Re-tag image
|
||||||
run: |
|
run: |
|
||||||
REGISTRY_NAME="ghcr.io"
|
REGISTRY_NAME="ghcr.io"
|
||||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||||
TAG_OLD=main${{ matrix.suffix }}
|
TAG_OLD=main${{ matrix.suffix }}
|
||||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||||
|
|
||||||
retag_server:
|
retag_server:
|
||||||
name: Re-Tag Server
|
name: Re-Tag Server
|
||||||
@@ -75,7 +74,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
suffix: [""]
|
suffix: ['']
|
||||||
steps:
|
steps:
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@@ -126,6 +125,11 @@ jobs:
|
|||||||
device: openvino
|
device: openvino
|
||||||
suffix: -openvino
|
suffix: -openvino
|
||||||
|
|
||||||
|
- platforms: linux/amd64
|
||||||
|
runner: mich
|
||||||
|
device: rocm
|
||||||
|
suffix: -rocm
|
||||||
|
|
||||||
- platform: linux/arm64
|
- platform: linux/arm64
|
||||||
runner: ubuntu-24.04-arm
|
runner: ubuntu-24.04-arm
|
||||||
device: armnn
|
device: armnn
|
||||||
@@ -141,7 +145,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3.9.0
|
uses: docker/setup-buildx-action@v3.10.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@@ -171,7 +175,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push image
|
- name: Build and push image
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6.13.0
|
uses: docker/build-push-action@v6.15.0
|
||||||
with:
|
with:
|
||||||
context: ${{ env.context }}
|
context: ${{ env.context }}
|
||||||
file: ${{ env.file }}
|
file: ${{ env.file }}
|
||||||
@@ -251,7 +255,7 @@ jobs:
|
|||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_PR_HEAD_SHA: "true"
|
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||||
with:
|
with:
|
||||||
flavor: |
|
flavor: |
|
||||||
# Disable latest tag
|
# Disable latest tag
|
||||||
@@ -334,7 +338,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push image
|
- name: Build and push image
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6.13.0
|
uses: docker/build-push-action@v6.15.0
|
||||||
with:
|
with:
|
||||||
context: ${{ env.context }}
|
context: ${{ env.context }}
|
||||||
file: ${{ env.file }}
|
file: ${{ env.file }}
|
||||||
@@ -404,7 +408,7 @@ jobs:
|
|||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_PR_HEAD_SHA: "true"
|
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||||
with:
|
with:
|
||||||
flavor: |
|
flavor: |
|
||||||
# Disable latest tag
|
# Disable latest tag
|
||||||
|
|||||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -457,7 +457,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
env:
|
env:
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
|
|||||||
50
.github/workflows/weblate-lock.yml
vendored
Normal file
50
.github/workflows/weblate-lock.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: Weblate checks
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pre-job:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- id: found_paths
|
||||||
|
uses: dorny/paths-filter@v3
|
||||||
|
with:
|
||||||
|
filters: |
|
||||||
|
i18n:
|
||||||
|
- 'i18n/!(en)**\.json'
|
||||||
|
enforce-lock:
|
||||||
|
name: Check Weblate Lock
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check weblate lock
|
||||||
|
run: |
|
||||||
|
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- name: Find Pull Request
|
||||||
|
uses: juliangruber/find-pull-request-action@v1
|
||||||
|
id: find-pr
|
||||||
|
with:
|
||||||
|
branch: chore/translations
|
||||||
|
- name: Fail if existing weblate PR
|
||||||
|
if: ${{ steps.find-pr.outputs.number }}
|
||||||
|
run: exit 1
|
||||||
|
success-check-lock:
|
||||||
|
name: Weblate Lock Check Success
|
||||||
|
needs: [ enforce-lock ]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Any jobs failed?
|
||||||
|
if: ${{ contains(needs.*.result, 'failure') }}
|
||||||
|
run: exit 1
|
||||||
|
- name: All jobs passed or skipped
|
||||||
|
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||||
|
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.13.1-alpine3.20@sha256:c52e20859a92b3eccbd3a36c5e1a90adc20617d8d421d65e8a622e87b5dac963 AS core
|
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
|
||||||
|
|
||||||
WORKDIR /usr/src/open-api/typescript-sdk
|
WORKDIR /usr/src/open-api/typescript-sdk
|
||||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||||
|
|||||||
616
cli/package-lock.json
generated
616
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@immich/cli",
|
"name": "@immich/cli",
|
||||||
"version": "2.2.51",
|
"version": "2.2.52",
|
||||||
"description": "Command Line Interface (CLI) for Immich",
|
"description": "Command Line Interface (CLI) for Immich",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"exports": "./dist/index.js",
|
"exports": "./dist/index.js",
|
||||||
@@ -19,8 +19,9 @@
|
|||||||
"@types/byte-size": "^8.1.0",
|
"@types/byte-size": "^8.1.0",
|
||||||
"@types/cli-progress": "^3.11.0",
|
"@types/cli-progress": "^3.11.0",
|
||||||
"@types/lodash-es": "^4.17.12",
|
"@types/lodash-es": "^4.17.12",
|
||||||
|
"@types/micromatch": "^4.0.9",
|
||||||
"@types/mock-fs": "^4.13.1",
|
"@types/mock-fs": "^4.13.1",
|
||||||
"@types/node": "^22.13.4",
|
"@types/node": "^22.13.5",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
||||||
"@typescript-eslint/parser": "^8.15.0",
|
"@typescript-eslint/parser": "^8.15.0",
|
||||||
"@vitest/coverage-v8": "^3.0.0",
|
"@vitest/coverage-v8": "^3.0.0",
|
||||||
@@ -31,7 +32,7 @@
|
|||||||
"eslint-config-prettier": "^10.0.0",
|
"eslint-config-prettier": "^10.0.0",
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
"eslint-plugin-unicorn": "^56.0.1",
|
"eslint-plugin-unicorn": "^56.0.1",
|
||||||
"globals": "^15.9.0",
|
"globals": "^16.0.0",
|
||||||
"mock-fs": "^5.2.0",
|
"mock-fs": "^5.2.0",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"prettier-plugin-organize-imports": "^4.0.0",
|
"prettier-plugin-organize-imports": "^4.0.0",
|
||||||
@@ -62,9 +63,11 @@
|
|||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"chokidar": "^4.0.3",
|
||||||
"fast-glob": "^3.3.2",
|
"fast-glob": "^3.3.2",
|
||||||
"fastq": "^1.17.1",
|
"fastq": "^1.17.1",
|
||||||
"lodash-es": "^4.17.21"
|
"lodash-es": "^4.17.21",
|
||||||
|
"micromatch": "^4.0.8"
|
||||||
},
|
},
|
||||||
"volta": {
|
"volta": {
|
||||||
"node": "22.14.0"
|
"node": "22.14.0"
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
import * as os from 'node:os';
|
import * as os from 'node:os';
|
||||||
import * as path from 'node:path';
|
import * as path from 'node:path';
|
||||||
import { describe, expect, it, vi } from 'vitest';
|
import { setTimeout as sleep } from 'node:timers/promises';
|
||||||
|
import { describe, expect, it, MockedFunction, vi } from 'vitest';
|
||||||
|
|
||||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
|
||||||
import createFetchMock from 'vitest-fetch-mock';
|
import createFetchMock from 'vitest-fetch-mock';
|
||||||
|
|
||||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
|
||||||
|
|
||||||
vi.mock('@immich/sdk');
|
vi.mock('@immich/sdk');
|
||||||
|
|
||||||
@@ -199,3 +200,112 @@ describe('checkForDuplicates', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('startWatch', () => {
|
||||||
|
let testFolder: string;
|
||||||
|
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
|
||||||
|
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
|
||||||
|
image: ['.jpg'],
|
||||||
|
sidecar: ['.xmp'],
|
||||||
|
video: ['.mp4'],
|
||||||
|
});
|
||||||
|
|
||||||
|
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
|
||||||
|
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
|
||||||
|
checkBulkUploadMocked.mockResolvedValue({
|
||||||
|
results: [],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should start watching a directory and upload new files', async () => {
|
||||||
|
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||||
|
|
||||||
|
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||||
|
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||||
|
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||||
|
|
||||||
|
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||||
|
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: [
|
||||||
|
expect.objectContaining({
|
||||||
|
id: testFilePath,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter out unsupported files', async () => {
|
||||||
|
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||||
|
const unsupportedFilePath = path.join(testFolder, 'test.txt');
|
||||||
|
|
||||||
|
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||||
|
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||||
|
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||||
|
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
|
||||||
|
|
||||||
|
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||||
|
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: testFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: unsupportedFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filger out ignored patterns', async () => {
|
||||||
|
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||||
|
const ignoredPattern = 'ignored';
|
||||||
|
const ignoredFolder = path.join(testFolder, ignoredPattern);
|
||||||
|
await fs.promises.mkdir(ignoredFolder, { recursive: true });
|
||||||
|
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
|
||||||
|
|
||||||
|
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
|
||||||
|
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||||
|
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||||
|
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
|
||||||
|
|
||||||
|
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||||
|
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: testFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: ignoredFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await fs.promises.rm(testFolder, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -12,13 +12,18 @@ import {
|
|||||||
getSupportedMediaTypes,
|
getSupportedMediaTypes,
|
||||||
} from '@immich/sdk';
|
} from '@immich/sdk';
|
||||||
import byteSize from 'byte-size';
|
import byteSize from 'byte-size';
|
||||||
|
import { Matcher, watch as watchFs } from 'chokidar';
|
||||||
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
||||||
import { chunk } from 'lodash-es';
|
import { chunk } from 'lodash-es';
|
||||||
|
import micromatch from 'micromatch';
|
||||||
import { Stats, createReadStream } from 'node:fs';
|
import { Stats, createReadStream } from 'node:fs';
|
||||||
import { stat, unlink } from 'node:fs/promises';
|
import { stat, unlink } from 'node:fs/promises';
|
||||||
import path, { basename } from 'node:path';
|
import path, { basename } from 'node:path';
|
||||||
import { Queue } from 'src/queue';
|
import { Queue } from 'src/queue';
|
||||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
|
||||||
|
|
||||||
|
const UPLOAD_WATCH_BATCH_SIZE = 100;
|
||||||
|
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
|
||||||
|
|
||||||
const s = (count: number) => (count === 1 ? '' : 's');
|
const s = (count: number) => (count === 1 ? '' : 's');
|
||||||
|
|
||||||
@@ -36,6 +41,8 @@ export interface UploadOptionsDto {
|
|||||||
albumName?: string;
|
albumName?: string;
|
||||||
includeHidden?: boolean;
|
includeHidden?: boolean;
|
||||||
concurrency: number;
|
concurrency: number;
|
||||||
|
progress?: boolean;
|
||||||
|
watch?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
class UploadFile extends File {
|
class UploadFile extends File {
|
||||||
@@ -55,19 +62,94 @@ class UploadFile extends File {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||||
|
const { newFiles, duplicates } = await checkForDuplicates(files, options);
|
||||||
|
const newAssets = await uploadFiles(newFiles, options);
|
||||||
|
await updateAlbums([...newAssets, ...duplicates], options);
|
||||||
|
await deleteFiles(newFiles, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const startWatch = async (
|
||||||
|
paths: string[],
|
||||||
|
options: UploadOptionsDto,
|
||||||
|
{
|
||||||
|
batchSize = UPLOAD_WATCH_BATCH_SIZE,
|
||||||
|
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
|
||||||
|
}: { batchSize?: number; debounceTimeMs?: number } = {},
|
||||||
|
) => {
|
||||||
|
const watcherIgnored: Matcher[] = [];
|
||||||
|
const { image, video } = await getSupportedMediaTypes();
|
||||||
|
const extensions = new Set([...image, ...video]);
|
||||||
|
|
||||||
|
if (options.ignore) {
|
||||||
|
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathsBatcher = new Batcher<string>({
|
||||||
|
batchSize,
|
||||||
|
debounceTimeMs,
|
||||||
|
onBatch: async (paths: string[]) => {
|
||||||
|
const uniquePaths = [...new Set(paths)];
|
||||||
|
await uploadBatch(uniquePaths, options);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const onFile = async (path: string, stats?: Stats) => {
|
||||||
|
if (stats?.isDirectory()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const ext = '.' + path.split('.').pop()?.toLowerCase();
|
||||||
|
if (!ext || !extensions.has(ext)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.progress) {
|
||||||
|
// logging when progress is disabled as it can cause issues with the progress bar rendering
|
||||||
|
console.log(`Change detected: ${path}`);
|
||||||
|
}
|
||||||
|
pathsBatcher.add(path);
|
||||||
|
};
|
||||||
|
const fsWatcher = watchFs(paths, {
|
||||||
|
ignoreInitial: true,
|
||||||
|
ignored: watcherIgnored,
|
||||||
|
alwaysStat: true,
|
||||||
|
awaitWriteFinish: true,
|
||||||
|
depth: options.recursive ? undefined : 1,
|
||||||
|
persistent: true,
|
||||||
|
})
|
||||||
|
.on('add', onFile)
|
||||||
|
.on('change', onFile)
|
||||||
|
.on('error', (error) => console.error(`Watcher error: ${error}`));
|
||||||
|
|
||||||
|
process.on('SIGINT', async () => {
|
||||||
|
console.log('Exiting...');
|
||||||
|
await fsWatcher.close();
|
||||||
|
process.exit();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||||
await authenticate(baseOptions);
|
await authenticate(baseOptions);
|
||||||
|
|
||||||
const scanFiles = await scan(paths, options);
|
const scanFiles = await scan(paths, options);
|
||||||
|
|
||||||
if (scanFiles.length === 0) {
|
if (scanFiles.length === 0) {
|
||||||
console.log('No files found, exiting');
|
if (options.watch) {
|
||||||
return;
|
console.log('No files found initially.');
|
||||||
|
} else {
|
||||||
|
console.log('No files found, exiting');
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
if (options.watch) {
|
||||||
const newAssets = await uploadFiles(newFiles, options);
|
console.log('Watching for changes...');
|
||||||
await updateAlbums([...newAssets, ...duplicates], options);
|
await startWatch(paths, options);
|
||||||
await deleteFiles(newFiles, options);
|
// watcher does not handle the initial scan
|
||||||
|
// as the scan() is a more efficient quick start with batched results
|
||||||
|
}
|
||||||
|
|
||||||
|
await uploadBatch(scanFiles, options);
|
||||||
};
|
};
|
||||||
|
|
||||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||||
@@ -85,19 +167,25 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
|||||||
return files;
|
return files;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
|
||||||
if (skipHash) {
|
if (skipHash) {
|
||||||
console.log('Skipping hash check, assuming all files are new');
|
console.log('Skipping hash check, assuming all files are new');
|
||||||
return { newFiles: files, duplicates: [] };
|
return { newFiles: files, duplicates: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
const multiBar = new MultiBar(
|
let multiBar: MultiBar | undefined;
|
||||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
|
||||||
Presets.shades_classic,
|
|
||||||
);
|
|
||||||
|
|
||||||
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
|
if (progress) {
|
||||||
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
|
multiBar = new MultiBar(
|
||||||
|
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Received ${files.length} files, hashing...`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
|
||||||
|
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||||
|
|
||||||
const newFiles: string[] = [];
|
const newFiles: string[] = [];
|
||||||
const duplicates: Asset[] = [];
|
const duplicates: Asset[] = [];
|
||||||
@@ -117,7 +205,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
checkProgressBar.increment(assets.length);
|
checkProgressBar?.increment(assets.length);
|
||||||
},
|
},
|
||||||
{ concurrency, retry: 3 },
|
{ concurrency, retry: 3 },
|
||||||
);
|
);
|
||||||
@@ -137,7 +225,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
void checkBulkUploadQueue.push(batch);
|
void checkBulkUploadQueue.push(batch);
|
||||||
}
|
}
|
||||||
|
|
||||||
hashProgressBar.increment();
|
hashProgressBar?.increment();
|
||||||
return results;
|
return results;
|
||||||
},
|
},
|
||||||
{ concurrency, retry: 3 },
|
{ concurrency, retry: 3 },
|
||||||
@@ -155,7 +243,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
|
|
||||||
await checkBulkUploadQueue.drained();
|
await checkBulkUploadQueue.drained();
|
||||||
|
|
||||||
multiBar.stop();
|
multiBar?.stop();
|
||||||
|
|
||||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||||
|
|
||||||
@@ -171,7 +259,10 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
return { newFiles, duplicates };
|
return { newFiles, duplicates };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
export const uploadFiles = async (
|
||||||
|
files: string[],
|
||||||
|
{ dryRun, concurrency, progress }: UploadOptionsDto,
|
||||||
|
): Promise<Asset[]> => {
|
||||||
if (files.length === 0) {
|
if (files.length === 0) {
|
||||||
console.log('All assets were already uploaded, nothing to do.');
|
console.log('All assets were already uploaded, nothing to do.');
|
||||||
return [];
|
return [];
|
||||||
@@ -191,12 +282,20 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
|||||||
return files.map((filepath) => ({ id: '', filepath }));
|
return files.map((filepath) => ({ id: '', filepath }));
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadProgress = new SingleBar(
|
let uploadProgress: SingleBar | undefined;
|
||||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
|
||||||
Presets.shades_classic,
|
if (progress) {
|
||||||
);
|
uploadProgress = new SingleBar(
|
||||||
uploadProgress.start(totalSize, 0);
|
{
|
||||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||||
|
},
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||||
|
}
|
||||||
|
uploadProgress?.start(totalSize, 0);
|
||||||
|
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||||
|
|
||||||
let duplicateCount = 0;
|
let duplicateCount = 0;
|
||||||
let duplicateSize = 0;
|
let duplicateSize = 0;
|
||||||
@@ -222,7 +321,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
|||||||
successSize += stats.size ?? 0;
|
successSize += stats.size ?? 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
},
|
},
|
||||||
@@ -235,7 +334,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
|||||||
|
|
||||||
await queue.drained();
|
await queue.drained();
|
||||||
|
|
||||||
uploadProgress.stop();
|
uploadProgress?.stop();
|
||||||
|
|
||||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||||
if (duplicateCount > 0) {
|
if (duplicateCount > 0) {
|
||||||
|
|||||||
@@ -69,6 +69,13 @@ program
|
|||||||
.default(4),
|
.default(4),
|
||||||
)
|
)
|
||||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||||
|
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||||
|
.addOption(
|
||||||
|
new Option('--watch', 'Watch for changes and upload automatically')
|
||||||
|
.env('IMMICH_WATCH_CHANGES')
|
||||||
|
.default(false)
|
||||||
|
.implies({ progress: false }),
|
||||||
|
)
|
||||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||||
.action((paths, options) => upload(paths, program.opts(), options));
|
.action((paths, options) => upload(paths, program.opts(), options));
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import mockfs from 'mock-fs';
|
import mockfs from 'mock-fs';
|
||||||
import { readFileSync } from 'node:fs';
|
import { readFileSync } from 'node:fs';
|
||||||
import { CrawlOptions, crawl } from 'src/utils';
|
import { Batcher, CrawlOptions, crawl } from 'src/utils';
|
||||||
|
import { Mock } from 'vitest';
|
||||||
|
|
||||||
interface Test {
|
interface Test {
|
||||||
test: string;
|
test: string;
|
||||||
@@ -303,3 +304,38 @@ describe('crawl', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Batcher', () => {
|
||||||
|
let batcher: Batcher;
|
||||||
|
let onBatch: Mock;
|
||||||
|
beforeEach(() => {
|
||||||
|
onBatch = vi.fn();
|
||||||
|
batcher = new Batcher({ batchSize: 2, onBatch });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trigger onBatch() when a batch limit is reached', async () => {
|
||||||
|
batcher.add('a');
|
||||||
|
batcher.add('b');
|
||||||
|
batcher.add('c');
|
||||||
|
expect(onBatch).toHaveBeenCalledOnce();
|
||||||
|
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trigger onBatch() when flush() is called', async () => {
|
||||||
|
batcher.add('a');
|
||||||
|
batcher.flush();
|
||||||
|
expect(onBatch).toHaveBeenCalledOnce();
|
||||||
|
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trigger onBatch() when debounce time reached', async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
|
||||||
|
batcher.add('a');
|
||||||
|
expect(onBatch).not.toHaveBeenCalled();
|
||||||
|
vi.advanceTimersByTime(200);
|
||||||
|
expect(onBatch).toHaveBeenCalledOnce();
|
||||||
|
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -172,3 +172,64 @@ export const sha1 = (filepath: string) => {
|
|||||||
rs.on('end', () => resolve(hash.digest('hex')));
|
rs.on('end', () => resolve(hash.digest('hex')));
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batches items and calls onBatch to process them
|
||||||
|
* when the batch size is reached or the debounce time has passed.
|
||||||
|
*/
|
||||||
|
export class Batcher<T = unknown> {
|
||||||
|
private items: T[] = [];
|
||||||
|
private readonly batchSize: number;
|
||||||
|
private readonly debounceTimeMs?: number;
|
||||||
|
private readonly onBatch: (items: T[]) => void;
|
||||||
|
private debounceTimer?: NodeJS.Timeout;
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
batchSize,
|
||||||
|
debounceTimeMs,
|
||||||
|
onBatch,
|
||||||
|
}: {
|
||||||
|
batchSize: number;
|
||||||
|
debounceTimeMs?: number;
|
||||||
|
onBatch: (items: T[]) => Promise<void>;
|
||||||
|
}) {
|
||||||
|
this.batchSize = batchSize;
|
||||||
|
this.debounceTimeMs = debounceTimeMs;
|
||||||
|
this.onBatch = onBatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
private setDebounceTimer() {
|
||||||
|
if (this.debounceTimer) {
|
||||||
|
clearTimeout(this.debounceTimer);
|
||||||
|
}
|
||||||
|
if (this.debounceTimeMs) {
|
||||||
|
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private clearDebounceTimer() {
|
||||||
|
if (this.debounceTimer) {
|
||||||
|
clearTimeout(this.debounceTimer);
|
||||||
|
this.debounceTimer = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
add(item: T) {
|
||||||
|
this.items.push(item);
|
||||||
|
this.setDebounceTimer();
|
||||||
|
if (this.items.length >= this.batchSize) {
|
||||||
|
this.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
flush() {
|
||||||
|
this.clearDebounceTimer();
|
||||||
|
if (this.items.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.onBatch(this.items);
|
||||||
|
|
||||||
|
this.items = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -95,12 +95,12 @@ services:
|
|||||||
image: immich-machine-learning-dev:latest
|
image: immich-machine-learning-dev:latest
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||||
build:
|
build:
|
||||||
context: ../machine-learning
|
context: ../machine-learning
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
args:
|
args:
|
||||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||||
ports:
|
ports:
|
||||||
- 3003:3003
|
- 3003:3003
|
||||||
volumes:
|
volumes:
|
||||||
@@ -122,7 +122,7 @@ services:
|
|||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
|
|||||||
@@ -38,12 +38,12 @@ services:
|
|||||||
image: immich-machine-learning:latest
|
image: immich-machine-learning:latest
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||||
build:
|
build:
|
||||||
context: ../machine-learning
|
context: ../machine-learning
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
args:
|
args:
|
||||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||||
ports:
|
ports:
|
||||||
- 3003:3003
|
- 3003:3003
|
||||||
volumes:
|
volumes:
|
||||||
@@ -63,7 +63,7 @@ services:
|
|||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
@@ -100,7 +100,7 @@ services:
|
|||||||
container_name: immich_prometheus
|
container_name: immich_prometheus
|
||||||
ports:
|
ports:
|
||||||
- 9090:9090
|
- 9090:9090
|
||||||
image: prom/prometheus@sha256:5888c188cf09e3f7eebc97369c3b2ce713e844cdbd88ccf36f5047c958aea120
|
image: prom/prometheus@sha256:6927e0919a144aa7616fd0137d4816816d42f6b816de3af269ab065250859a62
|
||||||
volumes:
|
volumes:
|
||||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||||
- prometheus-data:/prometheus
|
- prometheus-data:/prometheus
|
||||||
@@ -112,7 +112,7 @@ services:
|
|||||||
command: ['./run.sh', '-disable-reporting']
|
command: ['./run.sh', '-disable-reporting']
|
||||||
ports:
|
ports:
|
||||||
- 3000:3000
|
- 3000:3000
|
||||||
image: grafana/grafana:11.5.1-ubuntu@sha256:9a4ab78cec1a2ec7d1ca5dfd5aacec6412706a1bc9e971fc7184e2f6696a63f5
|
image: grafana/grafana:11.5.2-ubuntu@sha256:8b5858c447e06fd7a89006b562ba7bba7c4d5813600c7982374c41852adefaeb
|
||||||
volumes:
|
volumes:
|
||||||
- grafana-data:/var/lib/grafana
|
- grafana-data:/var/lib/grafana
|
||||||
|
|
||||||
|
|||||||
@@ -33,12 +33,12 @@ services:
|
|||||||
|
|
||||||
immich-machine-learning:
|
immich-machine-learning:
|
||||||
container_name: immich_machine_learning
|
container_name: immich_machine_learning
|
||||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino] to the image tag.
|
||||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
|
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||||
volumes:
|
volumes:
|
||||||
- model-cache:/cache
|
- model-cache:/cache
|
||||||
env_file:
|
env_file:
|
||||||
@@ -56,7 +56,7 @@ services:
|
|||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
POSTGRES_USER: ${DB_USERNAME}
|
POSTGRES_USER: ${DB_USERNAME}
|
||||||
|
|||||||
@@ -26,6 +26,13 @@ services:
|
|||||||
capabilities:
|
capabilities:
|
||||||
- gpu
|
- gpu
|
||||||
|
|
||||||
|
rocm:
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
- /dev/kfd:/dev/kfd
|
||||||
|
|
||||||
openvino:
|
openvino:
|
||||||
device_cgroup_rules:
|
device_cgroup_rules:
|
||||||
- 'c 189:* rmw'
|
- 'c 189:* rmw'
|
||||||
|
|||||||
@@ -69,6 +69,8 @@ Navigating to Administration > Settings > Machine Learning Settings > Facial Rec
|
|||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
||||||
|
|
||||||
|
You can learn how the tune the result in this [Guide](/docs/guides/better-facial-clusters)
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Facial recognition model
|
### Facial recognition model
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
|||||||
|
|
||||||
### Nightly job
|
### Nightly job
|
||||||
|
|
||||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion.
|
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library managment page.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
|||||||
|
|
||||||
- ARM NN (Mali)
|
- ARM NN (Mali)
|
||||||
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
|
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
|
||||||
|
- ROCm (AMD GPUs)
|
||||||
- OpenVINO (Intel GPUs such as Iris Xe and Arc)
|
- OpenVINO (Intel GPUs such as Iris Xe and Arc)
|
||||||
|
|
||||||
## Limitations
|
## Limitations
|
||||||
@@ -41,6 +42,10 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
|||||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
||||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||||
|
|
||||||
|
#### ROCm
|
||||||
|
|
||||||
|
- The GPU must be supported by ROCm. If it isn't officially supported, you can attempt to use the `HSA_OVERRIDE_GFX_VERSION` environmental variable: `HSA_OVERRIDE_GFX_VERSION=<a supported version, e.g. 10.3.0>`.
|
||||||
|
|
||||||
#### OpenVINO
|
#### OpenVINO
|
||||||
|
|
||||||
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM.
|
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM.
|
||||||
@@ -51,12 +56,12 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
|||||||
|
|
||||||
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||||
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||||
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, openvino] to the `image` section's tag at the end of the line.
|
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, rocm, openvino] to the `image` section's tag at the end of the line.
|
||||||
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
||||||
|
|
||||||
### Confirming Device Usage
|
### Confirming Device Usage
|
||||||
|
|
||||||
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel and `intel_gpu_top` for Intel.
|
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel, `intel_gpu_top` for Intel, and `radeontop` for AMD.
|
||||||
|
|
||||||
You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN.
|
You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN.
|
||||||
|
|
||||||
|
|||||||
72
docs/docs/guides/better-facial-clusters.md
Normal file
72
docs/docs/guides/better-facial-clusters.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Better Facial Recognition Clusters
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
This guide explains how to optimize facial recognition in systems with large image libraries. By following these steps, you'll achieve better clustering of faces, reducing the need for manual merging.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Important Notes
|
||||||
|
|
||||||
|
- **Best Suited For:** Large image libraries after importing a significant number of images.
|
||||||
|
- **Warning:** This method deletes all previously assigned names.
|
||||||
|
- **Tip:** **Always take a [backup](/docs/administration/backup-and-restore#database) before proceeding!**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step-by-Step Instructions
|
||||||
|
|
||||||
|
### Objective
|
||||||
|
|
||||||
|
To enhance face clustering and ensure the model effectively identifies faces using qualitative initial data.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Steps
|
||||||
|
|
||||||
|
#### 1. Adjust Machine Learning Settings
|
||||||
|
|
||||||
|
Navigate to:
|
||||||
|
**Admin → Administration → Settings → Machine Learning Settings**
|
||||||
|
|
||||||
|
Make the following changes:
|
||||||
|
|
||||||
|
- **Maximum recognition distance (Optional):**
|
||||||
|
Lower this value, e.g., to **0.4**, if the library contains people with similar facial features.
|
||||||
|
- **Minimum recognized faces:**
|
||||||
|
Set this to a **high value** (e.g., 20 For libraries with a large amount of assets (~100K+), and 10 for libraries with medium amount of assets (~40K+)).
|
||||||
|
> A high value ensures clusters only include faces that appear at least 20/`value` times in the library, improving the initial clustering process.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### 2. Run Reset Jobs
|
||||||
|
|
||||||
|
Go to:
|
||||||
|
**Admin → Administration → Settings → Jobs**
|
||||||
|
|
||||||
|
Perform the following:
|
||||||
|
|
||||||
|
1. **FACIAL RECOGNITION → Reset**
|
||||||
|
|
||||||
|
> These reset jobs rebuild the recognition model based on the new settings.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### 3. Refine Recognition with Lower Thresholds
|
||||||
|
|
||||||
|
Once the reset jobs are complete, refine the recognition as follows:
|
||||||
|
|
||||||
|
- **Step 1:**
|
||||||
|
Return to **Minimum recognized faces** in Machine Learning Settings and lower the value to **10** (In medium libraries we will lower the value from 10 to 5).
|
||||||
|
|
||||||
|
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||||
|
|
||||||
|
- **Step 2:**
|
||||||
|
Lower the value again to **3**.
|
||||||
|
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||||
|
|
||||||
|
:::tip try different values
|
||||||
|
For certain libraries with a larger or smaller amount of assets, other settings will be better or worse. It is recommended to try different values **before assigning names** and see which settings work best for your library.
|
||||||
|
:::
|
||||||
|
|
||||||
|
---
|
||||||
@@ -31,6 +31,10 @@ SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
|||||||
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sql title="Find by partial ID"
|
||||||
|
SELECT * FROM "assets" WHERE "id"::text LIKE '%ab431d3a%';
|
||||||
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
||||||
:::
|
:::
|
||||||
|
|||||||
@@ -23,12 +23,12 @@ name: immich_remote_ml
|
|||||||
services:
|
services:
|
||||||
immich-machine-learning:
|
immich-machine-learning:
|
||||||
container_name: immich_machine_learning
|
container_name: immich_machine_learning
|
||||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino] to the image tag.
|
||||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
# service: # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||||
volumes:
|
volumes:
|
||||||
- model-cache:/cache
|
- model-cache:/cache
|
||||||
restart: always
|
restart: always
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ Just restarting the containers does not replace the environment within the conta
|
|||||||
|
|
||||||
In order to recreate the container using docker compose, run `docker compose up -d`.
|
In order to recreate the container using docker compose, run `docker compose up -d`.
|
||||||
In most cases docker will recognize that the `.env` file has changed and recreate the affected containers.
|
In most cases docker will recognize that the `.env` file has changed and recreate the affected containers.
|
||||||
If this should not work, try running `docker compose up -d --force-recreate`.
|
If this does not work, try running `docker compose up -d --force-recreate`.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@@ -20,8 +20,8 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
|||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
||||||
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
||||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server |
|
| `UPLOAD_LOCATION` | Host path for uploads | | server |
|
||||||
| `DB_DATA_LOCATION` | Host Path for Postgres database | | database |
|
| `DB_DATA_LOCATION` | Host path for Postgres database | | database |
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
||||||
@@ -33,15 +33,15 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
|||||||
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||||
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
||||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||||
| `IMMICH_MEDIA_LOCATION` | Media Location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
| `CPU_CORES` | Number of cores available to the Immich server | auto-detected CPU core count | server | |
|
||||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||||
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
||||||
| `IMMICH_TRUSTED_PROXIES` | List of comma separated IPs set as trusted proxies | | server | api |
|
| `IMMICH_TRUSTED_PROXIES` | List of comma-separated IPs set as trusted proxies | | server | api |
|
||||||
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/docs/administration/system-integrity) | | server | api, microservices |
|
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/docs/administration/system-integrity) | | server | api, microservices |
|
||||||
|
|
||||||
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
||||||
@@ -50,7 +50,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
|||||||
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
||||||
|
|
||||||
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||||
It only need to be set if the Immich deployment method is changing.
|
It only needs to be set if the Immich deployment method is changing.
|
||||||
|
|
||||||
## Workers
|
## Workers
|
||||||
|
|
||||||
@@ -75,12 +75,12 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
|||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
||||||
| `DB_URL` | Database URL | | server |
|
| `DB_URL` | Database URL | | server |
|
||||||
| `DB_HOSTNAME` | Database Host | `database` | server |
|
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||||
| `DB_PORT` | Database Port | `5432` | server |
|
| `DB_PORT` | Database port | `5432` | server |
|
||||||
| `DB_USERNAME` | Database User | `postgres` | server, database<sup>\*1</sup> |
|
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||||
| `DB_PASSWORD` | Database Password | `postgres` | server, database<sup>\*1</sup> |
|
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||||
| `DB_DATABASE_NAME` | Database Name | `immich` | server, database<sup>\*1</sup> |
|
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database Vector Extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||||
|
|
||||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||||
@@ -103,18 +103,18 @@ When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSW
|
|||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
| :--------------- | :------------- | :-----: | :--------- |
|
| :--------------- | :------------- | :-----: | :--------- |
|
||||||
| `REDIS_URL` | Redis URL | | server |
|
| `REDIS_URL` | Redis URL | | server |
|
||||||
| `REDIS_SOCKET` | Redis Socket | | server |
|
| `REDIS_SOCKET` | Redis socket | | server |
|
||||||
| `REDIS_HOSTNAME` | Redis Host | `redis` | server |
|
| `REDIS_HOSTNAME` | Redis host | `redis` | server |
|
||||||
| `REDIS_PORT` | Redis Port | `6379` | server |
|
| `REDIS_PORT` | Redis port | `6379` | server |
|
||||||
| `REDIS_USERNAME` | Redis Username | | server |
|
| `REDIS_USERNAME` | Redis username | | server |
|
||||||
| `REDIS_PASSWORD` | Redis Password | | server |
|
| `REDIS_PASSWORD` | Redis password | | server |
|
||||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server |
|
| `REDIS_DBINDEX` | Redis DB index | `0` | server |
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||||
|
|
||||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
||||||
More info can be found in the upstream [ioredis] documentation.
|
More information can be found in the upstream [ioredis] documentation.
|
||||||
|
|
||||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
||||||
:::
|
:::
|
||||||
@@ -181,7 +181,11 @@ Redis (Sentinel) URL example JSON before encoding:
|
|||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
||||||
Other machine learning parameters can be tuned from the admin UI.
|
While the `textual` model is the only one required for smart search, some users may experience slow first searches
|
||||||
|
due to backups triggering loading of the other models into memory, which blocks other requests until completed.
|
||||||
|
To avoid this, you can preload the other models (`visual`, `recognition`, and `detection`) if you have enough RAM to do so.
|
||||||
|
|
||||||
|
Additional machine learning parameters can be tuned from the admin UI.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@@ -212,7 +216,7 @@ the `_FILE` variable should be set to the path of a file containing the variable
|
|||||||
details on how to use Docker Secrets in the Postgres image.
|
details on how to use Docker Secrets in the Postgres image.
|
||||||
|
|
||||||
\*2: See [this comment][docker-secrets-example] for an example of how
|
\*2: See [this comment][docker-secrets-example] for an example of how
|
||||||
to use use a Docker secret for the password in the Redis container.
|
to use a Docker secret for the password in the Redis container.
|
||||||
|
|
||||||
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||||
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
||||||
|
|||||||
12
docs/package-lock.json
generated
12
docs/package-lock.json
generated
@@ -14070,9 +14070,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/postcss": {
|
"node_modules/postcss": {
|
||||||
"version": "8.5.2",
|
"version": "8.5.3",
|
||||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.2.tgz",
|
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
|
||||||
"integrity": "sha512-MjOadfU3Ys9KYoX0AdkBlFEF1Vx37uCCeN4ZHnmwm9FfpbsGWMZeBLMmmpY+6Ocqod7mkdZ0DT31OlbsFrLlkA==",
|
"integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "opencollective",
|
"type": "opencollective",
|
||||||
@@ -15734,9 +15734,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/prettier": {
|
"node_modules/prettier": {
|
||||||
"version": "3.5.1",
|
"version": "3.5.2",
|
||||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.1.tgz",
|
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.2.tgz",
|
||||||
"integrity": "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw==",
|
"integrity": "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|||||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,4 +1,8 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"label": "v1.128.0",
|
||||||
|
"url": "https://v1.128.0.archive.immich.app"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"label": "v1.127.0",
|
"label": "v1.127.0",
|
||||||
"url": "https://v1.127.0.archive.immich.app"
|
"url": "https://v1.127.0.archive.immich.app"
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ services:
|
|||||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
||||||
|
|
||||||
database:
|
database:
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|||||||
749
e2e/package-lock.json
generated
749
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "immich-e2e",
|
"name": "immich-e2e",
|
||||||
"version": "1.127.0",
|
"version": "1.128.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -25,7 +25,7 @@
|
|||||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||||
"@playwright/test": "^1.44.1",
|
"@playwright/test": "^1.44.1",
|
||||||
"@types/luxon": "^3.4.2",
|
"@types/luxon": "^3.4.2",
|
||||||
"@types/node": "^22.13.4",
|
"@types/node": "^22.13.5",
|
||||||
"@types/oidc-provider": "^8.5.1",
|
"@types/oidc-provider": "^8.5.1",
|
||||||
"@types/pg": "^8.11.0",
|
"@types/pg": "^8.11.0",
|
||||||
"@types/pngjs": "^6.0.4",
|
"@types/pngjs": "^6.0.4",
|
||||||
@@ -38,7 +38,7 @@
|
|||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
"eslint-plugin-unicorn": "^56.0.1",
|
"eslint-plugin-unicorn": "^56.0.1",
|
||||||
"exiftool-vendored": "^28.3.1",
|
"exiftool-vendored": "^28.3.1",
|
||||||
"globals": "^15.9.0",
|
"globals": "^16.0.0",
|
||||||
"jose": "^5.6.3",
|
"jose": "^5.6.3",
|
||||||
"luxon": "^3.4.4",
|
"luxon": "^3.4.4",
|
||||||
"oidc-provider": "^8.5.1",
|
"oidc-provider": "^8.5.1",
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import {
|
|||||||
AssetResponseDto,
|
AssetResponseDto,
|
||||||
AssetTypeEnum,
|
AssetTypeEnum,
|
||||||
getAssetInfo,
|
getAssetInfo,
|
||||||
getConfig,
|
|
||||||
getMyUser,
|
getMyUser,
|
||||||
LoginResponseDto,
|
LoginResponseDto,
|
||||||
SharedLinkType,
|
SharedLinkType,
|
||||||
@@ -45,8 +44,6 @@ const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-sp
|
|||||||
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
||||||
const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
|
const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
|
||||||
|
|
||||||
const getSystemConfig = (accessToken: string) => getConfig({ headers: asBearerAuth(accessToken) });
|
|
||||||
|
|
||||||
const readTags = async (bytes: Buffer, filename: string) => {
|
const readTags = async (bytes: Buffer, filename: string) => {
|
||||||
const filepath = join(tempDir, filename);
|
const filepath = join(tempDir, filename);
|
||||||
await writeFile(filepath, bytes);
|
await writeFile(filepath, bytes);
|
||||||
@@ -228,7 +225,7 @@ describe('/asset', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should get the asset faces', async () => {
|
it('should get the asset faces', async () => {
|
||||||
const config = await getSystemConfig(admin.accessToken);
|
const config = await utils.getSystemConfig(admin.accessToken);
|
||||||
config.metadata.faces.import = true;
|
config.metadata.faces.import = true;
|
||||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import { JobCommand, JobName, LoginResponseDto } from '@immich/sdk';
|
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk';
|
||||||
|
import { cpSync, rmSync } from 'node:fs';
|
||||||
import { readFile } from 'node:fs/promises';
|
import { readFile } from 'node:fs/promises';
|
||||||
import { basename } from 'node:path';
|
import { basename } from 'node:path';
|
||||||
import { errorDto } from 'src/responses';
|
import { errorDto } from 'src/responses';
|
||||||
import { app, testAssetDir, utils } from 'src/utils';
|
import { app, asBearerAuth, testAssetDir, utils } from 'src/utils';
|
||||||
import request from 'supertest';
|
import request from 'supertest';
|
||||||
import { afterEach, beforeAll, describe, expect, it } from 'vitest';
|
import { afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
@@ -20,6 +21,33 @@ describe('/jobs', () => {
|
|||||||
command: JobCommand.Resume,
|
command: JobCommand.Resume,
|
||||||
force: false,
|
force: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Resume,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, {
|
||||||
|
command: JobCommand.Resume,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, {
|
||||||
|
command: JobCommand.Resume,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, {
|
||||||
|
command: JobCommand.Resume,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
const config = await utils.getSystemConfig(admin.accessToken);
|
||||||
|
config.machineLearning.duplicateDetection.enabled = false;
|
||||||
|
config.machineLearning.enabled = false;
|
||||||
|
config.metadata.faces.import = false;
|
||||||
|
config.machineLearning.clip.enabled = false;
|
||||||
|
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should require authentication', async () => {
|
it('should require authentication', async () => {
|
||||||
@@ -29,14 +57,7 @@ describe('/jobs', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should queue metadata extraction for missing assets', async () => {
|
it('should queue metadata extraction for missing assets', async () => {
|
||||||
const path1 = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||||
const path2 = `${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`;
|
|
||||||
|
|
||||||
await utils.createAsset(admin.accessToken, {
|
|
||||||
assetData: { bytes: await readFile(path1), filename: basename(path1) },
|
|
||||||
});
|
|
||||||
|
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
|
||||||
|
|
||||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||||
command: JobCommand.Pause,
|
command: JobCommand.Pause,
|
||||||
@@ -44,7 +65,7 @@ describe('/jobs', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const { id } = await utils.createAsset(admin.accessToken, {
|
const { id } = await utils.createAsset(admin.accessToken, {
|
||||||
assetData: { bytes: await readFile(path2), filename: basename(path2) },
|
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||||
});
|
});
|
||||||
|
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||||
@@ -82,5 +103,123 @@ describe('/jobs', () => {
|
|||||||
expect(asset.exifInfo?.make).toBe('NIKON CORPORATION');
|
expect(asset.exifInfo?.make).toBe('NIKON CORPORATION');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should not re-extract metadata for existing assets', async () => {
|
||||||
|
const path = `${testAssetDir}/temp/metadata/asset.jpg`;
|
||||||
|
|
||||||
|
cpSync(`${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`, path);
|
||||||
|
|
||||||
|
const { id } = await utils.createAsset(admin.accessToken, {
|
||||||
|
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||||
|
|
||||||
|
{
|
||||||
|
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||||
|
|
||||||
|
expect(asset.exifInfo).toBeDefined();
|
||||||
|
expect(asset.exifInfo?.model).toBe('NIKON D700');
|
||||||
|
}
|
||||||
|
|
||||||
|
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||||
|
command: JobCommand.Start,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||||
|
|
||||||
|
{
|
||||||
|
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||||
|
|
||||||
|
expect(asset.exifInfo).toBeDefined();
|
||||||
|
expect(asset.exifInfo?.model).toBe('NIKON D700');
|
||||||
|
}
|
||||||
|
|
||||||
|
rmSync(path);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should queue thumbnail extraction for assets missing thumbs', async () => {
|
||||||
|
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Pause,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { id } = await utils.createAsset(admin.accessToken, {
|
||||||
|
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||||
|
|
||||||
|
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||||
|
expect(assetBefore.thumbhash).toBeNull();
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Empty,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Resume,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Start,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||||
|
|
||||||
|
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||||
|
expect(assetAfter.thumbhash).not.toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not reload existing thumbnail when running thumb job for missing assets', async () => {
|
||||||
|
const path = `${testAssetDir}/temp/thumbs/asset1.jpg`;
|
||||||
|
|
||||||
|
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, path);
|
||||||
|
|
||||||
|
const { id } = await utils.createAsset(admin.accessToken, {
|
||||||
|
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||||
|
|
||||||
|
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||||
|
|
||||||
|
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
|
||||||
|
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Resume,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
// This runs the missing thumbnail job
|
||||||
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
|
command: JobCommand.Start,
|
||||||
|
force: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||||
|
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||||
|
|
||||||
|
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||||
|
|
||||||
|
// Asset 1 thumbnail should be untouched since its thumb should not have been reloaded, even though the file was changed
|
||||||
|
expect(assetAfter.thumbhash).toEqual(assetBefore.thumbhash);
|
||||||
|
|
||||||
|
rmSync(path);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { LoginResponseDto, getAssetInfo, getAssetStatistics, scanLibrary } from '@immich/sdk';
|
import { LoginResponseDto, getAssetInfo, getAssetStatistics } from '@immich/sdk';
|
||||||
import { existsSync } from 'node:fs';
|
import { existsSync } from 'node:fs';
|
||||||
import { Socket } from 'socket.io-client';
|
import { Socket } from 'socket.io-client';
|
||||||
import { errorDto } from 'src/responses';
|
import { errorDto } from 'src/responses';
|
||||||
@@ -6,8 +6,6 @@ import { app, asBearerAuth, testAssetDir, testAssetDirInternal, utils } from 'sr
|
|||||||
import request from 'supertest';
|
import request from 'supertest';
|
||||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||||
|
|
||||||
const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) });
|
|
||||||
|
|
||||||
describe('/trash', () => {
|
describe('/trash', () => {
|
||||||
let admin: LoginResponseDto;
|
let admin: LoginResponseDto;
|
||||||
let ws: Socket;
|
let ws: Socket;
|
||||||
@@ -81,8 +79,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
|
||||||
|
|
||||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||||
expect(assets.items.length).toBe(1);
|
expect(assets.items.length).toBe(1);
|
||||||
@@ -90,8 +87,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
|
||||||
|
|
||||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
||||||
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
||||||
@@ -116,8 +112,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
|
||||||
|
|
||||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||||
expect(assets.items.length).toBe(1);
|
expect(assets.items.length).toBe(1);
|
||||||
@@ -125,8 +120,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
|
||||||
|
|
||||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
||||||
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
||||||
@@ -180,8 +174,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
|
||||||
|
|
||||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||||
expect(assets.count).toBe(1);
|
expect(assets.count).toBe(1);
|
||||||
@@ -189,9 +182,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
|
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
|
||||||
|
|
||||||
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||||
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
||||||
@@ -201,6 +192,8 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
const after = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
const after = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||||
expect(after).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
expect(after).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
||||||
|
|
||||||
|
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -238,7 +231,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||||
|
|
||||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||||
@@ -247,7 +240,7 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||||
|
|
||||||
await scan(admin.accessToken, library.id);
|
await utils.scan(admin.accessToken, library.id);
|
||||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||||
|
|
||||||
const before = await utils.getAssetInfo(admin.accessToken, assetId);
|
const before = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||||
@@ -261,6 +254,8 @@ describe('/trash', () => {
|
|||||||
|
|
||||||
const after = await utils.getAssetInfo(admin.accessToken, assetId);
|
const after = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||||
expect(after.isTrashed).toBe(true);
|
expect(after.isTrashed).toBe(true);
|
||||||
|
|
||||||
|
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import {
|
|||||||
deleteAssets,
|
deleteAssets,
|
||||||
getAllJobsStatus,
|
getAllJobsStatus,
|
||||||
getAssetInfo,
|
getAssetInfo,
|
||||||
|
getConfig,
|
||||||
getConfigDefaults,
|
getConfigDefaults,
|
||||||
login,
|
login,
|
||||||
scanLibrary,
|
scanLibrary,
|
||||||
@@ -121,6 +122,7 @@ const execPromise = promisify(exec);
|
|||||||
const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
||||||
// console.log(`Received event: ${event} [id=${id}]`);
|
// console.log(`Received event: ${event} [id=${id}]`);
|
||||||
const set = events[event];
|
const set = events[event];
|
||||||
|
|
||||||
set.add(id);
|
set.add(id);
|
||||||
|
|
||||||
const idCallback = idCallbacks[id];
|
const idCallback = idCallbacks[id];
|
||||||
@@ -415,6 +417,8 @@ export const utils = {
|
|||||||
rmSync(path, { recursive: true });
|
rmSync(path, { recursive: true });
|
||||||
},
|
},
|
||||||
|
|
||||||
|
getSystemConfig: (accessToken: string) => getConfig({ headers: asBearerAuth(accessToken) }),
|
||||||
|
|
||||||
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
|
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
|
||||||
|
|
||||||
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>
|
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>
|
||||||
|
|||||||
@@ -96,7 +96,7 @@
|
|||||||
"library_scanning_enable_description": "Enable periodic library scanning",
|
"library_scanning_enable_description": "Enable periodic library scanning",
|
||||||
"library_settings": "External Library",
|
"library_settings": "External Library",
|
||||||
"library_settings_description": "Manage external library settings",
|
"library_settings_description": "Manage external library settings",
|
||||||
"library_tasks_description": "Perform library tasks",
|
"library_tasks_description": "Scan external libraries for new and/or changed assets",
|
||||||
"library_watching_enable_description": "Watch external libraries for file changes",
|
"library_watching_enable_description": "Watch external libraries for file changes",
|
||||||
"library_watching_settings": "Library watching (EXPERIMENTAL)",
|
"library_watching_settings": "Library watching (EXPERIMENTAL)",
|
||||||
"library_watching_settings_description": "Automatically watch for changed files",
|
"library_watching_settings_description": "Automatically watch for changed files",
|
||||||
@@ -336,6 +336,7 @@
|
|||||||
"untracked_files": "Untracked Files",
|
"untracked_files": "Untracked Files",
|
||||||
"untracked_files_description": "These files are not tracked by the application. They can be the results of failed moves, interrupted uploads, or left behind due to a bug",
|
"untracked_files_description": "These files are not tracked by the application. They can be the results of failed moves, interrupted uploads, or left behind due to a bug",
|
||||||
"user_cleanup_job": "User cleanup",
|
"user_cleanup_job": "User cleanup",
|
||||||
|
"cleanup": "Cleanup",
|
||||||
"user_delete_delay": "<b>{user}</b>'s account and assets will be scheduled for permanent deletion in {delay, plural, one {# day} other {# days}}.",
|
"user_delete_delay": "<b>{user}</b>'s account and assets will be scheduled for permanent deletion in {delay, plural, one {# day} other {# days}}.",
|
||||||
"user_delete_delay_settings": "Delete delay",
|
"user_delete_delay_settings": "Delete delay",
|
||||||
"user_delete_delay_settings_description": "Number of days after removal to permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.",
|
"user_delete_delay_settings_description": "Number of days after removal to permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.",
|
||||||
@@ -393,6 +394,7 @@
|
|||||||
"allow_edits": "Allow edits",
|
"allow_edits": "Allow edits",
|
||||||
"allow_public_user_to_download": "Allow public user to download",
|
"allow_public_user_to_download": "Allow public user to download",
|
||||||
"allow_public_user_to_upload": "Allow public user to upload",
|
"allow_public_user_to_upload": "Allow public user to upload",
|
||||||
|
"alt_text_qr_code": "QR code image",
|
||||||
"anti_clockwise": "Anti-clockwise",
|
"anti_clockwise": "Anti-clockwise",
|
||||||
"api_key": "API Key",
|
"api_key": "API Key",
|
||||||
"api_key_description": "This value will only be shown once. Please be sure to copy it before closing the window.",
|
"api_key_description": "This value will only be shown once. Please be sure to copy it before closing the window.",
|
||||||
@@ -889,6 +891,7 @@
|
|||||||
"month": "Month",
|
"month": "Month",
|
||||||
"more": "More",
|
"more": "More",
|
||||||
"moved_to_trash": "Moved to trash",
|
"moved_to_trash": "Moved to trash",
|
||||||
|
"mute_memories": "Mute Memories",
|
||||||
"my_albums": "My albums",
|
"my_albums": "My albums",
|
||||||
"name": "Name",
|
"name": "Name",
|
||||||
"name_or_nickname": "Name or nickname",
|
"name_or_nickname": "Name or nickname",
|
||||||
@@ -1114,6 +1117,7 @@
|
|||||||
"say_something": "Say something",
|
"say_something": "Say something",
|
||||||
"scan_all_libraries": "Scan All Libraries",
|
"scan_all_libraries": "Scan All Libraries",
|
||||||
"scan_library": "Scan",
|
"scan_library": "Scan",
|
||||||
|
"rescan": "Rescan",
|
||||||
"scan_settings": "Scan Settings",
|
"scan_settings": "Scan Settings",
|
||||||
"scanning_for_album": "Scanning for album...",
|
"scanning_for_album": "Scanning for album...",
|
||||||
"search": "Search",
|
"search": "Search",
|
||||||
@@ -1302,6 +1306,7 @@
|
|||||||
"unnamed_album": "Unnamed Album",
|
"unnamed_album": "Unnamed Album",
|
||||||
"unnamed_album_delete_confirmation": "Are you sure you want to delete this album?",
|
"unnamed_album_delete_confirmation": "Are you sure you want to delete this album?",
|
||||||
"unnamed_share": "Unnamed Share",
|
"unnamed_share": "Unnamed Share",
|
||||||
|
"unmute_memories": "Unmute Memories",
|
||||||
"unsaved_change": "Unsaved change",
|
"unsaved_change": "Unsaved change",
|
||||||
"unselect_all": "Unselect all",
|
"unselect_all": "Unselect all",
|
||||||
"unselect_all_duplicates": "Unselect all duplicates",
|
"unselect_all_duplicates": "Unselect all duplicates",
|
||||||
@@ -1352,6 +1357,7 @@
|
|||||||
"view_all": "View All",
|
"view_all": "View All",
|
||||||
"view_all_users": "View all users",
|
"view_all_users": "View all users",
|
||||||
"view_in_timeline": "View in timeline",
|
"view_in_timeline": "View in timeline",
|
||||||
|
"view_link": "View link",
|
||||||
"view_links": "View links",
|
"view_links": "View links",
|
||||||
"view_name": "View",
|
"view_name": "View",
|
||||||
"view_next_asset": "View next asset",
|
"view_next_asset": "View next asset",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
ARG DEVICE=cpu
|
ARG DEVICE=cpu
|
||||||
|
|
||||||
FROM python:3.11-bookworm@sha256:14b4620f59a90f163dfa6bd252b68743f9a41d494a9fde935f9d7669d98094bb AS builder-cpu
|
FROM python:3.11-bookworm@sha256:68a8863d0625f42d47e0684f33ca02f19d6094ef859a8af237aaf645195ed477 AS builder-cpu
|
||||||
|
|
||||||
FROM builder-cpu AS builder-openvino
|
FROM builder-cpu AS builder-openvino
|
||||||
|
|
||||||
@@ -15,6 +15,34 @@ RUN mkdir /opt/armnn && \
|
|||||||
cd /opt/ann && \
|
cd /opt/ann && \
|
||||||
sh build.sh
|
sh build.sh
|
||||||
|
|
||||||
|
# Warning: 25GiB+ disk space required to pull this image
|
||||||
|
# TODO: find a way to reduce the image size
|
||||||
|
FROM rocm/dev-ubuntu-22.04:6.3.1-complete AS builder-rocm
|
||||||
|
|
||||||
|
WORKDIR /code
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends wget git python3.10-venv
|
||||||
|
RUN wget -nv https://github.com/Kitware/CMake/releases/download/v3.30.1/cmake-3.30.1-linux-x86_64.sh && \
|
||||||
|
chmod +x cmake-3.30.1-linux-x86_64.sh && \
|
||||||
|
mkdir -p /code/cmake-3.30.1-linux-x86_64 && \
|
||||||
|
./cmake-3.30.1-linux-x86_64.sh --skip-license --prefix=/code/cmake-3.30.1-linux-x86_64 && \
|
||||||
|
rm cmake-3.30.1-linux-x86_64.sh
|
||||||
|
|
||||||
|
ENV PATH=/code/cmake-3.30.1-linux-x86_64/bin:${PATH}
|
||||||
|
|
||||||
|
RUN git clone --single-branch --branch v1.20.1 --recursive "https://github.com/Microsoft/onnxruntime" onnxruntime
|
||||||
|
WORKDIR /code/onnxruntime
|
||||||
|
# Fix for multi-threading based on comments in https://github.com/microsoft/onnxruntime/pull/19567
|
||||||
|
# TODO: find a way to fix this without disabling algo caching
|
||||||
|
COPY ./rocm-PR19567.patch /tmp/
|
||||||
|
RUN git apply /tmp/rocm-PR19567.patch
|
||||||
|
|
||||||
|
RUN /bin/sh ./dockerfiles/scripts/install_common_deps.sh
|
||||||
|
# Note: the `parallel` setting uses a substantial amount of RAM
|
||||||
|
RUN ./build.sh --allow_running_as_root --config Release --build_wheel --update --build --parallel 13 --cmake_extra_defines\
|
||||||
|
ONNXRUNTIME_VERSION=1.20.1 --use_rocm --rocm_home=/opt/rocm
|
||||||
|
RUN mv /code/onnxruntime/build/Linux/Release/dist/*.whl /opt/
|
||||||
|
|
||||||
FROM builder-${DEVICE} AS builder
|
FROM builder-${DEVICE} AS builder
|
||||||
|
|
||||||
ARG DEVICE
|
ARG DEVICE
|
||||||
@@ -32,18 +60,21 @@ RUN poetry config installer.max-workers 10 && \
|
|||||||
RUN python3 -m venv /opt/venv
|
RUN python3 -m venv /opt/venv
|
||||||
|
|
||||||
COPY poetry.lock pyproject.toml ./
|
COPY poetry.lock pyproject.toml ./
|
||||||
|
RUN if [ "$DEVICE" = "rocm" ]; then \
|
||||||
|
poetry add /opt/onnxruntime_rocm-*.whl; \
|
||||||
|
fi
|
||||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
|
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
|
||||||
|
|
||||||
FROM python:3.11-slim-bookworm@sha256:42420f737ba91d509fc60d5ed65ed0492678a90c561e1fa08786ae8ba8b52eda AS prod-cpu
|
FROM python:3.11-slim-bookworm@sha256:614c8691ab74150465ec9123378cd4dde7a6e57be9e558c3108df40664667a4c AS prod-cpu
|
||||||
|
|
||||||
FROM prod-cpu AS prod-openvino
|
FROM prod-cpu AS prod-openvino
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \
|
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \
|
||||||
wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-core_1.0.17384.11_amd64.deb && \
|
wget -nv https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-core_1.0.17384.11_amd64.deb && \
|
||||||
wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-opencl_1.0.17384.11_amd64.deb && \
|
wget -nv https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-opencl_1.0.17384.11_amd64.deb && \
|
||||||
wget https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/intel-opencl-icd_24.31.30508.7_amd64.deb && \
|
wget -nv https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/intel-opencl-icd_24.31.30508.7_amd64.deb && \
|
||||||
wget https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/libigdgmm12_22.4.1_amd64.deb && \
|
wget -nv https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/libigdgmm12_22.4.1_amd64.deb && \
|
||||||
dpkg -i *.deb && \
|
dpkg -i *.deb && \
|
||||||
rm *.deb && \
|
rm *.deb && \
|
||||||
apt-get remove wget -yqq && \
|
apt-get remove wget -yqq && \
|
||||||
@@ -80,11 +111,15 @@ COPY --from=builder-armnn \
|
|||||||
/opt/ann/build.sh \
|
/opt/ann/build.sh \
|
||||||
/opt/armnn/
|
/opt/armnn/
|
||||||
|
|
||||||
|
FROM rocm/dev-ubuntu-22.04:6.3.1-complete AS prod-rocm
|
||||||
|
|
||||||
|
|
||||||
FROM prod-${DEVICE} AS prod
|
FROM prod-${DEVICE} AS prod
|
||||||
|
|
||||||
ARG DEVICE
|
ARG DEVICE
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends tini $(if ! [ "$DEVICE" = "openvino" ]; then echo "libmimalloc2.0"; fi) && \
|
apt-get install -y --no-install-recommends tini $(if ! [ "$DEVICE" = "openvino" ] && ! [ "$DEVICE" = "rocm" ]; then echo "libmimalloc2.0"; fi) && \
|
||||||
apt-get autoremove -yqq && \
|
apt-get autoremove -yqq && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
This project uses [Poetry](https://python-poetry.org/docs/#installation), so be sure to install it first.
|
This project uses [Poetry](https://python-poetry.org/docs/#installation), so be sure to install it first.
|
||||||
Running `poetry install --no-root --with dev --with cpu` will install everything you need in an isolated virtual environment.
|
Running `poetry install --no-root --with dev --with cpu` will install everything you need in an isolated virtual environment.
|
||||||
CUDA and OpenVINO are supported as acceleration APIs. To use them, you can replace `--with cpu` with either of `--with cuda` or `--with openvino`. In the case of CUDA, a [compute capability](https://developer.nvidia.com/cuda-gpus) of 5.2 or higher is required.
|
CUDA, ROCM and OpenVINO are supported as acceleration APIs. To use them, you can replace `--with cpu` with either of `--with cuda`, `--with rocm` or `--with openvino`. In the case of CUDA, a [compute capability](https://developer.nvidia.com/cuda-gpus) of 5.2 or higher is required.
|
||||||
|
|
||||||
To add or remove dependencies, you can use the commands `poetry add $PACKAGE_NAME` and `poetry remove $PACKAGE_NAME`, respectively.
|
To add or remove dependencies, you can use the commands `poetry add $PACKAGE_NAME` and `poetry remove $PACKAGE_NAME`, respectively.
|
||||||
Be sure to commit the `poetry.lock` and `pyproject.toml` files with `poetry lock --no-update` to reflect any changes in dependencies.
|
Be sure to commit the `poetry.lock` and `pyproject.toml` files with `poetry lock --no-update` to reflect any changes in dependencies.
|
||||||
|
|||||||
@@ -63,7 +63,12 @@ _INSIGHTFACE_MODELS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SUPPORTED_PROVIDERS = ["CUDAExecutionProvider", "OpenVINOExecutionProvider", "CPUExecutionProvider"]
|
SUPPORTED_PROVIDERS = [
|
||||||
|
"CUDAExecutionProvider",
|
||||||
|
"ROCMExecutionProvider",
|
||||||
|
"OpenVINOExecutionProvider",
|
||||||
|
"CPUExecutionProvider",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_model_source(model_name: str) -> ModelSource | None:
|
def get_model_source(model_name: str) -> ModelSource | None:
|
||||||
|
|||||||
@@ -20,9 +20,8 @@ class FaceRecognizer(InferenceModel):
|
|||||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||||
|
|
||||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||||
super().__init__(model_name, **model_kwargs)
|
super().__init__(model_name, **model_kwargs)
|
||||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
|
||||||
max_batch_size = settings.max_batch_size.facial_recognition if settings.max_batch_size else None
|
max_batch_size = settings.max_batch_size.facial_recognition if settings.max_batch_size else None
|
||||||
self.batch_size = max_batch_size if max_batch_size else self._batch_size_default
|
self.batch_size = max_batch_size if max_batch_size else self._batch_size_default
|
||||||
|
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ class OrtSession:
|
|||||||
match provider:
|
match provider:
|
||||||
case "CPUExecutionProvider":
|
case "CPUExecutionProvider":
|
||||||
options = {"arena_extend_strategy": "kSameAsRequested"}
|
options = {"arena_extend_strategy": "kSameAsRequested"}
|
||||||
case "CUDAExecutionProvider":
|
case "CUDAExecutionProvider" | "ROCMExecutionProvider":
|
||||||
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
|
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
|
||||||
case "OpenVINOExecutionProvider":
|
case "OpenVINOExecutionProvider":
|
||||||
options = {
|
options = {
|
||||||
|
|||||||
@@ -324,7 +324,7 @@ class TestAnnSession:
|
|||||||
session.run(None, input_feed)
|
session.run(None, input_feed)
|
||||||
|
|
||||||
ann_session.return_value.execute.assert_called_once_with(123, [input1, input2])
|
ann_session.return_value.execute.assert_called_once_with(123, [input1, input2])
|
||||||
np_spy.call_count == 2
|
assert np_spy.call_count == 2
|
||||||
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
||||||
|
|
||||||
|
|
||||||
@@ -457,11 +457,14 @@ class TestCLIP:
|
|||||||
|
|
||||||
|
|
||||||
class TestFaceRecognition:
|
class TestFaceRecognition:
|
||||||
def test_set_min_score(self, mocker: MockerFixture) -> None:
|
def test_set_min_score(self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock) -> None:
|
||||||
mocker.patch.object(FaceRecognizer, "load")
|
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir="test_cache", min_score=0.5)
|
|
||||||
|
|
||||||
assert face_recognizer.min_score == 0.5
|
face_detector = FaceDetector("buffalo_s", min_score=0.5, cache_dir="test_cache")
|
||||||
|
face_detector.load()
|
||||||
|
|
||||||
|
assert face_detector.min_score == 0.5
|
||||||
|
assert face_detector.model.det_thresh == 0.5
|
||||||
|
|
||||||
def test_detection(self, cv_image: cv2.Mat, mocker: MockerFixture) -> None:
|
def test_detection(self, cv_image: cv2.Mat, mocker: MockerFixture) -> None:
|
||||||
mocker.patch.object(FaceDetector, "load")
|
mocker.patch.object(FaceDetector, "load")
|
||||||
|
|||||||
@@ -14,12 +14,6 @@ byte_image = BytesIO()
|
|||||||
def _(parser: ArgumentParser) -> None:
|
def _(parser: ArgumentParser) -> None:
|
||||||
parser.add_argument("--clip-model", type=str, default="ViT-B-32::openai")
|
parser.add_argument("--clip-model", type=str, default="ViT-B-32::openai")
|
||||||
parser.add_argument("--face-model", type=str, default="buffalo_l")
|
parser.add_argument("--face-model", type=str, default="buffalo_l")
|
||||||
parser.add_argument(
|
|
||||||
"--tag-min-score",
|
|
||||||
type=int,
|
|
||||||
default=0.0,
|
|
||||||
help="Returns all tags at or above this score. The default returns all tags.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--face-min-score",
|
"--face-min-score",
|
||||||
type=int,
|
type=int,
|
||||||
@@ -74,10 +68,10 @@ class RecognitionFormDataLoadTest(InferenceLoadTest):
|
|||||||
"facial-recognition": {
|
"facial-recognition": {
|
||||||
"recognition": {
|
"recognition": {
|
||||||
"modelName": self.environment.parsed_options.face_model,
|
"modelName": self.environment.parsed_options.face_model,
|
||||||
"options": {"minScore": self.environment.parsed_options.face_min_score},
|
|
||||||
},
|
},
|
||||||
"detection": {
|
"detection": {
|
||||||
"modelName": self.environment.parsed_options.face_model,
|
"modelName": self.environment.parsed_options.face_model,
|
||||||
|
"options": {"minScore": self.environment.parsed_options.face_min_score},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
166
machine-learning/poetry.lock
generated
166
machine-learning/poetry.lock
generated
@@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiocache"
|
name = "aiocache"
|
||||||
@@ -75,33 +75,33 @@ trio = ["trio (>=0.23)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "24.10.0"
|
version = "25.1.0"
|
||||||
description = "The uncompromising code formatter."
|
description = "The uncompromising code formatter."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.9"
|
||||||
files = [
|
files = [
|
||||||
{file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
|
{file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"},
|
||||||
{file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
|
{file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"},
|
||||||
{file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
|
{file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"},
|
||||||
{file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
|
{file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"},
|
||||||
{file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
|
{file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"},
|
||||||
{file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
|
{file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"},
|
||||||
{file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
|
{file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"},
|
||||||
{file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
|
{file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"},
|
||||||
{file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
|
{file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"},
|
||||||
{file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
|
{file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"},
|
||||||
{file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
|
{file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"},
|
||||||
{file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
|
{file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"},
|
||||||
{file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
|
{file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"},
|
||||||
{file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
|
{file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"},
|
||||||
{file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
|
{file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"},
|
||||||
{file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
|
{file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"},
|
||||||
{file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"},
|
{file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"},
|
||||||
{file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"},
|
{file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"},
|
||||||
{file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"},
|
{file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"},
|
||||||
{file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"},
|
{file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"},
|
||||||
{file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
|
{file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"},
|
||||||
{file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
|
{file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -147,10 +147,6 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
|
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
|
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
|
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
|
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
|
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
|
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
|
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
|
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
|
||||||
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
|
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
|
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
|
||||||
@@ -163,14 +159,8 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
|
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
|
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
|
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
|
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
|
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
|
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
|
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
|
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
|
||||||
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
|
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
|
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
|
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
|
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
|
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
|
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
|
||||||
@@ -181,24 +171,8 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
|
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
|
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
|
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
|
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
|
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
|
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
|
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
|
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
|
||||||
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
|
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
|
|
||||||
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
|
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
|
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
|
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
|
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
|
||||||
@@ -208,10 +182,6 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
|
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
|
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
|
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
|
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
|
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
|
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
|
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
|
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
|
||||||
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
|
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
|
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
|
||||||
@@ -223,10 +193,6 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
|
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
|
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
|
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
|
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
|
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
|
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
|
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
|
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
|
||||||
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
|
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
|
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
|
||||||
@@ -239,10 +205,6 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
|
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
|
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
|
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
|
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
|
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
|
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
|
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
|
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
|
||||||
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
|
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
|
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
|
||||||
@@ -255,10 +217,6 @@ files = [
|
|||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
|
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
|
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
|
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
|
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
|
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
|
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
|
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
|
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
|
||||||
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
|
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
|
||||||
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
|
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
|
||||||
@@ -1331,13 +1289,13 @@ zstd = ["zstandard (>=0.18.0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "huggingface-hub"
|
name = "huggingface-hub"
|
||||||
version = "0.28.1"
|
version = "0.29.1"
|
||||||
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8.0"
|
python-versions = ">=3.8.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "huggingface_hub-0.28.1-py3-none-any.whl", hash = "sha256:aa6b9a3ffdae939b72c464dbb0d7f99f56e649b55c3d52406f49e0a5a620c0a7"},
|
{file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"},
|
||||||
{file = "huggingface_hub-0.28.1.tar.gz", hash = "sha256:893471090c98e3b6efbdfdacafe4052b20b84d59866fb6f54c33d9af18c303ae"},
|
{file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -1625,23 +1583,23 @@ test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "locust"
|
name = "locust"
|
||||||
version = "2.32.9"
|
version = "2.33.0"
|
||||||
description = "Developer-friendly load testing framework"
|
description = "Developer-friendly load testing framework"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.9"
|
python-versions = ">=3.9"
|
||||||
files = [
|
files = [
|
||||||
{file = "locust-2.32.9-py3-none-any.whl", hash = "sha256:d9447c26d2bbaec5a0ace7cadefa1a31820ed392234257b309965a43d5e8d26f"},
|
{file = "locust-2.33.0-py3-none-any.whl", hash = "sha256:77fcc5cc35cceee5e12d99f5bb23bc441d145bdef6967c2e93d6e4d93451553e"},
|
||||||
{file = "locust-2.32.9.tar.gz", hash = "sha256:4c297afa5cdc3de15dfa79279576e5f33c1d69dd70006b51d079dcbd212201cc"},
|
{file = "locust-2.33.0.tar.gz", hash = "sha256:ba291b7ab2349cc2db540adb8888bc93feb89ea4e4e10d80b935e5065091e8e9"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
ConfigArgParse = ">=1.5.5"
|
configargparse = ">=1.5.5"
|
||||||
flask = ">=2.0.0"
|
flask = ">=2.0.0"
|
||||||
Flask-Cors = ">=3.0.10"
|
flask-cors = ">=3.0.10"
|
||||||
Flask-Login = ">=0.6.3"
|
flask-login = ">=0.6.3"
|
||||||
gevent = [
|
gevent = [
|
||||||
{version = ">=22.10.2", markers = "python_full_version <= \"3.12.0\""},
|
{version = ">=22.10.2", markers = "python_version <= \"3.12\""},
|
||||||
{version = ">=24.10.1", markers = "python_full_version > \"3.13.0\""},
|
{version = ">=24.10.1", markers = "python_version > \"3.13\""},
|
||||||
]
|
]
|
||||||
geventhttpclient = ">=2.3.1"
|
geventhttpclient = ">=2.3.1"
|
||||||
msgpack = ">=1.0.0"
|
msgpack = ">=1.0.0"
|
||||||
@@ -1649,13 +1607,13 @@ psutil = ">=5.9.1"
|
|||||||
pywin32 = {version = "*", markers = "sys_platform == \"win32\""}
|
pywin32 = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
pyzmq = ">=25.0.0"
|
pyzmq = ">=25.0.0"
|
||||||
requests = [
|
requests = [
|
||||||
{version = ">=2.26.0", markers = "python_full_version <= \"3.11.0\""},
|
{version = ">=2.26.0", markers = "python_version <= \"3.11\""},
|
||||||
{version = ">=2.32.2", markers = "python_full_version > \"3.11.0\""},
|
{version = ">=2.32.2", markers = "python_version > \"3.11\""},
|
||||||
]
|
]
|
||||||
setuptools = ">=70.0.0"
|
setuptools = ">=70.0.0"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||||
typing_extensions = {version = ">=4.6.0", markers = "python_version < \"3.11\""}
|
typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.11\""}
|
||||||
Werkzeug = ">=2.0.0"
|
werkzeug = ">=2.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "markdown-it-py"
|
name = "markdown-it-py"
|
||||||
@@ -2628,13 +2586,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pydantic-settings"
|
name = "pydantic-settings"
|
||||||
version = "2.7.1"
|
version = "2.8.1"
|
||||||
description = "Settings management using Pydantic"
|
description = "Settings management using Pydantic"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"},
|
{file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"},
|
||||||
{file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"},
|
{file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -3047,29 +3005,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.9.6"
|
version = "0.9.9"
|
||||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"},
|
{file = "ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367"},
|
||||||
{file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"},
|
{file = "ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7"},
|
||||||
{file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"},
|
{file = "ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb"},
|
||||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"},
|
{file = "ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0"},
|
||||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"},
|
{file = "ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17"},
|
||||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"},
|
{file = "ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1"},
|
||||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"},
|
{file = "ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57"},
|
||||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"},
|
{file = "ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e"},
|
||||||
{file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"},
|
{file = "ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1"},
|
||||||
{file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"},
|
{file = "ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1"},
|
||||||
{file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"},
|
{file = "ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf"},
|
||||||
{file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"},
|
{file = "ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3735,4 +3693,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.10,<4.0"
|
python-versions = ">=3.10,<4.0"
|
||||||
content-hash = "b690d5fbd141da3947f4f1dc029aba1b95e7faafd723166f2c4bdc47a66c095e"
|
content-hash = "271a6c2a76b1b6286e02b91489ffd0c42e92daf151ae932514f5416c7869f71d"
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "machine-learning"
|
name = "machine-learning"
|
||||||
version = "1.127.0"
|
version = "1.128.0"
|
||||||
description = ""
|
description = ""
|
||||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@@ -47,6 +47,11 @@ optional = true
|
|||||||
[tool.poetry.group.cuda.dependencies]
|
[tool.poetry.group.cuda.dependencies]
|
||||||
onnxruntime-gpu = {version = "^1.17.0", source = "cuda12"}
|
onnxruntime-gpu = {version = "^1.17.0", source = "cuda12"}
|
||||||
|
|
||||||
|
[tool.poetry.group.rocm]
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[tool.poetry.group.rocm.dependencies]
|
||||||
|
|
||||||
[tool.poetry.group.openvino]
|
[tool.poetry.group.openvino]
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
|
|||||||
176
machine-learning/rocm-PR19567.patch
Normal file
176
machine-learning/rocm-PR19567.patch
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
From a598a88db258f82a6e4bca75810921bd6bcee7e0 Mon Sep 17 00:00:00 2001
|
||||||
|
From: David Nieto <dmnieto@gmail.com>
|
||||||
|
Date: Sat, 17 Feb 2024 11:23:12 -0800
|
||||||
|
Subject: [PATCH] Disable algo caching in ROCM EP
|
||||||
|
|
||||||
|
Similar to the work done by Liangxijun-1001 in
|
||||||
|
https://github.com/apache/tvm/pull/16178 the ROCM spec mandates calling
|
||||||
|
miopenFindConvolution*Algorithm() before using any Convolution API
|
||||||
|
|
||||||
|
This is the link to the porting guide describing this requirement
|
||||||
|
https://rocmdocs.amd.com/projects/MIOpen/en/latest/MIOpen_Porting_Guide.html
|
||||||
|
|
||||||
|
Thus, this change disables the algo cache and enforces the official
|
||||||
|
API semantics
|
||||||
|
|
||||||
|
Signed-off-by: David Nieto <dmnieto@gmail.com>
|
||||||
|
---
|
||||||
|
onnxruntime/core/providers/rocm/nn/conv.cc | 61 +++++++++----------
|
||||||
|
onnxruntime/core/providers/rocm/nn/conv.h | 6 --
|
||||||
|
.../core/providers/rocm/nn/conv_transpose.cc | 17 +++---
|
||||||
|
3 files changed, 36 insertions(+), 48 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/onnxruntime/core/providers/rocm/nn/conv.cc b/onnxruntime/core/providers/rocm/nn/conv.cc
|
||||||
|
index 6214ec7bc0ea..b08aceca48b1 100644
|
||||||
|
--- a/onnxruntime/core/providers/rocm/nn/conv.cc
|
||||||
|
+++ b/onnxruntime/core/providers/rocm/nn/conv.cc
|
||||||
|
@@ -125,10 +125,8 @@ Status Conv<T, NHWC>::UpdateState(OpKernelContext* context, bool bias_expected)
|
||||||
|
if (input_dims_changed)
|
||||||
|
s_.last_x_dims = gsl::make_span(x_dims);
|
||||||
|
|
||||||
|
- if (w_dims_changed) {
|
||||||
|
+ if (w_dims_changed)
|
||||||
|
s_.last_w_dims = gsl::make_span(w_dims);
|
||||||
|
- s_.cached_benchmark_fwd_results.clear();
|
||||||
|
- }
|
||||||
|
|
||||||
|
ORT_RETURN_IF_ERROR(conv_attrs_.ValidateInputShape(X->Shape(), W->Shape(), channels_last, channels_last));
|
||||||
|
|
||||||
|
@@ -277,35 +275,6 @@ Status Conv<T, NHWC>::UpdateState(OpKernelContext* context, bool bias_expected)
|
||||||
|
HIP_CALL_THROW(hipMalloc(&s_.b_zero, malloc_size));
|
||||||
|
HIP_CALL_THROW(hipMemsetAsync(s_.b_zero, 0, malloc_size, Stream(context)));
|
||||||
|
}
|
||||||
|
-
|
||||||
|
- if (!s_.cached_benchmark_fwd_results.contains(x_dims_miopen)) {
|
||||||
|
- miopenConvAlgoPerf_t perf;
|
||||||
|
- int algo_count = 1;
|
||||||
|
- const ROCMExecutionProvider* rocm_ep = static_cast<const ROCMExecutionProvider*>(this->Info().GetExecutionProvider());
|
||||||
|
- static constexpr int num_algos = MIOPEN_CONVOLUTION_FWD_ALGO_COUNT;
|
||||||
|
- size_t max_ws_size = rocm_ep->GetMiopenConvUseMaxWorkspace() ? GetMaxWorkspaceSize(GetMiopenHandle(context), s_, kAllAlgos, num_algos)
|
||||||
|
- : AlgoSearchWorkspaceSize;
|
||||||
|
- IAllocatorUniquePtr<void> algo_search_workspace = GetTransientScratchBuffer<void>(max_ws_size);
|
||||||
|
- MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionForwardAlgorithm(
|
||||||
|
- GetMiopenHandle(context),
|
||||||
|
- s_.x_tensor,
|
||||||
|
- s_.x_data,
|
||||||
|
- s_.w_desc,
|
||||||
|
- s_.w_data,
|
||||||
|
- s_.conv_desc,
|
||||||
|
- s_.y_tensor,
|
||||||
|
- s_.y_data,
|
||||||
|
- 1, // requestedAlgoCount
|
||||||
|
- &algo_count, // returnedAlgoCount
|
||||||
|
- &perf,
|
||||||
|
- algo_search_workspace.get(),
|
||||||
|
- max_ws_size,
|
||||||
|
- false)); // Do not do exhaustive algo search.
|
||||||
|
- s_.cached_benchmark_fwd_results.insert(x_dims_miopen, {perf.fwd_algo, perf.memory});
|
||||||
|
- }
|
||||||
|
- const auto& perf = s_.cached_benchmark_fwd_results.at(x_dims_miopen);
|
||||||
|
- s_.fwd_algo = perf.fwd_algo;
|
||||||
|
- s_.workspace_bytes = perf.memory;
|
||||||
|
} else {
|
||||||
|
// set Y
|
||||||
|
s_.Y = context->Output(0, TensorShape(s_.y_dims));
|
||||||
|
@@ -319,6 +288,34 @@ Status Conv<T, NHWC>::UpdateState(OpKernelContext* context, bool bias_expected)
|
||||||
|
s_.y_data = reinterpret_cast<HipT*>(s_.Y->MutableData<T>());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
+ {
|
||||||
|
+ /* FindConvolution must always be called by the runtime */
|
||||||
|
+ TensorShapeVector x_dims_miopen{x_dims.begin(), x_dims.end()};
|
||||||
|
+ miopenConvAlgoPerf_t perf;
|
||||||
|
+ int algo_count = 1;
|
||||||
|
+ const ROCMExecutionProvider* rocm_ep = static_cast<const ROCMExecutionProvider*>(this->Info().GetExecutionProvider());
|
||||||
|
+ static constexpr int num_algos = MIOPEN_CONVOLUTION_FWD_ALGO_COUNT;
|
||||||
|
+ size_t max_ws_size = rocm_ep->GetMiopenConvUseMaxWorkspace() ? GetMaxWorkspaceSize(GetMiopenHandle(context), s_, kAllAlgos, num_algos)
|
||||||
|
+ : AlgoSearchWorkspaceSize;
|
||||||
|
+ IAllocatorUniquePtr<void> algo_search_workspace = GetTransientScratchBuffer<void>(max_ws_size);
|
||||||
|
+ MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionForwardAlgorithm(
|
||||||
|
+ GetMiopenHandle(context),
|
||||||
|
+ s_.x_tensor,
|
||||||
|
+ s_.x_data,
|
||||||
|
+ s_.w_desc,
|
||||||
|
+ s_.w_data,
|
||||||
|
+ s_.conv_desc,
|
||||||
|
+ s_.y_tensor,
|
||||||
|
+ s_.y_data,
|
||||||
|
+ 1, // requestedAlgoCount
|
||||||
|
+ &algo_count, // returnedAlgoCount
|
||||||
|
+ &perf,
|
||||||
|
+ algo_search_workspace.get(),
|
||||||
|
+ max_ws_size,
|
||||||
|
+ false)); // Do not do exhaustive algo search.
|
||||||
|
+ s_.fwd_algo = perf.fwd_algo;
|
||||||
|
+ s_.workspace_bytes = perf.memory;
|
||||||
|
+ }
|
||||||
|
return Status::OK();
|
||||||
|
}
|
||||||
|
|
||||||
|
diff --git a/onnxruntime/core/providers/rocm/nn/conv.h b/onnxruntime/core/providers/rocm/nn/conv.h
|
||||||
|
index bc9846203e57..d54218f25854 100644
|
||||||
|
--- a/onnxruntime/core/providers/rocm/nn/conv.h
|
||||||
|
+++ b/onnxruntime/core/providers/rocm/nn/conv.h
|
||||||
|
@@ -108,9 +108,6 @@ class lru_unordered_map {
|
||||||
|
list_type lru_list_;
|
||||||
|
};
|
||||||
|
|
||||||
|
-// cached miopen descriptors
|
||||||
|
-constexpr size_t MAX_CACHED_ALGO_PERF_RESULTS = 10000;
|
||||||
|
-
|
||||||
|
template <typename AlgoPerfType>
|
||||||
|
struct MiopenConvState {
|
||||||
|
// if x/w dims changed, update algo and miopenTensors
|
||||||
|
@@ -148,9 +145,6 @@ struct MiopenConvState {
|
||||||
|
decltype(AlgoPerfType().memory) memory;
|
||||||
|
};
|
||||||
|
|
||||||
|
- lru_unordered_map<TensorShapeVector, PerfFwdResultParams, vector_hash> cached_benchmark_fwd_results{MAX_CACHED_ALGO_PERF_RESULTS};
|
||||||
|
- lru_unordered_map<TensorShapeVector, PerfBwdResultParams, vector_hash> cached_benchmark_bwd_results{MAX_CACHED_ALGO_PERF_RESULTS};
|
||||||
|
-
|
||||||
|
// Some properties needed to support asymmetric padded Conv nodes
|
||||||
|
bool post_slicing_required;
|
||||||
|
TensorShapeVector slice_starts;
|
||||||
|
diff --git a/onnxruntime/core/providers/rocm/nn/conv_transpose.cc b/onnxruntime/core/providers/rocm/nn/conv_transpose.cc
|
||||||
|
index 7447113fdf84..45ed4c8ac37a 100644
|
||||||
|
--- a/onnxruntime/core/providers/rocm/nn/conv_transpose.cc
|
||||||
|
+++ b/onnxruntime/core/providers/rocm/nn/conv_transpose.cc
|
||||||
|
@@ -76,7 +76,6 @@ Status ConvTranspose<T, NHWC>::DoConvTranspose(OpKernelContext* context, bool dy
|
||||||
|
|
||||||
|
if (w_dims_changed) {
|
||||||
|
s_.last_w_dims = gsl::make_span(w_dims);
|
||||||
|
- s_.cached_benchmark_bwd_results.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
ConvTransposeAttributes::Prepare p;
|
||||||
|
@@ -127,12 +126,13 @@ Status ConvTranspose<T, NHWC>::DoConvTranspose(OpKernelContext* context, bool dy
|
||||||
|
|
||||||
|
y_data = reinterpret_cast<HipT*>(p.Y->MutableData<T>());
|
||||||
|
|
||||||
|
- if (!s_.cached_benchmark_bwd_results.contains(x_dims)) {
|
||||||
|
- IAllocatorUniquePtr<void> algo_search_workspace = GetScratchBuffer<void>(AlgoSearchWorkspaceSize, context->GetComputeStream());
|
||||||
|
-
|
||||||
|
- miopenConvAlgoPerf_t perf;
|
||||||
|
- int algo_count = 1;
|
||||||
|
- MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionBackwardDataAlgorithm(
|
||||||
|
+ }
|
||||||
|
+ // The following is required before calling convolution, we cannot cache the results
|
||||||
|
+ {
|
||||||
|
+ IAllocatorUniquePtr<void> algo_search_workspace = GetScratchBuffer<void>(AlgoSearchWorkspaceSize, context->GetComputeStream());
|
||||||
|
+ miopenConvAlgoPerf_t perf;
|
||||||
|
+ int algo_count = 1;
|
||||||
|
+ MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionBackwardDataAlgorithm(
|
||||||
|
GetMiopenHandle(context),
|
||||||
|
s_.x_tensor,
|
||||||
|
x_data,
|
||||||
|
@@ -147,10 +147,7 @@ Status ConvTranspose<T, NHWC>::DoConvTranspose(OpKernelContext* context, bool dy
|
||||||
|
algo_search_workspace.get(),
|
||||||
|
AlgoSearchWorkspaceSize,
|
||||||
|
false));
|
||||||
|
- s_.cached_benchmark_bwd_results.insert(x_dims, {perf.bwd_data_algo, perf.memory});
|
||||||
|
- }
|
||||||
|
|
||||||
|
- const auto& perf = s_.cached_benchmark_bwd_results.at(x_dims);
|
||||||
|
s_.bwd_data_algo = perf.bwd_data_algo;
|
||||||
|
s_.workspace_bytes = perf.memory;
|
||||||
|
}
|
||||||
@@ -70,6 +70,7 @@ custom_lint:
|
|||||||
- lib/infrastructure/repositories/{store,db,log}.repository.dart
|
- lib/infrastructure/repositories/{store,db,log}.repository.dart
|
||||||
- lib/providers/infrastructure/db.provider.dart
|
- lib/providers/infrastructure/db.provider.dart
|
||||||
# acceptable exceptions for the time being (until Isar is fully replaced)
|
# acceptable exceptions for the time being (until Isar is fully replaced)
|
||||||
|
- lib/providers/app_life_cycle.provider.dart
|
||||||
- integration_test/test_utils/general_helper.dart
|
- integration_test/test_utils/general_helper.dart
|
||||||
- lib/main.dart
|
- lib/main.dart
|
||||||
- lib/pages/album/album_asset_selection.page.dart
|
- lib/pages/album/album_asset_selection.page.dart
|
||||||
|
|||||||
@@ -35,8 +35,8 @@ platform :android do
|
|||||||
task: 'bundle',
|
task: 'bundle',
|
||||||
build_type: 'Release',
|
build_type: 'Release',
|
||||||
properties: {
|
properties: {
|
||||||
"android.injected.version.code" => 185,
|
"android.injected.version.code" => 186,
|
||||||
"android.injected.version.name" => "1.127.0",
|
"android.injected.version.name" => "1.128.0",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||||
|
|||||||
@@ -108,7 +108,7 @@
|
|||||||
"backup_info_card_assets": "elements",
|
"backup_info_card_assets": "elements",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"cache_settings_album_thumbnails": "Library page thumbnails ({} assets)",
|
"cache_settings_album_thumbnails": "Library page thumbnails ({} assets)",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "zapisi",
|
"backup_info_card_assets": "zapisi",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -133,7 +133,7 @@
|
|||||||
"backup_info_card_assets": "assets",
|
"backup_info_card_assets": "assets",
|
||||||
"backup_manual_cancelled": "Cancelled",
|
"backup_manual_cancelled": "Cancelled",
|
||||||
"backup_manual_failed": "Failed",
|
"backup_manual_failed": "Failed",
|
||||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||||
"backup_manual_success": "Success",
|
"backup_manual_success": "Success",
|
||||||
"backup_manual_title": "Upload status",
|
"backup_manual_title": "Upload status",
|
||||||
"backup_options_page_title": "Backup options",
|
"backup_options_page_title": "Backup options",
|
||||||
|
|||||||
@@ -541,7 +541,7 @@
|
|||||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||||
CODE_SIGN_IDENTITY = "Apple Development";
|
CODE_SIGN_IDENTITY = "Apple Development";
|
||||||
CODE_SIGN_STYLE = Automatic;
|
CODE_SIGN_STYLE = Automatic;
|
||||||
CURRENT_PROJECT_VERSION = 195;
|
CURRENT_PROJECT_VERSION = 196;
|
||||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||||
ENABLE_BITCODE = NO;
|
ENABLE_BITCODE = NO;
|
||||||
@@ -685,7 +685,7 @@
|
|||||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||||
CODE_SIGN_IDENTITY = "Apple Development";
|
CODE_SIGN_IDENTITY = "Apple Development";
|
||||||
CODE_SIGN_STYLE = Automatic;
|
CODE_SIGN_STYLE = Automatic;
|
||||||
CURRENT_PROJECT_VERSION = 195;
|
CURRENT_PROJECT_VERSION = 196;
|
||||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||||
ENABLE_BITCODE = NO;
|
ENABLE_BITCODE = NO;
|
||||||
@@ -715,7 +715,7 @@
|
|||||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||||
CODE_SIGN_IDENTITY = "Apple Development";
|
CODE_SIGN_IDENTITY = "Apple Development";
|
||||||
CODE_SIGN_STYLE = Automatic;
|
CODE_SIGN_STYLE = Automatic;
|
||||||
CURRENT_PROJECT_VERSION = 195;
|
CURRENT_PROJECT_VERSION = 196;
|
||||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||||
ENABLE_BITCODE = NO;
|
ENABLE_BITCODE = NO;
|
||||||
@@ -748,7 +748,7 @@
|
|||||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||||
CODE_SIGN_IDENTITY = "Apple Development";
|
CODE_SIGN_IDENTITY = "Apple Development";
|
||||||
CODE_SIGN_STYLE = Automatic;
|
CODE_SIGN_STYLE = Automatic;
|
||||||
CURRENT_PROJECT_VERSION = 195;
|
CURRENT_PROJECT_VERSION = 196;
|
||||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
@@ -791,7 +791,7 @@
|
|||||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||||
CODE_SIGN_IDENTITY = "Apple Development";
|
CODE_SIGN_IDENTITY = "Apple Development";
|
||||||
CODE_SIGN_STYLE = Automatic;
|
CODE_SIGN_STYLE = Automatic;
|
||||||
CURRENT_PROJECT_VERSION = 195;
|
CURRENT_PROJECT_VERSION = 196;
|
||||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
@@ -831,7 +831,7 @@
|
|||||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||||
CODE_SIGN_IDENTITY = "Apple Development";
|
CODE_SIGN_IDENTITY = "Apple Development";
|
||||||
CODE_SIGN_STYLE = Automatic;
|
CODE_SIGN_STYLE = Automatic;
|
||||||
CURRENT_PROJECT_VERSION = 195;
|
CURRENT_PROJECT_VERSION = 196;
|
||||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
|
|||||||
@@ -18,13 +18,6 @@ import UIKit
|
|||||||
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
|
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
|
||||||
}
|
}
|
||||||
|
|
||||||
do {
|
|
||||||
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
|
|
||||||
try AVAudioSession.sharedInstance().setActive(true)
|
|
||||||
} catch {
|
|
||||||
print("Failed to set audio session category. Error: \(error)")
|
|
||||||
}
|
|
||||||
|
|
||||||
GeneratedPluginRegistrant.register(with: self)
|
GeneratedPluginRegistrant.register(with: self)
|
||||||
BackgroundServicePlugin.registerBackgroundProcessing()
|
BackgroundServicePlugin.registerBackgroundProcessing()
|
||||||
|
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called by the flutter code when enabled so that we can turn on the backround services
|
// Called by the flutter code when enabled so that we can turn on the background services
|
||||||
// and save the callback information to communicate on this method channel
|
// and save the callback information to communicate on this method channel
|
||||||
public func handleBackgroundEnable(call: FlutterMethodCall, result: FlutterResult) {
|
public func handleBackgroundEnable(call: FlutterMethodCall, result: FlutterResult) {
|
||||||
|
|
||||||
@@ -249,7 +249,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
|||||||
result(true)
|
result(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the number of currently scheduled background processes to Flutter, striclty
|
// Returns the number of currently scheduled background processes to Flutter, strictly
|
||||||
// for debugging
|
// for debugging
|
||||||
func handleNumberOfProcesses(call: FlutterMethodCall, result: @escaping FlutterResult) {
|
func handleNumberOfProcesses(call: FlutterMethodCall, result: @escaping FlutterResult) {
|
||||||
BGTaskScheduler.shared.getPendingTaskRequests { requests in
|
BGTaskScheduler.shared.getPendingTaskRequests { requests in
|
||||||
@@ -355,7 +355,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
|||||||
let isExpensive = wifiMonitor.currentPath.isExpensive
|
let isExpensive = wifiMonitor.currentPath.isExpensive
|
||||||
if (isExpensive) {
|
if (isExpensive) {
|
||||||
// The network is expensive and we have required Wi-Fi
|
// The network is expensive and we have required Wi-Fi
|
||||||
// Therfore, we will simply complete the task without
|
// Therefore, we will simply complete the task without
|
||||||
// running it
|
// running it
|
||||||
task.setTaskCompleted(success: true)
|
task.setTaskCompleted(success: true)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -78,7 +78,7 @@
|
|||||||
<key>CFBundlePackageType</key>
|
<key>CFBundlePackageType</key>
|
||||||
<string>APPL</string>
|
<string>APPL</string>
|
||||||
<key>CFBundleShortVersionString</key>
|
<key>CFBundleShortVersionString</key>
|
||||||
<string>1.127.0</string>
|
<string>1.128.0</string>
|
||||||
<key>CFBundleSignature</key>
|
<key>CFBundleSignature</key>
|
||||||
<string>????</string>
|
<string>????</string>
|
||||||
<key>CFBundleURLTypes</key>
|
<key>CFBundleURLTypes</key>
|
||||||
@@ -93,7 +93,7 @@
|
|||||||
</dict>
|
</dict>
|
||||||
</array>
|
</array>
|
||||||
<key>CFBundleVersion</key>
|
<key>CFBundleVersion</key>
|
||||||
<string>195</string>
|
<string>196</string>
|
||||||
<key>FLTEnableImpeller</key>
|
<key>FLTEnableImpeller</key>
|
||||||
<true/>
|
<true/>
|
||||||
<key>ITSAppUsesNonExemptEncryption</key>
|
<key>ITSAppUsesNonExemptEncryption</key>
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ platform :ios do
|
|||||||
desc "iOS Release"
|
desc "iOS Release"
|
||||||
lane :release do
|
lane :release do
|
||||||
increment_version_number(
|
increment_version_number(
|
||||||
version_number: "1.127.0"
|
version_number: "1.128.0"
|
||||||
)
|
)
|
||||||
increment_build_number(
|
increment_build_number(
|
||||||
build_number: latest_testflight_build_number + 1,
|
build_number: latest_testflight_build_number + 1,
|
||||||
|
|||||||
@@ -1,19 +1,15 @@
|
|||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
|
|
||||||
import 'package:logging/logging.dart';
|
|
||||||
|
|
||||||
/// Log levels according to dart logging [Level]
|
/// Log levels according to dart logging [Level]
|
||||||
enum LogLevel {
|
enum LogLevel {
|
||||||
ALL,
|
all,
|
||||||
FINEST,
|
finest,
|
||||||
FINER,
|
finer,
|
||||||
FINE,
|
fine,
|
||||||
CONFIG,
|
config,
|
||||||
INFO,
|
info,
|
||||||
WARNING,
|
warning,
|
||||||
SEVERE,
|
severe,
|
||||||
SHOUT,
|
shout,
|
||||||
OFF,
|
off,
|
||||||
}
|
}
|
||||||
|
|
||||||
class LogMessage {
|
class LogMessage {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
||||||
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:immich_mobile/constants/constants.dart';
|
import 'package:immich_mobile/constants/constants.dart';
|
||||||
import 'package:immich_mobile/domain/interfaces/log.interface.dart';
|
import 'package:immich_mobile/domain/interfaces/log.interface.dart';
|
||||||
import 'package:immich_mobile/domain/interfaces/store.interface.dart';
|
import 'package:immich_mobile/domain/interfaces/store.interface.dart';
|
||||||
@@ -39,29 +40,29 @@ class LogService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static Future<LogService> init({
|
static Future<LogService> init({
|
||||||
required ILogRepository logRepo,
|
required ILogRepository logRepository,
|
||||||
required IStoreRepository storeRepo,
|
required IStoreRepository storeRepository,
|
||||||
bool shouldBuffer = true,
|
bool shouldBuffer = true,
|
||||||
}) async {
|
}) async {
|
||||||
if (_instance != null) {
|
if (_instance != null) {
|
||||||
return _instance!;
|
return _instance!;
|
||||||
}
|
}
|
||||||
_instance = await create(
|
_instance = await create(
|
||||||
logRepo: logRepo,
|
logRepository: logRepository,
|
||||||
storeRepo: storeRepo,
|
storeRepository: storeRepository,
|
||||||
shouldBuffer: shouldBuffer,
|
shouldBuffer: shouldBuffer,
|
||||||
);
|
);
|
||||||
return _instance!;
|
return _instance!;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<LogService> create({
|
static Future<LogService> create({
|
||||||
required ILogRepository logRepo,
|
required ILogRepository logRepository,
|
||||||
required IStoreRepository storeRepo,
|
required IStoreRepository storeRepository,
|
||||||
bool shouldBuffer = true,
|
bool shouldBuffer = true,
|
||||||
}) async {
|
}) async {
|
||||||
final instance = LogService._(logRepo, storeRepo, shouldBuffer);
|
final instance = LogService._(logRepository, storeRepository, shouldBuffer);
|
||||||
// Truncate logs to 250
|
// Truncate logs to 250
|
||||||
await logRepo.truncate(limit: kLogTruncateLimit);
|
await logRepository.truncate(limit: kLogTruncateLimit);
|
||||||
// Get log level from store
|
// Get log level from store
|
||||||
final level = await instance._storeRepository.tryGet(StoreKey.logLevel);
|
final level = await instance._storeRepository.tryGet(StoreKey.logLevel);
|
||||||
if (level != null) {
|
if (level != null) {
|
||||||
@@ -91,12 +92,13 @@ class LogService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Flush pending log messages to persistent storage
|
/// Flush pending log messages to persistent storage
|
||||||
Future<void> flush() async {
|
void flush() {
|
||||||
if (_flushTimer == null) {
|
if (_flushTimer == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_flushTimer!.cancel();
|
_flushTimer!.cancel();
|
||||||
await _flushBufferToDatabase();
|
// TODO: Rename enable this after moving to sqlite - #16504
|
||||||
|
// await _flushBufferToDatabase();
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<void> dispose() {
|
Future<void> dispose() {
|
||||||
@@ -106,6 +108,10 @@ class LogService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _writeLogToDatabase(LogRecord r) {
|
void _writeLogToDatabase(LogRecord r) {
|
||||||
|
if (kDebugMode) {
|
||||||
|
debugPrint('[${r.level.name}] [${r.time}] ${r.message}');
|
||||||
|
}
|
||||||
|
|
||||||
final record = LogMessage(
|
final record = LogMessage(
|
||||||
message: r.message,
|
message: r.message,
|
||||||
level: r.level.toLogLevel(),
|
level: r.level.toLogLevel(),
|
||||||
@@ -145,7 +151,7 @@ class LoggerUnInitializedException implements Exception {
|
|||||||
extension LevelDomainToInfraExtension on Level {
|
extension LevelDomainToInfraExtension on Level {
|
||||||
LogLevel toLogLevel() =>
|
LogLevel toLogLevel() =>
|
||||||
LogLevel.values.elementAtOrNull(Level.LEVELS.indexOf(this)) ??
|
LogLevel.values.elementAtOrNull(Level.LEVELS.indexOf(this)) ??
|
||||||
LogLevel.INFO;
|
LogLevel.info;
|
||||||
}
|
}
|
||||||
|
|
||||||
extension on LogLevel {
|
extension on LogLevel {
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ class StoreService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Asynchronously stores the value in the DB and synchronously in the cache
|
/// Asynchronously stores the value in the DB and synchronously in the cache
|
||||||
Future<void> put<T>(StoreKey<T> key, T value) async {
|
Future<void> put<U extends StoreKey<T>, T>(U key, T value) async {
|
||||||
if (_cache[key.id] == value) return;
|
if (_cache[key.id] == value) return;
|
||||||
await _storeRepository.insert(key, value);
|
await _storeRepository.insert(key, value);
|
||||||
_cache[key.id] = value;
|
_cache[key.id] = value;
|
||||||
|
|||||||
@@ -5,29 +5,24 @@ part 'log.entity.g.dart';
|
|||||||
|
|
||||||
@Collection(inheritance: false)
|
@Collection(inheritance: false)
|
||||||
class LoggerMessage {
|
class LoggerMessage {
|
||||||
Id id = Isar.autoIncrement;
|
final Id id = Isar.autoIncrement;
|
||||||
String message;
|
final String message;
|
||||||
String? details;
|
final String? details;
|
||||||
@Enumerated(EnumType.ordinal)
|
@Enumerated(EnumType.ordinal)
|
||||||
LogLevel level = LogLevel.INFO;
|
final LogLevel level;
|
||||||
DateTime createdAt;
|
final DateTime createdAt;
|
||||||
String? context1;
|
final String? context1;
|
||||||
String? context2;
|
final String? context2;
|
||||||
|
|
||||||
LoggerMessage({
|
const LoggerMessage({
|
||||||
required this.message,
|
required this.message,
|
||||||
required this.details,
|
required this.details,
|
||||||
required this.level,
|
this.level = LogLevel.info,
|
||||||
required this.createdAt,
|
required this.createdAt,
|
||||||
required this.context1,
|
required this.context1,
|
||||||
required this.context2,
|
required this.context2,
|
||||||
});
|
});
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() {
|
|
||||||
return 'LoggerMessage(message: $message, level: $level, createdAt: $createdAt)';
|
|
||||||
}
|
|
||||||
|
|
||||||
LogMessage toDto() {
|
LogMessage toDto() {
|
||||||
return LogMessage(
|
return LogMessage(
|
||||||
message: message,
|
message: message,
|
||||||
|
|||||||
49
mobile/lib/infrastructure/entities/log.entity.g.dart
generated
49
mobile/lib/infrastructure/entities/log.entity.g.dart
generated
@@ -117,10 +117,9 @@ LoggerMessage _loggerMessageDeserialize(
|
|||||||
createdAt: reader.readDateTime(offsets[2]),
|
createdAt: reader.readDateTime(offsets[2]),
|
||||||
details: reader.readStringOrNull(offsets[3]),
|
details: reader.readStringOrNull(offsets[3]),
|
||||||
level: _LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offsets[4])] ??
|
level: _LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offsets[4])] ??
|
||||||
LogLevel.ALL,
|
LogLevel.info,
|
||||||
message: reader.readString(offsets[5]),
|
message: reader.readString(offsets[5]),
|
||||||
);
|
);
|
||||||
object.id = id;
|
|
||||||
return object;
|
return object;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +140,7 @@ P _loggerMessageDeserializeProp<P>(
|
|||||||
return (reader.readStringOrNull(offset)) as P;
|
return (reader.readStringOrNull(offset)) as P;
|
||||||
case 4:
|
case 4:
|
||||||
return (_LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offset)] ??
|
return (_LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offset)] ??
|
||||||
LogLevel.ALL) as P;
|
LogLevel.info) as P;
|
||||||
case 5:
|
case 5:
|
||||||
return (reader.readString(offset)) as P;
|
return (reader.readString(offset)) as P;
|
||||||
default:
|
default:
|
||||||
@@ -150,28 +149,28 @@ P _loggerMessageDeserializeProp<P>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const _LoggerMessagelevelEnumValueMap = {
|
const _LoggerMessagelevelEnumValueMap = {
|
||||||
'ALL': 0,
|
'all': 0,
|
||||||
'FINEST': 1,
|
'finest': 1,
|
||||||
'FINER': 2,
|
'finer': 2,
|
||||||
'FINE': 3,
|
'fine': 3,
|
||||||
'CONFIG': 4,
|
'config': 4,
|
||||||
'INFO': 5,
|
'info': 5,
|
||||||
'WARNING': 6,
|
'warning': 6,
|
||||||
'SEVERE': 7,
|
'severe': 7,
|
||||||
'SHOUT': 8,
|
'shout': 8,
|
||||||
'OFF': 9,
|
'off': 9,
|
||||||
};
|
};
|
||||||
const _LoggerMessagelevelValueEnumMap = {
|
const _LoggerMessagelevelValueEnumMap = {
|
||||||
0: LogLevel.ALL,
|
0: LogLevel.all,
|
||||||
1: LogLevel.FINEST,
|
1: LogLevel.finest,
|
||||||
2: LogLevel.FINER,
|
2: LogLevel.finer,
|
||||||
3: LogLevel.FINE,
|
3: LogLevel.fine,
|
||||||
4: LogLevel.CONFIG,
|
4: LogLevel.config,
|
||||||
5: LogLevel.INFO,
|
5: LogLevel.info,
|
||||||
6: LogLevel.WARNING,
|
6: LogLevel.warning,
|
||||||
7: LogLevel.SEVERE,
|
7: LogLevel.severe,
|
||||||
8: LogLevel.SHOUT,
|
8: LogLevel.shout,
|
||||||
9: LogLevel.OFF,
|
9: LogLevel.off,
|
||||||
};
|
};
|
||||||
|
|
||||||
Id _loggerMessageGetId(LoggerMessage object) {
|
Id _loggerMessageGetId(LoggerMessage object) {
|
||||||
@@ -183,9 +182,7 @@ List<IsarLinkBase<dynamic>> _loggerMessageGetLinks(LoggerMessage object) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _loggerMessageAttach(
|
void _loggerMessageAttach(
|
||||||
IsarCollection<dynamic> col, Id id, LoggerMessage object) {
|
IsarCollection<dynamic> col, Id id, LoggerMessage object) {}
|
||||||
object.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
extension LoggerMessageQueryWhereSort
|
extension LoggerMessageQueryWhereSort
|
||||||
on QueryBuilder<LoggerMessage, LoggerMessage, QWhere> {
|
on QueryBuilder<LoggerMessage, LoggerMessage, QWhere> {
|
||||||
|
|||||||
@@ -5,8 +5,9 @@ part 'store.entity.g.dart';
|
|||||||
/// Internal class for `Store`, do not use elsewhere.
|
/// Internal class for `Store`, do not use elsewhere.
|
||||||
@Collection(inheritance: false)
|
@Collection(inheritance: false)
|
||||||
class StoreValue {
|
class StoreValue {
|
||||||
const StoreValue(this.id, {this.intValue, this.strValue});
|
|
||||||
final Id id;
|
final Id id;
|
||||||
final int? intValue;
|
final int? intValue;
|
||||||
final String? strValue;
|
final String? strValue;
|
||||||
|
|
||||||
|
const StoreValue(this.id, {this.intValue, this.strValue});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,10 @@ import 'package:immich_mobile/entities/asset.entity.dart';
|
|||||||
import 'package:immich_mobile/widgets/asset_grid/asset_grid_data_structure.dart';
|
import 'package:immich_mobile/widgets/asset_grid/asset_grid_data_structure.dart';
|
||||||
|
|
||||||
abstract class ITimelineRepository {
|
abstract class ITimelineRepository {
|
||||||
|
Future<List<int>> getTimelineUserIds(int id);
|
||||||
|
|
||||||
|
Stream<List<int>> watchTimelineUsers(int id);
|
||||||
|
|
||||||
Stream<RenderList> watchArchiveTimeline(int userId);
|
Stream<RenderList> watchArchiveTimeline(int userId);
|
||||||
Stream<RenderList> watchFavoriteTimeline(int userId);
|
Stream<RenderList> watchFavoriteTimeline(int userId);
|
||||||
Stream<RenderList> watchTrashTimeline(int userId);
|
Stream<RenderList> watchTrashTimeline(int userId);
|
||||||
|
|||||||
@@ -22,10 +22,6 @@ abstract interface class IUserRepository implements IDatabaseRepository {
|
|||||||
Future<User> me();
|
Future<User> me();
|
||||||
|
|
||||||
Future<void> clearTable();
|
Future<void> clearTable();
|
||||||
|
|
||||||
Future<List<int>> getTimelineUserIds(int id);
|
|
||||||
|
|
||||||
Stream<List<int>> watchTimelineUsers(int id);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
enum UserSort { id }
|
enum UserSort { id }
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ mixin ErrorLoggerMixin {
|
|||||||
abstract final Logger logger;
|
abstract final Logger logger;
|
||||||
|
|
||||||
/// Returns an AsyncValue<T> if the future is successfully executed
|
/// Returns an AsyncValue<T> if the future is successfully executed
|
||||||
/// Else, logs the error to the overrided logger and returns an AsyncError<>
|
/// Else, logs the error to the overridden logger and returns an AsyncError<>
|
||||||
AsyncFuture<T> guardError<T>(
|
AsyncFuture<T> guardError<T>(
|
||||||
Future<T> Function() fn, {
|
Future<T> Function() fn, {
|
||||||
required String errorMessage,
|
required String errorMessage,
|
||||||
|
|||||||
@@ -41,16 +41,16 @@ class AppLogPage extends HookConsumerWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Widget buildLeadingIcon(LogLevel level) => switch (level) {
|
Widget buildLeadingIcon(LogLevel level) => switch (level) {
|
||||||
LogLevel.INFO => colorStatusIndicator(context.primaryColor),
|
LogLevel.info => colorStatusIndicator(context.primaryColor),
|
||||||
LogLevel.SEVERE => colorStatusIndicator(Colors.redAccent),
|
LogLevel.severe => colorStatusIndicator(Colors.redAccent),
|
||||||
LogLevel.WARNING => colorStatusIndicator(Colors.orangeAccent),
|
LogLevel.warning => colorStatusIndicator(Colors.orangeAccent),
|
||||||
_ => colorStatusIndicator(Colors.grey),
|
_ => colorStatusIndicator(Colors.grey),
|
||||||
};
|
};
|
||||||
|
|
||||||
Color getTileColor(LogLevel level) => switch (level) {
|
Color getTileColor(LogLevel level) => switch (level) {
|
||||||
LogLevel.INFO => Colors.transparent,
|
LogLevel.info => Colors.transparent,
|
||||||
LogLevel.SEVERE => Colors.redAccent.withOpacity(0.25),
|
LogLevel.severe => Colors.redAccent.withOpacity(0.25),
|
||||||
LogLevel.WARNING => Colors.orangeAccent.withOpacity(0.25),
|
LogLevel.warning => Colors.orangeAccent.withOpacity(0.25),
|
||||||
_ => context.primaryColor.withOpacity(0.1),
|
_ => context.primaryColor.withOpacity(0.1),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ import 'package:immich_mobile/providers/server_info.provider.dart';
|
|||||||
import 'package:immich_mobile/providers/tab.provider.dart';
|
import 'package:immich_mobile/providers/tab.provider.dart';
|
||||||
import 'package:immich_mobile/providers/websocket.provider.dart';
|
import 'package:immich_mobile/providers/websocket.provider.dart';
|
||||||
import 'package:immich_mobile/services/background.service.dart';
|
import 'package:immich_mobile/services/background.service.dart';
|
||||||
|
import 'package:isar/isar.dart';
|
||||||
import 'package:permission_handler/permission_handler.dart';
|
import 'package:permission_handler/permission_handler.dart';
|
||||||
|
|
||||||
enum AppLifeCycleEnum {
|
enum AppLifeCycleEnum {
|
||||||
@@ -114,11 +115,13 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
|
|||||||
_ref.read(websocketProvider.notifier).disconnect();
|
_ref.read(websocketProvider.notifier).disconnect();
|
||||||
}
|
}
|
||||||
|
|
||||||
unawaited(LogService.I.flush());
|
LogService.I.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
void handleAppDetached() {
|
Future<void> handleAppDetached() async {
|
||||||
state = AppLifeCycleEnum.detached;
|
state = AppLifeCycleEnum.detached;
|
||||||
|
LogService.I.flush();
|
||||||
|
await Isar.getInstance()?.close();
|
||||||
// no guarantee this is called at all
|
// no guarantee this is called at all
|
||||||
_ref.read(manualUploadProvider.notifier).cancelBackup();
|
_ref.read(manualUploadProvider.notifier).cancelBackup();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -59,7 +59,11 @@ class AssetNotifier extends StateNotifier<bool> {
|
|||||||
await clearAllAssets();
|
await clearAllAssets();
|
||||||
log.info("Manual refresh requested, cleared assets and albums from db");
|
log.info("Manual refresh requested, cleared assets and albums from db");
|
||||||
}
|
}
|
||||||
final bool changedUsers = await _userService.refreshUsers();
|
final users = await _userService.getUsersFromServer();
|
||||||
|
bool changedUsers = false;
|
||||||
|
if (users != null) {
|
||||||
|
changedUsers = await _syncService.syncUsersFromServer(users);
|
||||||
|
}
|
||||||
final bool newRemote = await _assetService.refreshRemoteAssets();
|
final bool newRemote = await _assetService.refreshRemoteAssets();
|
||||||
final bool newLocal = await _albumService.refreshDeviceAlbums();
|
final bool newLocal = await _albumService.refreshDeviceAlbums();
|
||||||
debugPrint(
|
debugPrint(
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ class DownloadStateNotifier extends StateNotifier<DownloadState> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||||
// Ignore if the task is cancled or completed
|
// Ignore if the task is canceled or completed
|
||||||
if (update.progress == -2 || update.progress == -1) {
|
if (update.progress == -2 || update.progress == -1) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ class ShareIntentUploadStateNotifier
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||||
// Ignore if the task is cancled or completed
|
// Ignore if the task is canceled or completed
|
||||||
if (update.progress == downloadFailed ||
|
if (update.progress == downloadFailed ||
|
||||||
update.progress == downloadCompleted) {
|
update.progress == downloadCompleted) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ class AuthNotifier extends StateNotifier<AuthState> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Validating the url is the alternative connecting server url without
|
/// Validating the url is the alternative connecting server url without
|
||||||
/// saving the infomation to the local database
|
/// saving the information to the local database
|
||||||
Future<bool> validateAuxilaryServerUrl(String url) async {
|
Future<bool> validateAuxilaryServerUrl(String url) async {
|
||||||
try {
|
try {
|
||||||
final validEndpoint = await _apiService.resolveEndpoint(url);
|
final validEndpoint = await _apiService.resolveEndpoint(url);
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import 'package:permission_handler/permission_handler.dart';
|
|||||||
|
|
||||||
class GalleryPermissionNotifier extends StateNotifier<PermissionStatus> {
|
class GalleryPermissionNotifier extends StateNotifier<PermissionStatus> {
|
||||||
GalleryPermissionNotifier()
|
GalleryPermissionNotifier()
|
||||||
: super(PermissionStatus.denied) // Denied is the intitial state
|
: super(PermissionStatus.denied) // Denied is the initial state
|
||||||
{
|
{
|
||||||
// Sets the initial state
|
// Sets the initial state
|
||||||
getGalleryPermissionStatus();
|
getGalleryPermissionStatus();
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import 'package:immich_mobile/entities/store.entity.dart';
|
|||||||
import 'package:immich_mobile/entities/user.entity.dart';
|
import 'package:immich_mobile/entities/user.entity.dart';
|
||||||
import 'package:immich_mobile/providers/api.provider.dart';
|
import 'package:immich_mobile/providers/api.provider.dart';
|
||||||
import 'package:immich_mobile/services/api.service.dart';
|
import 'package:immich_mobile/services/api.service.dart';
|
||||||
import 'package:immich_mobile/services/user.service.dart';
|
import 'package:immich_mobile/services/timeline.service.dart';
|
||||||
|
|
||||||
class CurrentUserProvider extends StateNotifier<User?> {
|
class CurrentUserProvider extends StateNotifier<User?> {
|
||||||
CurrentUserProvider(this._apiService) : super(null) {
|
CurrentUserProvider(this._apiService) : super(null) {
|
||||||
@@ -46,14 +46,15 @@ final currentUserProvider =
|
|||||||
});
|
});
|
||||||
|
|
||||||
class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
||||||
TimelineUserIdsProvider(this._userService) : super([]) {
|
TimelineUserIdsProvider(this._timelineService) : super([]) {
|
||||||
_userService.getTimelineUserIds().then((users) => state = users);
|
_timelineService.getTimelineUserIds().then((users) => state = users);
|
||||||
streamSub =
|
streamSub = _timelineService
|
||||||
_userService.watchTimelineUserIds().listen((users) => state = users);
|
.watchTimelineUserIds()
|
||||||
|
.listen((users) => state = users);
|
||||||
}
|
}
|
||||||
|
|
||||||
late final StreamSubscription<List<int>> streamSub;
|
late final StreamSubscription<List<int>> streamSub;
|
||||||
final UserService _userService;
|
final TimelineService _timelineService;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void dispose() {
|
void dispose() {
|
||||||
@@ -64,5 +65,5 @@ class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
|||||||
|
|
||||||
final timelineUsersIdsProvider =
|
final timelineUsersIdsProvider =
|
||||||
StateNotifierProvider<TimelineUserIdsProvider, List<int>>((ref) {
|
StateNotifierProvider<TimelineUserIdsProvider, List<int>>((ref) {
|
||||||
return TimelineUserIdsProvider(ref.watch(userServiceProvider));
|
return TimelineUserIdsProvider(ref.watch(timelineServiceProvider));
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
|||||||
import 'package:immich_mobile/constants/enums.dart';
|
import 'package:immich_mobile/constants/enums.dart';
|
||||||
import 'package:immich_mobile/entities/album.entity.dart';
|
import 'package:immich_mobile/entities/album.entity.dart';
|
||||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||||
|
import 'package:immich_mobile/entities/user.entity.dart';
|
||||||
import 'package:immich_mobile/interfaces/timeline.interface.dart';
|
import 'package:immich_mobile/interfaces/timeline.interface.dart';
|
||||||
import 'package:immich_mobile/providers/db.provider.dart';
|
import 'package:immich_mobile/providers/db.provider.dart';
|
||||||
import 'package:immich_mobile/repositories/database.repository.dart';
|
import 'package:immich_mobile/repositories/database.repository.dart';
|
||||||
@@ -15,6 +16,28 @@ class TimelineRepository extends DatabaseRepository
|
|||||||
implements ITimelineRepository {
|
implements ITimelineRepository {
|
||||||
TimelineRepository(super.db);
|
TimelineRepository(super.db);
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<List<int>> getTimelineUserIds(int id) {
|
||||||
|
return db.users
|
||||||
|
.filter()
|
||||||
|
.inTimelineEqualTo(true)
|
||||||
|
.or()
|
||||||
|
.isarIdEqualTo(id)
|
||||||
|
.isarIdProperty()
|
||||||
|
.findAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Stream<List<int>> watchTimelineUsers(int id) {
|
||||||
|
return db.users
|
||||||
|
.filter()
|
||||||
|
.inTimelineEqualTo(true)
|
||||||
|
.or()
|
||||||
|
.isarIdEqualTo(id)
|
||||||
|
.isarIdProperty()
|
||||||
|
.watch();
|
||||||
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Stream<RenderList> watchArchiveTimeline(int userId) {
|
Stream<RenderList> watchArchiveTimeline(int userId) {
|
||||||
final query = db.assets
|
final query = db.assets
|
||||||
|
|||||||
@@ -70,26 +70,4 @@ class UserRepository extends DatabaseRepository implements IUserRepository {
|
|||||||
await db.users.clear();
|
await db.users.clear();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<List<int>> getTimelineUserIds(int id) {
|
|
||||||
return db.users
|
|
||||||
.filter()
|
|
||||||
.inTimelineEqualTo(true)
|
|
||||||
.or()
|
|
||||||
.isarIdEqualTo(id)
|
|
||||||
.isarIdProperty()
|
|
||||||
.findAll();
|
|
||||||
}
|
|
||||||
|
|
||||||
@override
|
|
||||||
Stream<List<int>> watchTimelineUsers(int id) {
|
|
||||||
return db.users
|
|
||||||
.filter()
|
|
||||||
.inTimelineEqualTo(true)
|
|
||||||
.or()
|
|
||||||
.isarIdEqualTo(id)
|
|
||||||
.isarIdProperty()
|
|
||||||
.watch();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -169,7 +169,10 @@ class AlbumService {
|
|||||||
final Stopwatch sw = Stopwatch()..start();
|
final Stopwatch sw = Stopwatch()..start();
|
||||||
bool changes = false;
|
bool changes = false;
|
||||||
try {
|
try {
|
||||||
await _userService.refreshUsers();
|
final users = await _userService.getUsersFromServer();
|
||||||
|
if (users != null) {
|
||||||
|
await _syncService.syncUsersFromServer(users);
|
||||||
|
}
|
||||||
final (sharedAlbum, ownedAlbum) = await (
|
final (sharedAlbum, ownedAlbum) = await (
|
||||||
// Note: `shared: true` is required to get albums that don't belong to
|
// Note: `shared: true` is required to get albums that don't belong to
|
||||||
// us due to unusual behaviour on the API but this will also return our
|
// us due to unusual behaviour on the API but this will also return our
|
||||||
|
|||||||
@@ -84,15 +84,17 @@ class ApiService implements Authentication {
|
|||||||
/// port - optional (default: based on schema)
|
/// port - optional (default: based on schema)
|
||||||
/// path - optional
|
/// path - optional
|
||||||
Future<String> resolveEndpoint(String serverUrl) async {
|
Future<String> resolveEndpoint(String serverUrl) async {
|
||||||
final url = sanitizeUrl(serverUrl);
|
String url = sanitizeUrl(serverUrl);
|
||||||
|
|
||||||
if (!await _isEndpointAvailable(serverUrl)) {
|
|
||||||
throw ApiException(503, "Server is not reachable");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for /.well-known/immich
|
// Check for /.well-known/immich
|
||||||
final wellKnownEndpoint = await _getWellKnownEndpoint(url);
|
final wellKnownEndpoint = await _getWellKnownEndpoint(url);
|
||||||
if (wellKnownEndpoint.isNotEmpty) return wellKnownEndpoint;
|
if (wellKnownEndpoint.isNotEmpty) {
|
||||||
|
url = sanitizeUrl(wellKnownEndpoint);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!await _isEndpointAvailable(url)) {
|
||||||
|
throw ApiException(503, "Server is not reachable");
|
||||||
|
}
|
||||||
|
|
||||||
// Otherwise, assume the URL provided is the api endpoint
|
// Otherwise, assume the URL provided is the api endpoint
|
||||||
return url;
|
return url;
|
||||||
@@ -128,10 +130,12 @@ class ApiService implements Authentication {
|
|||||||
var headers = {"Accept": "application/json"};
|
var headers = {"Accept": "application/json"};
|
||||||
headers.addAll(getRequestHeaders());
|
headers.addAll(getRequestHeaders());
|
||||||
|
|
||||||
final res = await client.get(
|
final res = await client
|
||||||
Uri.parse("$baseUrl/.well-known/immich"),
|
.get(
|
||||||
headers: headers,
|
Uri.parse("$baseUrl/.well-known/immich"),
|
||||||
);
|
headers: headers,
|
||||||
|
)
|
||||||
|
.timeout(const Duration(seconds: 5));
|
||||||
|
|
||||||
if (res.statusCode == 200) {
|
if (res.statusCode == 200) {
|
||||||
final data = jsonDecode(res.body);
|
final data = jsonDecode(res.body);
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ class AuthService {
|
|||||||
isValid = true;
|
isValid = true;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
_log.severe("Error validating auxilary endpoint", error);
|
_log.severe("Error validating auxiliary endpoint", error);
|
||||||
} finally {
|
} finally {
|
||||||
httpclient.close();
|
httpclient.close();
|
||||||
}
|
}
|
||||||
@@ -187,7 +187,7 @@ class AuthService {
|
|||||||
_log.severe("Cannot resolve endpoint", error);
|
_log.severe("Cannot resolve endpoint", error);
|
||||||
continue;
|
continue;
|
||||||
} catch (_) {
|
} catch (_) {
|
||||||
_log.severe("Auxilary server is not valid");
|
_log.severe("Auxiliary server is not valid");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -329,7 +329,7 @@ class BackgroundService {
|
|||||||
try {
|
try {
|
||||||
_clearErrorNotifications();
|
_clearErrorNotifications();
|
||||||
|
|
||||||
// iOS should time out after some threshhold so it doesn't wait
|
// iOS should time out after some threshold so it doesn't wait
|
||||||
// indefinitely and can run later
|
// indefinitely and can run later
|
||||||
// Android is fine to wait here until the lock releases
|
// Android is fine to wait here until the lock releases
|
||||||
final waitForLock = Platform.isIOS
|
final waitForLock = Platform.isIOS
|
||||||
@@ -410,7 +410,6 @@ class BackgroundService {
|
|||||||
partnerApiRepository,
|
partnerApiRepository,
|
||||||
userApiRepository,
|
userApiRepository,
|
||||||
userRepository,
|
userRepository,
|
||||||
syncSerive,
|
|
||||||
);
|
);
|
||||||
AlbumService albumService = AlbumService(
|
AlbumService albumService = AlbumService(
|
||||||
userService,
|
userService,
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class MemoryService {
|
|||||||
try {
|
try {
|
||||||
final now = DateTime.now();
|
final now = DateTime.now();
|
||||||
final data = await _apiService.memoriesApi.searchMemories(
|
final data = await _apiService.memoriesApi.searchMemories(
|
||||||
for_: now,
|
for_: DateTime.utc(now.year, now.month, now.day, 0, 0, 0),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (data == null) {
|
if (data == null) {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import 'package:immich_mobile/entities/album.entity.dart';
|
|||||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||||
import 'package:immich_mobile/entities/etag.entity.dart';
|
import 'package:immich_mobile/entities/etag.entity.dart';
|
||||||
import 'package:immich_mobile/entities/user.entity.dart';
|
import 'package:immich_mobile/entities/user.entity.dart';
|
||||||
|
import 'package:immich_mobile/extensions/collection_extensions.dart';
|
||||||
import 'package:immich_mobile/interfaces/album.interface.dart';
|
import 'package:immich_mobile/interfaces/album.interface.dart';
|
||||||
import 'package:immich_mobile/interfaces/album_api.interface.dart';
|
import 'package:immich_mobile/interfaces/album_api.interface.dart';
|
||||||
import 'package:immich_mobile/interfaces/album_media.interface.dart';
|
import 'package:immich_mobile/interfaces/album_media.interface.dart';
|
||||||
@@ -23,7 +24,6 @@ import 'package:immich_mobile/repositories/user.repository.dart';
|
|||||||
import 'package:immich_mobile/services/entity.service.dart';
|
import 'package:immich_mobile/services/entity.service.dart';
|
||||||
import 'package:immich_mobile/services/hash.service.dart';
|
import 'package:immich_mobile/services/hash.service.dart';
|
||||||
import 'package:immich_mobile/utils/async_mutex.dart';
|
import 'package:immich_mobile/utils/async_mutex.dart';
|
||||||
import 'package:immich_mobile/extensions/collection_extensions.dart';
|
|
||||||
import 'package:immich_mobile/utils/datetime_comparison.dart';
|
import 'package:immich_mobile/utils/datetime_comparison.dart';
|
||||||
import 'package:immich_mobile/utils/diff.dart';
|
import 'package:immich_mobile/utils/diff.dart';
|
||||||
import 'package:logging/logging.dart';
|
import 'package:logging/logging.dart';
|
||||||
@@ -639,7 +639,7 @@ class SyncService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// fast path for common case: only new assets were added to device album
|
/// fast path for common case: only new assets were added to device album
|
||||||
/// returns `true` if successfull, else `false`
|
/// returns `true` if successful, else `false`
|
||||||
Future<bool> _syncDeviceAlbumFast(Album deviceAlbum, Album dbAlbum) async {
|
Future<bool> _syncDeviceAlbumFast(Album deviceAlbum, Album dbAlbum) async {
|
||||||
if (!deviceAlbum.modifiedAt.isAfter(dbAlbum.modifiedAt)) {
|
if (!deviceAlbum.modifiedAt.isAfter(dbAlbum.modifiedAt)) {
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -21,12 +21,23 @@ class TimelineService {
|
|||||||
final ITimelineRepository _timelineRepository;
|
final ITimelineRepository _timelineRepository;
|
||||||
final IUserRepository _userRepository;
|
final IUserRepository _userRepository;
|
||||||
final AppSettingsService _appSettingsService;
|
final AppSettingsService _appSettingsService;
|
||||||
TimelineService(
|
|
||||||
|
const TimelineService(
|
||||||
this._timelineRepository,
|
this._timelineRepository,
|
||||||
this._userRepository,
|
this._userRepository,
|
||||||
this._appSettingsService,
|
this._appSettingsService,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Future<List<int>> getTimelineUserIds() async {
|
||||||
|
final me = await _userRepository.me();
|
||||||
|
return _timelineRepository.getTimelineUserIds(me.isarId);
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<List<int>> watchTimelineUserIds() async* {
|
||||||
|
final me = await _userRepository.me();
|
||||||
|
yield* _timelineRepository.watchTimelineUsers(me.isarId);
|
||||||
|
}
|
||||||
|
|
||||||
Stream<RenderList> watchHomeTimeline(int userId) {
|
Stream<RenderList> watchHomeTimeline(int userId) {
|
||||||
return _timelineRepository.watchHomeTimeline(userId, _getGroupByOption());
|
return _timelineRepository.watchHomeTimeline(userId, _getGroupByOption());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import 'package:collection/collection.dart';
|
import 'package:collection/collection.dart';
|
||||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||||
import 'package:image_picker/image_picker.dart';
|
import 'package:image_picker/image_picker.dart';
|
||||||
|
import 'package:immich_mobile/entities/user.entity.dart';
|
||||||
import 'package:immich_mobile/interfaces/partner_api.interface.dart';
|
import 'package:immich_mobile/interfaces/partner_api.interface.dart';
|
||||||
import 'package:immich_mobile/interfaces/user.interface.dart';
|
import 'package:immich_mobile/interfaces/user.interface.dart';
|
||||||
import 'package:immich_mobile/interfaces/user_api.interface.dart';
|
import 'package:immich_mobile/interfaces/user_api.interface.dart';
|
||||||
import 'package:immich_mobile/repositories/partner_api.repository.dart';
|
import 'package:immich_mobile/repositories/partner_api.repository.dart';
|
||||||
import 'package:immich_mobile/repositories/user.repository.dart';
|
import 'package:immich_mobile/repositories/user.repository.dart';
|
||||||
import 'package:immich_mobile/repositories/user_api.repository.dart';
|
import 'package:immich_mobile/repositories/user_api.repository.dart';
|
||||||
import 'package:immich_mobile/entities/user.entity.dart';
|
|
||||||
import 'package:immich_mobile/services/sync.service.dart';
|
|
||||||
import 'package:immich_mobile/utils/diff.dart';
|
import 'package:immich_mobile/utils/diff.dart';
|
||||||
import 'package:logging/logging.dart';
|
import 'package:logging/logging.dart';
|
||||||
|
|
||||||
@@ -17,7 +16,6 @@ final userServiceProvider = Provider(
|
|||||||
ref.watch(partnerApiRepositoryProvider),
|
ref.watch(partnerApiRepositoryProvider),
|
||||||
ref.watch(userApiRepositoryProvider),
|
ref.watch(userApiRepositoryProvider),
|
||||||
ref.watch(userRepositoryProvider),
|
ref.watch(userRepositoryProvider),
|
||||||
ref.watch(syncServiceProvider),
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -25,14 +23,12 @@ class UserService {
|
|||||||
final IPartnerApiRepository _partnerApiRepository;
|
final IPartnerApiRepository _partnerApiRepository;
|
||||||
final IUserApiRepository _userApiRepository;
|
final IUserApiRepository _userApiRepository;
|
||||||
final IUserRepository _userRepository;
|
final IUserRepository _userRepository;
|
||||||
final SyncService _syncService;
|
|
||||||
final Logger _log = Logger("UserService");
|
final Logger _log = Logger("UserService");
|
||||||
|
|
||||||
UserService(
|
UserService(
|
||||||
this._partnerApiRepository,
|
this._partnerApiRepository,
|
||||||
this._userApiRepository,
|
this._userApiRepository,
|
||||||
this._userRepository,
|
this._userRepository,
|
||||||
this._syncService,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
Future<List<User>> getUsers({bool self = false}) {
|
Future<List<User>> getUsers({bool self = false}) {
|
||||||
@@ -98,23 +94,7 @@ class UserService {
|
|||||||
return users;
|
return users;
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<bool> refreshUsers() async {
|
|
||||||
final users = await getUsersFromServer();
|
|
||||||
if (users == null) return false;
|
|
||||||
return _syncService.syncUsersFromServer(users);
|
|
||||||
}
|
|
||||||
|
|
||||||
Future<void> clearTable() {
|
Future<void> clearTable() {
|
||||||
return _userRepository.clearTable();
|
return _userRepository.clearTable();
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<List<int>> getTimelineUserIds() async {
|
|
||||||
final me = await _userRepository.me();
|
|
||||||
return _userRepository.getTimelineUserIds(me.isarId);
|
|
||||||
}
|
|
||||||
|
|
||||||
Stream<List<int>> watchTimelineUserIds() async* {
|
|
||||||
final me = await _userRepository.me();
|
|
||||||
yield* _userRepository.watchTimelineUsers(me.isarId);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -49,8 +49,8 @@ abstract final class Bootstrap {
|
|||||||
static Future<void> initDomain(Isar db) async {
|
static Future<void> initDomain(Isar db) async {
|
||||||
await StoreService.init(storeRepository: IsarStoreRepository(db));
|
await StoreService.init(storeRepository: IsarStoreRepository(db));
|
||||||
await LogService.init(
|
await LogService.init(
|
||||||
logRepo: IsarLogRepository(db),
|
logRepository: IsarLogRepository(db),
|
||||||
storeRepo: IsarStoreRepository(db),
|
storeRepository: IsarStoreRepository(db),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import 'package:immich_mobile/providers/image/immich_remote_image_provider.dart'
|
|||||||
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
|
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
|
||||||
|
|
||||||
/// [ImageCache] that uses two caches for small and large images
|
/// [ImageCache] that uses two caches for small and large images
|
||||||
/// so that a single large image does not evict all small iamges
|
/// so that a single large image does not evict all small images
|
||||||
final class CustomImageCache implements ImageCache {
|
final class CustomImageCache implements ImageCache {
|
||||||
final _small = ImageCache();
|
final _small = ImageCache();
|
||||||
final _large = ImageCache()..maximumSize = 5; // Maximum 5 images
|
final _large = ImageCache()..maximumSize = 5; // Maximum 5 images
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ double getScaleForScaleState(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Internal class to wraps custom scale boundaries (min, max and initial)
|
/// Internal class to wraps custom scale boundaries (min, max and initial)
|
||||||
/// Also, stores values regarding the two sizes: the container and teh child.
|
/// Also, stores values regarding the two sizes: the container and the child.
|
||||||
class ScaleBoundaries {
|
class ScaleBoundaries {
|
||||||
const ScaleBoundaries(
|
const ScaleBoundaries(
|
||||||
this._minScale,
|
this._minScale,
|
||||||
|
|||||||
@@ -2,13 +2,12 @@ import 'package:easy_localization/easy_localization.dart';
|
|||||||
import 'package:flutter/material.dart';
|
import 'package:flutter/material.dart';
|
||||||
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
||||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
|
||||||
import 'package:immich_mobile/entities/store.entity.dart';
|
|
||||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||||
import 'package:immich_mobile/models/auth/auxilary_endpoint.model.dart';
|
import 'package:immich_mobile/models/auth/auxilary_endpoint.model.dart';
|
||||||
import 'package:immich_mobile/providers/network.provider.dart';
|
import 'package:immich_mobile/providers/network.provider.dart';
|
||||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||||
import 'package:immich_mobile/utils/hooks/app_settings_update_hook.dart';
|
import 'package:immich_mobile/utils/hooks/app_settings_update_hook.dart';
|
||||||
|
import 'package:immich_mobile/utils/url_helper.dart';
|
||||||
import 'package:immich_mobile/widgets/settings/networking_settings/external_network_preference.dart';
|
import 'package:immich_mobile/widgets/settings/networking_settings/external_network_preference.dart';
|
||||||
import 'package:immich_mobile/widgets/settings/networking_settings/local_network_preference.dart';
|
import 'package:immich_mobile/widgets/settings/networking_settings/local_network_preference.dart';
|
||||||
import 'package:immich_mobile/widgets/settings/settings_switch_list_tile.dart';
|
import 'package:immich_mobile/widgets/settings/settings_switch_list_tile.dart';
|
||||||
@@ -18,7 +17,7 @@ class NetworkingSettings extends HookConsumerWidget {
|
|||||||
|
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context, WidgetRef ref) {
|
Widget build(BuildContext context, WidgetRef ref) {
|
||||||
final currentEndpoint = Store.get(StoreKey.serverEndpoint);
|
final currentEndpoint = getServerUrl();
|
||||||
final featureEnabled =
|
final featureEnabled =
|
||||||
useAppSettingsState(AppSettingsEnum.autoEndpointSwitching);
|
useAppSettingsState(AppSettingsEnum.autoEndpointSwitching);
|
||||||
|
|
||||||
@@ -102,7 +101,7 @@ class NetworkingSettings extends HookConsumerWidget {
|
|||||||
padding: const EdgeInsets.only(top: 8, left: 16, bottom: 8),
|
padding: const EdgeInsets.only(top: 8, left: 16, bottom: 8),
|
||||||
child: NetworkPreferenceTitle(
|
child: NetworkPreferenceTitle(
|
||||||
title: "current_server_address".tr().toUpperCase(),
|
title: "current_server_address".tr().toUpperCase(),
|
||||||
icon: currentEndpoint.startsWith('https')
|
icon: (currentEndpoint?.startsWith('https') ?? false)
|
||||||
? Icons.https_outlined
|
? Icons.https_outlined
|
||||||
: Icons.http_outlined,
|
: Icons.http_outlined,
|
||||||
),
|
),
|
||||||
@@ -119,10 +118,16 @@ class NetworkingSettings extends HookConsumerWidget {
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
child: ListTile(
|
child: ListTile(
|
||||||
leading:
|
leading: currentEndpoint != null
|
||||||
const Icon(Icons.check_circle_rounded, color: Colors.green),
|
? const Icon(
|
||||||
|
Icons.check_circle_rounded,
|
||||||
|
color: Colors.green,
|
||||||
|
)
|
||||||
|
: const Icon(
|
||||||
|
Icons.circle_outlined,
|
||||||
|
),
|
||||||
title: Text(
|
title: Text(
|
||||||
currentEndpoint,
|
currentEndpoint ?? "--",
|
||||||
style: TextStyle(
|
style: TextStyle(
|
||||||
fontSize: 16,
|
fontSize: 16,
|
||||||
fontFamily: 'Inconsolata',
|
fontFamily: 'Inconsolata',
|
||||||
|
|||||||
4
mobile/openapi/README.md
generated
4
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
|||||||
|
|
||||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||||
|
|
||||||
- API version: 1.127.0
|
- API version: 1.128.0
|
||||||
- Generator version: 7.8.0
|
- Generator version: 7.8.0
|
||||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||||
|
|
||||||
@@ -425,6 +425,8 @@ Class | Method | HTTP request | Description
|
|||||||
- [SyncAckDto](doc//SyncAckDto.md)
|
- [SyncAckDto](doc//SyncAckDto.md)
|
||||||
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
||||||
- [SyncEntityType](doc//SyncEntityType.md)
|
- [SyncEntityType](doc//SyncEntityType.md)
|
||||||
|
- [SyncPartnerDeleteV1](doc//SyncPartnerDeleteV1.md)
|
||||||
|
- [SyncPartnerV1](doc//SyncPartnerV1.md)
|
||||||
- [SyncRequestType](doc//SyncRequestType.md)
|
- [SyncRequestType](doc//SyncRequestType.md)
|
||||||
- [SyncStreamDto](doc//SyncStreamDto.md)
|
- [SyncStreamDto](doc//SyncStreamDto.md)
|
||||||
- [SyncUserDeleteV1](doc//SyncUserDeleteV1.md)
|
- [SyncUserDeleteV1](doc//SyncUserDeleteV1.md)
|
||||||
|
|||||||
2
mobile/openapi/lib/api.dart
generated
2
mobile/openapi/lib/api.dart
generated
@@ -232,6 +232,8 @@ part 'model/sync_ack_delete_dto.dart';
|
|||||||
part 'model/sync_ack_dto.dart';
|
part 'model/sync_ack_dto.dart';
|
||||||
part 'model/sync_ack_set_dto.dart';
|
part 'model/sync_ack_set_dto.dart';
|
||||||
part 'model/sync_entity_type.dart';
|
part 'model/sync_entity_type.dart';
|
||||||
|
part 'model/sync_partner_delete_v1.dart';
|
||||||
|
part 'model/sync_partner_v1.dart';
|
||||||
part 'model/sync_request_type.dart';
|
part 'model/sync_request_type.dart';
|
||||||
part 'model/sync_stream_dto.dart';
|
part 'model/sync_stream_dto.dart';
|
||||||
part 'model/sync_user_delete_v1.dart';
|
part 'model/sync_user_delete_v1.dart';
|
||||||
|
|||||||
4
mobile/openapi/lib/api_client.dart
generated
4
mobile/openapi/lib/api_client.dart
generated
@@ -520,6 +520,10 @@ class ApiClient {
|
|||||||
return SyncAckSetDto.fromJson(value);
|
return SyncAckSetDto.fromJson(value);
|
||||||
case 'SyncEntityType':
|
case 'SyncEntityType':
|
||||||
return SyncEntityTypeTypeTransformer().decode(value);
|
return SyncEntityTypeTypeTransformer().decode(value);
|
||||||
|
case 'SyncPartnerDeleteV1':
|
||||||
|
return SyncPartnerDeleteV1.fromJson(value);
|
||||||
|
case 'SyncPartnerV1':
|
||||||
|
return SyncPartnerV1.fromJson(value);
|
||||||
case 'SyncRequestType':
|
case 'SyncRequestType':
|
||||||
return SyncRequestTypeTypeTransformer().decode(value);
|
return SyncRequestTypeTypeTransformer().decode(value);
|
||||||
case 'SyncStreamDto':
|
case 'SyncStreamDto':
|
||||||
|
|||||||
6
mobile/openapi/lib/model/sync_entity_type.dart
generated
6
mobile/openapi/lib/model/sync_entity_type.dart
generated
@@ -25,11 +25,15 @@ class SyncEntityType {
|
|||||||
|
|
||||||
static const userV1 = SyncEntityType._(r'UserV1');
|
static const userV1 = SyncEntityType._(r'UserV1');
|
||||||
static const userDeleteV1 = SyncEntityType._(r'UserDeleteV1');
|
static const userDeleteV1 = SyncEntityType._(r'UserDeleteV1');
|
||||||
|
static const partnerV1 = SyncEntityType._(r'PartnerV1');
|
||||||
|
static const partnerDeleteV1 = SyncEntityType._(r'PartnerDeleteV1');
|
||||||
|
|
||||||
/// List of all possible values in this [enum][SyncEntityType].
|
/// List of all possible values in this [enum][SyncEntityType].
|
||||||
static const values = <SyncEntityType>[
|
static const values = <SyncEntityType>[
|
||||||
userV1,
|
userV1,
|
||||||
userDeleteV1,
|
userDeleteV1,
|
||||||
|
partnerV1,
|
||||||
|
partnerDeleteV1,
|
||||||
];
|
];
|
||||||
|
|
||||||
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
|
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
|
||||||
@@ -70,6 +74,8 @@ class SyncEntityTypeTypeTransformer {
|
|||||||
switch (data) {
|
switch (data) {
|
||||||
case r'UserV1': return SyncEntityType.userV1;
|
case r'UserV1': return SyncEntityType.userV1;
|
||||||
case r'UserDeleteV1': return SyncEntityType.userDeleteV1;
|
case r'UserDeleteV1': return SyncEntityType.userDeleteV1;
|
||||||
|
case r'PartnerV1': return SyncEntityType.partnerV1;
|
||||||
|
case r'PartnerDeleteV1': return SyncEntityType.partnerDeleteV1;
|
||||||
default:
|
default:
|
||||||
if (!allowNull) {
|
if (!allowNull) {
|
||||||
throw ArgumentError('Unknown enum value to decode: $data');
|
throw ArgumentError('Unknown enum value to decode: $data');
|
||||||
|
|||||||
107
mobile/openapi/lib/model/sync_partner_delete_v1.dart
generated
Normal file
107
mobile/openapi/lib/model/sync_partner_delete_v1.dart
generated
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
//
|
||||||
|
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||||
|
//
|
||||||
|
// @dart=2.18
|
||||||
|
|
||||||
|
// ignore_for_file: unused_element, unused_import
|
||||||
|
// ignore_for_file: always_put_required_named_parameters_first
|
||||||
|
// ignore_for_file: constant_identifier_names
|
||||||
|
// ignore_for_file: lines_longer_than_80_chars
|
||||||
|
|
||||||
|
part of openapi.api;
|
||||||
|
|
||||||
|
class SyncPartnerDeleteV1 {
|
||||||
|
/// Returns a new [SyncPartnerDeleteV1] instance.
|
||||||
|
SyncPartnerDeleteV1({
|
||||||
|
required this.sharedById,
|
||||||
|
required this.sharedWithId,
|
||||||
|
});
|
||||||
|
|
||||||
|
String sharedById;
|
||||||
|
|
||||||
|
String sharedWithId;
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool operator ==(Object other) => identical(this, other) || other is SyncPartnerDeleteV1 &&
|
||||||
|
other.sharedById == sharedById &&
|
||||||
|
other.sharedWithId == sharedWithId;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode =>
|
||||||
|
// ignore: unnecessary_parenthesis
|
||||||
|
(sharedById.hashCode) +
|
||||||
|
(sharedWithId.hashCode);
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() => 'SyncPartnerDeleteV1[sharedById=$sharedById, sharedWithId=$sharedWithId]';
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() {
|
||||||
|
final json = <String, dynamic>{};
|
||||||
|
json[r'sharedById'] = this.sharedById;
|
||||||
|
json[r'sharedWithId'] = this.sharedWithId;
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a new [SyncPartnerDeleteV1] instance and imports its values from
|
||||||
|
/// [value] if it's a [Map], null otherwise.
|
||||||
|
// ignore: prefer_constructors_over_static_methods
|
||||||
|
static SyncPartnerDeleteV1? fromJson(dynamic value) {
|
||||||
|
upgradeDto(value, "SyncPartnerDeleteV1");
|
||||||
|
if (value is Map) {
|
||||||
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
|
return SyncPartnerDeleteV1(
|
||||||
|
sharedById: mapValueOfType<String>(json, r'sharedById')!,
|
||||||
|
sharedWithId: mapValueOfType<String>(json, r'sharedWithId')!,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
static List<SyncPartnerDeleteV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final result = <SyncPartnerDeleteV1>[];
|
||||||
|
if (json is List && json.isNotEmpty) {
|
||||||
|
for (final row in json) {
|
||||||
|
final value = SyncPartnerDeleteV1.fromJson(row);
|
||||||
|
if (value != null) {
|
||||||
|
result.add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result.toList(growable: growable);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Map<String, SyncPartnerDeleteV1> mapFromJson(dynamic json) {
|
||||||
|
final map = <String, SyncPartnerDeleteV1>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
final value = SyncPartnerDeleteV1.fromJson(entry.value);
|
||||||
|
if (value != null) {
|
||||||
|
map[entry.key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
// maps a json object with a list of SyncPartnerDeleteV1-objects as value to a dart map
|
||||||
|
static Map<String, List<SyncPartnerDeleteV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final map = <String, List<SyncPartnerDeleteV1>>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
// ignore: parameter_assignments
|
||||||
|
json = json.cast<String, dynamic>();
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
map[entry.key] = SyncPartnerDeleteV1.listFromJson(entry.value, growable: growable,);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The list of required keys that must be present in a JSON.
|
||||||
|
static const requiredKeys = <String>{
|
||||||
|
'sharedById',
|
||||||
|
'sharedWithId',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
115
mobile/openapi/lib/model/sync_partner_v1.dart
generated
Normal file
115
mobile/openapi/lib/model/sync_partner_v1.dart
generated
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
//
|
||||||
|
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||||
|
//
|
||||||
|
// @dart=2.18
|
||||||
|
|
||||||
|
// ignore_for_file: unused_element, unused_import
|
||||||
|
// ignore_for_file: always_put_required_named_parameters_first
|
||||||
|
// ignore_for_file: constant_identifier_names
|
||||||
|
// ignore_for_file: lines_longer_than_80_chars
|
||||||
|
|
||||||
|
part of openapi.api;
|
||||||
|
|
||||||
|
class SyncPartnerV1 {
|
||||||
|
/// Returns a new [SyncPartnerV1] instance.
|
||||||
|
SyncPartnerV1({
|
||||||
|
required this.inTimeline,
|
||||||
|
required this.sharedById,
|
||||||
|
required this.sharedWithId,
|
||||||
|
});
|
||||||
|
|
||||||
|
bool inTimeline;
|
||||||
|
|
||||||
|
String sharedById;
|
||||||
|
|
||||||
|
String sharedWithId;
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool operator ==(Object other) => identical(this, other) || other is SyncPartnerV1 &&
|
||||||
|
other.inTimeline == inTimeline &&
|
||||||
|
other.sharedById == sharedById &&
|
||||||
|
other.sharedWithId == sharedWithId;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode =>
|
||||||
|
// ignore: unnecessary_parenthesis
|
||||||
|
(inTimeline.hashCode) +
|
||||||
|
(sharedById.hashCode) +
|
||||||
|
(sharedWithId.hashCode);
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() => 'SyncPartnerV1[inTimeline=$inTimeline, sharedById=$sharedById, sharedWithId=$sharedWithId]';
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() {
|
||||||
|
final json = <String, dynamic>{};
|
||||||
|
json[r'inTimeline'] = this.inTimeline;
|
||||||
|
json[r'sharedById'] = this.sharedById;
|
||||||
|
json[r'sharedWithId'] = this.sharedWithId;
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a new [SyncPartnerV1] instance and imports its values from
|
||||||
|
/// [value] if it's a [Map], null otherwise.
|
||||||
|
// ignore: prefer_constructors_over_static_methods
|
||||||
|
static SyncPartnerV1? fromJson(dynamic value) {
|
||||||
|
upgradeDto(value, "SyncPartnerV1");
|
||||||
|
if (value is Map) {
|
||||||
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
|
return SyncPartnerV1(
|
||||||
|
inTimeline: mapValueOfType<bool>(json, r'inTimeline')!,
|
||||||
|
sharedById: mapValueOfType<String>(json, r'sharedById')!,
|
||||||
|
sharedWithId: mapValueOfType<String>(json, r'sharedWithId')!,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
static List<SyncPartnerV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final result = <SyncPartnerV1>[];
|
||||||
|
if (json is List && json.isNotEmpty) {
|
||||||
|
for (final row in json) {
|
||||||
|
final value = SyncPartnerV1.fromJson(row);
|
||||||
|
if (value != null) {
|
||||||
|
result.add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result.toList(growable: growable);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Map<String, SyncPartnerV1> mapFromJson(dynamic json) {
|
||||||
|
final map = <String, SyncPartnerV1>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
final value = SyncPartnerV1.fromJson(entry.value);
|
||||||
|
if (value != null) {
|
||||||
|
map[entry.key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
// maps a json object with a list of SyncPartnerV1-objects as value to a dart map
|
||||||
|
static Map<String, List<SyncPartnerV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final map = <String, List<SyncPartnerV1>>{};
|
||||||
|
if (json is Map && json.isNotEmpty) {
|
||||||
|
// ignore: parameter_assignments
|
||||||
|
json = json.cast<String, dynamic>();
|
||||||
|
for (final entry in json.entries) {
|
||||||
|
map[entry.key] = SyncPartnerV1.listFromJson(entry.value, growable: growable,);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The list of required keys that must be present in a JSON.
|
||||||
|
static const requiredKeys = <String>{
|
||||||
|
'inTimeline',
|
||||||
|
'sharedById',
|
||||||
|
'sharedWithId',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
3
mobile/openapi/lib/model/sync_request_type.dart
generated
3
mobile/openapi/lib/model/sync_request_type.dart
generated
@@ -24,10 +24,12 @@ class SyncRequestType {
|
|||||||
String toJson() => value;
|
String toJson() => value;
|
||||||
|
|
||||||
static const usersV1 = SyncRequestType._(r'UsersV1');
|
static const usersV1 = SyncRequestType._(r'UsersV1');
|
||||||
|
static const partnersV1 = SyncRequestType._(r'PartnersV1');
|
||||||
|
|
||||||
/// List of all possible values in this [enum][SyncRequestType].
|
/// List of all possible values in this [enum][SyncRequestType].
|
||||||
static const values = <SyncRequestType>[
|
static const values = <SyncRequestType>[
|
||||||
usersV1,
|
usersV1,
|
||||||
|
partnersV1,
|
||||||
];
|
];
|
||||||
|
|
||||||
static SyncRequestType? fromJson(dynamic value) => SyncRequestTypeTypeTransformer().decode(value);
|
static SyncRequestType? fromJson(dynamic value) => SyncRequestTypeTypeTransformer().decode(value);
|
||||||
@@ -67,6 +69,7 @@ class SyncRequestTypeTypeTransformer {
|
|||||||
if (data != null) {
|
if (data != null) {
|
||||||
switch (data) {
|
switch (data) {
|
||||||
case r'UsersV1': return SyncRequestType.usersV1;
|
case r'UsersV1': return SyncRequestType.usersV1;
|
||||||
|
case r'PartnersV1': return SyncRequestType.partnersV1;
|
||||||
default:
|
default:
|
||||||
if (!allowNull) {
|
if (!allowNull) {
|
||||||
throw ArgumentError('Unknown enum value to decode: $data');
|
throw ArgumentError('Unknown enum value to decode: $data');
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user