Compare commits
102 Commits
v1.127.0
...
feature/Ad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f19cf206ba | ||
|
|
7ac30995a8 | ||
|
|
fe2ddc3644 | ||
|
|
ec0eb93036 | ||
|
|
d9a41b8ea0 | ||
|
|
f30fac971a | ||
|
|
fe26ccd1b7 | ||
|
|
3f4bbab4eb | ||
|
|
2da9e3152b | ||
|
|
56b85f7479 | ||
|
|
8b43066632 | ||
|
|
20acdcd884 | ||
|
|
22d348beca | ||
|
|
3b0af1c8a9 | ||
|
|
61c8237a4d | ||
|
|
d740f0283a | ||
|
|
4ada28ac99 | ||
|
|
63c01b78e2 | ||
|
|
1423cfd53c | ||
|
|
867eec86f5 | ||
|
|
86e8effd8e | ||
|
|
49d393216a | ||
|
|
75c9f63757 | ||
|
|
63984890df | ||
|
|
1356468c38 | ||
|
|
c23c53bf6f | ||
|
|
0dcfc43461 | ||
|
|
d1fd0076cc | ||
|
|
ff19502035 | ||
|
|
6ef069b537 | ||
|
|
a03e999bde | ||
|
|
ad1ba4be5f | ||
|
|
f89e74181b | ||
|
|
e2c34f17ba | ||
|
|
23b1256592 | ||
|
|
7bbc1d9f68 | ||
|
|
8b24c31d20 | ||
|
|
7f61ac6983 | ||
|
|
4db8f0c666 | ||
|
|
3d6a6f77a8 | ||
|
|
5698f446f7 | ||
|
|
eb74fafb00 | ||
|
|
24da25dbbf | ||
|
|
9b842d4cca | ||
|
|
a99bd94717 | ||
|
|
4b568dcbb3 | ||
|
|
12ab56c885 | ||
|
|
eed6465b41 | ||
|
|
5f6c16080b | ||
|
|
a2aab1f373 | ||
|
|
8e076ecfe4 | ||
|
|
fe702ba6d7 | ||
|
|
869839f642 | ||
|
|
8885e3105e | ||
|
|
6e51c4ec71 | ||
|
|
6bf2e8dbcb | ||
|
|
366f23774a | ||
|
|
fd5e931617 | ||
|
|
d8d87bb565 | ||
|
|
6cc1978b2d | ||
|
|
506d2d0f81 | ||
|
|
f13d13b2ea | ||
|
|
2510684bf7 | ||
|
|
c8eef5ad4d | ||
|
|
0cb3dc6211 | ||
|
|
f11080cc2d | ||
|
|
efcf773ea0 | ||
|
|
dc143046e3 | ||
|
|
e684062569 | ||
|
|
5c0538e52c | ||
|
|
84cf0d1670 | ||
|
|
bfcde05b1c | ||
|
|
b3b15e9b61 | ||
|
|
819e56d9ca | ||
|
|
9a98712db7 | ||
|
|
a185e06399 | ||
|
|
f2be9f7ad1 | ||
|
|
5c879acd5b | ||
|
|
28c664c769 | ||
|
|
fbd85a89e0 | ||
|
|
1c86293035 | ||
|
|
4a9d80298b | ||
|
|
362feb1e62 | ||
|
|
5503bf7a60 | ||
|
|
d20e2e268a | ||
|
|
a708649504 | ||
|
|
a808b8610e | ||
|
|
c70c9067b0 | ||
|
|
082471dfd9 | ||
|
|
9a098b4658 | ||
|
|
9d705097e8 | ||
|
|
6050485ad8 | ||
|
|
fb907d707d | ||
|
|
7d6cfd09e6 | ||
|
|
967c69317b | ||
|
|
128d653fc6 | ||
|
|
8b69114924 | ||
|
|
4b55888d16 | ||
|
|
8fbd650483 | ||
|
|
c778516ce2 | ||
|
|
2969e25ff7 | ||
|
|
c055e1aefe |
@@ -11,7 +11,7 @@ body:
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
||||
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
custom: ['https://buy.immich.app']
|
||||
custom: ['https://buy.immich.app', 'https://immich.store']
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
7
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,6 +1,13 @@
|
||||
name: Report an issue with Immich
|
||||
description: Report an issue with Immich
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||
options:
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
|
||||
6
.github/workflows/cli.yml
vendored
6
.github/workflows/cli.yml
vendored
@@ -56,10 +56,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.4.0
|
||||
uses: docker/setup-qemu-action@v3.5.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.9.0
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.13.0
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
50
.github/workflows/docker.yml
vendored
50
.github/workflows/docker.yml
vendored
@@ -5,7 +5,6 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -50,23 +49,23 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: ["", "-cuda", "-openvino", "-armnn"]
|
||||
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
retag_server:
|
||||
name: Re-Tag Server
|
||||
@@ -75,7 +74,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: [""]
|
||||
suffix: ['']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -126,6 +125,11 @@ jobs:
|
||||
device: openvino
|
||||
suffix: -openvino
|
||||
|
||||
- platforms: linux/amd64
|
||||
runner: mich
|
||||
device: rocm
|
||||
suffix: -rocm
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: armnn
|
||||
@@ -141,7 +145,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.9.0
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -171,7 +175,7 @@ jobs:
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@v6.13.0
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
@@ -251,7 +255,7 @@ jobs:
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: "true"
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
@@ -334,7 +338,7 @@ jobs:
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@v6.13.0
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
@@ -404,7 +408,7 @@ jobs:
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: "true"
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -457,7 +457,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
|
||||
50
.github/workflows/weblate-lock.yml
vendored
Normal file
50
.github/workflows/weblate-lock.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Weblate checks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
enforce-lock:
|
||||
name: Check Weblate Lock
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
steps:
|
||||
- name: Check weblate lock
|
||||
run: |
|
||||
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
|
||||
exit 1
|
||||
fi
|
||||
- name: Find Pull Request
|
||||
uses: juliangruber/find-pull-request-action@v1
|
||||
id: find-pr
|
||||
with:
|
||||
branch: chore/translations
|
||||
- name: Fail if existing weblate PR
|
||||
if: ${{ steps.find-pr.outputs.number }}
|
||||
run: exit 1
|
||||
success-check-lock:
|
||||
name: Weblate Lock Check Success
|
||||
needs: [ enforce-lock ]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.13.1-alpine3.20@sha256:c52e20859a92b3eccbd3a36c5e1a90adc20617d8d421d65e8a622e87b5dac963 AS core
|
||||
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
616
cli/package-lock.json
generated
616
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.51",
|
||||
"version": "2.2.52",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -19,8 +19,9 @@
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
||||
"@typescript-eslint/parser": "^8.15.0",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
@@ -31,7 +32,7 @@
|
||||
"eslint-config-prettier": "^10.0.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"globals": "^15.9.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
@@ -62,11 +63,13 @@
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chokidar": "^4.0.3",
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21"
|
||||
"lodash-es": "^4.17.21",
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
import { setTimeout as sleep } from 'node:timers/promises';
|
||||
import { describe, expect, it, MockedFunction, vi } from 'vitest';
|
||||
|
||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
||||
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
|
||||
import createFetchMock from 'vitest-fetch-mock';
|
||||
|
||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
||||
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
|
||||
|
||||
vi.mock('@immich/sdk');
|
||||
|
||||
@@ -199,3 +200,112 @@ describe('checkForDuplicates', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('startWatch', () => {
|
||||
let testFolder: string;
|
||||
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.restoreAllMocks();
|
||||
|
||||
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
|
||||
image: ['.jpg'],
|
||||
sidecar: ['.xmp'],
|
||||
video: ['.mp4'],
|
||||
});
|
||||
|
||||
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
|
||||
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
|
||||
checkBulkUploadMocked.mockResolvedValue({
|
||||
results: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should start watching a directory and upload new files', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: [
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter out unsupported files', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const unsupportedFilePath = path.join(testFolder, 'test.txt');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: unsupportedFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should filger out ignored patterns', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const ignoredPattern = 'ignored';
|
||||
const ignoredFolder = path.join(testFolder, ignoredPattern);
|
||||
await fs.promises.mkdir(ignoredFolder, { recursive: true });
|
||||
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: ignoredFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.promises.rm(testFolder, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,13 +12,18 @@ import {
|
||||
getSupportedMediaTypes,
|
||||
} from '@immich/sdk';
|
||||
import byteSize from 'byte-size';
|
||||
import { Matcher, watch as watchFs } from 'chokidar';
|
||||
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import micromatch from 'micromatch';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const UPLOAD_WATCH_BATCH_SIZE = 100;
|
||||
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
|
||||
@@ -36,6 +41,8 @@ export interface UploadOptionsDto {
|
||||
albumName?: string;
|
||||
includeHidden?: boolean;
|
||||
concurrency: number;
|
||||
progress?: boolean;
|
||||
watch?: boolean;
|
||||
}
|
||||
|
||||
class UploadFile extends File {
|
||||
@@ -55,19 +62,94 @@ class UploadFile extends File {
|
||||
}
|
||||
}
|
||||
|
||||
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||
const { newFiles, duplicates } = await checkForDuplicates(files, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
};
|
||||
|
||||
export const startWatch = async (
|
||||
paths: string[],
|
||||
options: UploadOptionsDto,
|
||||
{
|
||||
batchSize = UPLOAD_WATCH_BATCH_SIZE,
|
||||
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
|
||||
}: { batchSize?: number; debounceTimeMs?: number } = {},
|
||||
) => {
|
||||
const watcherIgnored: Matcher[] = [];
|
||||
const { image, video } = await getSupportedMediaTypes();
|
||||
const extensions = new Set([...image, ...video]);
|
||||
|
||||
if (options.ignore) {
|
||||
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
|
||||
}
|
||||
|
||||
const pathsBatcher = new Batcher<string>({
|
||||
batchSize,
|
||||
debounceTimeMs,
|
||||
onBatch: async (paths: string[]) => {
|
||||
const uniquePaths = [...new Set(paths)];
|
||||
await uploadBatch(uniquePaths, options);
|
||||
},
|
||||
});
|
||||
|
||||
const onFile = async (path: string, stats?: Stats) => {
|
||||
if (stats?.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
const ext = '.' + path.split('.').pop()?.toLowerCase();
|
||||
if (!ext || !extensions.has(ext)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!options.progress) {
|
||||
// logging when progress is disabled as it can cause issues with the progress bar rendering
|
||||
console.log(`Change detected: ${path}`);
|
||||
}
|
||||
pathsBatcher.add(path);
|
||||
};
|
||||
const fsWatcher = watchFs(paths, {
|
||||
ignoreInitial: true,
|
||||
ignored: watcherIgnored,
|
||||
alwaysStat: true,
|
||||
awaitWriteFinish: true,
|
||||
depth: options.recursive ? undefined : 1,
|
||||
persistent: true,
|
||||
})
|
||||
.on('add', onFile)
|
||||
.on('change', onFile)
|
||||
.on('error', (error) => console.error(`Watcher error: ${error}`));
|
||||
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('Exiting...');
|
||||
await fsWatcher.close();
|
||||
process.exit();
|
||||
});
|
||||
};
|
||||
|
||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||
await authenticate(baseOptions);
|
||||
|
||||
const scanFiles = await scan(paths, options);
|
||||
|
||||
if (scanFiles.length === 0) {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
if (options.watch) {
|
||||
console.log('No files found initially.');
|
||||
} else {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
if (options.watch) {
|
||||
console.log('Watching for changes...');
|
||||
await startWatch(paths, options);
|
||||
// watcher does not handle the initial scan
|
||||
// as the scan() is a more efficient quick start with batched results
|
||||
}
|
||||
|
||||
await uploadBatch(scanFiles, options);
|
||||
};
|
||||
|
||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
@@ -85,19 +167,25 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
}
|
||||
|
||||
const multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
let multiBar: MultiBar | undefined;
|
||||
|
||||
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
if (progress) {
|
||||
multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
} else {
|
||||
console.log(`Received ${files.length} files, hashing...`);
|
||||
}
|
||||
|
||||
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
@@ -117,7 +205,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
}
|
||||
}
|
||||
|
||||
checkProgressBar.increment(assets.length);
|
||||
checkProgressBar?.increment(assets.length);
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
@@ -137,7 +225,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
void checkBulkUploadQueue.push(batch);
|
||||
}
|
||||
|
||||
hashProgressBar.increment();
|
||||
hashProgressBar?.increment();
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
@@ -155,7 +243,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
|
||||
await checkBulkUploadQueue.drained();
|
||||
|
||||
multiBar.stop();
|
||||
multiBar?.stop();
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
@@ -171,7 +259,10 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
export const uploadFiles = async (
|
||||
files: string[],
|
||||
{ dryRun, concurrency, progress }: UploadOptionsDto,
|
||||
): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
@@ -191,12 +282,20 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
return files.map((filepath) => ({ id: '', filepath }));
|
||||
}
|
||||
|
||||
const uploadProgress = new SingleBar(
|
||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
let uploadProgress: SingleBar | undefined;
|
||||
|
||||
if (progress) {
|
||||
uploadProgress = new SingleBar(
|
||||
{
|
||||
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||
},
|
||||
Presets.shades_classic,
|
||||
);
|
||||
} else {
|
||||
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||
}
|
||||
uploadProgress?.start(totalSize, 0);
|
||||
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let duplicateCount = 0;
|
||||
let duplicateSize = 0;
|
||||
@@ -222,7 +321,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
return response;
|
||||
},
|
||||
@@ -235,7 +334,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
|
||||
await queue.drained();
|
||||
|
||||
uploadProgress.stop();
|
||||
uploadProgress?.stop();
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
|
||||
@@ -69,6 +69,13 @@ program
|
||||
.default(4),
|
||||
)
|
||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||
.addOption(
|
||||
new Option('--watch', 'Watch for changes and upload automatically')
|
||||
.env('IMMICH_WATCH_CHANGES')
|
||||
.default(false)
|
||||
.implies({ progress: false }),
|
||||
)
|
||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||
.action((paths, options) => upload(paths, program.opts(), options));
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
import { Batcher, CrawlOptions, crawl } from 'src/utils';
|
||||
import { Mock } from 'vitest';
|
||||
|
||||
interface Test {
|
||||
test: string;
|
||||
@@ -303,3 +304,38 @@ describe('crawl', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Batcher', () => {
|
||||
let batcher: Batcher;
|
||||
let onBatch: Mock;
|
||||
beforeEach(() => {
|
||||
onBatch = vi.fn();
|
||||
batcher = new Batcher({ batchSize: 2, onBatch });
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when a batch limit is reached', async () => {
|
||||
batcher.add('a');
|
||||
batcher.add('b');
|
||||
batcher.add('c');
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when flush() is called', async () => {
|
||||
batcher.add('a');
|
||||
batcher.flush();
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when debounce time reached', async () => {
|
||||
vi.useFakeTimers();
|
||||
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
|
||||
batcher.add('a');
|
||||
expect(onBatch).not.toHaveBeenCalled();
|
||||
vi.advanceTimersByTime(200);
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -172,3 +172,64 @@ export const sha1 = (filepath: string) => {
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Batches items and calls onBatch to process them
|
||||
* when the batch size is reached or the debounce time has passed.
|
||||
*/
|
||||
export class Batcher<T = unknown> {
|
||||
private items: T[] = [];
|
||||
private readonly batchSize: number;
|
||||
private readonly debounceTimeMs?: number;
|
||||
private readonly onBatch: (items: T[]) => void;
|
||||
private debounceTimer?: NodeJS.Timeout;
|
||||
|
||||
constructor({
|
||||
batchSize,
|
||||
debounceTimeMs,
|
||||
onBatch,
|
||||
}: {
|
||||
batchSize: number;
|
||||
debounceTimeMs?: number;
|
||||
onBatch: (items: T[]) => Promise<void>;
|
||||
}) {
|
||||
this.batchSize = batchSize;
|
||||
this.debounceTimeMs = debounceTimeMs;
|
||||
this.onBatch = onBatch;
|
||||
}
|
||||
|
||||
private setDebounceTimer() {
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
}
|
||||
if (this.debounceTimeMs) {
|
||||
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private clearDebounceTimer() {
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
this.debounceTimer = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
add(item: T) {
|
||||
this.items.push(item);
|
||||
this.setDebounceTimer();
|
||||
if (this.items.length >= this.batchSize) {
|
||||
this.flush();
|
||||
}
|
||||
}
|
||||
|
||||
flush() {
|
||||
this.clearDebounceTimer();
|
||||
if (this.items.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.onBatch(this.items);
|
||||
|
||||
this.items = [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,12 +95,12 @@ services:
|
||||
image: immich-machine-learning-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||
build:
|
||||
context: ../machine-learning
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
@@ -122,7 +122,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
|
||||
@@ -38,12 +38,12 @@ services:
|
||||
image: immich-machine-learning:latest
|
||||
# extends:
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||
build:
|
||||
context: ../machine-learning
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -100,7 +100,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:5888c188cf09e3f7eebc97369c3b2ce713e844cdbd88ccf36f5047c958aea120
|
||||
image: prom/prometheus@sha256:6927e0919a144aa7616fd0137d4816816d42f6b816de3af269ab065250859a62
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -112,7 +112,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.5.1-ubuntu@sha256:9a4ab78cec1a2ec7d1ca5dfd5aacec6412706a1bc9e971fc7184e2f6696a63f5
|
||||
image: grafana/grafana:11.5.2-ubuntu@sha256:8b5858c447e06fd7a89006b562ba7bba7c4d5813600c7982374c41852adefaeb
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -33,12 +33,12 @@ services:
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino] to the image tag.
|
||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
|
||||
@@ -26,6 +26,13 @@ services:
|
||||
capabilities:
|
||||
- gpu
|
||||
|
||||
rocm:
|
||||
group_add:
|
||||
- video
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
- /dev/kfd:/dev/kfd
|
||||
|
||||
openvino:
|
||||
device_cgroup_rules:
|
||||
- 'c 189:* rmw'
|
||||
|
||||
@@ -97,7 +97,7 @@ Make sure to [set your reverse proxy](/docs/administration/reverse-proxy/) to al
|
||||
Also, check the disk space of your reverse proxy.
|
||||
In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
|
||||
|
||||
If you are using Cloudflare Tunnel, please know that they set a maxiumum filesize of 100 MB that cannot be changed.
|
||||
If you are using Cloudflare Tunnel, please know that they set a maximum filesize of 100 MB that cannot be changed.
|
||||
At times, files larger than this may work, potentially up to 1 GB. However, the official limit is 100 MB.
|
||||
If you are having issues, we recommend switching to a different network deployment.
|
||||
|
||||
@@ -170,7 +170,7 @@ If you aren't able to or prefer not to mount Samba on the host (such as Windows
|
||||
Below is an example in the `docker-compose.yml`.
|
||||
|
||||
Change your username, password, local IP, and share name, and see below where the line `- originals:/usr/src/app/originals`,
|
||||
corrolates to the section where the volume `originals` was created. You can call this whatever you like, and map it to the docker container as you like.
|
||||
correlates to the section where the volume `originals` was created. You can call this whatever you like, and map it to the docker container as you like.
|
||||
For example you could change `originals:` to `Photos:`, and change `- originals:/usr/src/app/originals` to `Photos:/usr/src/app/photos`.
|
||||
|
||||
```diff
|
||||
|
||||
@@ -98,6 +98,14 @@ The default Immich log level is `Log` (commonly known as `Info`). The Immich adm
|
||||
Through this setting, you can manage all the settings related to machine learning in Immich, from the setting of remote machine learning to the model and its parameters
|
||||
You can choose to disable a certain type of machine learning, for example smart search or facial recognition.
|
||||
|
||||
### URL
|
||||
|
||||
The built in (`http://immich-machine-learning:3003`) machine learning server will be configured by default, but you can change this or add additional servers.
|
||||
|
||||
Hosting the `immich-machine-learning` container on a machine with a more powerful GPU can be helpful to for processing a large number of photos (such as during batch import) or for faster search.
|
||||
|
||||
If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.
|
||||
|
||||
### Smart Search
|
||||
|
||||
The [smart search](/docs/features/searching) settings allow you to change the [CLIP model](https://openai.com/research/clip). Larger models will typically provide [more accurate search results](https://github.com/immich-app/immich/discussions/11862) but consume more processing power and RAM. When [changing the CLIP model](/docs/FAQ#can-i-use-a-custom-clip-model) it is mandatory to re-run the Smart Search job on all images to fully apply the change.
|
||||
|
||||
@@ -69,6 +69,8 @@ Navigating to Administration > Settings > Machine Learning Settings > Facial Rec
|
||||
|
||||
:::tip
|
||||
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
||||
|
||||
You can learn how the tune the result in this [Guide](/docs/guides/better-facial-clusters)
|
||||
:::
|
||||
|
||||
### Facial recognition model
|
||||
|
||||
@@ -68,7 +68,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
||||
|
||||
### Nightly job
|
||||
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion.
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library managment page.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
|
||||
- ARM NN (Mali)
|
||||
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
|
||||
- ROCm (AMD GPUs)
|
||||
- OpenVINO (Intel GPUs such as Iris Xe and Arc)
|
||||
|
||||
## Limitations
|
||||
@@ -41,6 +42,10 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||
|
||||
#### ROCm
|
||||
|
||||
- The GPU must be supported by ROCm. If it isn't officially supported, you can attempt to use the `HSA_OVERRIDE_GFX_VERSION` environmental variable: `HSA_OVERRIDE_GFX_VERSION=<a supported version, e.g. 10.3.0>`.
|
||||
|
||||
#### OpenVINO
|
||||
|
||||
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM.
|
||||
@@ -51,12 +56,12 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
|
||||
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, openvino] to the `image` section's tag at the end of the line.
|
||||
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, rocm, openvino] to the `image` section's tag at the end of the line.
|
||||
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
||||
|
||||
### Confirming Device Usage
|
||||
|
||||
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel and `intel_gpu_top` for Intel.
|
||||
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel, `intel_gpu_top` for Intel, and `radeontop` for AMD.
|
||||
|
||||
You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN.
|
||||
|
||||
|
||||
72
docs/docs/guides/better-facial-clusters.md
Normal file
72
docs/docs/guides/better-facial-clusters.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Better Facial Recognition Clusters
|
||||
|
||||
## Purpose
|
||||
|
||||
This guide explains how to optimize facial recognition in systems with large image libraries. By following these steps, you'll achieve better clustering of faces, reducing the need for manual merging.
|
||||
|
||||
---
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Best Suited For:** Large image libraries after importing a significant number of images.
|
||||
- **Warning:** This method deletes all previously assigned names.
|
||||
- **Tip:** **Always take a [backup](/docs/administration/backup-and-restore#database) before proceeding!**
|
||||
|
||||
---
|
||||
|
||||
## Step-by-Step Instructions
|
||||
|
||||
### Objective
|
||||
|
||||
To enhance face clustering and ensure the model effectively identifies faces using qualitative initial data.
|
||||
|
||||
---
|
||||
|
||||
### Steps
|
||||
|
||||
#### 1. Adjust Machine Learning Settings
|
||||
|
||||
Navigate to:
|
||||
**Admin → Administration → Settings → Machine Learning Settings**
|
||||
|
||||
Make the following changes:
|
||||
|
||||
- **Maximum recognition distance (Optional):**
|
||||
Lower this value, e.g., to **0.4**, if the library contains people with similar facial features.
|
||||
- **Minimum recognized faces:**
|
||||
Set this to a **high value** (e.g., 20 For libraries with a large amount of assets (~100K+), and 10 for libraries with medium amount of assets (~40K+)).
|
||||
> A high value ensures clusters only include faces that appear at least 20/`value` times in the library, improving the initial clustering process.
|
||||
|
||||
---
|
||||
|
||||
#### 2. Run Reset Jobs
|
||||
|
||||
Go to:
|
||||
**Admin → Administration → Settings → Jobs**
|
||||
|
||||
Perform the following:
|
||||
|
||||
1. **FACIAL RECOGNITION → Reset**
|
||||
|
||||
> These reset jobs rebuild the recognition model based on the new settings.
|
||||
|
||||
---
|
||||
|
||||
#### 3. Refine Recognition with Lower Thresholds
|
||||
|
||||
Once the reset jobs are complete, refine the recognition as follows:
|
||||
|
||||
- **Step 1:**
|
||||
Return to **Minimum recognized faces** in Machine Learning Settings and lower the value to **10** (In medium libraries we will lower the value from 10 to 5).
|
||||
|
||||
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||
|
||||
- **Step 2:**
|
||||
Lower the value again to **3**.
|
||||
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||
|
||||
:::tip try different values
|
||||
For certain libraries with a larger or smaller amount of assets, other settings will be better or worse. It is recommended to try different values **before assigning names** and see which settings work best for your library.
|
||||
:::
|
||||
|
||||
---
|
||||
@@ -31,6 +31,10 @@ SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
||||
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
||||
```
|
||||
|
||||
```sql title="Find by partial ID"
|
||||
SELECT * FROM "assets" WHERE "id"::text LIKE '%ab431d3a%';
|
||||
```
|
||||
|
||||
:::note
|
||||
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
||||
:::
|
||||
|
||||
@@ -23,12 +23,12 @@ name: immich_remote_ml
|
||||
services:
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino] to the image tag.
|
||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||
# extends:
|
||||
# file: hwaccel.ml.yml
|
||||
# service: # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
# service: # set to one of [armnn, cuda, rocm, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
restart: always
|
||||
|
||||
@@ -37,7 +37,7 @@ You can alternatively download these two files from your browser and move them t
|
||||
</CodeBlock>
|
||||
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publically exposed, so this password is only used for local authentication.
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
||||
- Set your timezone by uncommenting the `TZ=` line.
|
||||
- Populate custom database information if necessary.
|
||||
|
||||
@@ -11,7 +11,7 @@ Just restarting the containers does not replace the environment within the conta
|
||||
|
||||
In order to recreate the container using docker compose, run `docker compose up -d`.
|
||||
In most cases docker will recognize that the `.env` file has changed and recreate the affected containers.
|
||||
If this should not work, try running `docker compose up -d --force-recreate`.
|
||||
If this does not work, try running `docker compose up -d --force-recreate`.
|
||||
|
||||
:::
|
||||
|
||||
@@ -20,8 +20,8 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
||||
| Variable | Description | Default | Containers |
|
||||
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server |
|
||||
| `DB_DATA_LOCATION` | Host Path for Postgres database | | database |
|
||||
| `UPLOAD_LOCATION` | Host path for uploads | | server |
|
||||
| `DB_DATA_LOCATION` | Host path for Postgres database | | database |
|
||||
|
||||
:::tip
|
||||
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
||||
@@ -33,15 +33,15 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `CPU_CORES` | Number of cores available to the Immich server | auto-detected CPU core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
||||
| `IMMICH_TRUSTED_PROXIES` | List of comma separated IPs set as trusted proxies | | server | api |
|
||||
| `IMMICH_TRUSTED_PROXIES` | List of comma-separated IPs set as trusted proxies | | server | api |
|
||||
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/docs/administration/system-integrity) | | server | api, microservices |
|
||||
|
||||
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
||||
@@ -50,7 +50,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
||||
|
||||
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
It only need to be set if the Immich deployment method is changing.
|
||||
It only needs to be set if the Immich deployment method is changing.
|
||||
|
||||
## Workers
|
||||
|
||||
@@ -75,12 +75,12 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database Host | `database` | server |
|
||||
| `DB_PORT` | Database Port | `5432` | server |
|
||||
| `DB_USERNAME` | Database User | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database Password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database Name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database Vector Extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
@@ -103,18 +103,18 @@ When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSW
|
||||
| Variable | Description | Default | Containers |
|
||||
| :--------------- | :------------- | :-----: | :--------- |
|
||||
| `REDIS_URL` | Redis URL | | server |
|
||||
| `REDIS_SOCKET` | Redis Socket | | server |
|
||||
| `REDIS_HOSTNAME` | Redis Host | `redis` | server |
|
||||
| `REDIS_PORT` | Redis Port | `6379` | server |
|
||||
| `REDIS_USERNAME` | Redis Username | | server |
|
||||
| `REDIS_PASSWORD` | Redis Password | | server |
|
||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server |
|
||||
| `REDIS_SOCKET` | Redis socket | | server |
|
||||
| `REDIS_HOSTNAME` | Redis host | `redis` | server |
|
||||
| `REDIS_PORT` | Redis port | `6379` | server |
|
||||
| `REDIS_USERNAME` | Redis username | | server |
|
||||
| `REDIS_PASSWORD` | Redis password | | server |
|
||||
| `REDIS_DBINDEX` | Redis DB index | `0` | server |
|
||||
|
||||
:::info
|
||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
|
||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
||||
More info can be found in the upstream [ioredis] documentation.
|
||||
More information can be found in the upstream [ioredis] documentation.
|
||||
|
||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
||||
:::
|
||||
@@ -168,6 +168,8 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
|
||||
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
|
||||
|
||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||
|
||||
@@ -179,7 +181,11 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
:::info
|
||||
|
||||
Other machine learning parameters can be tuned from the admin UI.
|
||||
While the `textual` model is the only one required for smart search, some users may experience slow first searches
|
||||
due to backups triggering loading of the other models into memory, which blocks other requests until completed.
|
||||
To avoid this, you can preload the other models (`visual`, `recognition`, and `detection`) if you have enough RAM to do so.
|
||||
|
||||
Additional machine learning parameters can be tuned from the admin UI.
|
||||
|
||||
:::
|
||||
|
||||
@@ -210,7 +216,7 @@ the `_FILE` variable should be set to the path of a file containing the variable
|
||||
details on how to use Docker Secrets in the Postgres image.
|
||||
|
||||
\*2: See [this comment][docker-secrets-example] for an example of how
|
||||
to use use a Docker secret for the password in the Redis container.
|
||||
to use a Docker secret for the password in the Redis container.
|
||||
|
||||
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
||||
|
||||
@@ -198,7 +198,7 @@ The **CPU** value was specified in a different format with a default of `4000m`
|
||||
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
||||
:::
|
||||
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passtrough Docs for TrueNAS Apps](https://www.truenas.com/docs/truenasapps/#gpu-passthrough)
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://www.truenas.com/docs/truenasapps/#gpu-passthrough)
|
||||
|
||||
### Install
|
||||
|
||||
|
||||
12
docs/package-lock.json
generated
12
docs/package-lock.json
generated
@@ -14070,9 +14070,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.5.2",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.2.tgz",
|
||||
"integrity": "sha512-MjOadfU3Ys9KYoX0AdkBlFEF1Vx37uCCeN4ZHnmwm9FfpbsGWMZeBLMmmpY+6Ocqod7mkdZ0DT31OlbsFrLlkA==",
|
||||
"version": "8.5.3",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
|
||||
"integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -15734,9 +15734,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.5.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.1.tgz",
|
||||
"integrity": "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.2.tgz",
|
||||
"integrity": "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
|
||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,4 +1,8 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.128.0",
|
||||
"url": "https://v1.128.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.127.0",
|
||||
"url": "https://v1.127.0.archive.immich.app"
|
||||
|
||||
@@ -5,7 +5,7 @@ module.exports = {
|
||||
preflight: false, // disable Tailwind's reset
|
||||
},
|
||||
content: ['./src/**/*.{js,jsx,ts,tsx}', './{docs,blog}/**/*.{md,mdx}'], // my markdown stuff is in ../docs, not /src
|
||||
darkMode: ['class', '[data-theme="dark"]'], // hooks into docusaurus' dark mode settigns
|
||||
darkMode: ['class', '[data-theme="dark"]'], // hooks into docusaurus' dark mode settings
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
|
||||
@@ -37,7 +37,7 @@ services:
|
||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
749
e2e/package-lock.json
generated
749
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.127.0",
|
||||
"version": "1.128.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -38,7 +38,7 @@
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"exiftool-vendored": "^28.3.1",
|
||||
"globals": "^15.9.0",
|
||||
"globals": "^16.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
AssetResponseDto,
|
||||
AssetTypeEnum,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getMyUser,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
@@ -45,8 +44,6 @@ const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-sp
|
||||
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
||||
const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
|
||||
|
||||
const getSystemConfig = (accessToken: string) => getConfig({ headers: asBearerAuth(accessToken) });
|
||||
|
||||
const readTags = async (bytes: Buffer, filename: string) => {
|
||||
const filepath = join(tempDir, filename);
|
||||
await writeFile(filepath, bytes);
|
||||
@@ -228,7 +225,7 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
it('should get the asset faces', async () => {
|
||||
const config = await getSystemConfig(admin.accessToken);
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.metadata.faces.import = true;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { JobCommand, JobName, LoginResponseDto } from '@immich/sdk';
|
||||
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk';
|
||||
import { cpSync, rmSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { basename } from 'node:path';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, testAssetDir, utils } from 'src/utils';
|
||||
import { app, asBearerAuth, testAssetDir, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
@@ -20,6 +21,33 @@ describe('/jobs', () => {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.machineLearning.duplicateDetection.enabled = false;
|
||||
config.machineLearning.enabled = false;
|
||||
config.metadata.faces.import = false;
|
||||
config.machineLearning.clip.enabled = false;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
@@ -29,14 +57,7 @@ describe('/jobs', () => {
|
||||
});
|
||||
|
||||
it('should queue metadata extraction for missing assets', async () => {
|
||||
const path1 = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||
const path2 = `${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`;
|
||||
|
||||
await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path1), filename: basename(path1) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Pause,
|
||||
@@ -44,7 +65,7 @@ describe('/jobs', () => {
|
||||
});
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path2), filename: basename(path2) },
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
@@ -82,5 +103,123 @@ describe('/jobs', () => {
|
||||
expect(asset.exifInfo?.make).toBe('NIKON CORPORATION');
|
||||
}
|
||||
});
|
||||
|
||||
it('should not re-extract metadata for existing assets', async () => {
|
||||
const path = `${testAssetDir}/temp/metadata/asset.jpg`;
|
||||
|
||||
cpSync(`${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`, path);
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
{
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo?.model).toBe('NIKON D700');
|
||||
}
|
||||
|
||||
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
{
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo?.model).toBe('NIKON D700');
|
||||
}
|
||||
|
||||
rmSync(path);
|
||||
});
|
||||
|
||||
it('should queue thumbnail extraction for assets missing thumbs', async () => {
|
||||
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Pause,
|
||||
force: false,
|
||||
});
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetBefore.thumbhash).toBeNull();
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Empty,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetAfter.thumbhash).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should not reload existing thumbnail when running thumb job for missing assets', async () => {
|
||||
const path = `${testAssetDir}/temp/thumbs/asset1.jpg`;
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, path);
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
// This runs the missing thumbnail job
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
// Asset 1 thumbnail should be untouched since its thumb should not have been reloaded, even though the file was changed
|
||||
expect(assetAfter.thumbhash).toEqual(assetBefore.thumbhash);
|
||||
|
||||
rmSync(path);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -526,6 +526,47 @@ describe('/libraries', () => {
|
||||
utils.removeImageFile(`${testAssetDir}/temp/reimport/asset.jpg`);
|
||||
});
|
||||
|
||||
it('should not reimport a modified file more than once', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/reimport`],
|
||||
});
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/reimport/asset.jpg`);
|
||||
await utimes(`${testAssetDir}/temp/reimport/asset.jpg`, 447_775_200_000);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/reimport/asset.jpg`);
|
||||
await utimes(`${testAssetDir}/temp/reimport/asset.jpg`, 447_775_200_001);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/el_torcal_rocks.jpg`, `${testAssetDir}/temp/reimport/asset.jpg`);
|
||||
await utimes(`${testAssetDir}/temp/reimport/asset.jpg`, 447_775_200_001);
|
||||
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, {
|
||||
libraryId: library.id,
|
||||
});
|
||||
|
||||
expect(assets.count).toEqual(1);
|
||||
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
|
||||
|
||||
expect(asset).toEqual(
|
||||
expect.objectContaining({
|
||||
originalFileName: 'asset.jpg',
|
||||
exifInfo: expect.objectContaining({
|
||||
model: 'NIKON D750',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/reimport/asset.jpg`);
|
||||
});
|
||||
|
||||
it('should set an asset offline if its file is missing', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LoginResponseDto, getAssetInfo, getAssetStatistics, scanLibrary } from '@immich/sdk';
|
||||
import { LoginResponseDto, getAssetInfo, getAssetStatistics } from '@immich/sdk';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { errorDto } from 'src/responses';
|
||||
@@ -6,8 +6,6 @@ import { app, asBearerAuth, testAssetDir, testAssetDirInternal, utils } from 'sr
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) });
|
||||
|
||||
describe('/trash', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let ws: Socket;
|
||||
@@ -81,8 +79,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.items.length).toBe(1);
|
||||
@@ -90,8 +87,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
||||
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
||||
@@ -116,8 +112,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.items.length).toBe(1);
|
||||
@@ -125,8 +120,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
||||
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
||||
@@ -180,8 +174,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.count).toBe(1);
|
||||
@@ -189,9 +182,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
||||
@@ -201,6 +192,8 @@ describe('/trash', () => {
|
||||
|
||||
const after = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -238,7 +231,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
@@ -247,7 +240,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const before = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||
@@ -261,6 +254,8 @@ describe('/trash', () => {
|
||||
|
||||
const after = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||
expect(after.isTrashed).toBe(true);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ import {
|
||||
deleteAssets,
|
||||
getAllJobsStatus,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getConfigDefaults,
|
||||
login,
|
||||
scanLibrary,
|
||||
@@ -121,6 +122,7 @@ const execPromise = promisify(exec);
|
||||
const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
||||
// console.log(`Received event: ${event} [id=${id}]`);
|
||||
const set = events[event];
|
||||
|
||||
set.add(id);
|
||||
|
||||
const idCallback = idCallbacks[id];
|
||||
@@ -415,6 +417,8 @@ export const utils = {
|
||||
rmSync(path, { recursive: true });
|
||||
},
|
||||
|
||||
getSystemConfig: (accessToken: string) => getConfig({ headers: asBearerAuth(accessToken) }),
|
||||
|
||||
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>
|
||||
|
||||
10
i18n/en.json
10
i18n/en.json
@@ -96,7 +96,7 @@
|
||||
"library_scanning_enable_description": "Enable periodic library scanning",
|
||||
"library_settings": "External Library",
|
||||
"library_settings_description": "Manage external library settings",
|
||||
"library_tasks_description": "Perform library tasks",
|
||||
"library_tasks_description": "Scan external libraries for new and/or changed assets",
|
||||
"library_watching_enable_description": "Watch external libraries for file changes",
|
||||
"library_watching_settings": "Library watching (EXPERIMENTAL)",
|
||||
"library_watching_settings_description": "Automatically watch for changed files",
|
||||
@@ -131,7 +131,7 @@
|
||||
"machine_learning_smart_search_description": "Search for images semantically using CLIP embeddings",
|
||||
"machine_learning_smart_search_enabled": "Enable smart search",
|
||||
"machine_learning_smart_search_enabled_description": "If disabled, images will not be encoded for smart search.",
|
||||
"machine_learning_url_description": "The URL of the machine learning server. If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last.",
|
||||
"machine_learning_url_description": "The URL of the machine learning server. If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.",
|
||||
"manage_concurrency": "Manage Concurrency",
|
||||
"manage_log_settings": "Manage log settings",
|
||||
"map_dark_style": "Dark style",
|
||||
@@ -336,6 +336,7 @@
|
||||
"untracked_files": "Untracked Files",
|
||||
"untracked_files_description": "These files are not tracked by the application. They can be the results of failed moves, interrupted uploads, or left behind due to a bug",
|
||||
"user_cleanup_job": "User cleanup",
|
||||
"cleanup": "Cleanup",
|
||||
"user_delete_delay": "<b>{user}</b>'s account and assets will be scheduled for permanent deletion in {delay, plural, one {# day} other {# days}}.",
|
||||
"user_delete_delay_settings": "Delete delay",
|
||||
"user_delete_delay_settings_description": "Number of days after removal to permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.",
|
||||
@@ -393,6 +394,7 @@
|
||||
"allow_edits": "Allow edits",
|
||||
"allow_public_user_to_download": "Allow public user to download",
|
||||
"allow_public_user_to_upload": "Allow public user to upload",
|
||||
"alt_text_qr_code": "QR code image",
|
||||
"anti_clockwise": "Anti-clockwise",
|
||||
"api_key": "API Key",
|
||||
"api_key_description": "This value will only be shown once. Please be sure to copy it before closing the window.",
|
||||
@@ -889,6 +891,7 @@
|
||||
"month": "Month",
|
||||
"more": "More",
|
||||
"moved_to_trash": "Moved to trash",
|
||||
"mute_memories": "Mute Memories",
|
||||
"my_albums": "My albums",
|
||||
"name": "Name",
|
||||
"name_or_nickname": "Name or nickname",
|
||||
@@ -1114,6 +1117,7 @@
|
||||
"say_something": "Say something",
|
||||
"scan_all_libraries": "Scan All Libraries",
|
||||
"scan_library": "Scan",
|
||||
"rescan": "Rescan",
|
||||
"scan_settings": "Scan Settings",
|
||||
"scanning_for_album": "Scanning for album...",
|
||||
"search": "Search",
|
||||
@@ -1302,6 +1306,7 @@
|
||||
"unnamed_album": "Unnamed Album",
|
||||
"unnamed_album_delete_confirmation": "Are you sure you want to delete this album?",
|
||||
"unnamed_share": "Unnamed Share",
|
||||
"unmute_memories": "Unmute Memories",
|
||||
"unsaved_change": "Unsaved change",
|
||||
"unselect_all": "Unselect all",
|
||||
"unselect_all_duplicates": "Unselect all duplicates",
|
||||
@@ -1352,6 +1357,7 @@
|
||||
"view_all": "View All",
|
||||
"view_all_users": "View all users",
|
||||
"view_in_timeline": "View in timeline",
|
||||
"view_link": "View link",
|
||||
"view_links": "View links",
|
||||
"view_name": "View",
|
||||
"view_next_asset": "View next asset",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
ARG DEVICE=cpu
|
||||
|
||||
FROM python:3.11-bookworm@sha256:14b4620f59a90f163dfa6bd252b68743f9a41d494a9fde935f9d7669d98094bb AS builder-cpu
|
||||
FROM python:3.11-bookworm@sha256:68a8863d0625f42d47e0684f33ca02f19d6094ef859a8af237aaf645195ed477 AS builder-cpu
|
||||
|
||||
FROM builder-cpu AS builder-openvino
|
||||
|
||||
@@ -15,6 +15,34 @@ RUN mkdir /opt/armnn && \
|
||||
cd /opt/ann && \
|
||||
sh build.sh
|
||||
|
||||
# Warning: 25GiB+ disk space required to pull this image
|
||||
# TODO: find a way to reduce the image size
|
||||
FROM rocm/dev-ubuntu-22.04:6.3.1-complete AS builder-rocm
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget git python3.10-venv
|
||||
RUN wget -nv https://github.com/Kitware/CMake/releases/download/v3.30.1/cmake-3.30.1-linux-x86_64.sh && \
|
||||
chmod +x cmake-3.30.1-linux-x86_64.sh && \
|
||||
mkdir -p /code/cmake-3.30.1-linux-x86_64 && \
|
||||
./cmake-3.30.1-linux-x86_64.sh --skip-license --prefix=/code/cmake-3.30.1-linux-x86_64 && \
|
||||
rm cmake-3.30.1-linux-x86_64.sh
|
||||
|
||||
ENV PATH=/code/cmake-3.30.1-linux-x86_64/bin:${PATH}
|
||||
|
||||
RUN git clone --single-branch --branch v1.20.1 --recursive "https://github.com/Microsoft/onnxruntime" onnxruntime
|
||||
WORKDIR /code/onnxruntime
|
||||
# Fix for multi-threading based on comments in https://github.com/microsoft/onnxruntime/pull/19567
|
||||
# TODO: find a way to fix this without disabling algo caching
|
||||
COPY ./rocm-PR19567.patch /tmp/
|
||||
RUN git apply /tmp/rocm-PR19567.patch
|
||||
|
||||
RUN /bin/sh ./dockerfiles/scripts/install_common_deps.sh
|
||||
# Note: the `parallel` setting uses a substantial amount of RAM
|
||||
RUN ./build.sh --allow_running_as_root --config Release --build_wheel --update --build --parallel 13 --cmake_extra_defines\
|
||||
ONNXRUNTIME_VERSION=1.20.1 --use_rocm --rocm_home=/opt/rocm
|
||||
RUN mv /code/onnxruntime/build/Linux/Release/dist/*.whl /opt/
|
||||
|
||||
FROM builder-${DEVICE} AS builder
|
||||
|
||||
ARG DEVICE
|
||||
@@ -32,18 +60,21 @@ RUN poetry config installer.max-workers 10 && \
|
||||
RUN python3 -m venv /opt/venv
|
||||
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN if [ "$DEVICE" = "rocm" ]; then \
|
||||
poetry add /opt/onnxruntime_rocm-*.whl; \
|
||||
fi
|
||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:42420f737ba91d509fc60d5ed65ed0492678a90c561e1fa08786ae8ba8b52eda AS prod-cpu
|
||||
FROM python:3.11-slim-bookworm@sha256:614c8691ab74150465ec9123378cd4dde7a6e57be9e558c3108df40664667a4c AS prod-cpu
|
||||
|
||||
FROM prod-cpu AS prod-openvino
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \
|
||||
wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-core_1.0.17384.11_amd64.deb && \
|
||||
wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-opencl_1.0.17384.11_amd64.deb && \
|
||||
wget https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/intel-opencl-icd_24.31.30508.7_amd64.deb && \
|
||||
wget https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/libigdgmm12_22.4.1_amd64.deb && \
|
||||
wget -nv https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-core_1.0.17384.11_amd64.deb && \
|
||||
wget -nv https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17384.11/intel-igc-opencl_1.0.17384.11_amd64.deb && \
|
||||
wget -nv https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/intel-opencl-icd_24.31.30508.7_amd64.deb && \
|
||||
wget -nv https://github.com/intel/compute-runtime/releases/download/24.31.30508.7/libigdgmm12_22.4.1_amd64.deb && \
|
||||
dpkg -i *.deb && \
|
||||
rm *.deb && \
|
||||
apt-get remove wget -yqq && \
|
||||
@@ -80,11 +111,15 @@ COPY --from=builder-armnn \
|
||||
/opt/ann/build.sh \
|
||||
/opt/armnn/
|
||||
|
||||
FROM rocm/dev-ubuntu-22.04:6.3.1-complete AS prod-rocm
|
||||
|
||||
|
||||
FROM prod-${DEVICE} AS prod
|
||||
|
||||
ARG DEVICE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends tini $(if ! [ "$DEVICE" = "openvino" ]; then echo "libmimalloc2.0"; fi) && \
|
||||
apt-get install -y --no-install-recommends tini $(if ! [ "$DEVICE" = "openvino" ] && ! [ "$DEVICE" = "rocm" ]; then echo "libmimalloc2.0"; fi) && \
|
||||
apt-get autoremove -yqq && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
This project uses [Poetry](https://python-poetry.org/docs/#installation), so be sure to install it first.
|
||||
Running `poetry install --no-root --with dev --with cpu` will install everything you need in an isolated virtual environment.
|
||||
CUDA and OpenVINO are supported as acceleration APIs. To use them, you can replace `--with cpu` with either of `--with cuda` or `--with openvino`. In the case of CUDA, a [compute capability](https://developer.nvidia.com/cuda-gpus) of 5.2 or higher is required.
|
||||
CUDA, ROCM and OpenVINO are supported as acceleration APIs. To use them, you can replace `--with cpu` with either of `--with cuda`, `--with rocm` or `--with openvino`. In the case of CUDA, a [compute capability](https://developer.nvidia.com/cuda-gpus) of 5.2 or higher is required.
|
||||
|
||||
To add or remove dependencies, you can use the commands `poetry add $PACKAGE_NAME` and `poetry remove $PACKAGE_NAME`, respectively.
|
||||
Be sure to commit the `poetry.lock` and `pyproject.toml` files with `poetry lock --no-update` to reflect any changes in dependencies.
|
||||
@@ -37,4 +37,4 @@ This project utilizes facial recognition models from the [InsightFace](https://g
|
||||
## License and Use Restrictions
|
||||
We have received permission to use the InsightFace facial recognition models in our project, as granted via email by Jia Guo (guojia@insightface.ai) on 18th March 2023. However, it's important to note that this permission does not extend to the redistribution or commercial use of their models by third parties. Users and developers interested in using these models should review the licensing terms provided in the InsightFace GitHub repository.
|
||||
|
||||
For more information on the capabilities of the InsightFace models and to ensure compliance with their license, please refer to their [official repository](https://github.com/deepinsight/insightface). Adhering to the specified licensing terms is crucial for the respectful and lawful use of their work.
|
||||
For more information on the capabilities of the InsightFace models and to ensure compliance with their license, please refer to their [official repository](https://github.com/deepinsight/insightface). Adhering to the specified licensing terms is crucial for the respectful and lawful use of their work.
|
||||
|
||||
@@ -63,7 +63,12 @@ _INSIGHTFACE_MODELS = {
|
||||
}
|
||||
|
||||
|
||||
SUPPORTED_PROVIDERS = ["CUDAExecutionProvider", "OpenVINOExecutionProvider", "CPUExecutionProvider"]
|
||||
SUPPORTED_PROVIDERS = [
|
||||
"CUDAExecutionProvider",
|
||||
"ROCMExecutionProvider",
|
||||
"OpenVINOExecutionProvider",
|
||||
"CPUExecutionProvider",
|
||||
]
|
||||
|
||||
|
||||
def get_model_source(model_name: str) -> ModelSource | None:
|
||||
|
||||
@@ -20,9 +20,8 @@ class FaceRecognizer(InferenceModel):
|
||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
max_batch_size = settings.max_batch_size.facial_recognition if settings.max_batch_size else None
|
||||
self.batch_size = max_batch_size if max_batch_size else self._batch_size_default
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ class OrtSession:
|
||||
match provider:
|
||||
case "CPUExecutionProvider":
|
||||
options = {"arena_extend_strategy": "kSameAsRequested"}
|
||||
case "CUDAExecutionProvider":
|
||||
case "CUDAExecutionProvider" | "ROCMExecutionProvider":
|
||||
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
|
||||
case "OpenVINOExecutionProvider":
|
||||
options = {
|
||||
|
||||
@@ -324,7 +324,7 @@ class TestAnnSession:
|
||||
session.run(None, input_feed)
|
||||
|
||||
ann_session.return_value.execute.assert_called_once_with(123, [input1, input2])
|
||||
np_spy.call_count == 2
|
||||
assert np_spy.call_count == 2
|
||||
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
||||
|
||||
|
||||
@@ -457,11 +457,14 @@ class TestCLIP:
|
||||
|
||||
|
||||
class TestFaceRecognition:
|
||||
def test_set_min_score(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceRecognizer, "load")
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir="test_cache", min_score=0.5)
|
||||
def test_set_min_score(self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock) -> None:
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
assert face_recognizer.min_score == 0.5
|
||||
face_detector = FaceDetector("buffalo_s", min_score=0.5, cache_dir="test_cache")
|
||||
face_detector.load()
|
||||
|
||||
assert face_detector.min_score == 0.5
|
||||
assert face_detector.model.det_thresh == 0.5
|
||||
|
||||
def test_detection(self, cv_image: cv2.Mat, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceDetector, "load")
|
||||
|
||||
@@ -14,12 +14,6 @@ byte_image = BytesIO()
|
||||
def _(parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--clip-model", type=str, default="ViT-B-32::openai")
|
||||
parser.add_argument("--face-model", type=str, default="buffalo_l")
|
||||
parser.add_argument(
|
||||
"--tag-min-score",
|
||||
type=int,
|
||||
default=0.0,
|
||||
help="Returns all tags at or above this score. The default returns all tags.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--face-min-score",
|
||||
type=int,
|
||||
@@ -74,10 +68,10 @@ class RecognitionFormDataLoadTest(InferenceLoadTest):
|
||||
"facial-recognition": {
|
||||
"recognition": {
|
||||
"modelName": self.environment.parsed_options.face_model,
|
||||
"options": {"minScore": self.environment.parsed_options.face_min_score},
|
||||
},
|
||||
"detection": {
|
||||
"modelName": self.environment.parsed_options.face_model,
|
||||
"options": {"minScore": self.environment.parsed_options.face_min_score},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
166
machine-learning/poetry.lock
generated
166
machine-learning/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiocache"
|
||||
@@ -75,33 +75,33 @@ trio = ["trio (>=0.23)"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.10.0"
|
||||
version = "25.1.0"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
|
||||
{file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
|
||||
{file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
|
||||
{file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
|
||||
{file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
|
||||
{file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
|
||||
{file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
|
||||
{file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
|
||||
{file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
|
||||
{file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
|
||||
{file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
|
||||
{file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
|
||||
{file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
|
||||
{file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
|
||||
{file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
|
||||
{file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
|
||||
{file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"},
|
||||
{file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"},
|
||||
{file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"},
|
||||
{file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"},
|
||||
{file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
|
||||
{file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
|
||||
{file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"},
|
||||
{file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"},
|
||||
{file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"},
|
||||
{file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"},
|
||||
{file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"},
|
||||
{file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"},
|
||||
{file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"},
|
||||
{file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"},
|
||||
{file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"},
|
||||
{file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"},
|
||||
{file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"},
|
||||
{file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"},
|
||||
{file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"},
|
||||
{file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"},
|
||||
{file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"},
|
||||
{file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"},
|
||||
{file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"},
|
||||
{file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"},
|
||||
{file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"},
|
||||
{file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"},
|
||||
{file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"},
|
||||
{file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -147,10 +147,6 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
|
||||
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
|
||||
@@ -163,14 +159,8 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
|
||||
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
|
||||
@@ -181,24 +171,8 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
|
||||
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
|
||||
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
|
||||
@@ -208,10 +182,6 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
|
||||
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
|
||||
@@ -223,10 +193,6 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
|
||||
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
|
||||
@@ -239,10 +205,6 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
|
||||
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
|
||||
@@ -255,10 +217,6 @@ files = [
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
|
||||
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
|
||||
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
|
||||
@@ -1331,13 +1289,13 @@ zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.28.1"
|
||||
version = "0.29.1"
|
||||
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "huggingface_hub-0.28.1-py3-none-any.whl", hash = "sha256:aa6b9a3ffdae939b72c464dbb0d7f99f56e649b55c3d52406f49e0a5a620c0a7"},
|
||||
{file = "huggingface_hub-0.28.1.tar.gz", hash = "sha256:893471090c98e3b6efbdfdacafe4052b20b84d59866fb6f54c33d9af18c303ae"},
|
||||
{file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"},
|
||||
{file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1625,23 +1583,23 @@ test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "locust"
|
||||
version = "2.32.9"
|
||||
version = "2.33.0"
|
||||
description = "Developer-friendly load testing framework"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "locust-2.32.9-py3-none-any.whl", hash = "sha256:d9447c26d2bbaec5a0ace7cadefa1a31820ed392234257b309965a43d5e8d26f"},
|
||||
{file = "locust-2.32.9.tar.gz", hash = "sha256:4c297afa5cdc3de15dfa79279576e5f33c1d69dd70006b51d079dcbd212201cc"},
|
||||
{file = "locust-2.33.0-py3-none-any.whl", hash = "sha256:77fcc5cc35cceee5e12d99f5bb23bc441d145bdef6967c2e93d6e4d93451553e"},
|
||||
{file = "locust-2.33.0.tar.gz", hash = "sha256:ba291b7ab2349cc2db540adb8888bc93feb89ea4e4e10d80b935e5065091e8e9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
ConfigArgParse = ">=1.5.5"
|
||||
configargparse = ">=1.5.5"
|
||||
flask = ">=2.0.0"
|
||||
Flask-Cors = ">=3.0.10"
|
||||
Flask-Login = ">=0.6.3"
|
||||
flask-cors = ">=3.0.10"
|
||||
flask-login = ">=0.6.3"
|
||||
gevent = [
|
||||
{version = ">=22.10.2", markers = "python_full_version <= \"3.12.0\""},
|
||||
{version = ">=24.10.1", markers = "python_full_version > \"3.13.0\""},
|
||||
{version = ">=22.10.2", markers = "python_version <= \"3.12\""},
|
||||
{version = ">=24.10.1", markers = "python_version > \"3.13\""},
|
||||
]
|
||||
geventhttpclient = ">=2.3.1"
|
||||
msgpack = ">=1.0.0"
|
||||
@@ -1649,13 +1607,13 @@ psutil = ">=5.9.1"
|
||||
pywin32 = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
pyzmq = ">=25.0.0"
|
||||
requests = [
|
||||
{version = ">=2.26.0", markers = "python_full_version <= \"3.11.0\""},
|
||||
{version = ">=2.32.2", markers = "python_full_version > \"3.11.0\""},
|
||||
{version = ">=2.26.0", markers = "python_version <= \"3.11\""},
|
||||
{version = ">=2.32.2", markers = "python_version > \"3.11\""},
|
||||
]
|
||||
setuptools = ">=70.0.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing_extensions = {version = ">=4.6.0", markers = "python_version < \"3.11\""}
|
||||
Werkzeug = ">=2.0.0"
|
||||
typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.11\""}
|
||||
werkzeug = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
@@ -2628,13 +2586,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.7.1"
|
||||
version = "2.8.1"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"},
|
||||
{file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"},
|
||||
{file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"},
|
||||
{file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3047,29 +3005,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.6"
|
||||
version = "0.9.9"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"},
|
||||
{file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"},
|
||||
{file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"},
|
||||
{file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"},
|
||||
{file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"},
|
||||
{file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"},
|
||||
{file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"},
|
||||
{file = "ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367"},
|
||||
{file = "ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7"},
|
||||
{file = "ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e"},
|
||||
{file = "ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1"},
|
||||
{file = "ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1"},
|
||||
{file = "ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf"},
|
||||
{file = "ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3735,4 +3693,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "b690d5fbd141da3947f4f1dc029aba1b95e7faafd723166f2c4bdc47a66c095e"
|
||||
content-hash = "271a6c2a76b1b6286e02b91489ffd0c42e92daf151ae932514f5416c7869f71d"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.127.0"
|
||||
version = "1.128.0"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
@@ -47,6 +47,11 @@ optional = true
|
||||
[tool.poetry.group.cuda.dependencies]
|
||||
onnxruntime-gpu = {version = "^1.17.0", source = "cuda12"}
|
||||
|
||||
[tool.poetry.group.rocm]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.rocm.dependencies]
|
||||
|
||||
[tool.poetry.group.openvino]
|
||||
optional = true
|
||||
|
||||
|
||||
176
machine-learning/rocm-PR19567.patch
Normal file
176
machine-learning/rocm-PR19567.patch
Normal file
@@ -0,0 +1,176 @@
|
||||
From a598a88db258f82a6e4bca75810921bd6bcee7e0 Mon Sep 17 00:00:00 2001
|
||||
From: David Nieto <dmnieto@gmail.com>
|
||||
Date: Sat, 17 Feb 2024 11:23:12 -0800
|
||||
Subject: [PATCH] Disable algo caching in ROCM EP
|
||||
|
||||
Similar to the work done by Liangxijun-1001 in
|
||||
https://github.com/apache/tvm/pull/16178 the ROCM spec mandates calling
|
||||
miopenFindConvolution*Algorithm() before using any Convolution API
|
||||
|
||||
This is the link to the porting guide describing this requirement
|
||||
https://rocmdocs.amd.com/projects/MIOpen/en/latest/MIOpen_Porting_Guide.html
|
||||
|
||||
Thus, this change disables the algo cache and enforces the official
|
||||
API semantics
|
||||
|
||||
Signed-off-by: David Nieto <dmnieto@gmail.com>
|
||||
---
|
||||
onnxruntime/core/providers/rocm/nn/conv.cc | 61 +++++++++----------
|
||||
onnxruntime/core/providers/rocm/nn/conv.h | 6 --
|
||||
.../core/providers/rocm/nn/conv_transpose.cc | 17 +++---
|
||||
3 files changed, 36 insertions(+), 48 deletions(-)
|
||||
|
||||
diff --git a/onnxruntime/core/providers/rocm/nn/conv.cc b/onnxruntime/core/providers/rocm/nn/conv.cc
|
||||
index 6214ec7bc0ea..b08aceca48b1 100644
|
||||
--- a/onnxruntime/core/providers/rocm/nn/conv.cc
|
||||
+++ b/onnxruntime/core/providers/rocm/nn/conv.cc
|
||||
@@ -125,10 +125,8 @@ Status Conv<T, NHWC>::UpdateState(OpKernelContext* context, bool bias_expected)
|
||||
if (input_dims_changed)
|
||||
s_.last_x_dims = gsl::make_span(x_dims);
|
||||
|
||||
- if (w_dims_changed) {
|
||||
+ if (w_dims_changed)
|
||||
s_.last_w_dims = gsl::make_span(w_dims);
|
||||
- s_.cached_benchmark_fwd_results.clear();
|
||||
- }
|
||||
|
||||
ORT_RETURN_IF_ERROR(conv_attrs_.ValidateInputShape(X->Shape(), W->Shape(), channels_last, channels_last));
|
||||
|
||||
@@ -277,35 +275,6 @@ Status Conv<T, NHWC>::UpdateState(OpKernelContext* context, bool bias_expected)
|
||||
HIP_CALL_THROW(hipMalloc(&s_.b_zero, malloc_size));
|
||||
HIP_CALL_THROW(hipMemsetAsync(s_.b_zero, 0, malloc_size, Stream(context)));
|
||||
}
|
||||
-
|
||||
- if (!s_.cached_benchmark_fwd_results.contains(x_dims_miopen)) {
|
||||
- miopenConvAlgoPerf_t perf;
|
||||
- int algo_count = 1;
|
||||
- const ROCMExecutionProvider* rocm_ep = static_cast<const ROCMExecutionProvider*>(this->Info().GetExecutionProvider());
|
||||
- static constexpr int num_algos = MIOPEN_CONVOLUTION_FWD_ALGO_COUNT;
|
||||
- size_t max_ws_size = rocm_ep->GetMiopenConvUseMaxWorkspace() ? GetMaxWorkspaceSize(GetMiopenHandle(context), s_, kAllAlgos, num_algos)
|
||||
- : AlgoSearchWorkspaceSize;
|
||||
- IAllocatorUniquePtr<void> algo_search_workspace = GetTransientScratchBuffer<void>(max_ws_size);
|
||||
- MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionForwardAlgorithm(
|
||||
- GetMiopenHandle(context),
|
||||
- s_.x_tensor,
|
||||
- s_.x_data,
|
||||
- s_.w_desc,
|
||||
- s_.w_data,
|
||||
- s_.conv_desc,
|
||||
- s_.y_tensor,
|
||||
- s_.y_data,
|
||||
- 1, // requestedAlgoCount
|
||||
- &algo_count, // returnedAlgoCount
|
||||
- &perf,
|
||||
- algo_search_workspace.get(),
|
||||
- max_ws_size,
|
||||
- false)); // Do not do exhaustive algo search.
|
||||
- s_.cached_benchmark_fwd_results.insert(x_dims_miopen, {perf.fwd_algo, perf.memory});
|
||||
- }
|
||||
- const auto& perf = s_.cached_benchmark_fwd_results.at(x_dims_miopen);
|
||||
- s_.fwd_algo = perf.fwd_algo;
|
||||
- s_.workspace_bytes = perf.memory;
|
||||
} else {
|
||||
// set Y
|
||||
s_.Y = context->Output(0, TensorShape(s_.y_dims));
|
||||
@@ -319,6 +288,34 @@ Status Conv<T, NHWC>::UpdateState(OpKernelContext* context, bool bias_expected)
|
||||
s_.y_data = reinterpret_cast<HipT*>(s_.Y->MutableData<T>());
|
||||
}
|
||||
}
|
||||
+ {
|
||||
+ /* FindConvolution must always be called by the runtime */
|
||||
+ TensorShapeVector x_dims_miopen{x_dims.begin(), x_dims.end()};
|
||||
+ miopenConvAlgoPerf_t perf;
|
||||
+ int algo_count = 1;
|
||||
+ const ROCMExecutionProvider* rocm_ep = static_cast<const ROCMExecutionProvider*>(this->Info().GetExecutionProvider());
|
||||
+ static constexpr int num_algos = MIOPEN_CONVOLUTION_FWD_ALGO_COUNT;
|
||||
+ size_t max_ws_size = rocm_ep->GetMiopenConvUseMaxWorkspace() ? GetMaxWorkspaceSize(GetMiopenHandle(context), s_, kAllAlgos, num_algos)
|
||||
+ : AlgoSearchWorkspaceSize;
|
||||
+ IAllocatorUniquePtr<void> algo_search_workspace = GetTransientScratchBuffer<void>(max_ws_size);
|
||||
+ MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionForwardAlgorithm(
|
||||
+ GetMiopenHandle(context),
|
||||
+ s_.x_tensor,
|
||||
+ s_.x_data,
|
||||
+ s_.w_desc,
|
||||
+ s_.w_data,
|
||||
+ s_.conv_desc,
|
||||
+ s_.y_tensor,
|
||||
+ s_.y_data,
|
||||
+ 1, // requestedAlgoCount
|
||||
+ &algo_count, // returnedAlgoCount
|
||||
+ &perf,
|
||||
+ algo_search_workspace.get(),
|
||||
+ max_ws_size,
|
||||
+ false)); // Do not do exhaustive algo search.
|
||||
+ s_.fwd_algo = perf.fwd_algo;
|
||||
+ s_.workspace_bytes = perf.memory;
|
||||
+ }
|
||||
return Status::OK();
|
||||
}
|
||||
|
||||
diff --git a/onnxruntime/core/providers/rocm/nn/conv.h b/onnxruntime/core/providers/rocm/nn/conv.h
|
||||
index bc9846203e57..d54218f25854 100644
|
||||
--- a/onnxruntime/core/providers/rocm/nn/conv.h
|
||||
+++ b/onnxruntime/core/providers/rocm/nn/conv.h
|
||||
@@ -108,9 +108,6 @@ class lru_unordered_map {
|
||||
list_type lru_list_;
|
||||
};
|
||||
|
||||
-// cached miopen descriptors
|
||||
-constexpr size_t MAX_CACHED_ALGO_PERF_RESULTS = 10000;
|
||||
-
|
||||
template <typename AlgoPerfType>
|
||||
struct MiopenConvState {
|
||||
// if x/w dims changed, update algo and miopenTensors
|
||||
@@ -148,9 +145,6 @@ struct MiopenConvState {
|
||||
decltype(AlgoPerfType().memory) memory;
|
||||
};
|
||||
|
||||
- lru_unordered_map<TensorShapeVector, PerfFwdResultParams, vector_hash> cached_benchmark_fwd_results{MAX_CACHED_ALGO_PERF_RESULTS};
|
||||
- lru_unordered_map<TensorShapeVector, PerfBwdResultParams, vector_hash> cached_benchmark_bwd_results{MAX_CACHED_ALGO_PERF_RESULTS};
|
||||
-
|
||||
// Some properties needed to support asymmetric padded Conv nodes
|
||||
bool post_slicing_required;
|
||||
TensorShapeVector slice_starts;
|
||||
diff --git a/onnxruntime/core/providers/rocm/nn/conv_transpose.cc b/onnxruntime/core/providers/rocm/nn/conv_transpose.cc
|
||||
index 7447113fdf84..45ed4c8ac37a 100644
|
||||
--- a/onnxruntime/core/providers/rocm/nn/conv_transpose.cc
|
||||
+++ b/onnxruntime/core/providers/rocm/nn/conv_transpose.cc
|
||||
@@ -76,7 +76,6 @@ Status ConvTranspose<T, NHWC>::DoConvTranspose(OpKernelContext* context, bool dy
|
||||
|
||||
if (w_dims_changed) {
|
||||
s_.last_w_dims = gsl::make_span(w_dims);
|
||||
- s_.cached_benchmark_bwd_results.clear();
|
||||
}
|
||||
|
||||
ConvTransposeAttributes::Prepare p;
|
||||
@@ -127,12 +126,13 @@ Status ConvTranspose<T, NHWC>::DoConvTranspose(OpKernelContext* context, bool dy
|
||||
|
||||
y_data = reinterpret_cast<HipT*>(p.Y->MutableData<T>());
|
||||
|
||||
- if (!s_.cached_benchmark_bwd_results.contains(x_dims)) {
|
||||
- IAllocatorUniquePtr<void> algo_search_workspace = GetScratchBuffer<void>(AlgoSearchWorkspaceSize, context->GetComputeStream());
|
||||
-
|
||||
- miopenConvAlgoPerf_t perf;
|
||||
- int algo_count = 1;
|
||||
- MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionBackwardDataAlgorithm(
|
||||
+ }
|
||||
+ // The following is required before calling convolution, we cannot cache the results
|
||||
+ {
|
||||
+ IAllocatorUniquePtr<void> algo_search_workspace = GetScratchBuffer<void>(AlgoSearchWorkspaceSize, context->GetComputeStream());
|
||||
+ miopenConvAlgoPerf_t perf;
|
||||
+ int algo_count = 1;
|
||||
+ MIOPEN_RETURN_IF_ERROR(miopenFindConvolutionBackwardDataAlgorithm(
|
||||
GetMiopenHandle(context),
|
||||
s_.x_tensor,
|
||||
x_data,
|
||||
@@ -147,10 +147,7 @@ Status ConvTranspose<T, NHWC>::DoConvTranspose(OpKernelContext* context, bool dy
|
||||
algo_search_workspace.get(),
|
||||
AlgoSearchWorkspaceSize,
|
||||
false));
|
||||
- s_.cached_benchmark_bwd_results.insert(x_dims, {perf.bwd_data_algo, perf.memory});
|
||||
- }
|
||||
|
||||
- const auto& perf = s_.cached_benchmark_bwd_results.at(x_dims);
|
||||
s_.bwd_data_algo = perf.bwd_data_algo;
|
||||
s_.workspace_bytes = perf.memory;
|
||||
}
|
||||
@@ -67,26 +67,27 @@ custom_lint:
|
||||
- lib/entities/*.entity.dart
|
||||
- lib/repositories/{album,asset,backup,database,etag,exif_info,user,timeline,partner}.repository.dart
|
||||
- lib/infrastructure/entities/*.entity.dart
|
||||
- lib/infrastructure/repositories/{store,db}.repository.dart
|
||||
- lib/infrastructure/repositories/{store,db,log}.repository.dart
|
||||
- lib/providers/infrastructure/db.provider.dart
|
||||
# acceptable exceptions for the time being (until Isar is fully replaced)
|
||||
- lib/providers/app_life_cycle.provider.dart
|
||||
- integration_test/test_utils/general_helper.dart
|
||||
- lib/main.dart
|
||||
- lib/pages/album/album_asset_selection.page.dart
|
||||
- lib/routing/router.dart
|
||||
- lib/services/immich_logger.service.dart # not really a service... more a util
|
||||
- lib/utils/{db,migration}.dart
|
||||
- lib/utils/bootstrap.dart
|
||||
- lib/widgets/asset_grid/asset_grid_data_structure.dart
|
||||
- test/**.dart
|
||||
# refactor the remaining providers
|
||||
- lib/providers/{db,user}.provider.dart
|
||||
- lib/providers/backup/backup.provider.dart
|
||||
- lib/providers/db.provider.dart
|
||||
|
||||
- import_rule_openapi:
|
||||
message: openapi must only be used through ApiRepositories
|
||||
restrict: package:openapi
|
||||
allowed:
|
||||
# requried / wanted
|
||||
# required / wanted
|
||||
- lib/repositories/*_api.repository.dart
|
||||
# acceptable exceptions for the time being
|
||||
- lib/entities/{album,asset,exif_info,user}.entity.dart # to convert DTOs to entities
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 185,
|
||||
"android.injected.version.name" => "1.127.0",
|
||||
"android.injected.version.code" => 186,
|
||||
"android.injected.version.name" => "1.128.0",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -108,7 +108,7 @@
|
||||
"backup_info_card_assets": "elements",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"cache_settings_album_thumbnails": "Library page thumbnails ({} assets)",
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "स्टैक रद्द करें",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "zapisi",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/main.dart' as app;
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
import 'package:immich_mobile/utils/bootstrap.dart';
|
||||
import 'package:integration_test/integration_test.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
// ignore: depend_on_referenced_packages
|
||||
import 'package:meta/meta.dart';
|
||||
|
||||
@@ -39,7 +39,8 @@ class ImmichTestHelper {
|
||||
static Future<void> loadApp(WidgetTester tester) async {
|
||||
await EasyLocalization.ensureInitialized();
|
||||
// Clear all data from Isar (reuse existing instance if available)
|
||||
final db = Isar.getInstance() ?? await app.loadDb();
|
||||
final db = await Bootstrap.initIsar();
|
||||
await Bootstrap.initDomain(db);
|
||||
await Store.clear();
|
||||
await db.writeTxn(() => db.clear());
|
||||
// Load main Widget
|
||||
|
||||
@@ -541,7 +541,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 194;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -685,7 +685,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 194;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -715,7 +715,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 194;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -748,7 +748,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 194;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -791,7 +791,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 194;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -831,7 +831,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 194;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
|
||||
@@ -18,13 +18,6 @@ import UIKit
|
||||
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
|
||||
}
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
|
||||
try AVAudioSession.sharedInstance().setActive(true)
|
||||
} catch {
|
||||
print("Failed to set audio session category. Error: \(error)")
|
||||
}
|
||||
|
||||
GeneratedPluginRegistrant.register(with: self)
|
||||
BackgroundServicePlugin.registerBackgroundProcessing()
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the flutter code when enabled so that we can turn on the backround services
|
||||
// Called by the flutter code when enabled so that we can turn on the background services
|
||||
// and save the callback information to communicate on this method channel
|
||||
public func handleBackgroundEnable(call: FlutterMethodCall, result: FlutterResult) {
|
||||
|
||||
@@ -249,7 +249,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
result(true)
|
||||
}
|
||||
|
||||
// Returns the number of currently scheduled background processes to Flutter, striclty
|
||||
// Returns the number of currently scheduled background processes to Flutter, strictly
|
||||
// for debugging
|
||||
func handleNumberOfProcesses(call: FlutterMethodCall, result: @escaping FlutterResult) {
|
||||
BGTaskScheduler.shared.getPendingTaskRequests { requests in
|
||||
@@ -355,7 +355,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
let isExpensive = wifiMonitor.currentPath.isExpensive
|
||||
if (isExpensive) {
|
||||
// The network is expensive and we have required Wi-Fi
|
||||
// Therfore, we will simply complete the task without
|
||||
// Therefore, we will simply complete the task without
|
||||
// running it
|
||||
task.setTaskCompleted(success: true)
|
||||
return
|
||||
|
||||
@@ -78,7 +78,7 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.126.1</string>
|
||||
<string>1.128.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleURLTypes</key>
|
||||
@@ -93,7 +93,7 @@
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>194</string>
|
||||
<string>196</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
@@ -19,7 +19,7 @@ platform :ios do
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
increment_version_number(
|
||||
version_number: "1.127.0"
|
||||
version_number: "1.128.0"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
const int noDbId = -9223372036854775808; // from Isar
|
||||
const double downloadCompleted = -1;
|
||||
const double downloadFailed = -2;
|
||||
|
||||
// Number of log entries to retain on app start
|
||||
const int kLogTruncateLimit = 250;
|
||||
|
||||
16
mobile/lib/domain/interfaces/log.interface.dart
Normal file
16
mobile/lib/domain/interfaces/log.interface.dart
Normal file
@@ -0,0 +1,16 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:immich_mobile/domain/models/log.model.dart';
|
||||
|
||||
abstract interface class ILogRepository {
|
||||
Future<bool> insert(LogMessage log);
|
||||
|
||||
Future<bool> insertAll(Iterable<LogMessage> logs);
|
||||
|
||||
Future<List<LogMessage>> getAll();
|
||||
|
||||
Future<bool> deleteAll();
|
||||
|
||||
/// Truncates the logs to the most recent [limit]. Defaults to recent 250 logs
|
||||
Future<void> truncate({int limit = 250});
|
||||
}
|
||||
65
mobile/lib/domain/models/log.model.dart
Normal file
65
mobile/lib/domain/models/log.model.dart
Normal file
@@ -0,0 +1,65 @@
|
||||
/// Log levels according to dart logging [Level]
|
||||
enum LogLevel {
|
||||
all,
|
||||
finest,
|
||||
finer,
|
||||
fine,
|
||||
config,
|
||||
info,
|
||||
warning,
|
||||
severe,
|
||||
shout,
|
||||
off,
|
||||
}
|
||||
|
||||
class LogMessage {
|
||||
final String message;
|
||||
final LogLevel level;
|
||||
final DateTime createdAt;
|
||||
final String? logger;
|
||||
final String? error;
|
||||
final String? stack;
|
||||
|
||||
const LogMessage({
|
||||
required this.message,
|
||||
required this.level,
|
||||
required this.createdAt,
|
||||
this.logger,
|
||||
this.error,
|
||||
this.stack,
|
||||
});
|
||||
|
||||
@override
|
||||
bool operator ==(covariant LogMessage other) {
|
||||
if (identical(this, other)) return true;
|
||||
|
||||
return other.message == message &&
|
||||
other.level == level &&
|
||||
other.createdAt == createdAt &&
|
||||
other.logger == logger &&
|
||||
other.error == error &&
|
||||
other.stack == stack;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return message.hashCode ^
|
||||
level.hashCode ^
|
||||
createdAt.hashCode ^
|
||||
logger.hashCode ^
|
||||
error.hashCode ^
|
||||
stack.hashCode;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return '''LogMessage: {
|
||||
message: $message,
|
||||
level: $level,
|
||||
createdAt: $createdAt,
|
||||
logger: ${logger ?? '<NA>'},
|
||||
error: ${error ?? '<NA>'},
|
||||
stack: ${stack ?? '<NA>'},
|
||||
}''';
|
||||
}
|
||||
}
|
||||
159
mobile/lib/domain/services/log.service.dart
Normal file
159
mobile/lib/domain/services/log.service.dart
Normal file
@@ -0,0 +1,159 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/log.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/store.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/log.model.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
class LogService {
|
||||
final ILogRepository _logRepository;
|
||||
final IStoreRepository _storeRepository;
|
||||
|
||||
final List<LogMessage> _msgBuffer = [];
|
||||
|
||||
/// Whether to buffer logs in memory before writing to the database.
|
||||
/// This is useful when logging in quick succession, as it increases performance
|
||||
/// and reduces NAND wear. However, it may cause the logs to be lost in case of a crash / in isolates.
|
||||
final bool _shouldBuffer;
|
||||
Timer? _flushTimer;
|
||||
|
||||
late final StreamSubscription<LogRecord> _logSubscription;
|
||||
|
||||
LogService._(
|
||||
this._logRepository,
|
||||
this._storeRepository,
|
||||
this._shouldBuffer,
|
||||
) {
|
||||
// Listen to log messages and write them to the database
|
||||
_logSubscription = Logger.root.onRecord.listen(_writeLogToDatabase);
|
||||
}
|
||||
|
||||
static LogService? _instance;
|
||||
static LogService get I {
|
||||
if (_instance == null) {
|
||||
throw const LoggerUnInitializedException();
|
||||
}
|
||||
return _instance!;
|
||||
}
|
||||
|
||||
static Future<LogService> init({
|
||||
required ILogRepository logRepository,
|
||||
required IStoreRepository storeRepository,
|
||||
bool shouldBuffer = true,
|
||||
}) async {
|
||||
if (_instance != null) {
|
||||
return _instance!;
|
||||
}
|
||||
_instance = await create(
|
||||
logRepository: logRepository,
|
||||
storeRepository: storeRepository,
|
||||
shouldBuffer: shouldBuffer,
|
||||
);
|
||||
return _instance!;
|
||||
}
|
||||
|
||||
static Future<LogService> create({
|
||||
required ILogRepository logRepository,
|
||||
required IStoreRepository storeRepository,
|
||||
bool shouldBuffer = true,
|
||||
}) async {
|
||||
final instance = LogService._(logRepository, storeRepository, shouldBuffer);
|
||||
// Truncate logs to 250
|
||||
await logRepository.truncate(limit: kLogTruncateLimit);
|
||||
// Get log level from store
|
||||
final level = await instance._storeRepository.tryGet(StoreKey.logLevel);
|
||||
if (level != null) {
|
||||
Logger.root.level = Level.LEVELS.elementAtOrNull(level) ?? Level.INFO;
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
Future<void> setlogLevel(LogLevel level) async {
|
||||
await _storeRepository.insert(StoreKey.logLevel, level.index);
|
||||
Logger.root.level = level.toLevel();
|
||||
}
|
||||
|
||||
Future<List<LogMessage>> getMessages() async {
|
||||
final logsFromDb = await _logRepository.getAll();
|
||||
if (_msgBuffer.isNotEmpty) {
|
||||
return [..._msgBuffer.reversed, ...logsFromDb];
|
||||
}
|
||||
return logsFromDb;
|
||||
}
|
||||
|
||||
Future<void> clearLogs() async {
|
||||
_flushTimer?.cancel();
|
||||
_flushTimer = null;
|
||||
_msgBuffer.clear();
|
||||
await _logRepository.deleteAll();
|
||||
}
|
||||
|
||||
/// Flush pending log messages to persistent storage
|
||||
void flush() {
|
||||
if (_flushTimer == null) {
|
||||
return;
|
||||
}
|
||||
_flushTimer!.cancel();
|
||||
// TODO: Rename enable this after moving to sqlite - #16504
|
||||
// await _flushBufferToDatabase();
|
||||
}
|
||||
|
||||
Future<void> dispose() {
|
||||
_flushTimer?.cancel();
|
||||
_logSubscription.cancel();
|
||||
return _flushBufferToDatabase();
|
||||
}
|
||||
|
||||
void _writeLogToDatabase(LogRecord r) {
|
||||
if (kDebugMode) {
|
||||
debugPrint('[${r.level.name}] [${r.time}] ${r.message}');
|
||||
}
|
||||
|
||||
final record = LogMessage(
|
||||
message: r.message,
|
||||
level: r.level.toLogLevel(),
|
||||
createdAt: r.time,
|
||||
logger: r.loggerName,
|
||||
error: r.error?.toString(),
|
||||
stack: r.stackTrace?.toString(),
|
||||
);
|
||||
|
||||
if (_shouldBuffer) {
|
||||
_msgBuffer.add(record);
|
||||
_flushTimer ??= Timer(
|
||||
const Duration(seconds: 5),
|
||||
() => unawaited(_flushBufferToDatabase()),
|
||||
);
|
||||
} else {
|
||||
unawaited(_logRepository.insert(record));
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _flushBufferToDatabase() async {
|
||||
_flushTimer = null;
|
||||
final buffer = [..._msgBuffer];
|
||||
_msgBuffer.clear();
|
||||
await _logRepository.insertAll(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
class LoggerUnInitializedException implements Exception {
|
||||
const LoggerUnInitializedException();
|
||||
|
||||
@override
|
||||
String toString() => 'Logger is not initialized. Call init()';
|
||||
}
|
||||
|
||||
/// Log levels according to dart logging [Level]
|
||||
extension LevelDomainToInfraExtension on Level {
|
||||
LogLevel toLogLevel() =>
|
||||
LogLevel.values.elementAtOrNull(Level.LEVELS.indexOf(this)) ??
|
||||
LogLevel.info;
|
||||
}
|
||||
|
||||
extension on LogLevel {
|
||||
Level toLevel() => Level.LEVELS.elementAtOrNull(index) ?? Level.INFO;
|
||||
}
|
||||
@@ -75,7 +75,7 @@ class StoreService {
|
||||
}
|
||||
|
||||
/// Asynchronously stores the value in the DB and synchronously in the cache
|
||||
Future<void> put<T>(StoreKey<T> key, T value) async {
|
||||
Future<void> put<U extends StoreKey<T>, T>(U key, T value) async {
|
||||
if (_cache[key.id] == value) return;
|
||||
await _storeRepository.insert(key, value);
|
||||
_cache[key.id] = value;
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
// ignore_for_file: constant_identifier_names
|
||||
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
part 'logger_message.entity.g.dart';
|
||||
|
||||
@Collection(inheritance: false)
|
||||
class LoggerMessage {
|
||||
Id id = Isar.autoIncrement;
|
||||
String message;
|
||||
String? details;
|
||||
@Enumerated(EnumType.ordinal)
|
||||
LogLevel level = LogLevel.INFO;
|
||||
DateTime createdAt;
|
||||
String? context1;
|
||||
String? context2;
|
||||
|
||||
LoggerMessage({
|
||||
required this.message,
|
||||
required this.details,
|
||||
required this.level,
|
||||
required this.createdAt,
|
||||
required this.context1,
|
||||
required this.context2,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'InAppLoggerMessage(message: $message, level: $level, createdAt: $createdAt)';
|
||||
}
|
||||
}
|
||||
|
||||
/// Log levels according to dart logging [Level]
|
||||
enum LogLevel {
|
||||
ALL,
|
||||
FINEST,
|
||||
FINER,
|
||||
FINE,
|
||||
CONFIG,
|
||||
INFO,
|
||||
WARNING,
|
||||
SEVERE,
|
||||
SHOUT,
|
||||
OFF,
|
||||
}
|
||||
|
||||
extension LevelExtension on Level {
|
||||
LogLevel toLogLevel() => LogLevel.values[Level.LEVELS.indexOf(this)];
|
||||
}
|
||||
47
mobile/lib/infrastructure/entities/log.entity.dart
Normal file
47
mobile/lib/infrastructure/entities/log.entity.dart
Normal file
@@ -0,0 +1,47 @@
|
||||
import 'package:immich_mobile/domain/models/log.model.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
|
||||
part 'log.entity.g.dart';
|
||||
|
||||
@Collection(inheritance: false)
|
||||
class LoggerMessage {
|
||||
final Id id = Isar.autoIncrement;
|
||||
final String message;
|
||||
final String? details;
|
||||
@Enumerated(EnumType.ordinal)
|
||||
final LogLevel level;
|
||||
final DateTime createdAt;
|
||||
final String? context1;
|
||||
final String? context2;
|
||||
|
||||
const LoggerMessage({
|
||||
required this.message,
|
||||
required this.details,
|
||||
this.level = LogLevel.info,
|
||||
required this.createdAt,
|
||||
required this.context1,
|
||||
required this.context2,
|
||||
});
|
||||
|
||||
LogMessage toDto() {
|
||||
return LogMessage(
|
||||
message: message,
|
||||
level: level,
|
||||
createdAt: createdAt,
|
||||
logger: context1,
|
||||
error: details,
|
||||
stack: context2,
|
||||
);
|
||||
}
|
||||
|
||||
static LoggerMessage fromDto(LogMessage log) {
|
||||
return LoggerMessage(
|
||||
message: log.message,
|
||||
details: log.error,
|
||||
level: log.level,
|
||||
createdAt: log.createdAt,
|
||||
context1: log.logger,
|
||||
context2: log.stack,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||
|
||||
part of 'logger_message.entity.dart';
|
||||
part of 'log.entity.dart';
|
||||
|
||||
// **************************************************************************
|
||||
// IsarCollectionGenerator
|
||||
@@ -117,10 +117,9 @@ LoggerMessage _loggerMessageDeserialize(
|
||||
createdAt: reader.readDateTime(offsets[2]),
|
||||
details: reader.readStringOrNull(offsets[3]),
|
||||
level: _LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offsets[4])] ??
|
||||
LogLevel.ALL,
|
||||
LogLevel.info,
|
||||
message: reader.readString(offsets[5]),
|
||||
);
|
||||
object.id = id;
|
||||
return object;
|
||||
}
|
||||
|
||||
@@ -141,7 +140,7 @@ P _loggerMessageDeserializeProp<P>(
|
||||
return (reader.readStringOrNull(offset)) as P;
|
||||
case 4:
|
||||
return (_LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offset)] ??
|
||||
LogLevel.ALL) as P;
|
||||
LogLevel.info) as P;
|
||||
case 5:
|
||||
return (reader.readString(offset)) as P;
|
||||
default:
|
||||
@@ -150,28 +149,28 @@ P _loggerMessageDeserializeProp<P>(
|
||||
}
|
||||
|
||||
const _LoggerMessagelevelEnumValueMap = {
|
||||
'ALL': 0,
|
||||
'FINEST': 1,
|
||||
'FINER': 2,
|
||||
'FINE': 3,
|
||||
'CONFIG': 4,
|
||||
'INFO': 5,
|
||||
'WARNING': 6,
|
||||
'SEVERE': 7,
|
||||
'SHOUT': 8,
|
||||
'OFF': 9,
|
||||
'all': 0,
|
||||
'finest': 1,
|
||||
'finer': 2,
|
||||
'fine': 3,
|
||||
'config': 4,
|
||||
'info': 5,
|
||||
'warning': 6,
|
||||
'severe': 7,
|
||||
'shout': 8,
|
||||
'off': 9,
|
||||
};
|
||||
const _LoggerMessagelevelValueEnumMap = {
|
||||
0: LogLevel.ALL,
|
||||
1: LogLevel.FINEST,
|
||||
2: LogLevel.FINER,
|
||||
3: LogLevel.FINE,
|
||||
4: LogLevel.CONFIG,
|
||||
5: LogLevel.INFO,
|
||||
6: LogLevel.WARNING,
|
||||
7: LogLevel.SEVERE,
|
||||
8: LogLevel.SHOUT,
|
||||
9: LogLevel.OFF,
|
||||
0: LogLevel.all,
|
||||
1: LogLevel.finest,
|
||||
2: LogLevel.finer,
|
||||
3: LogLevel.fine,
|
||||
4: LogLevel.config,
|
||||
5: LogLevel.info,
|
||||
6: LogLevel.warning,
|
||||
7: LogLevel.severe,
|
||||
8: LogLevel.shout,
|
||||
9: LogLevel.off,
|
||||
};
|
||||
|
||||
Id _loggerMessageGetId(LoggerMessage object) {
|
||||
@@ -183,9 +182,7 @@ List<IsarLinkBase<dynamic>> _loggerMessageGetLinks(LoggerMessage object) {
|
||||
}
|
||||
|
||||
void _loggerMessageAttach(
|
||||
IsarCollection<dynamic> col, Id id, LoggerMessage object) {
|
||||
object.id = id;
|
||||
}
|
||||
IsarCollection<dynamic> col, Id id, LoggerMessage object) {}
|
||||
|
||||
extension LoggerMessageQueryWhereSort
|
||||
on QueryBuilder<LoggerMessage, LoggerMessage, QWhere> {
|
||||
@@ -5,8 +5,9 @@ part 'store.entity.g.dart';
|
||||
/// Internal class for `Store`, do not use elsewhere.
|
||||
@Collection(inheritance: false)
|
||||
class StoreValue {
|
||||
const StoreValue(this.id, {this.intValue, this.strValue});
|
||||
final Id id;
|
||||
final int? intValue;
|
||||
final String? strValue;
|
||||
|
||||
const StoreValue(this.id, {this.intValue, this.strValue});
|
||||
}
|
||||
|
||||
53
mobile/lib/infrastructure/repositories/log.repository.dart
Normal file
53
mobile/lib/infrastructure/repositories/log.repository.dart
Normal file
@@ -0,0 +1,53 @@
|
||||
import 'package:immich_mobile/domain/interfaces/log.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/log.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
|
||||
class IsarLogRepository extends IsarDatabaseRepository
|
||||
implements ILogRepository {
|
||||
final Isar _db;
|
||||
const IsarLogRepository(super.db) : _db = db;
|
||||
|
||||
@override
|
||||
Future<bool> deleteAll() async {
|
||||
await transaction(() async => await _db.loggerMessages.clear());
|
||||
return true;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<LogMessage>> getAll() async {
|
||||
final logs =
|
||||
await _db.loggerMessages.where().sortByCreatedAtDesc().findAll();
|
||||
return logs.map((l) => l.toDto()).toList();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<bool> insert(LogMessage log) async {
|
||||
final logEntity = LoggerMessage.fromDto(log);
|
||||
await transaction(() async {
|
||||
await _db.loggerMessages.put(logEntity);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<bool> insertAll(Iterable<LogMessage> logs) async {
|
||||
await transaction(() async {
|
||||
final logEntities =
|
||||
logs.map((log) => LoggerMessage.fromDto(log)).toList();
|
||||
await _db.loggerMessages.putAll(logEntities);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> truncate({int limit = 250}) async {
|
||||
await transaction(() async {
|
||||
final count = await _db.loggerMessages.count();
|
||||
if (count <= limit) return;
|
||||
final toRemove = count - limit;
|
||||
await _db.loggerMessages.where().limit(toRemove).deleteAll();
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/database.interface.dart';
|
||||
|
||||
abstract interface class IBackupRepository implements IDatabaseRepository {
|
||||
abstract interface class IBackupAlbumRepository implements IDatabaseRepository {
|
||||
Future<List<BackupAlbum>> getAll({BackupAlbumSort? sort});
|
||||
|
||||
Future<List<String>> getIdsBySelection(BackupSelection backup);
|
||||
@@ -3,6 +3,10 @@ import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/widgets/asset_grid/asset_grid_data_structure.dart';
|
||||
|
||||
abstract class ITimelineRepository {
|
||||
Future<List<int>> getTimelineUserIds(int id);
|
||||
|
||||
Stream<List<int>> watchTimelineUsers(int id);
|
||||
|
||||
Stream<RenderList> watchArchiveTimeline(int userId);
|
||||
Stream<RenderList> watchFavoriteTimeline(int userId);
|
||||
Stream<RenderList> watchTrashTimeline(int userId);
|
||||
|
||||
@@ -10,20 +10,7 @@ import 'package:flutter/services.dart';
|
||||
import 'package:flutter_displaymode/flutter_displaymode.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/locales.dart';
|
||||
import 'package:immich_mobile/domain/services/store.service.dart';
|
||||
import 'package:immich_mobile/entities/album.entity.dart';
|
||||
import 'package:immich_mobile/entities/android_device_asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/entities/duplicated_asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/etag.entity.dart';
|
||||
import 'package:immich_mobile/entities/exif_info.entity.dart';
|
||||
import 'package:immich_mobile/entities/ios_device_asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/logger_message.entity.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
|
||||
import 'package:immich_mobile/providers/app_life_cycle.provider.dart';
|
||||
import 'package:immich_mobile/providers/asset_viewer/share_intent_upload.provider.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
@@ -33,23 +20,22 @@ import 'package:immich_mobile/providers/theme.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/routing/tab_navigation_observer.dart';
|
||||
import 'package:immich_mobile/services/background.service.dart';
|
||||
import 'package:immich_mobile/services/immich_logger.service.dart';
|
||||
import 'package:immich_mobile/services/local_notification.service.dart';
|
||||
import 'package:immich_mobile/theme/dynamic_theme.dart';
|
||||
import 'package:immich_mobile/theme/theme_data.dart';
|
||||
import 'package:immich_mobile/utils/bootstrap.dart';
|
||||
import 'package:immich_mobile/utils/cache/widgets_binding.dart';
|
||||
import 'package:immich_mobile/utils/download.dart';
|
||||
import 'package:immich_mobile/utils/http_ssl_cert_override.dart';
|
||||
import 'package:immich_mobile/utils/migration.dart';
|
||||
import 'package:intl/date_symbol_data_local.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:timezone/data/latest.dart';
|
||||
|
||||
void main() async {
|
||||
ImmichWidgetsBinding();
|
||||
final db = await loadDb();
|
||||
final db = await Bootstrap.initIsar();
|
||||
await Bootstrap.initDomain(db);
|
||||
await initApp();
|
||||
await migrateDatabaseIfNeeded(db);
|
||||
HttpOverrides.global = HttpSSLCertOverride();
|
||||
@@ -80,9 +66,6 @@ Future<void> initApp() async {
|
||||
|
||||
await DynamicTheme.fetchSystemPalette();
|
||||
|
||||
// Initialize Immich Logger Service
|
||||
ImmichLogger();
|
||||
|
||||
final log = Logger("ImmichErrorLogger");
|
||||
|
||||
FlutterError.onError = (details) {
|
||||
@@ -122,29 +105,6 @@ Future<void> initApp() async {
|
||||
await FileDownloader().trackTasks();
|
||||
}
|
||||
|
||||
Future<Isar> loadDb() async {
|
||||
final dir = await getApplicationDocumentsDirectory();
|
||||
Isar db = await Isar.open(
|
||||
[
|
||||
StoreValueSchema,
|
||||
ExifInfoSchema,
|
||||
AssetSchema,
|
||||
AlbumSchema,
|
||||
UserSchema,
|
||||
BackupAlbumSchema,
|
||||
DuplicatedAssetSchema,
|
||||
LoggerMessageSchema,
|
||||
ETagSchema,
|
||||
if (Platform.isAndroid) AndroidDeviceAssetSchema,
|
||||
if (Platform.isIOS) IOSDeviceAssetSchema,
|
||||
],
|
||||
directory: dir.path,
|
||||
maxSizeMiB: 1024,
|
||||
);
|
||||
await StoreService.init(storeRepository: IsarStoreRepository(db));
|
||||
return db;
|
||||
}
|
||||
|
||||
class ImmichApp extends ConsumerStatefulWidget {
|
||||
const ImmichApp({super.key});
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ mixin ErrorLoggerMixin {
|
||||
abstract final Logger logger;
|
||||
|
||||
/// Returns an AsyncValue<T> if the future is successfully executed
|
||||
/// Else, logs the error to the overrided logger and returns an AsyncError<>
|
||||
/// Else, logs the error to the overridden logger and returns an AsyncError<>
|
||||
AsyncFuture<T> guardError<T>(
|
||||
Future<T> Function() fn, {
|
||||
required String errorMessage,
|
||||
|
||||
@@ -2,10 +2,11 @@ import 'package:auto_route/auto_route.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/log.model.dart';
|
||||
import 'package:immich_mobile/domain/services/log.service.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/theme_extensions.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/entities/logger_message.entity.dart';
|
||||
import 'package:immich_mobile/services/immich_logger.service.dart';
|
||||
import 'package:intl/intl.dart';
|
||||
|
||||
@@ -17,8 +18,11 @@ class AppLogPage extends HookConsumerWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final immichLogger = ImmichLogger();
|
||||
final logMessages = useState(immichLogger.messages);
|
||||
final immichLogger = LogService.I;
|
||||
final shouldReload = useState(false);
|
||||
final logMessages = useFuture(
|
||||
useMemoized(() => immichLogger.getMessages(), [shouldReload.value]),
|
||||
);
|
||||
|
||||
Widget colorStatusIndicator(Color color) {
|
||||
return Column(
|
||||
@@ -37,16 +41,16 @@ class AppLogPage extends HookConsumerWidget {
|
||||
}
|
||||
|
||||
Widget buildLeadingIcon(LogLevel level) => switch (level) {
|
||||
LogLevel.INFO => colorStatusIndicator(context.primaryColor),
|
||||
LogLevel.SEVERE => colorStatusIndicator(Colors.redAccent),
|
||||
LogLevel.WARNING => colorStatusIndicator(Colors.orangeAccent),
|
||||
LogLevel.info => colorStatusIndicator(context.primaryColor),
|
||||
LogLevel.severe => colorStatusIndicator(Colors.redAccent),
|
||||
LogLevel.warning => colorStatusIndicator(Colors.orangeAccent),
|
||||
_ => colorStatusIndicator(Colors.grey),
|
||||
};
|
||||
|
||||
Color getTileColor(LogLevel level) => switch (level) {
|
||||
LogLevel.INFO => Colors.transparent,
|
||||
LogLevel.SEVERE => Colors.redAccent.withOpacity(0.25),
|
||||
LogLevel.WARNING => Colors.orangeAccent.withOpacity(0.25),
|
||||
LogLevel.info => Colors.transparent,
|
||||
LogLevel.severe => Colors.redAccent.withOpacity(0.25),
|
||||
LogLevel.warning => Colors.orangeAccent.withOpacity(0.25),
|
||||
_ => context.primaryColor.withOpacity(0.1),
|
||||
};
|
||||
|
||||
@@ -71,7 +75,7 @@ class AppLogPage extends HookConsumerWidget {
|
||||
),
|
||||
onPressed: () {
|
||||
immichLogger.clearLogs();
|
||||
logMessages.value = [];
|
||||
shouldReload.value = !shouldReload.value;
|
||||
},
|
||||
),
|
||||
Builder(
|
||||
@@ -84,7 +88,7 @@ class AppLogPage extends HookConsumerWidget {
|
||||
size: 20.0,
|
||||
),
|
||||
onPressed: () {
|
||||
immichLogger.shareLogs(iconContext);
|
||||
ImmichLogger.shareLogs(iconContext);
|
||||
},
|
||||
);
|
||||
},
|
||||
@@ -105,9 +109,9 @@ class AppLogPage extends HookConsumerWidget {
|
||||
separatorBuilder: (context, index) {
|
||||
return const Divider(height: 0);
|
||||
},
|
||||
itemCount: logMessages.value.length,
|
||||
itemCount: logMessages.data?.length ?? 0,
|
||||
itemBuilder: (context, index) {
|
||||
var logMessage = logMessages.value[index];
|
||||
var logMessage = logMessages.data![index];
|
||||
return ListTile(
|
||||
onTap: () => context.pushRoute(
|
||||
AppLogDetailRoute(
|
||||
@@ -128,7 +132,7 @@ class AppLogPage extends HookConsumerWidget {
|
||||
),
|
||||
),
|
||||
subtitle: Text(
|
||||
"at ${DateFormat("HH:mm:ss.SSS").format(logMessage.createdAt)} in ${logMessage.context1}",
|
||||
"at ${DateFormat("HH:mm:ss.SSS").format(logMessage.createdAt)} in ${logMessage.logger}",
|
||||
style: TextStyle(
|
||||
fontSize: 12.0,
|
||||
color: context.colorScheme.onSurfaceSecondary,
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/entities/logger_message.entity.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/log.model.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
|
||||
@RoutePage()
|
||||
class AppLogDetailPage extends HookConsumerWidget {
|
||||
const AppLogDetailPage({super.key, required this.logMessage});
|
||||
|
||||
final LoggerMessage logMessage;
|
||||
final LogMessage logMessage;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
@@ -126,14 +126,14 @@ class AppLogDetailPage extends HookConsumerWidget {
|
||||
child: ListView(
|
||||
children: [
|
||||
buildTextWithCopyButton("MESSAGE", logMessage.message),
|
||||
if (logMessage.details != null)
|
||||
buildTextWithCopyButton("DETAILS", logMessage.details.toString()),
|
||||
if (logMessage.context1 != null)
|
||||
buildLogContext1(logMessage.context1.toString()),
|
||||
if (logMessage.context2 != null)
|
||||
if (logMessage.error != null)
|
||||
buildTextWithCopyButton("DETAILS", logMessage.error.toString()),
|
||||
if (logMessage.logger != null)
|
||||
buildLogContext1(logMessage.logger.toString()),
|
||||
if (logMessage.stack != null)
|
||||
buildTextWithCopyButton(
|
||||
"STACK TRACE",
|
||||
logMessage.context2.toString(),
|
||||
logMessage.stack.toString(),
|
||||
),
|
||||
],
|
||||
),
|
||||
|
||||
@@ -110,7 +110,7 @@ class PhotosPage extends HookConsumerWidget {
|
||||
: const SizedBox(),
|
||||
renderListProvider: timelineUsers.length > 1
|
||||
? multiUsersTimelineProvider(timelineUsers)
|
||||
: singleUserTimelineProvider(currentUser!.isarId),
|
||||
: singleUserTimelineProvider(currentUser?.isarId),
|
||||
buildLoadingIndicator: buildLoadingIndicator,
|
||||
onRefresh: refreshAssets,
|
||||
stackEnabled: true,
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/providers/album/album.provider.dart';
|
||||
import 'package:immich_mobile/services/background.service.dart';
|
||||
import 'package:immich_mobile/domain/services/log.service.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_state.model.dart';
|
||||
import 'package:immich_mobile/providers/album/album.provider.dart';
|
||||
import 'package:immich_mobile/providers/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/auth.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/ios_background_settings.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/manual_upload.provider.dart';
|
||||
import 'package:immich_mobile/providers/auth.provider.dart';
|
||||
import 'package:immich_mobile/providers/memory.provider.dart';
|
||||
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
|
||||
import 'package:immich_mobile/providers/memory.provider.dart';
|
||||
import 'package:immich_mobile/providers/notification_permission.provider.dart';
|
||||
import 'package:immich_mobile/providers/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/providers/tab.provider.dart';
|
||||
import 'package:immich_mobile/providers/websocket.provider.dart';
|
||||
import 'package:immich_mobile/services/immich_logger.service.dart';
|
||||
import 'package:immich_mobile/services/background.service.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
|
||||
enum AppLifeCycleEnum {
|
||||
@@ -112,11 +115,13 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
|
||||
_ref.read(websocketProvider.notifier).disconnect();
|
||||
}
|
||||
|
||||
ImmichLogger().flush();
|
||||
LogService.I.flush();
|
||||
}
|
||||
|
||||
void handleAppDetached() {
|
||||
Future<void> handleAppDetached() async {
|
||||
state = AppLifeCycleEnum.detached;
|
||||
LogService.I.flush();
|
||||
await Isar.getInstance()?.close();
|
||||
// no guarantee this is called at all
|
||||
_ref.read(manualUploadProvider.notifier).cancelBackup();
|
||||
}
|
||||
|
||||
@@ -59,7 +59,11 @@ class AssetNotifier extends StateNotifier<bool> {
|
||||
await clearAllAssets();
|
||||
log.info("Manual refresh requested, cleared assets and albums from db");
|
||||
}
|
||||
final bool changedUsers = await _userService.refreshUsers();
|
||||
final users = await _userService.getUsersFromServer();
|
||||
bool changedUsers = false;
|
||||
if (users != null) {
|
||||
changedUsers = await _syncService.syncUsersFromServer(users);
|
||||
}
|
||||
final bool newRemote = await _assetService.refreshRemoteAssets();
|
||||
final bool newLocal = await _albumService.refreshDeviceAlbums();
|
||||
debugPrint(
|
||||
|
||||
@@ -104,7 +104,7 @@ class DownloadStateNotifier extends StateNotifier<DownloadState> {
|
||||
}
|
||||
|
||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||
// Ignore if the task is cancled or completed
|
||||
// Ignore if the task is canceled or completed
|
||||
if (update.progress == -2 || update.progress == -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ class ShareIntentUploadStateNotifier
|
||||
}
|
||||
|
||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||
// Ignore if the task is cancled or completed
|
||||
// Ignore if the task is canceled or completed
|
||||
if (update.progress == downloadFailed ||
|
||||
update.progress == downloadCompleted) {
|
||||
return;
|
||||
|
||||
@@ -47,7 +47,7 @@ class AuthNotifier extends StateNotifier<AuthState> {
|
||||
}
|
||||
|
||||
/// Validating the url is the alternative connecting server url without
|
||||
/// saving the infomation to the local database
|
||||
/// saving the information to the local database
|
||||
Future<bool> validateAuxilaryServerUrl(String url) async {
|
||||
try {
|
||||
final validEndpoint = await _apiService.resolveEndpoint(url);
|
||||
|
||||
@@ -10,7 +10,7 @@ import 'package:immich_mobile/entities/album.entity.dart';
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/album_media.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/backup.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/backup_album.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/file_media.interface.dart';
|
||||
import 'package:immich_mobile/models/auth/auth_state.model.dart';
|
||||
import 'package:immich_mobile/models/backup/available_album.model.dart';
|
||||
@@ -23,21 +23,34 @@ import 'package:immich_mobile/models/server_info/server_disk_info.model.dart';
|
||||
import 'package:immich_mobile/providers/app_life_cycle.provider.dart';
|
||||
import 'package:immich_mobile/providers/auth.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/error_backup_list.provider.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
|
||||
import 'package:immich_mobile/repositories/album_media.repository.dart';
|
||||
import 'package:immich_mobile/repositories/backup.repository.dart';
|
||||
import 'package:immich_mobile/repositories/file_media.repository.dart';
|
||||
import 'package:immich_mobile/services/background.service.dart';
|
||||
import 'package:immich_mobile/services/backup.service.dart';
|
||||
import 'package:immich_mobile/services/backup_album.service.dart';
|
||||
import 'package:immich_mobile/services/server_info.service.dart';
|
||||
import 'package:immich_mobile/utils/backup_progress.dart';
|
||||
import 'package:immich_mobile/utils/diff.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
import 'package:photo_manager/photo_manager.dart' show PMProgressHandler;
|
||||
|
||||
final backupProvider =
|
||||
StateNotifierProvider<BackupNotifier, BackUpState>((ref) {
|
||||
return BackupNotifier(
|
||||
ref.watch(backupServiceProvider),
|
||||
ref.watch(serverInfoServiceProvider),
|
||||
ref.watch(authProvider),
|
||||
ref.watch(backgroundServiceProvider),
|
||||
ref.watch(galleryPermissionNotifier.notifier),
|
||||
ref.watch(albumMediaRepositoryProvider),
|
||||
ref.watch(fileMediaRepositoryProvider),
|
||||
ref.watch(backupAlbumServiceProvider),
|
||||
ref,
|
||||
);
|
||||
});
|
||||
|
||||
class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
BackupNotifier(
|
||||
this._backupService,
|
||||
@@ -45,10 +58,9 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
this._authState,
|
||||
this._backgroundService,
|
||||
this._galleryPermissionNotifier,
|
||||
this._db,
|
||||
this._albumMediaRepository,
|
||||
this._fileMediaRepository,
|
||||
this._backupRepository,
|
||||
this._backupAlbumService,
|
||||
this.ref,
|
||||
) : super(
|
||||
BackUpState(
|
||||
@@ -96,10 +108,9 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
final AuthState _authState;
|
||||
final BackgroundService _backgroundService;
|
||||
final GalleryPermissionNotifier _galleryPermissionNotifier;
|
||||
final Isar _db;
|
||||
final IAlbumMediaRepository _albumMediaRepository;
|
||||
final IFileMediaRepository _fileMediaRepository;
|
||||
final IBackupRepository _backupRepository;
|
||||
final BackupAlbumService _backupAlbumService;
|
||||
final Ref ref;
|
||||
|
||||
///
|
||||
@@ -260,9 +271,9 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
state = state.copyWith(availableAlbums: availableAlbums);
|
||||
|
||||
final List<BackupAlbum> excludedBackupAlbums =
|
||||
await _backupRepository.getAllBySelection(BackupSelection.exclude);
|
||||
await _backupAlbumService.getAllBySelection(BackupSelection.exclude);
|
||||
final List<BackupAlbum> selectedBackupAlbums =
|
||||
await _backupRepository.getAllBySelection(BackupSelection.select);
|
||||
await _backupAlbumService.getAllBySelection(BackupSelection.select);
|
||||
|
||||
final Set<AvailableAlbum> selectedAlbums = {};
|
||||
for (final BackupAlbum ba in selectedBackupAlbums) {
|
||||
@@ -439,7 +450,7 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
}
|
||||
|
||||
/// Save user selection of selected albums and excluded albums to database
|
||||
Future<void> _updatePersistentAlbumsSelection() {
|
||||
Future<void> _updatePersistentAlbumsSelection() async {
|
||||
final epoch = DateTime.fromMillisecondsSinceEpoch(0, isUtc: true);
|
||||
final selected = state.selectedBackupAlbums.map(
|
||||
(e) => BackupAlbum(e.id, e.lastBackup ?? epoch, BackupSelection.select),
|
||||
@@ -447,29 +458,30 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
final excluded = state.excludedBackupAlbums.map(
|
||||
(e) => BackupAlbum(e.id, e.lastBackup ?? epoch, BackupSelection.exclude),
|
||||
);
|
||||
final backupAlbums = selected.followedBy(excluded).toList();
|
||||
backupAlbums.sortBy((e) => e.id);
|
||||
return _db.writeTxn(() async {
|
||||
final dbAlbums = await _db.backupAlbums.where().sortById().findAll();
|
||||
final List<int> toDelete = [];
|
||||
final List<BackupAlbum> toUpsert = [];
|
||||
// stores the most recent `lastBackup` per album but always keeps the `selection` the user just made
|
||||
diffSortedListsSync(
|
||||
dbAlbums,
|
||||
backupAlbums,
|
||||
compare: (BackupAlbum a, BackupAlbum b) => a.id.compareTo(b.id),
|
||||
both: (BackupAlbum a, BackupAlbum b) {
|
||||
b.lastBackup =
|
||||
a.lastBackup.isAfter(b.lastBackup) ? a.lastBackup : b.lastBackup;
|
||||
toUpsert.add(b);
|
||||
return true;
|
||||
},
|
||||
onlyFirst: (BackupAlbum a) => toDelete.add(a.isarId),
|
||||
onlySecond: (BackupAlbum b) => toUpsert.add(b),
|
||||
);
|
||||
await _db.backupAlbums.deleteAll(toDelete);
|
||||
await _db.backupAlbums.putAll(toUpsert);
|
||||
});
|
||||
final candidates = selected.followedBy(excluded).toList();
|
||||
candidates.sortBy((e) => e.id);
|
||||
|
||||
final savedBackupAlbums =
|
||||
await _backupAlbumService.getAll(sort: BackupAlbumSort.id);
|
||||
final List<int> toDelete = [];
|
||||
final List<BackupAlbum> toUpsert = [];
|
||||
|
||||
diffSortedListsSync(
|
||||
savedBackupAlbums,
|
||||
candidates,
|
||||
compare: (BackupAlbum a, BackupAlbum b) => a.id.compareTo(b.id),
|
||||
both: (BackupAlbum a, BackupAlbum b) {
|
||||
b.lastBackup =
|
||||
a.lastBackup.isAfter(b.lastBackup) ? a.lastBackup : b.lastBackup;
|
||||
toUpsert.add(b);
|
||||
return true;
|
||||
},
|
||||
onlyFirst: (BackupAlbum a) => toDelete.add(a.isarId),
|
||||
onlySecond: (BackupAlbum b) => toUpsert.add(b),
|
||||
);
|
||||
|
||||
await _backupAlbumService.deleteAll(toDelete);
|
||||
await _backupAlbumService.updateAll(toUpsert);
|
||||
}
|
||||
|
||||
/// Invoke backup process
|
||||
@@ -686,14 +698,10 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
}
|
||||
|
||||
Future<void> resumeBackup() async {
|
||||
final List<BackupAlbum> selectedBackupAlbums = await _db.backupAlbums
|
||||
.filter()
|
||||
.selectionEqualTo(BackupSelection.select)
|
||||
.findAll();
|
||||
final List<BackupAlbum> excludedBackupAlbums = await _db.backupAlbums
|
||||
.filter()
|
||||
.selectionEqualTo(BackupSelection.exclude)
|
||||
.findAll();
|
||||
final List<BackupAlbum> selectedBackupAlbums =
|
||||
await _backupAlbumService.getAllBySelection(BackupSelection.select);
|
||||
final List<BackupAlbum> excludedBackupAlbums =
|
||||
await _backupAlbumService.getAllBySelection(BackupSelection.exclude);
|
||||
Set<AvailableAlbum> selectedAlbums = state.selectedBackupAlbums;
|
||||
Set<AvailableAlbum> excludedAlbums = state.excludedBackupAlbums;
|
||||
if (selectedAlbums.isNotEmpty) {
|
||||
@@ -756,23 +764,8 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
}
|
||||
|
||||
BackUpProgressEnum get backupProgress => state.backupProgress;
|
||||
|
||||
void updateBackupProgress(BackUpProgressEnum backupProgress) {
|
||||
state = state.copyWith(backupProgress: backupProgress);
|
||||
}
|
||||
}
|
||||
|
||||
final backupProvider =
|
||||
StateNotifierProvider<BackupNotifier, BackUpState>((ref) {
|
||||
return BackupNotifier(
|
||||
ref.watch(backupServiceProvider),
|
||||
ref.watch(serverInfoServiceProvider),
|
||||
ref.watch(authProvider),
|
||||
ref.watch(backgroundServiceProvider),
|
||||
ref.watch(galleryPermissionNotifier.notifier),
|
||||
ref.watch(dbProvider),
|
||||
ref.watch(albumMediaRepositoryProvider),
|
||||
ref.watch(fileMediaRepositoryProvider),
|
||||
ref.watch(backupRepositoryProvider),
|
||||
ref,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -9,7 +9,6 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_candidate.model.dart';
|
||||
import 'package:immich_mobile/models/backup/success_upload_asset.model.dart';
|
||||
import 'package:immich_mobile/repositories/backup.repository.dart';
|
||||
import 'package:immich_mobile/repositories/file_media.repository.dart';
|
||||
import 'package:immich_mobile/services/background.service.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_state.model.dart';
|
||||
@@ -24,6 +23,7 @@ import 'package:immich_mobile/providers/app_settings.provider.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/providers/app_life_cycle.provider.dart';
|
||||
import 'package:immich_mobile/services/backup_album.service.dart';
|
||||
import 'package:immich_mobile/services/local_notification.service.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_toast.dart';
|
||||
import 'package:immich_mobile/utils/backup_progress.dart';
|
||||
@@ -37,7 +37,7 @@ final manualUploadProvider =
|
||||
ref.watch(localNotificationService),
|
||||
ref.watch(backupProvider.notifier),
|
||||
ref.watch(backupServiceProvider),
|
||||
ref.watch(backupRepositoryProvider),
|
||||
ref.watch(backupAlbumServiceProvider),
|
||||
ref,
|
||||
);
|
||||
});
|
||||
@@ -47,14 +47,14 @@ class ManualUploadNotifier extends StateNotifier<ManualUploadState> {
|
||||
final LocalNotificationService _localNotificationService;
|
||||
final BackupNotifier _backupProvider;
|
||||
final BackupService _backupService;
|
||||
final BackupRepository _backupRepository;
|
||||
final BackupAlbumService _backupAlbumService;
|
||||
final Ref ref;
|
||||
|
||||
ManualUploadNotifier(
|
||||
this._localNotificationService,
|
||||
this._backupProvider,
|
||||
this._backupService,
|
||||
this._backupRepository,
|
||||
this._backupAlbumService,
|
||||
this.ref,
|
||||
) : super(
|
||||
ManualUploadState(
|
||||
@@ -210,9 +210,9 @@ class ManualUploadNotifier extends StateNotifier<ManualUploadState> {
|
||||
}
|
||||
|
||||
final selectedBackupAlbums =
|
||||
await _backupRepository.getAllBySelection(BackupSelection.select);
|
||||
final excludedBackupAlbums =
|
||||
await _backupRepository.getAllBySelection(BackupSelection.exclude);
|
||||
await _backupAlbumService.getAllBySelection(BackupSelection.select);
|
||||
final excludedBackupAlbums = await _backupAlbumService
|
||||
.getAllBySelection(BackupSelection.exclude);
|
||||
|
||||
// Get candidates from selected albums and excluded albums
|
||||
Set<BackupCandidate> candidates =
|
||||
|
||||
@@ -6,7 +6,7 @@ import 'package:permission_handler/permission_handler.dart';
|
||||
|
||||
class GalleryPermissionNotifier extends StateNotifier<PermissionStatus> {
|
||||
GalleryPermissionNotifier()
|
||||
: super(PermissionStatus.denied) // Denied is the intitial state
|
||||
: super(PermissionStatus.denied) // Denied is the initial state
|
||||
{
|
||||
// Sets the initial state
|
||||
getGalleryPermissionStatus();
|
||||
|
||||
@@ -8,6 +8,7 @@ import 'package:immich_mobile/entities/user.entity.dart';
|
||||
|
||||
class PartnerSharedWithNotifier extends StateNotifier<List<User>> {
|
||||
final PartnerService _partnerService;
|
||||
late final StreamSubscription<List<User>> streamSub;
|
||||
|
||||
PartnerSharedWithNotifier(this._partnerService) : super([]) {
|
||||
Function eq = const ListEquality<User>().equals;
|
||||
@@ -16,7 +17,7 @@ class PartnerSharedWithNotifier extends StateNotifier<List<User>> {
|
||||
state = partners;
|
||||
}
|
||||
}).then((_) {
|
||||
_partnerService.watchSharedWith().listen((partners) {
|
||||
streamSub = _partnerService.watchSharedWith().listen((partners) {
|
||||
if (!eq(state, partners)) {
|
||||
state = partners;
|
||||
}
|
||||
@@ -27,6 +28,14 @@ class PartnerSharedWithNotifier extends StateNotifier<List<User>> {
|
||||
Future<bool> updatePartner(User partner, {required bool inTimeline}) {
|
||||
return _partnerService.updatePartner(partner, inTimeline: inTimeline);
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
if (mounted) {
|
||||
streamSub.cancel();
|
||||
}
|
||||
super.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
final partnerSharedWithProvider =
|
||||
@@ -38,6 +47,7 @@ final partnerSharedWithProvider =
|
||||
|
||||
class PartnerSharedByNotifier extends StateNotifier<List<User>> {
|
||||
final PartnerService _partnerService;
|
||||
late final StreamSubscription<List<User>> streamSub;
|
||||
|
||||
PartnerSharedByNotifier(this._partnerService) : super([]) {
|
||||
Function eq = const ListEquality<User>().equals;
|
||||
@@ -54,11 +64,11 @@ class PartnerSharedByNotifier extends StateNotifier<List<User>> {
|
||||
});
|
||||
}
|
||||
|
||||
late final StreamSubscription<List<User>> streamSub;
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
streamSub.cancel();
|
||||
if (mounted) {
|
||||
streamSub.cancel();
|
||||
}
|
||||
super.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,12 @@ import 'package:immich_mobile/providers/locale_provider.dart';
|
||||
import 'package:immich_mobile/services/timeline.service.dart';
|
||||
import 'package:immich_mobile/widgets/asset_grid/asset_grid_data_structure.dart';
|
||||
|
||||
final singleUserTimelineProvider = StreamProvider.family<RenderList, int>(
|
||||
final singleUserTimelineProvider = StreamProvider.family<RenderList, int?>(
|
||||
(ref, userId) {
|
||||
if (userId == null) {
|
||||
return const Stream.empty();
|
||||
}
|
||||
|
||||
ref.watch(localeProvider);
|
||||
final timelineService = ref.watch(timelineServiceProvider);
|
||||
return timelineService.watchHomeTimeline(userId);
|
||||
|
||||
@@ -5,9 +5,8 @@ import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/providers/api.provider.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:immich_mobile/services/timeline.service.dart';
|
||||
|
||||
class CurrentUserProvider extends StateNotifier<User?> {
|
||||
CurrentUserProvider(this._apiService) : super(null) {
|
||||
@@ -47,18 +46,15 @@ final currentUserProvider =
|
||||
});
|
||||
|
||||
class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
||||
TimelineUserIdsProvider(Isar db, User? currentUser) : super([]) {
|
||||
final query = db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(currentUser?.isarId ?? Isar.autoIncrement)
|
||||
.isarIdProperty();
|
||||
query.findAll().then((users) => state = users);
|
||||
streamSub = query.watch().listen((users) => state = users);
|
||||
TimelineUserIdsProvider(this._timelineService) : super([]) {
|
||||
_timelineService.getTimelineUserIds().then((users) => state = users);
|
||||
streamSub = _timelineService
|
||||
.watchTimelineUserIds()
|
||||
.listen((users) => state = users);
|
||||
}
|
||||
|
||||
late final StreamSubscription<List<int>> streamSub;
|
||||
final TimelineService _timelineService;
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
@@ -69,8 +65,5 @@ class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
||||
|
||||
final timelineUsersIdsProvider =
|
||||
StateNotifierProvider<TimelineUserIdsProvider, List<int>>((ref) {
|
||||
return TimelineUserIdsProvider(
|
||||
ref.watch(dbProvider),
|
||||
ref.watch(currentUserProvider),
|
||||
);
|
||||
return TimelineUserIdsProvider(ref.watch(timelineServiceProvider));
|
||||
});
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/backup.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/backup_album.interface.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/repositories/database.repository.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
|
||||
final backupRepositoryProvider =
|
||||
Provider((ref) => BackupRepository(ref.watch(dbProvider)));
|
||||
final backupAlbumRepositoryProvider =
|
||||
Provider((ref) => BackupAlbumRepository(ref.watch(dbProvider)));
|
||||
|
||||
class BackupRepository extends DatabaseRepository implements IBackupRepository {
|
||||
BackupRepository(super.db);
|
||||
class BackupAlbumRepository extends DatabaseRepository
|
||||
implements IBackupAlbumRepository {
|
||||
BackupAlbumRepository(super.db);
|
||||
|
||||
@override
|
||||
Future<List<BackupAlbum>> getAll({BackupAlbumSort? sort}) {
|
||||
|
||||
@@ -2,6 +2,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/enums.dart';
|
||||
import 'package:immich_mobile/entities/album.entity.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/timeline.interface.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/repositories/database.repository.dart';
|
||||
@@ -15,6 +16,28 @@ class TimelineRepository extends DatabaseRepository
|
||||
implements ITimelineRepository {
|
||||
TimelineRepository(super.db);
|
||||
|
||||
@override
|
||||
Future<List<int>> getTimelineUserIds(int id) {
|
||||
return db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(id)
|
||||
.isarIdProperty()
|
||||
.findAll();
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<List<int>> watchTimelineUsers(int id) {
|
||||
return db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(id)
|
||||
.isarIdProperty()
|
||||
.watch();
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<RenderList> watchArchiveTimeline(int userId) {
|
||||
final query = db.assets
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user