Compare commits
64 Commits
qr-code-lo
...
feat/rocm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2da9e3152b | ||
|
|
56b85f7479 | ||
|
|
8b43066632 | ||
|
|
20acdcd884 | ||
|
|
22d348beca | ||
|
|
3b0af1c8a9 | ||
|
|
61c8237a4d | ||
|
|
d740f0283a | ||
|
|
4ada28ac99 | ||
|
|
63c01b78e2 | ||
|
|
1423cfd53c | ||
|
|
867eec86f5 | ||
|
|
86e8effd8e | ||
|
|
49d393216a | ||
|
|
75c9f63757 | ||
|
|
63984890df | ||
|
|
1356468c38 | ||
|
|
c23c53bf6f | ||
|
|
0dcfc43461 | ||
|
|
d1fd0076cc | ||
|
|
ff19502035 | ||
|
|
6ef069b537 | ||
|
|
a03e999bde | ||
|
|
ad1ba4be5f | ||
|
|
f89e74181b | ||
|
|
e2c34f17ba | ||
|
|
23b1256592 | ||
|
|
7bbc1d9f68 | ||
|
|
8b24c31d20 | ||
|
|
7f61ac6983 | ||
|
|
4db8f0c666 | ||
|
|
3d6a6f77a8 | ||
|
|
5698f446f7 | ||
|
|
eb74fafb00 | ||
|
|
24da25dbbf | ||
|
|
9b842d4cca | ||
|
|
a99bd94717 | ||
|
|
4b568dcbb3 | ||
|
|
12ab56c885 | ||
|
|
eed6465b41 | ||
|
|
5f6c16080b | ||
|
|
a2aab1f373 | ||
|
|
8e076ecfe4 | ||
|
|
fe702ba6d7 | ||
|
|
869839f642 | ||
|
|
8885e3105e | ||
|
|
6e51c4ec71 | ||
|
|
6bf2e8dbcb | ||
|
|
366f23774a | ||
|
|
fd5e931617 | ||
|
|
d8d87bb565 | ||
|
|
6cc1978b2d | ||
|
|
506d2d0f81 | ||
|
|
f13d13b2ea | ||
|
|
2510684bf7 | ||
|
|
c8eef5ad4d | ||
|
|
0cb3dc6211 | ||
|
|
f11080cc2d | ||
|
|
efcf773ea0 | ||
|
|
dc143046e3 | ||
|
|
e684062569 | ||
|
|
5c0538e52c | ||
|
|
84cf0d1670 | ||
|
|
bfcde05b1c |
@@ -11,7 +11,7 @@ body:
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
||||
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
7
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,6 +1,13 @@
|
||||
name: Report an issue with Immich
|
||||
description: Report an issue with Immich
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||
options:
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
|
||||
6
.github/workflows/cli.yml
vendored
6
.github/workflows/cli.yml
vendored
@@ -56,10 +56,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.4.0
|
||||
uses: docker/setup-qemu-action@v3.5.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.9.0
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.13.0
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
7
.github/workflows/docker.yml
vendored
7
.github/workflows/docker.yml
vendored
@@ -5,7 +5,6 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -141,7 +140,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.9.0
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -171,7 +170,7 @@ jobs:
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@v6.13.0
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
@@ -334,7 +333,7 @@ jobs:
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@v6.13.0
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -457,7 +457,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
|
||||
50
.github/workflows/weblate-lock.yml
vendored
Normal file
50
.github/workflows/weblate-lock.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Weblate checks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
enforce-lock:
|
||||
name: Check Weblate Lock
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
steps:
|
||||
- name: Check weblate lock
|
||||
run: |
|
||||
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
|
||||
exit 1
|
||||
fi
|
||||
- name: Find Pull Request
|
||||
uses: juliangruber/find-pull-request-action@v1
|
||||
id: find-pr
|
||||
with:
|
||||
branch: chore/translations
|
||||
- name: Fail if existing weblate PR
|
||||
if: ${{ steps.find-pr.outputs.number }}
|
||||
run: exit 1
|
||||
success-check-lock:
|
||||
name: Weblate Lock Check Success
|
||||
needs: [ enforce-lock ]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.13.1-alpine3.20@sha256:c52e20859a92b3eccbd3a36c5e1a90adc20617d8d421d65e8a622e87b5dac963 AS core
|
||||
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
616
cli/package-lock.json
generated
616
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.51",
|
||||
"version": "2.2.52",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -19,8 +19,9 @@
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
||||
"@typescript-eslint/parser": "^8.15.0",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
@@ -31,7 +32,7 @@
|
||||
"eslint-config-prettier": "^10.0.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"globals": "^15.9.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
@@ -62,11 +63,13 @@
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chokidar": "^4.0.3",
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21"
|
||||
"lodash-es": "^4.17.21",
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
import { setTimeout as sleep } from 'node:timers/promises';
|
||||
import { describe, expect, it, MockedFunction, vi } from 'vitest';
|
||||
|
||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
||||
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
|
||||
import createFetchMock from 'vitest-fetch-mock';
|
||||
|
||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
||||
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
|
||||
|
||||
vi.mock('@immich/sdk');
|
||||
|
||||
@@ -199,3 +200,112 @@ describe('checkForDuplicates', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('startWatch', () => {
|
||||
let testFolder: string;
|
||||
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.restoreAllMocks();
|
||||
|
||||
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
|
||||
image: ['.jpg'],
|
||||
sidecar: ['.xmp'],
|
||||
video: ['.mp4'],
|
||||
});
|
||||
|
||||
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
|
||||
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
|
||||
checkBulkUploadMocked.mockResolvedValue({
|
||||
results: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should start watching a directory and upload new files', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: [
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter out unsupported files', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const unsupportedFilePath = path.join(testFolder, 'test.txt');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: unsupportedFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should filger out ignored patterns', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const ignoredPattern = 'ignored';
|
||||
const ignoredFolder = path.join(testFolder, ignoredPattern);
|
||||
await fs.promises.mkdir(ignoredFolder, { recursive: true });
|
||||
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: ignoredFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.promises.rm(testFolder, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,13 +12,18 @@ import {
|
||||
getSupportedMediaTypes,
|
||||
} from '@immich/sdk';
|
||||
import byteSize from 'byte-size';
|
||||
import { Matcher, watch as watchFs } from 'chokidar';
|
||||
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import micromatch from 'micromatch';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const UPLOAD_WATCH_BATCH_SIZE = 100;
|
||||
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
|
||||
@@ -36,6 +41,8 @@ export interface UploadOptionsDto {
|
||||
albumName?: string;
|
||||
includeHidden?: boolean;
|
||||
concurrency: number;
|
||||
progress?: boolean;
|
||||
watch?: boolean;
|
||||
}
|
||||
|
||||
class UploadFile extends File {
|
||||
@@ -55,19 +62,94 @@ class UploadFile extends File {
|
||||
}
|
||||
}
|
||||
|
||||
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||
const { newFiles, duplicates } = await checkForDuplicates(files, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
};
|
||||
|
||||
export const startWatch = async (
|
||||
paths: string[],
|
||||
options: UploadOptionsDto,
|
||||
{
|
||||
batchSize = UPLOAD_WATCH_BATCH_SIZE,
|
||||
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
|
||||
}: { batchSize?: number; debounceTimeMs?: number } = {},
|
||||
) => {
|
||||
const watcherIgnored: Matcher[] = [];
|
||||
const { image, video } = await getSupportedMediaTypes();
|
||||
const extensions = new Set([...image, ...video]);
|
||||
|
||||
if (options.ignore) {
|
||||
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
|
||||
}
|
||||
|
||||
const pathsBatcher = new Batcher<string>({
|
||||
batchSize,
|
||||
debounceTimeMs,
|
||||
onBatch: async (paths: string[]) => {
|
||||
const uniquePaths = [...new Set(paths)];
|
||||
await uploadBatch(uniquePaths, options);
|
||||
},
|
||||
});
|
||||
|
||||
const onFile = async (path: string, stats?: Stats) => {
|
||||
if (stats?.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
const ext = '.' + path.split('.').pop()?.toLowerCase();
|
||||
if (!ext || !extensions.has(ext)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!options.progress) {
|
||||
// logging when progress is disabled as it can cause issues with the progress bar rendering
|
||||
console.log(`Change detected: ${path}`);
|
||||
}
|
||||
pathsBatcher.add(path);
|
||||
};
|
||||
const fsWatcher = watchFs(paths, {
|
||||
ignoreInitial: true,
|
||||
ignored: watcherIgnored,
|
||||
alwaysStat: true,
|
||||
awaitWriteFinish: true,
|
||||
depth: options.recursive ? undefined : 1,
|
||||
persistent: true,
|
||||
})
|
||||
.on('add', onFile)
|
||||
.on('change', onFile)
|
||||
.on('error', (error) => console.error(`Watcher error: ${error}`));
|
||||
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('Exiting...');
|
||||
await fsWatcher.close();
|
||||
process.exit();
|
||||
});
|
||||
};
|
||||
|
||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||
await authenticate(baseOptions);
|
||||
|
||||
const scanFiles = await scan(paths, options);
|
||||
|
||||
if (scanFiles.length === 0) {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
if (options.watch) {
|
||||
console.log('No files found initially.');
|
||||
} else {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
if (options.watch) {
|
||||
console.log('Watching for changes...');
|
||||
await startWatch(paths, options);
|
||||
// watcher does not handle the initial scan
|
||||
// as the scan() is a more efficient quick start with batched results
|
||||
}
|
||||
|
||||
await uploadBatch(scanFiles, options);
|
||||
};
|
||||
|
||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
@@ -85,19 +167,25 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
}
|
||||
|
||||
const multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
let multiBar: MultiBar | undefined;
|
||||
|
||||
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
if (progress) {
|
||||
multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
} else {
|
||||
console.log(`Received ${files.length} files, hashing...`);
|
||||
}
|
||||
|
||||
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
@@ -117,7 +205,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
}
|
||||
}
|
||||
|
||||
checkProgressBar.increment(assets.length);
|
||||
checkProgressBar?.increment(assets.length);
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
@@ -137,7 +225,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
void checkBulkUploadQueue.push(batch);
|
||||
}
|
||||
|
||||
hashProgressBar.increment();
|
||||
hashProgressBar?.increment();
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
@@ -155,7 +243,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
|
||||
await checkBulkUploadQueue.drained();
|
||||
|
||||
multiBar.stop();
|
||||
multiBar?.stop();
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
@@ -171,7 +259,10 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
export const uploadFiles = async (
|
||||
files: string[],
|
||||
{ dryRun, concurrency, progress }: UploadOptionsDto,
|
||||
): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
@@ -191,12 +282,20 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
return files.map((filepath) => ({ id: '', filepath }));
|
||||
}
|
||||
|
||||
const uploadProgress = new SingleBar(
|
||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
let uploadProgress: SingleBar | undefined;
|
||||
|
||||
if (progress) {
|
||||
uploadProgress = new SingleBar(
|
||||
{
|
||||
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||
},
|
||||
Presets.shades_classic,
|
||||
);
|
||||
} else {
|
||||
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||
}
|
||||
uploadProgress?.start(totalSize, 0);
|
||||
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let duplicateCount = 0;
|
||||
let duplicateSize = 0;
|
||||
@@ -222,7 +321,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
return response;
|
||||
},
|
||||
@@ -235,7 +334,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
|
||||
await queue.drained();
|
||||
|
||||
uploadProgress.stop();
|
||||
uploadProgress?.stop();
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
|
||||
@@ -69,6 +69,13 @@ program
|
||||
.default(4),
|
||||
)
|
||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||
.addOption(
|
||||
new Option('--watch', 'Watch for changes and upload automatically')
|
||||
.env('IMMICH_WATCH_CHANGES')
|
||||
.default(false)
|
||||
.implies({ progress: false }),
|
||||
)
|
||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||
.action((paths, options) => upload(paths, program.opts(), options));
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
import { Batcher, CrawlOptions, crawl } from 'src/utils';
|
||||
import { Mock } from 'vitest';
|
||||
|
||||
interface Test {
|
||||
test: string;
|
||||
@@ -303,3 +304,38 @@ describe('crawl', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Batcher', () => {
|
||||
let batcher: Batcher;
|
||||
let onBatch: Mock;
|
||||
beforeEach(() => {
|
||||
onBatch = vi.fn();
|
||||
batcher = new Batcher({ batchSize: 2, onBatch });
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when a batch limit is reached', async () => {
|
||||
batcher.add('a');
|
||||
batcher.add('b');
|
||||
batcher.add('c');
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when flush() is called', async () => {
|
||||
batcher.add('a');
|
||||
batcher.flush();
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when debounce time reached', async () => {
|
||||
vi.useFakeTimers();
|
||||
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
|
||||
batcher.add('a');
|
||||
expect(onBatch).not.toHaveBeenCalled();
|
||||
vi.advanceTimersByTime(200);
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -172,3 +172,64 @@ export const sha1 = (filepath: string) => {
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Batches items and calls onBatch to process them
|
||||
* when the batch size is reached or the debounce time has passed.
|
||||
*/
|
||||
export class Batcher<T = unknown> {
|
||||
private items: T[] = [];
|
||||
private readonly batchSize: number;
|
||||
private readonly debounceTimeMs?: number;
|
||||
private readonly onBatch: (items: T[]) => void;
|
||||
private debounceTimer?: NodeJS.Timeout;
|
||||
|
||||
constructor({
|
||||
batchSize,
|
||||
debounceTimeMs,
|
||||
onBatch,
|
||||
}: {
|
||||
batchSize: number;
|
||||
debounceTimeMs?: number;
|
||||
onBatch: (items: T[]) => Promise<void>;
|
||||
}) {
|
||||
this.batchSize = batchSize;
|
||||
this.debounceTimeMs = debounceTimeMs;
|
||||
this.onBatch = onBatch;
|
||||
}
|
||||
|
||||
private setDebounceTimer() {
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
}
|
||||
if (this.debounceTimeMs) {
|
||||
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private clearDebounceTimer() {
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
this.debounceTimer = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
add(item: T) {
|
||||
this.items.push(item);
|
||||
this.setDebounceTimer();
|
||||
if (this.items.length >= this.batchSize) {
|
||||
this.flush();
|
||||
}
|
||||
}
|
||||
|
||||
flush() {
|
||||
this.clearDebounceTimer();
|
||||
if (this.items.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.onBatch(this.items);
|
||||
|
||||
this.items = [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,7 +122,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -100,7 +100,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:5888c188cf09e3f7eebc97369c3b2ce713e844cdbd88ccf36f5047c958aea120
|
||||
image: prom/prometheus@sha256:6927e0919a144aa7616fd0137d4816816d42f6b816de3af269ab065250859a62
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -112,7 +112,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.5.1-ubuntu@sha256:9a4ab78cec1a2ec7d1ca5dfd5aacec6412706a1bc9e971fc7184e2f6696a63f5
|
||||
image: grafana/grafana:11.5.2-ubuntu@sha256:8b5858c447e06fd7a89006b562ba7bba7c4d5813600c7982374c41852adefaeb
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
|
||||
@@ -69,6 +69,8 @@ Navigating to Administration > Settings > Machine Learning Settings > Facial Rec
|
||||
|
||||
:::tip
|
||||
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
||||
|
||||
You can learn how the tune the result in this [Guide](/docs/guides/better-facial-clusters)
|
||||
:::
|
||||
|
||||
### Facial recognition model
|
||||
|
||||
@@ -68,7 +68,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
||||
|
||||
### Nightly job
|
||||
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion.
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library managment page.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
72
docs/docs/guides/better-facial-clusters.md
Normal file
72
docs/docs/guides/better-facial-clusters.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Better Facial Recognition Clusters
|
||||
|
||||
## Purpose
|
||||
|
||||
This guide explains how to optimize facial recognition in systems with large image libraries. By following these steps, you'll achieve better clustering of faces, reducing the need for manual merging.
|
||||
|
||||
---
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Best Suited For:** Large image libraries after importing a significant number of images.
|
||||
- **Warning:** This method deletes all previously assigned names.
|
||||
- **Tip:** **Always take a [backup](/docs/administration/backup-and-restore#database) before proceeding!**
|
||||
|
||||
---
|
||||
|
||||
## Step-by-Step Instructions
|
||||
|
||||
### Objective
|
||||
|
||||
To enhance face clustering and ensure the model effectively identifies faces using qualitative initial data.
|
||||
|
||||
---
|
||||
|
||||
### Steps
|
||||
|
||||
#### 1. Adjust Machine Learning Settings
|
||||
|
||||
Navigate to:
|
||||
**Admin → Administration → Settings → Machine Learning Settings**
|
||||
|
||||
Make the following changes:
|
||||
|
||||
- **Maximum recognition distance (Optional):**
|
||||
Lower this value, e.g., to **0.4**, if the library contains people with similar facial features.
|
||||
- **Minimum recognized faces:**
|
||||
Set this to a **high value** (e.g., 20 For libraries with a large amount of assets (~100K+), and 10 for libraries with medium amount of assets (~40K+)).
|
||||
> A high value ensures clusters only include faces that appear at least 20/`value` times in the library, improving the initial clustering process.
|
||||
|
||||
---
|
||||
|
||||
#### 2. Run Reset Jobs
|
||||
|
||||
Go to:
|
||||
**Admin → Administration → Settings → Jobs**
|
||||
|
||||
Perform the following:
|
||||
|
||||
1. **FACIAL RECOGNITION → Reset**
|
||||
|
||||
> These reset jobs rebuild the recognition model based on the new settings.
|
||||
|
||||
---
|
||||
|
||||
#### 3. Refine Recognition with Lower Thresholds
|
||||
|
||||
Once the reset jobs are complete, refine the recognition as follows:
|
||||
|
||||
- **Step 1:**
|
||||
Return to **Minimum recognized faces** in Machine Learning Settings and lower the value to **10** (In medium libraries we will lower the value from 10 to 5).
|
||||
|
||||
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||
|
||||
- **Step 2:**
|
||||
Lower the value again to **3**.
|
||||
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||
|
||||
:::tip try different values
|
||||
For certain libraries with a larger or smaller amount of assets, other settings will be better or worse. It is recommended to try different values **before assigning names** and see which settings work best for your library.
|
||||
:::
|
||||
|
||||
---
|
||||
@@ -31,6 +31,10 @@ SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
||||
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
||||
```
|
||||
|
||||
```sql title="Find by partial ID"
|
||||
SELECT * FROM "assets" WHERE "id"::text LIKE '%ab431d3a%';
|
||||
```
|
||||
|
||||
:::note
|
||||
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
||||
:::
|
||||
|
||||
@@ -11,7 +11,7 @@ Just restarting the containers does not replace the environment within the conta
|
||||
|
||||
In order to recreate the container using docker compose, run `docker compose up -d`.
|
||||
In most cases docker will recognize that the `.env` file has changed and recreate the affected containers.
|
||||
If this should not work, try running `docker compose up -d --force-recreate`.
|
||||
If this does not work, try running `docker compose up -d --force-recreate`.
|
||||
|
||||
:::
|
||||
|
||||
@@ -20,8 +20,8 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
||||
| Variable | Description | Default | Containers |
|
||||
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server |
|
||||
| `DB_DATA_LOCATION` | Host Path for Postgres database | | database |
|
||||
| `UPLOAD_LOCATION` | Host path for uploads | | server |
|
||||
| `DB_DATA_LOCATION` | Host path for Postgres database | | database |
|
||||
|
||||
:::tip
|
||||
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
||||
@@ -33,15 +33,15 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `CPU_CORES` | Number of cores available to the Immich server | auto-detected CPU core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
||||
| `IMMICH_TRUSTED_PROXIES` | List of comma separated IPs set as trusted proxies | | server | api |
|
||||
| `IMMICH_TRUSTED_PROXIES` | List of comma-separated IPs set as trusted proxies | | server | api |
|
||||
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/docs/administration/system-integrity) | | server | api, microservices |
|
||||
|
||||
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
||||
@@ -50,7 +50,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
||||
|
||||
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
It only need to be set if the Immich deployment method is changing.
|
||||
It only needs to be set if the Immich deployment method is changing.
|
||||
|
||||
## Workers
|
||||
|
||||
@@ -75,12 +75,12 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database Host | `database` | server |
|
||||
| `DB_PORT` | Database Port | `5432` | server |
|
||||
| `DB_USERNAME` | Database User | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database Password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database Name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database Vector Extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
@@ -103,18 +103,18 @@ When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSW
|
||||
| Variable | Description | Default | Containers |
|
||||
| :--------------- | :------------- | :-----: | :--------- |
|
||||
| `REDIS_URL` | Redis URL | | server |
|
||||
| `REDIS_SOCKET` | Redis Socket | | server |
|
||||
| `REDIS_HOSTNAME` | Redis Host | `redis` | server |
|
||||
| `REDIS_PORT` | Redis Port | `6379` | server |
|
||||
| `REDIS_USERNAME` | Redis Username | | server |
|
||||
| `REDIS_PASSWORD` | Redis Password | | server |
|
||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server |
|
||||
| `REDIS_SOCKET` | Redis socket | | server |
|
||||
| `REDIS_HOSTNAME` | Redis host | `redis` | server |
|
||||
| `REDIS_PORT` | Redis port | `6379` | server |
|
||||
| `REDIS_USERNAME` | Redis username | | server |
|
||||
| `REDIS_PASSWORD` | Redis password | | server |
|
||||
| `REDIS_DBINDEX` | Redis DB index | `0` | server |
|
||||
|
||||
:::info
|
||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
|
||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
||||
More info can be found in the upstream [ioredis] documentation.
|
||||
More information can be found in the upstream [ioredis] documentation.
|
||||
|
||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
||||
:::
|
||||
@@ -181,7 +181,11 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
:::info
|
||||
|
||||
Other machine learning parameters can be tuned from the admin UI.
|
||||
While the `textual` model is the only one required for smart search, some users may experience slow first searches
|
||||
due to backups triggering loading of the other models into memory, which blocks other requests until completed.
|
||||
To avoid this, you can preload the other models (`visual`, `recognition`, and `detection`) if you have enough RAM to do so.
|
||||
|
||||
Additional machine learning parameters can be tuned from the admin UI.
|
||||
|
||||
:::
|
||||
|
||||
@@ -212,7 +216,7 @@ the `_FILE` variable should be set to the path of a file containing the variable
|
||||
details on how to use Docker Secrets in the Postgres image.
|
||||
|
||||
\*2: See [this comment][docker-secrets-example] for an example of how
|
||||
to use use a Docker secret for the password in the Redis container.
|
||||
to use a Docker secret for the password in the Redis container.
|
||||
|
||||
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
||||
|
||||
12
docs/package-lock.json
generated
12
docs/package-lock.json
generated
@@ -14070,9 +14070,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.5.2",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.2.tgz",
|
||||
"integrity": "sha512-MjOadfU3Ys9KYoX0AdkBlFEF1Vx37uCCeN4ZHnmwm9FfpbsGWMZeBLMmmpY+6Ocqod7mkdZ0DT31OlbsFrLlkA==",
|
||||
"version": "8.5.3",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
|
||||
"integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -15734,9 +15734,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.5.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.1.tgz",
|
||||
"integrity": "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.2.tgz",
|
||||
"integrity": "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
|
||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,4 +1,8 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.128.0",
|
||||
"url": "https://v1.128.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.127.0",
|
||||
"url": "https://v1.127.0.archive.immich.app"
|
||||
|
||||
@@ -37,7 +37,7 @@ services:
|
||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
749
e2e/package-lock.json
generated
749
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.127.0",
|
||||
"version": "1.128.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -38,7 +38,7 @@
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"exiftool-vendored": "^28.3.1",
|
||||
"globals": "^15.9.0",
|
||||
"globals": "^16.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
AssetResponseDto,
|
||||
AssetTypeEnum,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getMyUser,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
@@ -45,8 +44,6 @@ const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-sp
|
||||
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
||||
const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
|
||||
|
||||
const getSystemConfig = (accessToken: string) => getConfig({ headers: asBearerAuth(accessToken) });
|
||||
|
||||
const readTags = async (bytes: Buffer, filename: string) => {
|
||||
const filepath = join(tempDir, filename);
|
||||
await writeFile(filepath, bytes);
|
||||
@@ -228,7 +225,7 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
it('should get the asset faces', async () => {
|
||||
const config = await getSystemConfig(admin.accessToken);
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.metadata.faces.import = true;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { JobCommand, JobName, LoginResponseDto } from '@immich/sdk';
|
||||
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk';
|
||||
import { cpSync, rmSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { basename } from 'node:path';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, testAssetDir, utils } from 'src/utils';
|
||||
import { app, asBearerAuth, testAssetDir, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterEach, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
@@ -20,6 +21,33 @@ describe('/jobs', () => {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.machineLearning.duplicateDetection.enabled = false;
|
||||
config.machineLearning.enabled = false;
|
||||
config.metadata.faces.import = false;
|
||||
config.machineLearning.clip.enabled = false;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
@@ -29,14 +57,7 @@ describe('/jobs', () => {
|
||||
});
|
||||
|
||||
it('should queue metadata extraction for missing assets', async () => {
|
||||
const path1 = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||
const path2 = `${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`;
|
||||
|
||||
await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path1), filename: basename(path1) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Pause,
|
||||
@@ -44,7 +65,7 @@ describe('/jobs', () => {
|
||||
});
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path2), filename: basename(path2) },
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
@@ -82,5 +103,123 @@ describe('/jobs', () => {
|
||||
expect(asset.exifInfo?.make).toBe('NIKON CORPORATION');
|
||||
}
|
||||
});
|
||||
|
||||
it('should not re-extract metadata for existing assets', async () => {
|
||||
const path = `${testAssetDir}/temp/metadata/asset.jpg`;
|
||||
|
||||
cpSync(`${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`, path);
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
{
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo?.model).toBe('NIKON D700');
|
||||
}
|
||||
|
||||
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
{
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo?.model).toBe('NIKON D700');
|
||||
}
|
||||
|
||||
rmSync(path);
|
||||
});
|
||||
|
||||
it('should queue thumbnail extraction for assets missing thumbs', async () => {
|
||||
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Pause,
|
||||
force: false,
|
||||
});
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetBefore.thumbhash).toBeNull();
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Empty,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetAfter.thumbhash).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should not reload existing thumbnail when running thumb job for missing assets', async () => {
|
||||
const path = `${testAssetDir}/temp/thumbs/asset1.jpg`;
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, path);
|
||||
|
||||
const { id } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
// This runs the missing thumbnail job
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
// Asset 1 thumbnail should be untouched since its thumb should not have been reloaded, even though the file was changed
|
||||
expect(assetAfter.thumbhash).toEqual(assetBefore.thumbhash);
|
||||
|
||||
rmSync(path);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LoginResponseDto, getAssetInfo, getAssetStatistics, scanLibrary } from '@immich/sdk';
|
||||
import { LoginResponseDto, getAssetInfo, getAssetStatistics } from '@immich/sdk';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { errorDto } from 'src/responses';
|
||||
@@ -6,8 +6,6 @@ import { app, asBearerAuth, testAssetDir, testAssetDirInternal, utils } from 'sr
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) });
|
||||
|
||||
describe('/trash', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let ws: Socket;
|
||||
@@ -81,8 +79,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.items.length).toBe(1);
|
||||
@@ -90,8 +87,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
||||
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
||||
@@ -116,8 +112,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.items.length).toBe(1);
|
||||
@@ -125,8 +120,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
|
||||
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
|
||||
@@ -180,8 +174,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.count).toBe(1);
|
||||
@@ -189,9 +182,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
|
||||
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
||||
@@ -201,6 +192,8 @@ describe('/trash', () => {
|
||||
|
||||
const after = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -238,7 +231,7 @@ describe('/trash', () => {
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id });
|
||||
@@ -247,7 +240,7 @@ describe('/trash', () => {
|
||||
|
||||
await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const before = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||
@@ -261,6 +254,8 @@ describe('/trash', () => {
|
||||
|
||||
const after = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||
expect(after.isTrashed).toBe(true);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ import {
|
||||
deleteAssets,
|
||||
getAllJobsStatus,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getConfigDefaults,
|
||||
login,
|
||||
scanLibrary,
|
||||
@@ -121,6 +122,7 @@ const execPromise = promisify(exec);
|
||||
const onEvent = ({ event, id }: { event: EventType; id: string }) => {
|
||||
// console.log(`Received event: ${event} [id=${id}]`);
|
||||
const set = events[event];
|
||||
|
||||
set.add(id);
|
||||
|
||||
const idCallback = idCallbacks[id];
|
||||
@@ -415,6 +417,8 @@ export const utils = {
|
||||
rmSync(path, { recursive: true });
|
||||
},
|
||||
|
||||
getSystemConfig: (accessToken: string) => getConfig({ headers: asBearerAuth(accessToken) }),
|
||||
|
||||
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>
|
||||
|
||||
@@ -96,7 +96,7 @@
|
||||
"library_scanning_enable_description": "Enable periodic library scanning",
|
||||
"library_settings": "External Library",
|
||||
"library_settings_description": "Manage external library settings",
|
||||
"library_tasks_description": "Perform library tasks",
|
||||
"library_tasks_description": "Scan external libraries for new and/or changed assets",
|
||||
"library_watching_enable_description": "Watch external libraries for file changes",
|
||||
"library_watching_settings": "Library watching (EXPERIMENTAL)",
|
||||
"library_watching_settings_description": "Automatically watch for changed files",
|
||||
@@ -336,6 +336,7 @@
|
||||
"untracked_files": "Untracked Files",
|
||||
"untracked_files_description": "These files are not tracked by the application. They can be the results of failed moves, interrupted uploads, or left behind due to a bug",
|
||||
"user_cleanup_job": "User cleanup",
|
||||
"cleanup": "Cleanup",
|
||||
"user_delete_delay": "<b>{user}</b>'s account and assets will be scheduled for permanent deletion in {delay, plural, one {# day} other {# days}}.",
|
||||
"user_delete_delay_settings": "Delete delay",
|
||||
"user_delete_delay_settings_description": "Number of days after removal to permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.",
|
||||
@@ -393,6 +394,7 @@
|
||||
"allow_edits": "Allow edits",
|
||||
"allow_public_user_to_download": "Allow public user to download",
|
||||
"allow_public_user_to_upload": "Allow public user to upload",
|
||||
"alt_text_qr_code": "QR code image",
|
||||
"anti_clockwise": "Anti-clockwise",
|
||||
"api_key": "API Key",
|
||||
"api_key_description": "This value will only be shown once. Please be sure to copy it before closing the window.",
|
||||
@@ -889,6 +891,7 @@
|
||||
"month": "Month",
|
||||
"more": "More",
|
||||
"moved_to_trash": "Moved to trash",
|
||||
"mute_memories": "Mute Memories",
|
||||
"my_albums": "My albums",
|
||||
"name": "Name",
|
||||
"name_or_nickname": "Name or nickname",
|
||||
@@ -1114,6 +1117,7 @@
|
||||
"say_something": "Say something",
|
||||
"scan_all_libraries": "Scan All Libraries",
|
||||
"scan_library": "Scan",
|
||||
"rescan": "Rescan",
|
||||
"scan_settings": "Scan Settings",
|
||||
"scanning_for_album": "Scanning for album...",
|
||||
"search": "Search",
|
||||
@@ -1302,6 +1306,7 @@
|
||||
"unnamed_album": "Unnamed Album",
|
||||
"unnamed_album_delete_confirmation": "Are you sure you want to delete this album?",
|
||||
"unnamed_share": "Unnamed Share",
|
||||
"unmute_memories": "Unmute Memories",
|
||||
"unsaved_change": "Unsaved change",
|
||||
"unselect_all": "Unselect all",
|
||||
"unselect_all_duplicates": "Unselect all duplicates",
|
||||
@@ -1352,6 +1357,7 @@
|
||||
"view_all": "View All",
|
||||
"view_all_users": "View all users",
|
||||
"view_in_timeline": "View in timeline",
|
||||
"view_link": "View link",
|
||||
"view_links": "View links",
|
||||
"view_name": "View",
|
||||
"view_next_asset": "View next asset",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
ARG DEVICE=cpu
|
||||
|
||||
FROM python:3.11-bookworm@sha256:14b4620f59a90f163dfa6bd252b68743f9a41d494a9fde935f9d7669d98094bb AS builder-cpu
|
||||
FROM python:3.11-bookworm@sha256:68a8863d0625f42d47e0684f33ca02f19d6094ef859a8af237aaf645195ed477 AS builder-cpu
|
||||
|
||||
FROM builder-cpu AS builder-openvino
|
||||
|
||||
@@ -34,7 +34,7 @@ RUN python3 -m venv /opt/venv
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:42420f737ba91d509fc60d5ed65ed0492678a90c561e1fa08786ae8ba8b52eda AS prod-cpu
|
||||
FROM python:3.11-slim-bookworm@sha256:614c8691ab74150465ec9123378cd4dde7a6e57be9e558c3108df40664667a4c AS prod-cpu
|
||||
|
||||
FROM prod-cpu AS prod-openvino
|
||||
|
||||
|
||||
@@ -20,9 +20,8 @@ class FaceRecognizer(InferenceModel):
|
||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
max_batch_size = settings.max_batch_size.facial_recognition if settings.max_batch_size else None
|
||||
self.batch_size = max_batch_size if max_batch_size else self._batch_size_default
|
||||
|
||||
|
||||
@@ -324,7 +324,7 @@ class TestAnnSession:
|
||||
session.run(None, input_feed)
|
||||
|
||||
ann_session.return_value.execute.assert_called_once_with(123, [input1, input2])
|
||||
np_spy.call_count == 2
|
||||
assert np_spy.call_count == 2
|
||||
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
||||
|
||||
|
||||
@@ -457,11 +457,14 @@ class TestCLIP:
|
||||
|
||||
|
||||
class TestFaceRecognition:
|
||||
def test_set_min_score(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceRecognizer, "load")
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir="test_cache", min_score=0.5)
|
||||
def test_set_min_score(self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock) -> None:
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
assert face_recognizer.min_score == 0.5
|
||||
face_detector = FaceDetector("buffalo_s", min_score=0.5, cache_dir="test_cache")
|
||||
face_detector.load()
|
||||
|
||||
assert face_detector.min_score == 0.5
|
||||
assert face_detector.model.det_thresh == 0.5
|
||||
|
||||
def test_detection(self, cv_image: cv2.Mat, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceDetector, "load")
|
||||
|
||||
@@ -14,12 +14,6 @@ byte_image = BytesIO()
|
||||
def _(parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--clip-model", type=str, default="ViT-B-32::openai")
|
||||
parser.add_argument("--face-model", type=str, default="buffalo_l")
|
||||
parser.add_argument(
|
||||
"--tag-min-score",
|
||||
type=int,
|
||||
default=0.0,
|
||||
help="Returns all tags at or above this score. The default returns all tags.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--face-min-score",
|
||||
type=int,
|
||||
@@ -74,10 +68,10 @@ class RecognitionFormDataLoadTest(InferenceLoadTest):
|
||||
"facial-recognition": {
|
||||
"recognition": {
|
||||
"modelName": self.environment.parsed_options.face_model,
|
||||
"options": {"minScore": self.environment.parsed_options.face_min_score},
|
||||
},
|
||||
"detection": {
|
||||
"modelName": self.environment.parsed_options.face_model,
|
||||
"options": {"minScore": self.environment.parsed_options.face_min_score},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
120
machine-learning/poetry.lock
generated
120
machine-learning/poetry.lock
generated
@@ -75,33 +75,33 @@ trio = ["trio (>=0.23)"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.10.0"
|
||||
version = "25.1.0"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
|
||||
{file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
|
||||
{file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
|
||||
{file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
|
||||
{file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
|
||||
{file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
|
||||
{file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
|
||||
{file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
|
||||
{file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
|
||||
{file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
|
||||
{file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
|
||||
{file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
|
||||
{file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
|
||||
{file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
|
||||
{file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
|
||||
{file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
|
||||
{file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"},
|
||||
{file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"},
|
||||
{file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"},
|
||||
{file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"},
|
||||
{file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
|
||||
{file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
|
||||
{file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"},
|
||||
{file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"},
|
||||
{file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"},
|
||||
{file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"},
|
||||
{file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"},
|
||||
{file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"},
|
||||
{file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"},
|
||||
{file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"},
|
||||
{file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"},
|
||||
{file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"},
|
||||
{file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"},
|
||||
{file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"},
|
||||
{file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"},
|
||||
{file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"},
|
||||
{file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"},
|
||||
{file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"},
|
||||
{file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"},
|
||||
{file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"},
|
||||
{file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"},
|
||||
{file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"},
|
||||
{file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"},
|
||||
{file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1331,13 +1331,13 @@ zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.28.1"
|
||||
version = "0.29.1"
|
||||
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "huggingface_hub-0.28.1-py3-none-any.whl", hash = "sha256:aa6b9a3ffdae939b72c464dbb0d7f99f56e649b55c3d52406f49e0a5a620c0a7"},
|
||||
{file = "huggingface_hub-0.28.1.tar.gz", hash = "sha256:893471090c98e3b6efbdfdacafe4052b20b84d59866fb6f54c33d9af18c303ae"},
|
||||
{file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"},
|
||||
{file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1625,23 +1625,23 @@ test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "locust"
|
||||
version = "2.32.9"
|
||||
version = "2.33.0"
|
||||
description = "Developer-friendly load testing framework"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "locust-2.32.9-py3-none-any.whl", hash = "sha256:d9447c26d2bbaec5a0ace7cadefa1a31820ed392234257b309965a43d5e8d26f"},
|
||||
{file = "locust-2.32.9.tar.gz", hash = "sha256:4c297afa5cdc3de15dfa79279576e5f33c1d69dd70006b51d079dcbd212201cc"},
|
||||
{file = "locust-2.33.0-py3-none-any.whl", hash = "sha256:77fcc5cc35cceee5e12d99f5bb23bc441d145bdef6967c2e93d6e4d93451553e"},
|
||||
{file = "locust-2.33.0.tar.gz", hash = "sha256:ba291b7ab2349cc2db540adb8888bc93feb89ea4e4e10d80b935e5065091e8e9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
ConfigArgParse = ">=1.5.5"
|
||||
configargparse = ">=1.5.5"
|
||||
flask = ">=2.0.0"
|
||||
Flask-Cors = ">=3.0.10"
|
||||
Flask-Login = ">=0.6.3"
|
||||
flask-cors = ">=3.0.10"
|
||||
flask-login = ">=0.6.3"
|
||||
gevent = [
|
||||
{version = ">=22.10.2", markers = "python_full_version <= \"3.12.0\""},
|
||||
{version = ">=24.10.1", markers = "python_full_version > \"3.13.0\""},
|
||||
{version = ">=22.10.2", markers = "python_version <= \"3.12\""},
|
||||
{version = ">=24.10.1", markers = "python_version > \"3.13\""},
|
||||
]
|
||||
geventhttpclient = ">=2.3.1"
|
||||
msgpack = ">=1.0.0"
|
||||
@@ -1649,13 +1649,13 @@ psutil = ">=5.9.1"
|
||||
pywin32 = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
pyzmq = ">=25.0.0"
|
||||
requests = [
|
||||
{version = ">=2.26.0", markers = "python_full_version <= \"3.11.0\""},
|
||||
{version = ">=2.32.2", markers = "python_full_version > \"3.11.0\""},
|
||||
{version = ">=2.26.0", markers = "python_version <= \"3.11\""},
|
||||
{version = ">=2.32.2", markers = "python_version > \"3.11\""},
|
||||
]
|
||||
setuptools = ">=70.0.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing_extensions = {version = ">=4.6.0", markers = "python_version < \"3.11\""}
|
||||
Werkzeug = ">=2.0.0"
|
||||
typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.11\""}
|
||||
werkzeug = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
@@ -2628,13 +2628,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.7.1"
|
||||
version = "2.8.1"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"},
|
||||
{file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"},
|
||||
{file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"},
|
||||
{file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3047,29 +3047,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.6"
|
||||
version = "0.9.9"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"},
|
||||
{file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"},
|
||||
{file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"},
|
||||
{file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"},
|
||||
{file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"},
|
||||
{file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"},
|
||||
{file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"},
|
||||
{file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"},
|
||||
{file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"},
|
||||
{file = "ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367"},
|
||||
{file = "ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7"},
|
||||
{file = "ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb"},
|
||||
{file = "ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57"},
|
||||
{file = "ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e"},
|
||||
{file = "ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1"},
|
||||
{file = "ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1"},
|
||||
{file = "ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf"},
|
||||
{file = "ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.127.0"
|
||||
version = "1.128.0"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 185,
|
||||
"android.injected.version.name" => "1.127.0",
|
||||
"android.injected.version.code" => 186,
|
||||
"android.injected.version.name" => "1.128.0",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -108,7 +108,7 @@
|
||||
"backup_info_card_assets": "elements",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"cache_settings_album_thumbnails": "Library page thumbnails ({} assets)",
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "स्टैक रद्द करें",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "zapisi",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"backup_info_card_assets": "assets",
|
||||
"backup_manual_cancelled": "Cancelled",
|
||||
"backup_manual_failed": "Failed",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after sometime",
|
||||
"backup_manual_in_progress": "Upload already in progress. Try after some time",
|
||||
"backup_manual_success": "Success",
|
||||
"backup_manual_title": "Upload status",
|
||||
"backup_options_page_title": "Backup options",
|
||||
@@ -678,4 +678,4 @@
|
||||
"viewer_unstack": "Un-Stack",
|
||||
"wifi_name": "WiFi Name",
|
||||
"your_wifi_name": "Your WiFi name"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -541,7 +541,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 195;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -685,7 +685,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 195;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -715,7 +715,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 195;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -748,7 +748,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 195;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -791,7 +791,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 195;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -831,7 +831,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 195;
|
||||
CURRENT_PROJECT_VERSION = 196;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
|
||||
@@ -18,13 +18,6 @@ import UIKit
|
||||
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
|
||||
}
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
|
||||
try AVAudioSession.sharedInstance().setActive(true)
|
||||
} catch {
|
||||
print("Failed to set audio session category. Error: \(error)")
|
||||
}
|
||||
|
||||
GeneratedPluginRegistrant.register(with: self)
|
||||
BackgroundServicePlugin.registerBackgroundProcessing()
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the flutter code when enabled so that we can turn on the backround services
|
||||
// Called by the flutter code when enabled so that we can turn on the background services
|
||||
// and save the callback information to communicate on this method channel
|
||||
public func handleBackgroundEnable(call: FlutterMethodCall, result: FlutterResult) {
|
||||
|
||||
@@ -249,7 +249,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
result(true)
|
||||
}
|
||||
|
||||
// Returns the number of currently scheduled background processes to Flutter, striclty
|
||||
// Returns the number of currently scheduled background processes to Flutter, strictly
|
||||
// for debugging
|
||||
func handleNumberOfProcesses(call: FlutterMethodCall, result: @escaping FlutterResult) {
|
||||
BGTaskScheduler.shared.getPendingTaskRequests { requests in
|
||||
@@ -355,7 +355,7 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
let isExpensive = wifiMonitor.currentPath.isExpensive
|
||||
if (isExpensive) {
|
||||
// The network is expensive and we have required Wi-Fi
|
||||
// Therfore, we will simply complete the task without
|
||||
// Therefore, we will simply complete the task without
|
||||
// running it
|
||||
task.setTaskCompleted(success: true)
|
||||
return
|
||||
|
||||
@@ -78,7 +78,7 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.127.0</string>
|
||||
<string>1.128.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleURLTypes</key>
|
||||
@@ -93,7 +93,7 @@
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>195</string>
|
||||
<string>196</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
@@ -19,7 +19,7 @@ platform :ios do
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
increment_version_number(
|
||||
version_number: "1.127.0"
|
||||
version_number: "1.128.0"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
// ignore_for_file: constant_identifier_names
|
||||
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
/// Log levels according to dart logging [Level]
|
||||
enum LogLevel {
|
||||
ALL,
|
||||
FINEST,
|
||||
FINER,
|
||||
FINE,
|
||||
CONFIG,
|
||||
INFO,
|
||||
WARNING,
|
||||
SEVERE,
|
||||
SHOUT,
|
||||
OFF,
|
||||
all,
|
||||
finest,
|
||||
finer,
|
||||
fine,
|
||||
config,
|
||||
info,
|
||||
warning,
|
||||
severe,
|
||||
shout,
|
||||
off,
|
||||
}
|
||||
|
||||
class LogMessage {
|
||||
|
||||
@@ -39,29 +39,29 @@ class LogService {
|
||||
}
|
||||
|
||||
static Future<LogService> init({
|
||||
required ILogRepository logRepo,
|
||||
required IStoreRepository storeRepo,
|
||||
required ILogRepository logRepository,
|
||||
required IStoreRepository storeRepository,
|
||||
bool shouldBuffer = true,
|
||||
}) async {
|
||||
if (_instance != null) {
|
||||
return _instance!;
|
||||
}
|
||||
_instance = await create(
|
||||
logRepo: logRepo,
|
||||
storeRepo: storeRepo,
|
||||
logRepository: logRepository,
|
||||
storeRepository: storeRepository,
|
||||
shouldBuffer: shouldBuffer,
|
||||
);
|
||||
return _instance!;
|
||||
}
|
||||
|
||||
static Future<LogService> create({
|
||||
required ILogRepository logRepo,
|
||||
required IStoreRepository storeRepo,
|
||||
required ILogRepository logRepository,
|
||||
required IStoreRepository storeRepository,
|
||||
bool shouldBuffer = true,
|
||||
}) async {
|
||||
final instance = LogService._(logRepo, storeRepo, shouldBuffer);
|
||||
final instance = LogService._(logRepository, storeRepository, shouldBuffer);
|
||||
// Truncate logs to 250
|
||||
await logRepo.truncate(limit: kLogTruncateLimit);
|
||||
await logRepository.truncate(limit: kLogTruncateLimit);
|
||||
// Get log level from store
|
||||
final level = await instance._storeRepository.tryGet(StoreKey.logLevel);
|
||||
if (level != null) {
|
||||
@@ -145,7 +145,7 @@ class LoggerUnInitializedException implements Exception {
|
||||
extension LevelDomainToInfraExtension on Level {
|
||||
LogLevel toLogLevel() =>
|
||||
LogLevel.values.elementAtOrNull(Level.LEVELS.indexOf(this)) ??
|
||||
LogLevel.INFO;
|
||||
LogLevel.info;
|
||||
}
|
||||
|
||||
extension on LogLevel {
|
||||
|
||||
@@ -75,7 +75,7 @@ class StoreService {
|
||||
}
|
||||
|
||||
/// Asynchronously stores the value in the DB and synchronously in the cache
|
||||
Future<void> put<T>(StoreKey<T> key, T value) async {
|
||||
Future<void> put<U extends StoreKey<T>, T>(U key, T value) async {
|
||||
if (_cache[key.id] == value) return;
|
||||
await _storeRepository.insert(key, value);
|
||||
_cache[key.id] = value;
|
||||
|
||||
@@ -5,29 +5,24 @@ part 'log.entity.g.dart';
|
||||
|
||||
@Collection(inheritance: false)
|
||||
class LoggerMessage {
|
||||
Id id = Isar.autoIncrement;
|
||||
String message;
|
||||
String? details;
|
||||
final Id id = Isar.autoIncrement;
|
||||
final String message;
|
||||
final String? details;
|
||||
@Enumerated(EnumType.ordinal)
|
||||
LogLevel level = LogLevel.INFO;
|
||||
DateTime createdAt;
|
||||
String? context1;
|
||||
String? context2;
|
||||
final LogLevel level;
|
||||
final DateTime createdAt;
|
||||
final String? context1;
|
||||
final String? context2;
|
||||
|
||||
LoggerMessage({
|
||||
const LoggerMessage({
|
||||
required this.message,
|
||||
required this.details,
|
||||
required this.level,
|
||||
this.level = LogLevel.info,
|
||||
required this.createdAt,
|
||||
required this.context1,
|
||||
required this.context2,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'LoggerMessage(message: $message, level: $level, createdAt: $createdAt)';
|
||||
}
|
||||
|
||||
LogMessage toDto() {
|
||||
return LogMessage(
|
||||
message: message,
|
||||
|
||||
49
mobile/lib/infrastructure/entities/log.entity.g.dart
generated
49
mobile/lib/infrastructure/entities/log.entity.g.dart
generated
@@ -117,10 +117,9 @@ LoggerMessage _loggerMessageDeserialize(
|
||||
createdAt: reader.readDateTime(offsets[2]),
|
||||
details: reader.readStringOrNull(offsets[3]),
|
||||
level: _LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offsets[4])] ??
|
||||
LogLevel.ALL,
|
||||
LogLevel.info,
|
||||
message: reader.readString(offsets[5]),
|
||||
);
|
||||
object.id = id;
|
||||
return object;
|
||||
}
|
||||
|
||||
@@ -141,7 +140,7 @@ P _loggerMessageDeserializeProp<P>(
|
||||
return (reader.readStringOrNull(offset)) as P;
|
||||
case 4:
|
||||
return (_LoggerMessagelevelValueEnumMap[reader.readByteOrNull(offset)] ??
|
||||
LogLevel.ALL) as P;
|
||||
LogLevel.info) as P;
|
||||
case 5:
|
||||
return (reader.readString(offset)) as P;
|
||||
default:
|
||||
@@ -150,28 +149,28 @@ P _loggerMessageDeserializeProp<P>(
|
||||
}
|
||||
|
||||
const _LoggerMessagelevelEnumValueMap = {
|
||||
'ALL': 0,
|
||||
'FINEST': 1,
|
||||
'FINER': 2,
|
||||
'FINE': 3,
|
||||
'CONFIG': 4,
|
||||
'INFO': 5,
|
||||
'WARNING': 6,
|
||||
'SEVERE': 7,
|
||||
'SHOUT': 8,
|
||||
'OFF': 9,
|
||||
'all': 0,
|
||||
'finest': 1,
|
||||
'finer': 2,
|
||||
'fine': 3,
|
||||
'config': 4,
|
||||
'info': 5,
|
||||
'warning': 6,
|
||||
'severe': 7,
|
||||
'shout': 8,
|
||||
'off': 9,
|
||||
};
|
||||
const _LoggerMessagelevelValueEnumMap = {
|
||||
0: LogLevel.ALL,
|
||||
1: LogLevel.FINEST,
|
||||
2: LogLevel.FINER,
|
||||
3: LogLevel.FINE,
|
||||
4: LogLevel.CONFIG,
|
||||
5: LogLevel.INFO,
|
||||
6: LogLevel.WARNING,
|
||||
7: LogLevel.SEVERE,
|
||||
8: LogLevel.SHOUT,
|
||||
9: LogLevel.OFF,
|
||||
0: LogLevel.all,
|
||||
1: LogLevel.finest,
|
||||
2: LogLevel.finer,
|
||||
3: LogLevel.fine,
|
||||
4: LogLevel.config,
|
||||
5: LogLevel.info,
|
||||
6: LogLevel.warning,
|
||||
7: LogLevel.severe,
|
||||
8: LogLevel.shout,
|
||||
9: LogLevel.off,
|
||||
};
|
||||
|
||||
Id _loggerMessageGetId(LoggerMessage object) {
|
||||
@@ -183,9 +182,7 @@ List<IsarLinkBase<dynamic>> _loggerMessageGetLinks(LoggerMessage object) {
|
||||
}
|
||||
|
||||
void _loggerMessageAttach(
|
||||
IsarCollection<dynamic> col, Id id, LoggerMessage object) {
|
||||
object.id = id;
|
||||
}
|
||||
IsarCollection<dynamic> col, Id id, LoggerMessage object) {}
|
||||
|
||||
extension LoggerMessageQueryWhereSort
|
||||
on QueryBuilder<LoggerMessage, LoggerMessage, QWhere> {
|
||||
|
||||
@@ -5,8 +5,9 @@ part 'store.entity.g.dart';
|
||||
/// Internal class for `Store`, do not use elsewhere.
|
||||
@Collection(inheritance: false)
|
||||
class StoreValue {
|
||||
const StoreValue(this.id, {this.intValue, this.strValue});
|
||||
final Id id;
|
||||
final int? intValue;
|
||||
final String? strValue;
|
||||
|
||||
const StoreValue(this.id, {this.intValue, this.strValue});
|
||||
}
|
||||
|
||||
@@ -3,6 +3,10 @@ import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/widgets/asset_grid/asset_grid_data_structure.dart';
|
||||
|
||||
abstract class ITimelineRepository {
|
||||
Future<List<int>> getTimelineUserIds(int id);
|
||||
|
||||
Stream<List<int>> watchTimelineUsers(int id);
|
||||
|
||||
Stream<RenderList> watchArchiveTimeline(int userId);
|
||||
Stream<RenderList> watchFavoriteTimeline(int userId);
|
||||
Stream<RenderList> watchTrashTimeline(int userId);
|
||||
|
||||
@@ -22,10 +22,6 @@ abstract interface class IUserRepository implements IDatabaseRepository {
|
||||
Future<User> me();
|
||||
|
||||
Future<void> clearTable();
|
||||
|
||||
Future<List<int>> getTimelineUserIds(int id);
|
||||
|
||||
Stream<List<int>> watchTimelineUsers(int id);
|
||||
}
|
||||
|
||||
enum UserSort { id }
|
||||
|
||||
@@ -7,7 +7,7 @@ mixin ErrorLoggerMixin {
|
||||
abstract final Logger logger;
|
||||
|
||||
/// Returns an AsyncValue<T> if the future is successfully executed
|
||||
/// Else, logs the error to the overrided logger and returns an AsyncError<>
|
||||
/// Else, logs the error to the overridden logger and returns an AsyncError<>
|
||||
AsyncFuture<T> guardError<T>(
|
||||
Future<T> Function() fn, {
|
||||
required String errorMessage,
|
||||
|
||||
@@ -41,16 +41,16 @@ class AppLogPage extends HookConsumerWidget {
|
||||
}
|
||||
|
||||
Widget buildLeadingIcon(LogLevel level) => switch (level) {
|
||||
LogLevel.INFO => colorStatusIndicator(context.primaryColor),
|
||||
LogLevel.SEVERE => colorStatusIndicator(Colors.redAccent),
|
||||
LogLevel.WARNING => colorStatusIndicator(Colors.orangeAccent),
|
||||
LogLevel.info => colorStatusIndicator(context.primaryColor),
|
||||
LogLevel.severe => colorStatusIndicator(Colors.redAccent),
|
||||
LogLevel.warning => colorStatusIndicator(Colors.orangeAccent),
|
||||
_ => colorStatusIndicator(Colors.grey),
|
||||
};
|
||||
|
||||
Color getTileColor(LogLevel level) => switch (level) {
|
||||
LogLevel.INFO => Colors.transparent,
|
||||
LogLevel.SEVERE => Colors.redAccent.withOpacity(0.25),
|
||||
LogLevel.WARNING => Colors.orangeAccent.withOpacity(0.25),
|
||||
LogLevel.info => Colors.transparent,
|
||||
LogLevel.severe => Colors.redAccent.withOpacity(0.25),
|
||||
LogLevel.warning => Colors.orangeAccent.withOpacity(0.25),
|
||||
_ => context.primaryColor.withOpacity(0.1),
|
||||
};
|
||||
|
||||
|
||||
@@ -59,7 +59,11 @@ class AssetNotifier extends StateNotifier<bool> {
|
||||
await clearAllAssets();
|
||||
log.info("Manual refresh requested, cleared assets and albums from db");
|
||||
}
|
||||
final bool changedUsers = await _userService.refreshUsers();
|
||||
final users = await _userService.getUsersFromServer();
|
||||
bool changedUsers = false;
|
||||
if (users != null) {
|
||||
changedUsers = await _syncService.syncUsersFromServer(users);
|
||||
}
|
||||
final bool newRemote = await _assetService.refreshRemoteAssets();
|
||||
final bool newLocal = await _albumService.refreshDeviceAlbums();
|
||||
debugPrint(
|
||||
|
||||
@@ -104,7 +104,7 @@ class DownloadStateNotifier extends StateNotifier<DownloadState> {
|
||||
}
|
||||
|
||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||
// Ignore if the task is cancled or completed
|
||||
// Ignore if the task is canceled or completed
|
||||
if (update.progress == -2 || update.progress == -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ class ShareIntentUploadStateNotifier
|
||||
}
|
||||
|
||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||
// Ignore if the task is cancled or completed
|
||||
// Ignore if the task is canceled or completed
|
||||
if (update.progress == downloadFailed ||
|
||||
update.progress == downloadCompleted) {
|
||||
return;
|
||||
|
||||
@@ -47,7 +47,7 @@ class AuthNotifier extends StateNotifier<AuthState> {
|
||||
}
|
||||
|
||||
/// Validating the url is the alternative connecting server url without
|
||||
/// saving the infomation to the local database
|
||||
/// saving the information to the local database
|
||||
Future<bool> validateAuxilaryServerUrl(String url) async {
|
||||
try {
|
||||
final validEndpoint = await _apiService.resolveEndpoint(url);
|
||||
|
||||
@@ -6,7 +6,7 @@ import 'package:permission_handler/permission_handler.dart';
|
||||
|
||||
class GalleryPermissionNotifier extends StateNotifier<PermissionStatus> {
|
||||
GalleryPermissionNotifier()
|
||||
: super(PermissionStatus.denied) // Denied is the intitial state
|
||||
: super(PermissionStatus.denied) // Denied is the initial state
|
||||
{
|
||||
// Sets the initial state
|
||||
getGalleryPermissionStatus();
|
||||
|
||||
@@ -6,7 +6,7 @@ import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/providers/api.provider.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/services/user.service.dart';
|
||||
import 'package:immich_mobile/services/timeline.service.dart';
|
||||
|
||||
class CurrentUserProvider extends StateNotifier<User?> {
|
||||
CurrentUserProvider(this._apiService) : super(null) {
|
||||
@@ -46,14 +46,15 @@ final currentUserProvider =
|
||||
});
|
||||
|
||||
class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
||||
TimelineUserIdsProvider(this._userService) : super([]) {
|
||||
_userService.getTimelineUserIds().then((users) => state = users);
|
||||
streamSub =
|
||||
_userService.watchTimelineUserIds().listen((users) => state = users);
|
||||
TimelineUserIdsProvider(this._timelineService) : super([]) {
|
||||
_timelineService.getTimelineUserIds().then((users) => state = users);
|
||||
streamSub = _timelineService
|
||||
.watchTimelineUserIds()
|
||||
.listen((users) => state = users);
|
||||
}
|
||||
|
||||
late final StreamSubscription<List<int>> streamSub;
|
||||
final UserService _userService;
|
||||
final TimelineService _timelineService;
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
@@ -64,5 +65,5 @@ class TimelineUserIdsProvider extends StateNotifier<List<int>> {
|
||||
|
||||
final timelineUsersIdsProvider =
|
||||
StateNotifierProvider<TimelineUserIdsProvider, List<int>>((ref) {
|
||||
return TimelineUserIdsProvider(ref.watch(userServiceProvider));
|
||||
return TimelineUserIdsProvider(ref.watch(timelineServiceProvider));
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/enums.dart';
|
||||
import 'package:immich_mobile/entities/album.entity.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/timeline.interface.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/repositories/database.repository.dart';
|
||||
@@ -15,6 +16,28 @@ class TimelineRepository extends DatabaseRepository
|
||||
implements ITimelineRepository {
|
||||
TimelineRepository(super.db);
|
||||
|
||||
@override
|
||||
Future<List<int>> getTimelineUserIds(int id) {
|
||||
return db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(id)
|
||||
.isarIdProperty()
|
||||
.findAll();
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<List<int>> watchTimelineUsers(int id) {
|
||||
return db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(id)
|
||||
.isarIdProperty()
|
||||
.watch();
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<RenderList> watchArchiveTimeline(int userId) {
|
||||
final query = db.assets
|
||||
|
||||
@@ -70,26 +70,4 @@ class UserRepository extends DatabaseRepository implements IUserRepository {
|
||||
await db.users.clear();
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<int>> getTimelineUserIds(int id) {
|
||||
return db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(id)
|
||||
.isarIdProperty()
|
||||
.findAll();
|
||||
}
|
||||
|
||||
@override
|
||||
Stream<List<int>> watchTimelineUsers(int id) {
|
||||
return db.users
|
||||
.filter()
|
||||
.inTimelineEqualTo(true)
|
||||
.or()
|
||||
.isarIdEqualTo(id)
|
||||
.isarIdProperty()
|
||||
.watch();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,7 +169,10 @@ class AlbumService {
|
||||
final Stopwatch sw = Stopwatch()..start();
|
||||
bool changes = false;
|
||||
try {
|
||||
await _userService.refreshUsers();
|
||||
final users = await _userService.getUsersFromServer();
|
||||
if (users != null) {
|
||||
await _syncService.syncUsersFromServer(users);
|
||||
}
|
||||
final (sharedAlbum, ownedAlbum) = await (
|
||||
// Note: `shared: true` is required to get albums that don't belong to
|
||||
// us due to unusual behaviour on the API but this will also return our
|
||||
|
||||
@@ -84,15 +84,17 @@ class ApiService implements Authentication {
|
||||
/// port - optional (default: based on schema)
|
||||
/// path - optional
|
||||
Future<String> resolveEndpoint(String serverUrl) async {
|
||||
final url = sanitizeUrl(serverUrl);
|
||||
|
||||
if (!await _isEndpointAvailable(serverUrl)) {
|
||||
throw ApiException(503, "Server is not reachable");
|
||||
}
|
||||
String url = sanitizeUrl(serverUrl);
|
||||
|
||||
// Check for /.well-known/immich
|
||||
final wellKnownEndpoint = await _getWellKnownEndpoint(url);
|
||||
if (wellKnownEndpoint.isNotEmpty) return wellKnownEndpoint;
|
||||
if (wellKnownEndpoint.isNotEmpty) {
|
||||
url = sanitizeUrl(wellKnownEndpoint);
|
||||
}
|
||||
|
||||
if (!await _isEndpointAvailable(url)) {
|
||||
throw ApiException(503, "Server is not reachable");
|
||||
}
|
||||
|
||||
// Otherwise, assume the URL provided is the api endpoint
|
||||
return url;
|
||||
@@ -128,10 +130,12 @@ class ApiService implements Authentication {
|
||||
var headers = {"Accept": "application/json"};
|
||||
headers.addAll(getRequestHeaders());
|
||||
|
||||
final res = await client.get(
|
||||
Uri.parse("$baseUrl/.well-known/immich"),
|
||||
headers: headers,
|
||||
);
|
||||
final res = await client
|
||||
.get(
|
||||
Uri.parse("$baseUrl/.well-known/immich"),
|
||||
headers: headers,
|
||||
)
|
||||
.timeout(const Duration(seconds: 5));
|
||||
|
||||
if (res.statusCode == 200) {
|
||||
final data = jsonDecode(res.body);
|
||||
|
||||
@@ -75,7 +75,7 @@ class AuthService {
|
||||
isValid = true;
|
||||
}
|
||||
} catch (error) {
|
||||
_log.severe("Error validating auxilary endpoint", error);
|
||||
_log.severe("Error validating auxiliary endpoint", error);
|
||||
} finally {
|
||||
httpclient.close();
|
||||
}
|
||||
@@ -187,7 +187,7 @@ class AuthService {
|
||||
_log.severe("Cannot resolve endpoint", error);
|
||||
continue;
|
||||
} catch (_) {
|
||||
_log.severe("Auxilary server is not valid");
|
||||
_log.severe("Auxiliary server is not valid");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -329,7 +329,7 @@ class BackgroundService {
|
||||
try {
|
||||
_clearErrorNotifications();
|
||||
|
||||
// iOS should time out after some threshhold so it doesn't wait
|
||||
// iOS should time out after some threshold so it doesn't wait
|
||||
// indefinitely and can run later
|
||||
// Android is fine to wait here until the lock releases
|
||||
final waitForLock = Platform.isIOS
|
||||
@@ -410,7 +410,6 @@ class BackgroundService {
|
||||
partnerApiRepository,
|
||||
userApiRepository,
|
||||
userRepository,
|
||||
syncSerive,
|
||||
);
|
||||
AlbumService albumService = AlbumService(
|
||||
userService,
|
||||
|
||||
@@ -26,7 +26,7 @@ class MemoryService {
|
||||
try {
|
||||
final now = DateTime.now();
|
||||
final data = await _apiService.memoriesApi.searchMemories(
|
||||
for_: now,
|
||||
for_: DateTime.utc(now.year, now.month, now.day, 0, 0, 0),
|
||||
);
|
||||
|
||||
if (data == null) {
|
||||
|
||||
@@ -6,6 +6,7 @@ import 'package:immich_mobile/entities/album.entity.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/etag.entity.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/extensions/collection_extensions.dart';
|
||||
import 'package:immich_mobile/interfaces/album.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/album_api.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/album_media.interface.dart';
|
||||
@@ -23,7 +24,6 @@ import 'package:immich_mobile/repositories/user.repository.dart';
|
||||
import 'package:immich_mobile/services/entity.service.dart';
|
||||
import 'package:immich_mobile/services/hash.service.dart';
|
||||
import 'package:immich_mobile/utils/async_mutex.dart';
|
||||
import 'package:immich_mobile/extensions/collection_extensions.dart';
|
||||
import 'package:immich_mobile/utils/datetime_comparison.dart';
|
||||
import 'package:immich_mobile/utils/diff.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
@@ -639,7 +639,7 @@ class SyncService {
|
||||
}
|
||||
|
||||
/// fast path for common case: only new assets were added to device album
|
||||
/// returns `true` if successfull, else `false`
|
||||
/// returns `true` if successful, else `false`
|
||||
Future<bool> _syncDeviceAlbumFast(Album deviceAlbum, Album dbAlbum) async {
|
||||
if (!deviceAlbum.modifiedAt.isAfter(dbAlbum.modifiedAt)) {
|
||||
return false;
|
||||
|
||||
@@ -21,12 +21,23 @@ class TimelineService {
|
||||
final ITimelineRepository _timelineRepository;
|
||||
final IUserRepository _userRepository;
|
||||
final AppSettingsService _appSettingsService;
|
||||
TimelineService(
|
||||
|
||||
const TimelineService(
|
||||
this._timelineRepository,
|
||||
this._userRepository,
|
||||
this._appSettingsService,
|
||||
);
|
||||
|
||||
Future<List<int>> getTimelineUserIds() async {
|
||||
final me = await _userRepository.me();
|
||||
return _timelineRepository.getTimelineUserIds(me.isarId);
|
||||
}
|
||||
|
||||
Stream<List<int>> watchTimelineUserIds() async* {
|
||||
final me = await _userRepository.me();
|
||||
yield* _timelineRepository.watchTimelineUsers(me.isarId);
|
||||
}
|
||||
|
||||
Stream<RenderList> watchHomeTimeline(int userId) {
|
||||
return _timelineRepository.watchHomeTimeline(userId, _getGroupByOption());
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:image_picker/image_picker.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/interfaces/partner_api.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/user.interface.dart';
|
||||
import 'package:immich_mobile/interfaces/user_api.interface.dart';
|
||||
import 'package:immich_mobile/repositories/partner_api.repository.dart';
|
||||
import 'package:immich_mobile/repositories/user.repository.dart';
|
||||
import 'package:immich_mobile/repositories/user_api.repository.dart';
|
||||
import 'package:immich_mobile/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/services/sync.service.dart';
|
||||
import 'package:immich_mobile/utils/diff.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
@@ -17,7 +16,6 @@ final userServiceProvider = Provider(
|
||||
ref.watch(partnerApiRepositoryProvider),
|
||||
ref.watch(userApiRepositoryProvider),
|
||||
ref.watch(userRepositoryProvider),
|
||||
ref.watch(syncServiceProvider),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -25,14 +23,12 @@ class UserService {
|
||||
final IPartnerApiRepository _partnerApiRepository;
|
||||
final IUserApiRepository _userApiRepository;
|
||||
final IUserRepository _userRepository;
|
||||
final SyncService _syncService;
|
||||
final Logger _log = Logger("UserService");
|
||||
|
||||
UserService(
|
||||
this._partnerApiRepository,
|
||||
this._userApiRepository,
|
||||
this._userRepository,
|
||||
this._syncService,
|
||||
);
|
||||
|
||||
Future<List<User>> getUsers({bool self = false}) {
|
||||
@@ -98,23 +94,7 @@ class UserService {
|
||||
return users;
|
||||
}
|
||||
|
||||
Future<bool> refreshUsers() async {
|
||||
final users = await getUsersFromServer();
|
||||
if (users == null) return false;
|
||||
return _syncService.syncUsersFromServer(users);
|
||||
}
|
||||
|
||||
Future<void> clearTable() {
|
||||
return _userRepository.clearTable();
|
||||
}
|
||||
|
||||
Future<List<int>> getTimelineUserIds() async {
|
||||
final me = await _userRepository.me();
|
||||
return _userRepository.getTimelineUserIds(me.isarId);
|
||||
}
|
||||
|
||||
Stream<List<int>> watchTimelineUserIds() async* {
|
||||
final me = await _userRepository.me();
|
||||
yield* _userRepository.watchTimelineUsers(me.isarId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,8 +49,8 @@ abstract final class Bootstrap {
|
||||
static Future<void> initDomain(Isar db) async {
|
||||
await StoreService.init(storeRepository: IsarStoreRepository(db));
|
||||
await LogService.init(
|
||||
logRepo: IsarLogRepository(db),
|
||||
storeRepo: IsarStoreRepository(db),
|
||||
logRepository: IsarLogRepository(db),
|
||||
storeRepository: IsarStoreRepository(db),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import 'package:immich_mobile/providers/image/immich_remote_image_provider.dart'
|
||||
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
|
||||
|
||||
/// [ImageCache] that uses two caches for small and large images
|
||||
/// so that a single large image does not evict all small iamges
|
||||
/// so that a single large image does not evict all small images
|
||||
final class CustomImageCache implements ImageCache {
|
||||
final _small = ImageCache();
|
||||
final _large = ImageCache()..maximumSize = 5; // Maximum 5 images
|
||||
|
||||
@@ -26,7 +26,7 @@ double getScaleForScaleState(
|
||||
}
|
||||
|
||||
/// Internal class to wraps custom scale boundaries (min, max and initial)
|
||||
/// Also, stores values regarding the two sizes: the container and teh child.
|
||||
/// Also, stores values regarding the two sizes: the container and the child.
|
||||
class ScaleBoundaries {
|
||||
const ScaleBoundaries(
|
||||
this._minScale,
|
||||
|
||||
@@ -2,13 +2,12 @@ import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/models/auth/auxilary_endpoint.model.dart';
|
||||
import 'package:immich_mobile/providers/network.provider.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/utils/hooks/app_settings_update_hook.dart';
|
||||
import 'package:immich_mobile/utils/url_helper.dart';
|
||||
import 'package:immich_mobile/widgets/settings/networking_settings/external_network_preference.dart';
|
||||
import 'package:immich_mobile/widgets/settings/networking_settings/local_network_preference.dart';
|
||||
import 'package:immich_mobile/widgets/settings/settings_switch_list_tile.dart';
|
||||
@@ -18,7 +17,7 @@ class NetworkingSettings extends HookConsumerWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final currentEndpoint = Store.get(StoreKey.serverEndpoint);
|
||||
final currentEndpoint = getServerUrl();
|
||||
final featureEnabled =
|
||||
useAppSettingsState(AppSettingsEnum.autoEndpointSwitching);
|
||||
|
||||
@@ -102,7 +101,7 @@ class NetworkingSettings extends HookConsumerWidget {
|
||||
padding: const EdgeInsets.only(top: 8, left: 16, bottom: 8),
|
||||
child: NetworkPreferenceTitle(
|
||||
title: "current_server_address".tr().toUpperCase(),
|
||||
icon: currentEndpoint.startsWith('https')
|
||||
icon: (currentEndpoint?.startsWith('https') ?? false)
|
||||
? Icons.https_outlined
|
||||
: Icons.http_outlined,
|
||||
),
|
||||
@@ -119,10 +118,16 @@ class NetworkingSettings extends HookConsumerWidget {
|
||||
),
|
||||
),
|
||||
child: ListTile(
|
||||
leading:
|
||||
const Icon(Icons.check_circle_rounded, color: Colors.green),
|
||||
leading: currentEndpoint != null
|
||||
? const Icon(
|
||||
Icons.check_circle_rounded,
|
||||
color: Colors.green,
|
||||
)
|
||||
: const Icon(
|
||||
Icons.circle_outlined,
|
||||
),
|
||||
title: Text(
|
||||
currentEndpoint,
|
||||
currentEndpoint ?? "--",
|
||||
style: TextStyle(
|
||||
fontSize: 16,
|
||||
fontFamily: 'Inconsolata',
|
||||
|
||||
4
mobile/openapi/README.md
generated
4
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.127.0
|
||||
- API version: 1.128.0
|
||||
- Generator version: 7.8.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
@@ -425,6 +425,8 @@ Class | Method | HTTP request | Description
|
||||
- [SyncAckDto](doc//SyncAckDto.md)
|
||||
- [SyncAckSetDto](doc//SyncAckSetDto.md)
|
||||
- [SyncEntityType](doc//SyncEntityType.md)
|
||||
- [SyncPartnerDeleteV1](doc//SyncPartnerDeleteV1.md)
|
||||
- [SyncPartnerV1](doc//SyncPartnerV1.md)
|
||||
- [SyncRequestType](doc//SyncRequestType.md)
|
||||
- [SyncStreamDto](doc//SyncStreamDto.md)
|
||||
- [SyncUserDeleteV1](doc//SyncUserDeleteV1.md)
|
||||
|
||||
2
mobile/openapi/lib/api.dart
generated
2
mobile/openapi/lib/api.dart
generated
@@ -232,6 +232,8 @@ part 'model/sync_ack_delete_dto.dart';
|
||||
part 'model/sync_ack_dto.dart';
|
||||
part 'model/sync_ack_set_dto.dart';
|
||||
part 'model/sync_entity_type.dart';
|
||||
part 'model/sync_partner_delete_v1.dart';
|
||||
part 'model/sync_partner_v1.dart';
|
||||
part 'model/sync_request_type.dart';
|
||||
part 'model/sync_stream_dto.dart';
|
||||
part 'model/sync_user_delete_v1.dart';
|
||||
|
||||
4
mobile/openapi/lib/api_client.dart
generated
4
mobile/openapi/lib/api_client.dart
generated
@@ -520,6 +520,10 @@ class ApiClient {
|
||||
return SyncAckSetDto.fromJson(value);
|
||||
case 'SyncEntityType':
|
||||
return SyncEntityTypeTypeTransformer().decode(value);
|
||||
case 'SyncPartnerDeleteV1':
|
||||
return SyncPartnerDeleteV1.fromJson(value);
|
||||
case 'SyncPartnerV1':
|
||||
return SyncPartnerV1.fromJson(value);
|
||||
case 'SyncRequestType':
|
||||
return SyncRequestTypeTypeTransformer().decode(value);
|
||||
case 'SyncStreamDto':
|
||||
|
||||
6
mobile/openapi/lib/model/sync_entity_type.dart
generated
6
mobile/openapi/lib/model/sync_entity_type.dart
generated
@@ -25,11 +25,15 @@ class SyncEntityType {
|
||||
|
||||
static const userV1 = SyncEntityType._(r'UserV1');
|
||||
static const userDeleteV1 = SyncEntityType._(r'UserDeleteV1');
|
||||
static const partnerV1 = SyncEntityType._(r'PartnerV1');
|
||||
static const partnerDeleteV1 = SyncEntityType._(r'PartnerDeleteV1');
|
||||
|
||||
/// List of all possible values in this [enum][SyncEntityType].
|
||||
static const values = <SyncEntityType>[
|
||||
userV1,
|
||||
userDeleteV1,
|
||||
partnerV1,
|
||||
partnerDeleteV1,
|
||||
];
|
||||
|
||||
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
|
||||
@@ -70,6 +74,8 @@ class SyncEntityTypeTypeTransformer {
|
||||
switch (data) {
|
||||
case r'UserV1': return SyncEntityType.userV1;
|
||||
case r'UserDeleteV1': return SyncEntityType.userDeleteV1;
|
||||
case r'PartnerV1': return SyncEntityType.partnerV1;
|
||||
case r'PartnerDeleteV1': return SyncEntityType.partnerDeleteV1;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
||||
107
mobile/openapi/lib/model/sync_partner_delete_v1.dart
generated
Normal file
107
mobile/openapi/lib/model/sync_partner_delete_v1.dart
generated
Normal file
@@ -0,0 +1,107 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SyncPartnerDeleteV1 {
|
||||
/// Returns a new [SyncPartnerDeleteV1] instance.
|
||||
SyncPartnerDeleteV1({
|
||||
required this.sharedById,
|
||||
required this.sharedWithId,
|
||||
});
|
||||
|
||||
String sharedById;
|
||||
|
||||
String sharedWithId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SyncPartnerDeleteV1 &&
|
||||
other.sharedById == sharedById &&
|
||||
other.sharedWithId == sharedWithId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(sharedById.hashCode) +
|
||||
(sharedWithId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SyncPartnerDeleteV1[sharedById=$sharedById, sharedWithId=$sharedWithId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'sharedById'] = this.sharedById;
|
||||
json[r'sharedWithId'] = this.sharedWithId;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SyncPartnerDeleteV1] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SyncPartnerDeleteV1? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SyncPartnerDeleteV1");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SyncPartnerDeleteV1(
|
||||
sharedById: mapValueOfType<String>(json, r'sharedById')!,
|
||||
sharedWithId: mapValueOfType<String>(json, r'sharedWithId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SyncPartnerDeleteV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SyncPartnerDeleteV1>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SyncPartnerDeleteV1.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SyncPartnerDeleteV1> mapFromJson(dynamic json) {
|
||||
final map = <String, SyncPartnerDeleteV1>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SyncPartnerDeleteV1.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SyncPartnerDeleteV1-objects as value to a dart map
|
||||
static Map<String, List<SyncPartnerDeleteV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SyncPartnerDeleteV1>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SyncPartnerDeleteV1.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'sharedById',
|
||||
'sharedWithId',
|
||||
};
|
||||
}
|
||||
|
||||
115
mobile/openapi/lib/model/sync_partner_v1.dart
generated
Normal file
115
mobile/openapi/lib/model/sync_partner_v1.dart
generated
Normal file
@@ -0,0 +1,115 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SyncPartnerV1 {
|
||||
/// Returns a new [SyncPartnerV1] instance.
|
||||
SyncPartnerV1({
|
||||
required this.inTimeline,
|
||||
required this.sharedById,
|
||||
required this.sharedWithId,
|
||||
});
|
||||
|
||||
bool inTimeline;
|
||||
|
||||
String sharedById;
|
||||
|
||||
String sharedWithId;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SyncPartnerV1 &&
|
||||
other.inTimeline == inTimeline &&
|
||||
other.sharedById == sharedById &&
|
||||
other.sharedWithId == sharedWithId;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(inTimeline.hashCode) +
|
||||
(sharedById.hashCode) +
|
||||
(sharedWithId.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SyncPartnerV1[inTimeline=$inTimeline, sharedById=$sharedById, sharedWithId=$sharedWithId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'inTimeline'] = this.inTimeline;
|
||||
json[r'sharedById'] = this.sharedById;
|
||||
json[r'sharedWithId'] = this.sharedWithId;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SyncPartnerV1] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SyncPartnerV1? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SyncPartnerV1");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SyncPartnerV1(
|
||||
inTimeline: mapValueOfType<bool>(json, r'inTimeline')!,
|
||||
sharedById: mapValueOfType<String>(json, r'sharedById')!,
|
||||
sharedWithId: mapValueOfType<String>(json, r'sharedWithId')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SyncPartnerV1> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SyncPartnerV1>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SyncPartnerV1.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SyncPartnerV1> mapFromJson(dynamic json) {
|
||||
final map = <String, SyncPartnerV1>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SyncPartnerV1.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SyncPartnerV1-objects as value to a dart map
|
||||
static Map<String, List<SyncPartnerV1>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SyncPartnerV1>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SyncPartnerV1.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'inTimeline',
|
||||
'sharedById',
|
||||
'sharedWithId',
|
||||
};
|
||||
}
|
||||
|
||||
3
mobile/openapi/lib/model/sync_request_type.dart
generated
3
mobile/openapi/lib/model/sync_request_type.dart
generated
@@ -24,10 +24,12 @@ class SyncRequestType {
|
||||
String toJson() => value;
|
||||
|
||||
static const usersV1 = SyncRequestType._(r'UsersV1');
|
||||
static const partnersV1 = SyncRequestType._(r'PartnersV1');
|
||||
|
||||
/// List of all possible values in this [enum][SyncRequestType].
|
||||
static const values = <SyncRequestType>[
|
||||
usersV1,
|
||||
partnersV1,
|
||||
];
|
||||
|
||||
static SyncRequestType? fromJson(dynamic value) => SyncRequestTypeTypeTransformer().decode(value);
|
||||
@@ -67,6 +69,7 @@ class SyncRequestTypeTypeTransformer {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'UsersV1': return SyncRequestType.usersV1;
|
||||
case r'PartnersV1': return SyncRequestType.partnersV1;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
||||
@@ -1012,8 +1012,8 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
path: "."
|
||||
ref: "4530808"
|
||||
resolved-ref: "4530808a6d04c9992de184c423c9e87fbf6a53eb"
|
||||
ref: "5459d54"
|
||||
resolved-ref: "5459d54cdc1cf4d99e2193b310052f1ebb5dcf43"
|
||||
url: "https://github.com/immich-app/native_video_player"
|
||||
source: git
|
||||
version: "1.3.1"
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 1.127.0+185
|
||||
version: 1.128.0+186
|
||||
|
||||
environment:
|
||||
sdk: '>=3.3.0 <4.0.0'
|
||||
@@ -65,7 +65,7 @@ dependencies:
|
||||
native_video_player:
|
||||
git:
|
||||
url: https://github.com/immich-app/native_video_player
|
||||
ref: '4530808'
|
||||
ref: '5459d54'
|
||||
|
||||
#image editing packages
|
||||
crop_image: ^1.0.13
|
||||
|
||||
@@ -14,14 +14,14 @@ import '../../test_utils.dart';
|
||||
|
||||
final _kInfoLog = LogMessage(
|
||||
message: '#Info Message',
|
||||
level: LogLevel.INFO,
|
||||
level: LogLevel.info,
|
||||
createdAt: DateTime(2025, 2, 26),
|
||||
logger: 'Info Logger',
|
||||
);
|
||||
|
||||
final _kWarnLog = LogMessage(
|
||||
message: '#Warn Message',
|
||||
level: LogLevel.WARNING,
|
||||
level: LogLevel.warning,
|
||||
createdAt: DateTime(2025, 2, 27),
|
||||
logger: 'Warn Logger',
|
||||
);
|
||||
@@ -40,13 +40,15 @@ void main() {
|
||||
when(() => mockLogRepo.truncate(limit: any(named: 'limit')))
|
||||
.thenAnswer((_) async => {});
|
||||
when(() => mockStoreRepo.tryGet<int>(StoreKey.logLevel))
|
||||
.thenAnswer((_) async => LogLevel.FINE.index);
|
||||
.thenAnswer((_) async => LogLevel.fine.index);
|
||||
when(() => mockLogRepo.getAll()).thenAnswer((_) async => []);
|
||||
when(() => mockLogRepo.insert(any())).thenAnswer((_) async => true);
|
||||
when(() => mockLogRepo.insertAll(any())).thenAnswer((_) async => true);
|
||||
|
||||
sut =
|
||||
await LogService.create(logRepo: mockLogRepo, storeRepo: mockStoreRepo);
|
||||
sut = await LogService.create(
|
||||
logRepository: mockLogRepo,
|
||||
storeRepository: mockStoreRepo,
|
||||
);
|
||||
});
|
||||
|
||||
tearDown(() async {
|
||||
@@ -72,14 +74,14 @@ void main() {
|
||||
setUp(() async {
|
||||
when(() => mockStoreRepo.insert<int>(StoreKey.logLevel, any()))
|
||||
.thenAnswer((_) async => true);
|
||||
await sut.setlogLevel(LogLevel.SHOUT);
|
||||
await sut.setlogLevel(LogLevel.shout);
|
||||
});
|
||||
|
||||
test('Updates the log level in store', () {
|
||||
final index = verify(
|
||||
() => mockStoreRepo.insert<int>(StoreKey.logLevel, captureAny()),
|
||||
).captured.firstOrNull;
|
||||
expect(index, LogLevel.SHOUT.index);
|
||||
expect(index, LogLevel.shout.index);
|
||||
});
|
||||
|
||||
test('Sets log level on logger', () {
|
||||
@@ -91,8 +93,8 @@ void main() {
|
||||
test('Buffers logs until timer elapses', () {
|
||||
TestUtils.fakeAsync((time) async {
|
||||
sut = await LogService.create(
|
||||
logRepo: mockLogRepo,
|
||||
storeRepo: mockStoreRepo,
|
||||
logRepository: mockLogRepo,
|
||||
storeRepository: mockStoreRepo,
|
||||
shouldBuffer: true,
|
||||
);
|
||||
|
||||
@@ -109,8 +111,8 @@ void main() {
|
||||
test('Batch inserts all logs on timer', () {
|
||||
TestUtils.fakeAsync((time) async {
|
||||
sut = await LogService.create(
|
||||
logRepo: mockLogRepo,
|
||||
storeRepo: mockStoreRepo,
|
||||
logRepository: mockLogRepo,
|
||||
storeRepository: mockStoreRepo,
|
||||
shouldBuffer: true,
|
||||
);
|
||||
|
||||
@@ -131,8 +133,8 @@ void main() {
|
||||
test('Does not buffer when off', () {
|
||||
TestUtils.fakeAsync((time) async {
|
||||
sut = await LogService.create(
|
||||
logRepo: mockLogRepo,
|
||||
storeRepo: mockStoreRepo,
|
||||
logRepository: mockLogRepo,
|
||||
storeRepository: mockStoreRepo,
|
||||
shouldBuffer: false,
|
||||
);
|
||||
|
||||
@@ -165,8 +167,8 @@ void main() {
|
||||
test('Combines result from both DB + Buffer', () {
|
||||
TestUtils.fakeAsync((time) async {
|
||||
sut = await LogService.create(
|
||||
logRepo: mockLogRepo,
|
||||
storeRepo: mockStoreRepo,
|
||||
logRepository: mockLogRepo,
|
||||
storeRepository: mockStoreRepo,
|
||||
shouldBuffer: true,
|
||||
);
|
||||
|
||||
|
||||
@@ -72,8 +72,8 @@ void main() {
|
||||
await StoreService.init(storeRepository: IsarStoreRepository(db));
|
||||
await Store.put(StoreKey.currentUser, owner);
|
||||
await LogService.init(
|
||||
logRepo: IsarLogRepository(db),
|
||||
storeRepo: IsarStoreRepository(db),
|
||||
logRepository: IsarLogRepository(db),
|
||||
storeRepository: IsarStoreRepository(db),
|
||||
);
|
||||
});
|
||||
final List<Asset> initialAssets = [
|
||||
|
||||
@@ -2,6 +2,7 @@ import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/services/album.service.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
|
||||
import '../fixtures/album.stub.dart';
|
||||
import '../fixtures/asset.stub.dart';
|
||||
import '../fixtures/user.stub.dart';
|
||||
@@ -83,7 +84,9 @@ void main() {
|
||||
|
||||
group('refreshRemoteAlbums', () {
|
||||
test('is working', () async {
|
||||
when(() => userService.refreshUsers()).thenAnswer((_) async => true);
|
||||
when(() => userService.getUsersFromServer()).thenAnswer((_) async => []);
|
||||
when(() => syncService.syncUsersFromServer(any()))
|
||||
.thenAnswer((_) async => true);
|
||||
when(() => albumApiRepository.getAll(shared: true))
|
||||
.thenAnswer((_) async => [AlbumStub.sharedWithUser]);
|
||||
|
||||
@@ -99,7 +102,8 @@ void main() {
|
||||
).thenAnswer((_) async => true);
|
||||
final result = await sut.refreshRemoteAlbums();
|
||||
expect(result, true);
|
||||
verify(() => userService.refreshUsers()).called(1);
|
||||
verify(() => userService.getUsersFromServer()).called(1);
|
||||
verify(() => syncService.syncUsersFromServer([])).called(1);
|
||||
verify(() => albumApiRepository.getAll(shared: true)).called(1);
|
||||
verify(() => albumApiRepository.getAll(shared: null)).called(1);
|
||||
verify(
|
||||
|
||||
@@ -7655,7 +7655,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "1.127.0",
|
||||
"version": "1.128.0",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
@@ -12052,13 +12052,50 @@
|
||||
"SyncEntityType": {
|
||||
"enum": [
|
||||
"UserV1",
|
||||
"UserDeleteV1"
|
||||
"UserDeleteV1",
|
||||
"PartnerV1",
|
||||
"PartnerDeleteV1"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SyncPartnerDeleteV1": {
|
||||
"properties": {
|
||||
"sharedById": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharedWithId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sharedById",
|
||||
"sharedWithId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SyncPartnerV1": {
|
||||
"properties": {
|
||||
"inTimeline": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sharedById": {
|
||||
"type": "string"
|
||||
},
|
||||
"sharedWithId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"inTimeline",
|
||||
"sharedById",
|
||||
"sharedWithId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SyncRequestType": {
|
||||
"enum": [
|
||||
"UsersV1"
|
||||
"UsersV1",
|
||||
"PartnersV1"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
12
open-api/typescript-sdk/package-lock.json
generated
12
open-api/typescript-sdk/package-lock.json
generated
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.127.0",
|
||||
"version": "1.128.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.127.0",
|
||||
"version": "1.128.0",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -23,9 +23,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.13.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.4.tgz",
|
||||
"integrity": "sha512-ywP2X0DYtX3y08eFVx5fNIw7/uIv8hYUKgXoK8oayJlLnKcRfEYCxWMVE1XagUdVtCJlZT1AU4LXEABW+L1Peg==",
|
||||
"version": "22.13.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.8.tgz",
|
||||
"integrity": "sha512-G3EfaZS+iOGYWLLRCEAXdWK9my08oHNZ+FHluRiggIYJPOXzhOiDgpVCUHaUvyIC5/fj7C/p637jdzC666AOKQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.127.0",
|
||||
"version": "1.128.0",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
@@ -19,7 +19,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/node": "^22.13.5",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 1.127.0
|
||||
* 1.128.0
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
@@ -3645,10 +3645,13 @@ export enum Error2 {
|
||||
}
|
||||
export enum SyncEntityType {
|
||||
UserV1 = "UserV1",
|
||||
UserDeleteV1 = "UserDeleteV1"
|
||||
UserDeleteV1 = "UserDeleteV1",
|
||||
PartnerV1 = "PartnerV1",
|
||||
PartnerDeleteV1 = "PartnerDeleteV1"
|
||||
}
|
||||
export enum SyncRequestType {
|
||||
UsersV1 = "UsersV1"
|
||||
UsersV1 = "UsersV1",
|
||||
PartnersV1 = "PartnersV1"
|
||||
}
|
||||
export enum TranscodeHWAccel {
|
||||
Nvenc = "nvenc",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user