Compare commits

...

73 Commits

Author SHA1 Message Date
mertalev
12381f6b3c use Date 2025-05-06 21:34:59 -04:00
mertalev
c9728a107e rename patch 2025-05-06 18:08:32 -04:00
mertalev
35d91aa6bf silly generator 2025-05-06 18:06:59 -04:00
mertalev
4174575785 fix 2025-05-06 14:55:00 -04:00
mertalev
606d4b66d0 fix spec 2025-05-05 11:16:13 -04:00
mertalev
71cc045405 string tuple 2025-05-05 10:35:12 -04:00
mertalev
21bbf2f5e2 linting, fix expected response 2025-05-05 10:03:20 -04:00
mertalev
3ace02b3e7 update timeline tests 2025-05-05 09:54:40 -04:00
mertalev
85359bfc1a update sql 2025-05-05 09:43:51 -04:00
mertalev
f7712c332e update sql 2025-05-05 09:38:24 -04:00
mertalev
b20440e4d5 update alt text tests 2025-05-05 09:18:23 -04:00
mertalev
1d885c1a20 update references to description 2025-05-05 09:12:07 -04:00
mertalev
ef9245487c openapi 2025-05-04 20:12:40 -04:00
mertalev
a3a2ced3a9 stack as tuple 2025-05-04 20:11:48 -04:00
mertalev
8837f5b4fb openapi 2025-05-04 19:26:09 -04:00
mertalev
97cc9e223e push aggregation to query 2025-05-04 19:24:08 -04:00
Min Idzelis
07c03b8a79 test 2025-05-03 14:24:00 +00:00
Min Idzelis
5a3e32fc3c lint 2025-05-03 14:07:39 +00:00
Min Idzelis
5520db10af lint/tests 2025-05-03 13:58:46 +00:00
Min Idzelis
ee08fd012d tests 2025-05-03 13:47:21 +00:00
Min Idzelis
f7fd213260 tests 2025-05-03 13:45:13 +00:00
Min Idzelis
73cd236756 date->string 2025-05-03 13:33:58 +00:00
Min Idzelis
bf0be6a655 openapi battle 2025-05-03 02:43:06 +00:00
Min Idzelis
6e8993c6eb Merge branch 'lighter_buckets_web' into lighter_buckets_server 2025-05-03 02:08:15 +00:00
Min Idzelis
aea2c9506d Use nulls, make-sql 2025-05-03 02:06:34 +00:00
Min Idzelis
8011605e6f lint 2025-05-02 23:29:14 +00:00
Min Idzelis
0ed2a2fd2e Merge remote-tracking branch 'origin/lighter_buckets_web' into lighter_buckets_server 2025-05-02 23:24:34 +00:00
Min Idzelis
9d527b37f0 Merge branch 'main' into lighter_buckets_web 2025-05-02 19:19:46 -04:00
Daniel Dietzler
62fc5b3c7d refactor: introduce modal manager (#18039) 2025-05-02 18:41:42 -04:00
Daniel Dietzler
15d431ba6a refactor: dialog callbacks (#18034) 2025-05-02 13:34:53 -04:00
Jason Rasmussen
5d21ba3166 chore: logging clean up (#18031) 2025-05-02 12:34:35 -05:00
Min Idzelis
1d3a546646 missing import 2025-05-02 01:23:40 +00:00
Min Idzelis
c16348e3fd Merge remote-tracking branch 'origin/main' into lighter_buckets_web 2025-05-02 00:56:41 +00:00
Thomas
da7a81b752 chore(server): split album update notifications into multiple jobs (#17879)
We would like to move away from the concept of finding and removing pending
jobs. The only place this is used is for album update notifications, and this
is done so that users who initially uploaded assets to an album will also
receive a notification if someone else then adds assets to the same album. This
can also be achieved with a job for each recipient. Multiple jobs also has the
advantage that it will scale better for albums with many users, it's possible
to send notifications concurrently, retries are possible without sending
duplicate notifications, and it's clear what recipient a job failed for.
2025-04-30 17:45:35 -04:00
Jason Rasmussen
becdc3dcf5 refactor: job on-done (#18004) 2025-04-30 17:02:53 -04:00
Eli Gao
84b51e3cbb fix(server): double rotation on HEIF files (#18002)
* fix(server): double rotation on HEIF/HEIC files

* Update server/src/services/media.service.ts

* formatting

---------

Co-authored-by: Mert <101130780+mertalev@users.noreply.github.com>
2025-04-30 20:33:18 +00:00
Jason Rasmussen
b845184c80 chore: remove old memory lane implementation (#18000) 2025-04-30 14:23:32 -04:00
Jason Rasmussen
1fde02ee1e chore: remove unused types and code (#17999) 2025-04-30 13:41:23 -04:00
Jason Rasmussen
526c02297c refactor: stream queue migration (#17997) 2025-04-30 16:23:13 +00:00
Alex
732b06eec8 refactor: stream for sidecar (#17995)
* refactor: stream for sidecar

* chore: make sql

---------

Co-authored-by: Jason Rasmussen <jason@rasm.me>
2025-04-30 10:53:51 -05:00
Daniel Dietzler
436cff72b5 refactor: activity manager (#17943) 2025-04-30 15:50:38 +00:00
Jason Rasmussen
be5cc2cdf5 refactor: stream detect faces (#17996) 2025-04-30 11:25:30 -04:00
Jason Rasmussen
094a41ac9a chore: remove audit file report (#17994) 2025-04-30 11:17:23 -04:00
Daniel Dietzler
ebad6a008f fix: add missing translations to face editor (#17993) 2025-04-30 10:07:21 -05:00
Jason Rasmussen
0c261ffbe2 fix: queue in batches (#17989) 2025-04-30 10:52:51 -04:00
Jason Rasmussen
6df6103c67 chore: better immich-web logging (#17992) 2025-04-30 09:48:24 -05:00
Jason Rasmussen
8c5116bc1d refactor: stream search duplicates (#17991) 2025-04-30 10:40:32 -04:00
bo0tzz
e3812a0e36 chore: also run e2e tests on arm64 (#17822)
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
2025-04-30 14:22:10 +02:00
Min Idzelis
4b1ced439b feat: improve/refactor focus handling (#17796)
* feat: improve focus

* test

* lint

* use modulus in loop
2025-04-30 00:19:38 -04:00
Min Idzelis
15d5460afb test 2025-04-29 13:52:00 +00:00
Min Idzelis
bc5d4b45a6 Adapt web client to consume new server response format 2025-04-29 13:45:40 +00:00
Min Idzelis
077703adcc Merge branch 'lighter_buckets_web' into lighter_buckets_server 2025-04-29 02:00:43 +00:00
Min Idzelis
580a0117c4 fix after merge 2025-04-29 01:50:39 +00:00
Min Idzelis
ffda7364dd Merge remote-tracking branch 'origin/main' into lighter_buckets_web 2025-04-29 01:35:20 +00:00
Min Idzelis
236973e329 unneeded cast 2025-04-29 01:26:53 +00:00
Min Idzelis
cd8806eac0 revert settings 2025-04-28 13:04:00 +00:00
Min Idzelis
7f934583cf lint 2025-04-24 02:07:45 +00:00
Min Idzelis
6308ae71a1 fix: flappy e2e test 2025-04-24 02:01:27 +00:00
Min Idzelis
bfefa36f04 feat(server): lighter buckets 2025-04-24 01:51:16 +00:00
Min Idzelis
50cfc461a9 missing import 2025-04-24 00:20:08 +00:00
Min Idzelis
77121a0e07 tests 2025-04-24 00:09:11 +00:00
Min Idzelis
89bfa692b1 update tests 2025-04-23 23:40:00 +00:00
Min Idzelis
683a10f0fe Merge branch 'main' into lighter_buckets_web 2025-04-23 19:20:27 -04:00
Min Idzelis
a5eaaddec4 test fix 2025-04-23 23:19:48 +00:00
Min Idzelis
d76c50ff22 Merge remote-tracking branch 'origin/main' into lighter_buckets_web 2025-04-23 21:41:20 +00:00
Min Idzelis
0795f8a761 re-add alt-text 2025-04-23 21:41:09 +00:00
Min Idzelis
6cb7fffe91 empty - trigger ci 2025-04-20 14:08:52 +00:00
Min Idzelis
9f6120a134 ensure keys on getAssetInfo, alt-text 2025-04-20 12:51:26 +00:00
Min Idzelis
f3fe043c22 Remove generics from AssetInteraction 2025-04-20 03:47:51 +00:00
Min Idzelis
9b7e9bc7b8 weird ssr 2025-04-20 03:27:18 +00:00
Min Idzelis
c1e699ebaf GalleryViewer 2025-04-20 02:51:32 +00:00
Min Idzelis
3b9490e28d Merge remote-tracking branch 'origin/main' into lighter_buckets_web 2025-04-19 22:46:41 +00:00
Min Idzelis
5a8f9f3b5c feat(web): lighter timeline buckets 2025-04-19 22:43:08 +00:00
175 changed files with 3223 additions and 4610 deletions

View File

@@ -338,12 +338,15 @@ jobs:
name: End-to-End Tests (Server & CLI)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: mich
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [mich, ubuntu-24.04-arm]
steps:
- name: Checkout code
@@ -383,12 +386,15 @@ jobs:
name: End-to-End Tests (Web)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: mich
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [mich, ubuntu-24.04-arm]
steps:
- name: Checkout code
@@ -423,6 +429,21 @@ jobs:
run: npx playwright test
if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
mobile-unit-tests:
name: Unit Test Mobile
needs: pre-job

194
api.mustache Normal file
View File

@@ -0,0 +1,194 @@
{{>header}}
{{>part_of}}
{{#operations}}
class {{{classname}}} {
{{{classname}}}([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
{{#operation}}
{{#summary}}
/// {{{.}}}
{{/summary}}
{{#notes}}
{{#summary}}
///
{{/summary}}
/// {{{notes}}}
///
/// Note: This method returns the HTTP [Response].
{{/notes}}
{{^notes}}
{{#summary}}
///
/// Note: This method returns the HTTP [Response].
{{/summary}}
{{^summary}}
/// Performs an HTTP '{{{httpMethod}}} {{{path}}}' operation and returns the [Response].
{{/summary}}
{{/notes}}
{{#hasParams}}
{{#summary}}
///
{{/summary}}
{{^summary}}
{{#notes}}
///
{{/notes}}
{{/summary}}
/// Parameters:
///
{{/hasParams}}
{{#allParams}}
/// * [{{{dataType}}}] {{{paramName}}}{{#required}} (required){{/required}}{{#optional}} (optional){{/optional}}:
{{#description}}
/// {{{.}}}
{{/description}}
{{^-last}}
///
{{/-last}}
{{/allParams}}
Future<Response> {{{nickname}}}WithHttpInfo({{#allParams}}{{#required}}{{{dataType}}} {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}}{ {{#allParams}}{{^required}}{{{dataType}}}? {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} }{{/hasOptionalParams}}) async {
// ignore: prefer_const_declarations
final path = r'{{{path}}}'{{#pathParams}}
.replaceAll({{=<% %>=}}'{<% baseName %>}'<%={{ }}=%>, {{{paramName}}}{{^isString}}.toString(){{/isString}}){{/pathParams}};
// ignore: prefer_final_locals
Object? postBody{{#bodyParam}} = {{{paramName}}}{{/bodyParam}};
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
{{#hasQueryParams}}
{{#queryParams}}
{{^required}}
if ({{{paramName}}} != null) {
{{/required}}
queryParams.addAll(_queryParams('{{{collectionFormat}}}', '{{{baseName}}}', {{{paramName}}}));
{{^required}}
}
{{/required}}
{{/queryParams}}
{{/hasQueryParams}}
{{#hasHeaderParams}}
{{#headerParams}}
{{#required}}
headerParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
{{/required}}
{{^required}}
if ({{{paramName}}} != null) {
headerParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
}
{{/required}}
{{/headerParams}}
{{/hasHeaderParams}}
const contentTypes = <String>[{{#prioritizedContentTypes}}'{{{mediaType}}}'{{^-last}}, {{/-last}}{{/prioritizedContentTypes}}];
{{#isMultipart}}
bool hasFields = false;
final mp = MultipartRequest('{{{httpMethod}}}', Uri.parse(path));
{{#formParams}}
{{^isFile}}
if ({{{paramName}}} != null) {
hasFields = true;
mp.fields[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
}
{{/isFile}}
{{#isFile}}
if ({{{paramName}}} != null) {
hasFields = true;
mp.fields[r'{{{baseName}}}'] = {{{paramName}}}.field;
mp.files.add({{{paramName}}});
}
{{/isFile}}
{{/formParams}}
if (hasFields) {
postBody = mp;
}
{{/isMultipart}}
{{^isMultipart}}
{{#formParams}}
{{^isFile}}
if ({{{paramName}}} != null) {
formParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
}
{{/isFile}}
{{/formParams}}
{{/isMultipart}}
return apiClient.invokeAPI(
path,
'{{{httpMethod}}}',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
{{#summary}}
/// {{{.}}}
{{/summary}}
{{#notes}}
{{#summary}}
///
{{/summary}}
/// {{{notes}}}
{{/notes}}
{{#hasParams}}
{{#summary}}
///
{{/summary}}
{{^summary}}
{{#notes}}
///
{{/notes}}
{{/summary}}
/// Parameters:
///
{{/hasParams}}
{{#allParams}}
/// * [{{{dataType}}}] {{{paramName}}}{{#required}} (required){{/required}}{{#optional}} (optional){{/optional}}:
{{#description}}
/// {{{.}}}
{{/description}}
{{^-last}}
///
{{/-last}}
{{/allParams}}
Future<{{#returnType}}{{{.}}}?{{/returnType}}{{^returnType}}void{{/returnType}}> {{{nickname}}}({{#allParams}}{{#required}}{{{dataType}}} {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}}{ {{#allParams}}{{^required}}{{{dataType}}}? {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} }{{/hasOptionalParams}}) async {
final response = await {{{nickname}}}WithHttpInfo({{#allParams}}{{#required}}{{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}} {{#allParams}}{{^required}}{{{paramName}}}: {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} {{/hasOptionalParams}});
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
{{#returnType}}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
{{#native_serialization}}
{{#isArray}}
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, '{{{returnType}}}') as List)
.cast<{{{returnBaseType}}}>()
.{{#uniqueItems}}toSet(){{/uniqueItems}}{{^uniqueItems}}toList(growable: false){{/uniqueItems}};
{{/isArray}}
{{^isArray}}
{{#isMap}}
return {{{returnType}}}.from(await apiClient.deserializeAsync(await _decodeBodyBytes(response), '{{{returnType}}}'),);
{{/isMap}}
{{^isMap}}
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), '{{{returnType}}}',) as {{{returnType}}};
{{/isMap}}{{/isArray}}{{/native_serialization}}
}
return null;
{{/returnType}}
}
{{/operation}}
}
{{/operations}}

View File

@@ -1,43 +0,0 @@
import { deleteAssets, getAuditFiles, updateAsset, type LoginResponseDto } from '@immich/sdk';
import { asBearerAuth, utils } from 'src/utils';
import { beforeAll, describe, expect, it } from 'vitest';
describe('/audits', () => {
let admin: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
await utils.resetFilesystem();
admin = await utils.adminSetup();
});
// TODO: Enable these tests again once #7436 is resolved as these were flaky
describe.skip('GET :/file-report', () => {
it('excludes assets without issues from report', async () => {
const [trashedAsset, archivedAsset] = await Promise.all([
utils.createAsset(admin.accessToken),
utils.createAsset(admin.accessToken),
utils.createAsset(admin.accessToken),
]);
await Promise.all([
deleteAssets({ assetBulkDeleteDto: { ids: [trashedAsset.id] } }, { headers: asBearerAuth(admin.accessToken) }),
updateAsset(
{
id: archivedAsset.id,
updateAssetDto: { isArchived: true },
},
{ headers: asBearerAuth(admin.accessToken) },
),
]);
const body = await getAuditFiles({
headers: asBearerAuth(admin.accessToken),
});
expect(body.orphans).toHaveLength(0);
expect(body.extras).toHaveLength(0);
});
});
});

View File

@@ -1,4 +1,4 @@
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
import { DateTime } from 'luxon';
import { createUserDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
@@ -52,7 +52,7 @@ describe('/timeline', () => {
describe('GET /timeline/buckets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/buckets').query({ size: TimeBucketSize.Month });
const { status, body } = await request(app).get('/timeline/buckets').query({});
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
@@ -61,7 +61,7 @@ describe('/timeline', () => {
const { status, body } = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Month });
.query({});
expect(status).toBe(200);
expect(body).toEqual(
@@ -78,33 +78,17 @@ describe('/timeline', () => {
assetIds: userAssets.map(({ id }) => id),
});
const { status, body } = await request(app)
.get('/timeline/buckets')
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should get time buckets by day', async () => {
const { status, body } = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Day });
expect(status).toBe(200);
expect(body).toEqual([
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
]);
});
it('should return error if time bucket is requested with partners asset and archived', async () => {
const req1 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
.query({ withPartners: true, isArchived: true });
expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest());
@@ -112,7 +96,7 @@ describe('/timeline', () => {
const req2 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`)
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
.query({ withPartners: true, isArchived: undefined });
expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest());
@@ -122,7 +106,7 @@ describe('/timeline', () => {
const req1 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
.query({ withPartners: true, isFavorite: true });
expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest());
@@ -130,7 +114,7 @@ describe('/timeline', () => {
const req2 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
.query({ withPartners: true, isFavorite: false });
expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest());
@@ -140,7 +124,7 @@ describe('/timeline', () => {
const req = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`)
.query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
.query({ withPartners: true, isTrashed: true });
expect(req.status).toBe(400);
expect(req.body).toEqual(errorDto.badRequest());
@@ -150,7 +134,6 @@ describe('/timeline', () => {
describe('GET /timeline/bucket', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/bucket').query({
size: TimeBucketSize.Month,
timeBucket: '1900-01-01',
});
@@ -161,11 +144,27 @@ describe('/timeline', () => {
it('should handle 5 digit years', async () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.query({ size: TimeBucketSize.Month, timeBucket: '012345-01-01' })
.query({ timeBucket: '012345-01-01' })
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual([]);
expect(body).toEqual({
city: [],
country: [],
duration: [],
id: [],
isArchived: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
localDateTime: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
});
// TODO enable date string validation while still accepting 5 digit years
@@ -173,7 +172,7 @@ describe('/timeline', () => {
// const { status, body } = await request(app)
// .get('/timeline/bucket')
// .set('Authorization', `Bearer ${user.accessToken}`)
// .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
// .query({ timeBucket: 'foo' });
// expect(status).toBe(400);
// expect(body).toEqual(errorDto.badRequest);
@@ -183,10 +182,26 @@ describe('/timeline', () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10' });
.query({ timeBucket: '1970-02-10' });
expect(status).toBe(200);
expect(body).toEqual([]);
expect(body).toEqual({
city: [],
country: [],
duration: [],
id: [],
isArchived: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
localDateTime: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
});
});
});

View File

@@ -1260,6 +1260,7 @@
"no_favorites_message": "Add favorites to quickly find your best pictures and videos",
"no_libraries_message": "Create an external library to view your photos and videos",
"no_name": "No Name",
"no_people_found": "No matching people found",
"no_places": "No places",
"no_results": "No results",
"no_results_description": "Try a synonym or more general keyword",
@@ -1572,6 +1573,7 @@
"select_keep_all": "Select keep all",
"select_library_owner": "Select library owner",
"select_new_face": "Select new face",
"select_person_to_tag": "Select a person to tag",
"select_photos": "Select photos",
"select_trash_all": "Select trash all",
"select_user_for_sharing_page_err_album": "Failed to create album",

View File

@@ -100,7 +100,6 @@ Class | Method | HTTP request | Description
*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | getAllUserAssetsByDeviceId
*AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} |
*AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics |
*AssetsApi* | [**getMemoryLane**](doc//AssetsApi.md#getmemorylane) | **GET** /assets/memory-lane |
*AssetsApi* | [**getRandom**](doc//AssetsApi.md#getrandom) | **GET** /assets/random |
*AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback |
*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | replaceAsset
@@ -122,9 +121,6 @@ Class | Method | HTTP request | Description
*FacesApi* | [**deleteFace**](doc//FacesApi.md#deleteface) | **DELETE** /faces/{id} |
*FacesApi* | [**getFaces**](doc//FacesApi.md#getfaces) | **GET** /faces |
*FacesApi* | [**reassignFacesById**](doc//FacesApi.md#reassignfacesbyid) | **PUT** /faces/{id} |
*FileReportsApi* | [**fixAuditFiles**](doc//FileReportsApi.md#fixauditfiles) | **POST** /reports/fix |
*FileReportsApi* | [**getAuditFiles**](doc//FileReportsApi.md#getauditfiles) | **GET** /reports |
*FileReportsApi* | [**getFileChecksums**](doc//FileReportsApi.md#getfilechecksums) | **POST** /reports/checksum |
*JobsApi* | [**createJob**](doc//JobsApi.md#createjob) | **POST** /jobs |
*JobsApi* | [**getAllJobsStatus**](doc//JobsApi.md#getalljobsstatus) | **GET** /jobs |
*JobsApi* | [**sendJobCommand**](doc//JobsApi.md#sendjobcommand) | **PUT** /jobs/{id} |
@@ -332,11 +328,6 @@ Class | Method | HTTP request | Description
- [ExifResponseDto](doc//ExifResponseDto.md)
- [FaceDto](doc//FaceDto.md)
- [FacialRecognitionConfig](doc//FacialRecognitionConfig.md)
- [FileChecksumDto](doc//FileChecksumDto.md)
- [FileChecksumResponseDto](doc//FileChecksumResponseDto.md)
- [FileReportDto](doc//FileReportDto.md)
- [FileReportFixDto](doc//FileReportFixDto.md)
- [FileReportItemDto](doc//FileReportItemDto.md)
- [FoldersResponse](doc//FoldersResponse.md)
- [FoldersUpdate](doc//FoldersUpdate.md)
- [ImageFormat](doc//ImageFormat.md)
@@ -361,7 +352,6 @@ Class | Method | HTTP request | Description
- [MemoriesResponse](doc//MemoriesResponse.md)
- [MemoriesUpdate](doc//MemoriesUpdate.md)
- [MemoryCreateDto](doc//MemoryCreateDto.md)
- [MemoryLaneResponseDto](doc//MemoryLaneResponseDto.md)
- [MemoryResponseDto](doc//MemoryResponseDto.md)
- [MemoryType](doc//MemoryType.md)
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
@@ -381,8 +371,6 @@ Class | Method | HTTP request | Description
- [OnThisDayDto](doc//OnThisDayDto.md)
- [PartnerDirection](doc//PartnerDirection.md)
- [PartnerResponseDto](doc//PartnerResponseDto.md)
- [PathEntityType](doc//PathEntityType.md)
- [PathType](doc//PathType.md)
- [PeopleResponse](doc//PeopleResponse.md)
- [PeopleResponseDto](doc//PeopleResponseDto.md)
- [PeopleUpdate](doc//PeopleUpdate.md)
@@ -489,8 +477,8 @@ Class | Method | HTTP request | Description
- [TemplateDto](doc//TemplateDto.md)
- [TemplateResponseDto](doc//TemplateResponseDto.md)
- [TestEmailResponseDto](doc//TestEmailResponseDto.md)
- [TimeBucketResponseDto](doc//TimeBucketResponseDto.md)
- [TimeBucketSize](doc//TimeBucketSize.md)
- [TimeBucketAssetResponseDto](doc//TimeBucketAssetResponseDto.md)
- [TimeBucketsResponseDto](doc//TimeBucketsResponseDto.md)
- [ToneMapping](doc//ToneMapping.md)
- [TranscodeHWAccel](doc//TranscodeHWAccel.md)
- [TranscodePolicy](doc//TranscodePolicy.md)

View File

@@ -39,7 +39,6 @@ part 'api/deprecated_api.dart';
part 'api/download_api.dart';
part 'api/duplicates_api.dart';
part 'api/faces_api.dart';
part 'api/file_reports_api.dart';
part 'api/jobs_api.dart';
part 'api/libraries_api.dart';
part 'api/map_api.dart';
@@ -133,11 +132,6 @@ part 'model/email_notifications_update.dart';
part 'model/exif_response_dto.dart';
part 'model/face_dto.dart';
part 'model/facial_recognition_config.dart';
part 'model/file_checksum_dto.dart';
part 'model/file_checksum_response_dto.dart';
part 'model/file_report_dto.dart';
part 'model/file_report_fix_dto.dart';
part 'model/file_report_item_dto.dart';
part 'model/folders_response.dart';
part 'model/folders_update.dart';
part 'model/image_format.dart';
@@ -162,7 +156,6 @@ part 'model/map_reverse_geocode_response_dto.dart';
part 'model/memories_response.dart';
part 'model/memories_update.dart';
part 'model/memory_create_dto.dart';
part 'model/memory_lane_response_dto.dart';
part 'model/memory_response_dto.dart';
part 'model/memory_type.dart';
part 'model/memory_update_dto.dart';
@@ -182,8 +175,6 @@ part 'model/o_auth_token_endpoint_auth_method.dart';
part 'model/on_this_day_dto.dart';
part 'model/partner_direction.dart';
part 'model/partner_response_dto.dart';
part 'model/path_entity_type.dart';
part 'model/path_type.dart';
part 'model/people_response.dart';
part 'model/people_response_dto.dart';
part 'model/people_update.dart';
@@ -290,8 +281,8 @@ part 'model/tags_update.dart';
part 'model/template_dto.dart';
part 'model/template_response_dto.dart';
part 'model/test_email_response_dto.dart';
part 'model/time_bucket_response_dto.dart';
part 'model/time_bucket_size.dart';
part 'model/time_bucket_asset_response_dto.dart';
part 'model/time_buckets_response_dto.dart';
part 'model/tone_mapping.dart';
part 'model/transcode_hw_accel.dart';
part 'model/transcode_policy.dart';

View File

@@ -404,63 +404,6 @@ class AssetsApi {
return null;
}
/// Performs an HTTP 'GET /assets/memory-lane' operation and returns the [Response].
/// Parameters:
///
/// * [int] day (required):
///
/// * [int] month (required):
Future<Response> getMemoryLaneWithHttpInfo(int day, int month,) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/memory-lane';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
queryParams.addAll(_queryParams('', 'day', day));
queryParams.addAll(_queryParams('', 'month', month));
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [int] day (required):
///
/// * [int] month (required):
Future<List<MemoryLaneResponseDto>?> getMemoryLane(int day, int month,) async {
final response = await getMemoryLaneWithHttpInfo(day, month,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<MemoryLaneResponseDto>') as List)
.cast<MemoryLaneResponseDto>()
.toList(growable: false);
}
return null;
}
/// This property was deprecated in v1.116.0
///
/// Note: This method returns the HTTP [Response].

View File

@@ -1,148 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportsApi {
FileReportsApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
/// Performs an HTTP 'POST /reports/fix' operation and returns the [Response].
/// Parameters:
///
/// * [FileReportFixDto] fileReportFixDto (required):
Future<Response> fixAuditFilesWithHttpInfo(FileReportFixDto fileReportFixDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/reports/fix';
// ignore: prefer_final_locals
Object? postBody = fileReportFixDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [FileReportFixDto] fileReportFixDto (required):
Future<void> fixAuditFiles(FileReportFixDto fileReportFixDto,) async {
final response = await fixAuditFilesWithHttpInfo(fileReportFixDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'GET /reports' operation and returns the [Response].
Future<Response> getAuditFilesWithHttpInfo() async {
// ignore: prefer_const_declarations
final apiPath = r'/reports';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
Future<FileReportDto?> getAuditFiles() async {
final response = await getAuditFilesWithHttpInfo();
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'FileReportDto',) as FileReportDto;
}
return null;
}
/// Performs an HTTP 'POST /reports/checksum' operation and returns the [Response].
/// Parameters:
///
/// * [FileChecksumDto] fileChecksumDto (required):
Future<Response> getFileChecksumsWithHttpInfo(FileChecksumDto fileChecksumDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/reports/checksum';
// ignore: prefer_final_locals
Object? postBody = fileChecksumDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [FileChecksumDto] fileChecksumDto (required):
Future<List<FileChecksumResponseDto>?> getFileChecksums(FileChecksumDto fileChecksumDto,) async {
final response = await getFileChecksumsWithHttpInfo(fileChecksumDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<FileChecksumResponseDto>') as List)
.cast<FileChecksumResponseDto>()
.toList(growable: false);
}
return null;
}
}

View File

@@ -19,8 +19,6 @@ class TimelineApi {
/// Performs an HTTP 'GET /timeline/bucket' operation and returns the [Response].
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] timeBucket (required):
///
/// * [String] albumId:
@@ -35,6 +33,10 @@ class TimelineApi {
///
/// * [AssetOrder] order:
///
/// * [num] page:
///
/// * [num] pageSize:
///
/// * [String] personId:
///
/// * [String] tagId:
@@ -44,7 +46,7 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<Response> getTimeBucketWithHttpInfo(TimeBucketSize size, String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
Future<Response> getTimeBucketWithHttpInfo(String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, num? page, num? pageSize, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/timeline/bucket';
@@ -73,10 +75,15 @@ class TimelineApi {
if (order != null) {
queryParams.addAll(_queryParams('', 'order', order));
}
if (page != null) {
queryParams.addAll(_queryParams('', 'page', page));
}
if (pageSize != null) {
queryParams.addAll(_queryParams('', 'pageSize', pageSize));
}
if (personId != null) {
queryParams.addAll(_queryParams('', 'personId', personId));
}
queryParams.addAll(_queryParams('', 'size', size));
if (tagId != null) {
queryParams.addAll(_queryParams('', 'tagId', tagId));
}
@@ -107,8 +114,6 @@ class TimelineApi {
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] timeBucket (required):
///
/// * [String] albumId:
@@ -123,6 +128,10 @@ class TimelineApi {
///
/// * [AssetOrder] order:
///
/// * [num] page:
///
/// * [num] pageSize:
///
/// * [String] personId:
///
/// * [String] tagId:
@@ -132,8 +141,8 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<List<AssetResponseDto>?> getTimeBucket(TimeBucketSize size, String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketWithHttpInfo(size, timeBucket, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
Future<TimeBucketAssetResponseDto?> getTimeBucket(String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, num? page, num? pageSize, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketWithHttpInfo(timeBucket, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, page: page, pageSize: pageSize, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
@@ -141,11 +150,8 @@ class TimelineApi {
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<AssetResponseDto>') as List)
.cast<AssetResponseDto>()
.toList(growable: false);
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'TimeBucketAssetResponseDto',) as TimeBucketAssetResponseDto;
}
return null;
}
@@ -153,8 +159,6 @@ class TimelineApi {
/// Performs an HTTP 'GET /timeline/buckets' operation and returns the [Response].
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] albumId:
///
/// * [bool] isArchived:
@@ -176,7 +180,7 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<Response> getTimeBucketsWithHttpInfo(TimeBucketSize size, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
Future<Response> getTimeBucketsWithHttpInfo({ String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/timeline/buckets';
@@ -208,7 +212,6 @@ class TimelineApi {
if (personId != null) {
queryParams.addAll(_queryParams('', 'personId', personId));
}
queryParams.addAll(_queryParams('', 'size', size));
if (tagId != null) {
queryParams.addAll(_queryParams('', 'tagId', tagId));
}
@@ -238,8 +241,6 @@ class TimelineApi {
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] albumId:
///
/// * [bool] isArchived:
@@ -261,8 +262,8 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<List<TimeBucketResponseDto>?> getTimeBuckets(TimeBucketSize size, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketsWithHttpInfo(size, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
Future<List<TimeBucketsResponseDto>?> getTimeBuckets({ String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketsWithHttpInfo( albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
@@ -271,8 +272,8 @@ class TimelineApi {
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<TimeBucketResponseDto>') as List)
.cast<TimeBucketResponseDto>()
return (await apiClient.deserializeAsync(responseBody, 'List<TimeBucketsResponseDto>') as List)
.cast<TimeBucketsResponseDto>()
.toList(growable: false);
}

View File

@@ -320,16 +320,6 @@ class ApiClient {
return FaceDto.fromJson(value);
case 'FacialRecognitionConfig':
return FacialRecognitionConfig.fromJson(value);
case 'FileChecksumDto':
return FileChecksumDto.fromJson(value);
case 'FileChecksumResponseDto':
return FileChecksumResponseDto.fromJson(value);
case 'FileReportDto':
return FileReportDto.fromJson(value);
case 'FileReportFixDto':
return FileReportFixDto.fromJson(value);
case 'FileReportItemDto':
return FileReportItemDto.fromJson(value);
case 'FoldersResponse':
return FoldersResponse.fromJson(value);
case 'FoldersUpdate':
@@ -378,8 +368,6 @@ class ApiClient {
return MemoriesUpdate.fromJson(value);
case 'MemoryCreateDto':
return MemoryCreateDto.fromJson(value);
case 'MemoryLaneResponseDto':
return MemoryLaneResponseDto.fromJson(value);
case 'MemoryResponseDto':
return MemoryResponseDto.fromJson(value);
case 'MemoryType':
@@ -418,10 +406,6 @@ class ApiClient {
return PartnerDirectionTypeTransformer().decode(value);
case 'PartnerResponseDto':
return PartnerResponseDto.fromJson(value);
case 'PathEntityType':
return PathEntityTypeTypeTransformer().decode(value);
case 'PathType':
return PathTypeTypeTransformer().decode(value);
case 'PeopleResponse':
return PeopleResponse.fromJson(value);
case 'PeopleResponseDto':
@@ -634,10 +618,10 @@ class ApiClient {
return TemplateResponseDto.fromJson(value);
case 'TestEmailResponseDto':
return TestEmailResponseDto.fromJson(value);
case 'TimeBucketResponseDto':
return TimeBucketResponseDto.fromJson(value);
case 'TimeBucketSize':
return TimeBucketSizeTypeTransformer().decode(value);
case 'TimeBucketAssetResponseDto':
return TimeBucketAssetResponseDto.fromJson(value);
case 'TimeBucketsResponseDto':
return TimeBucketsResponseDto.fromJson(value);
case 'ToneMapping':
return ToneMappingTypeTransformer().decode(value);
case 'TranscodeHWAccel':

View File

@@ -112,12 +112,6 @@ String parameterToString(dynamic value) {
if (value is PartnerDirection) {
return PartnerDirectionTypeTransformer().encode(value).toString();
}
if (value is PathEntityType) {
return PathEntityTypeTypeTransformer().encode(value).toString();
}
if (value is PathType) {
return PathTypeTypeTransformer().encode(value).toString();
}
if (value is Permission) {
return PermissionTypeTransformer().encode(value).toString();
}
@@ -142,9 +136,6 @@ String parameterToString(dynamic value) {
if (value is SyncRequestType) {
return SyncRequestTypeTypeTransformer().encode(value).toString();
}
if (value is TimeBucketSize) {
return TimeBucketSizeTypeTransformer().encode(value).toString();
}
if (value is ToneMapping) {
return ToneMappingTypeTransformer().encode(value).toString();
}

View File

@@ -1,101 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileChecksumDto {
/// Returns a new [FileChecksumDto] instance.
FileChecksumDto({
this.filenames = const [],
});
List<String> filenames;
@override
bool operator ==(Object other) => identical(this, other) || other is FileChecksumDto &&
_deepEquality.equals(other.filenames, filenames);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(filenames.hashCode);
@override
String toString() => 'FileChecksumDto[filenames=$filenames]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'filenames'] = this.filenames;
return json;
}
/// Returns a new [FileChecksumDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileChecksumDto? fromJson(dynamic value) {
upgradeDto(value, "FileChecksumDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileChecksumDto(
filenames: json[r'filenames'] is Iterable
? (json[r'filenames'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<FileChecksumDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileChecksumDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileChecksumDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileChecksumDto> mapFromJson(dynamic json) {
final map = <String, FileChecksumDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileChecksumDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileChecksumDto-objects as value to a dart map
static Map<String, List<FileChecksumDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileChecksumDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileChecksumDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'filenames',
};
}

View File

@@ -1,107 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileChecksumResponseDto {
/// Returns a new [FileChecksumResponseDto] instance.
FileChecksumResponseDto({
required this.checksum,
required this.filename,
});
String checksum;
String filename;
@override
bool operator ==(Object other) => identical(this, other) || other is FileChecksumResponseDto &&
other.checksum == checksum &&
other.filename == filename;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(checksum.hashCode) +
(filename.hashCode);
@override
String toString() => 'FileChecksumResponseDto[checksum=$checksum, filename=$filename]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'checksum'] = this.checksum;
json[r'filename'] = this.filename;
return json;
}
/// Returns a new [FileChecksumResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileChecksumResponseDto? fromJson(dynamic value) {
upgradeDto(value, "FileChecksumResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileChecksumResponseDto(
checksum: mapValueOfType<String>(json, r'checksum')!,
filename: mapValueOfType<String>(json, r'filename')!,
);
}
return null;
}
static List<FileChecksumResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileChecksumResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileChecksumResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileChecksumResponseDto> mapFromJson(dynamic json) {
final map = <String, FileChecksumResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileChecksumResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileChecksumResponseDto-objects as value to a dart map
static Map<String, List<FileChecksumResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileChecksumResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileChecksumResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'checksum',
'filename',
};
}

View File

@@ -1,109 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportDto {
/// Returns a new [FileReportDto] instance.
FileReportDto({
this.extras = const [],
this.orphans = const [],
});
List<String> extras;
List<FileReportItemDto> orphans;
@override
bool operator ==(Object other) => identical(this, other) || other is FileReportDto &&
_deepEquality.equals(other.extras, extras) &&
_deepEquality.equals(other.orphans, orphans);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(extras.hashCode) +
(orphans.hashCode);
@override
String toString() => 'FileReportDto[extras=$extras, orphans=$orphans]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'extras'] = this.extras;
json[r'orphans'] = this.orphans;
return json;
}
/// Returns a new [FileReportDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileReportDto? fromJson(dynamic value) {
upgradeDto(value, "FileReportDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileReportDto(
extras: json[r'extras'] is Iterable
? (json[r'extras'] as Iterable).cast<String>().toList(growable: false)
: const [],
orphans: FileReportItemDto.listFromJson(json[r'orphans']),
);
}
return null;
}
static List<FileReportDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileReportDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileReportDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileReportDto> mapFromJson(dynamic json) {
final map = <String, FileReportDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileReportDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileReportDto-objects as value to a dart map
static Map<String, List<FileReportDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileReportDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileReportDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'extras',
'orphans',
};
}

View File

@@ -1,99 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportFixDto {
/// Returns a new [FileReportFixDto] instance.
FileReportFixDto({
this.items = const [],
});
List<FileReportItemDto> items;
@override
bool operator ==(Object other) => identical(this, other) || other is FileReportFixDto &&
_deepEquality.equals(other.items, items);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(items.hashCode);
@override
String toString() => 'FileReportFixDto[items=$items]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'items'] = this.items;
return json;
}
/// Returns a new [FileReportFixDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileReportFixDto? fromJson(dynamic value) {
upgradeDto(value, "FileReportFixDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileReportFixDto(
items: FileReportItemDto.listFromJson(json[r'items']),
);
}
return null;
}
static List<FileReportFixDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileReportFixDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileReportFixDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileReportFixDto> mapFromJson(dynamic json) {
final map = <String, FileReportFixDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileReportFixDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileReportFixDto-objects as value to a dart map
static Map<String, List<FileReportFixDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileReportFixDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileReportFixDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'items',
};
}

View File

@@ -1,140 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportItemDto {
/// Returns a new [FileReportItemDto] instance.
FileReportItemDto({
this.checksum,
required this.entityId,
required this.entityType,
required this.pathType,
required this.pathValue,
});
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? checksum;
String entityId;
PathEntityType entityType;
PathType pathType;
String pathValue;
@override
bool operator ==(Object other) => identical(this, other) || other is FileReportItemDto &&
other.checksum == checksum &&
other.entityId == entityId &&
other.entityType == entityType &&
other.pathType == pathType &&
other.pathValue == pathValue;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(checksum == null ? 0 : checksum!.hashCode) +
(entityId.hashCode) +
(entityType.hashCode) +
(pathType.hashCode) +
(pathValue.hashCode);
@override
String toString() => 'FileReportItemDto[checksum=$checksum, entityId=$entityId, entityType=$entityType, pathType=$pathType, pathValue=$pathValue]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.checksum != null) {
json[r'checksum'] = this.checksum;
} else {
// json[r'checksum'] = null;
}
json[r'entityId'] = this.entityId;
json[r'entityType'] = this.entityType;
json[r'pathType'] = this.pathType;
json[r'pathValue'] = this.pathValue;
return json;
}
/// Returns a new [FileReportItemDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileReportItemDto? fromJson(dynamic value) {
upgradeDto(value, "FileReportItemDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileReportItemDto(
checksum: mapValueOfType<String>(json, r'checksum'),
entityId: mapValueOfType<String>(json, r'entityId')!,
entityType: PathEntityType.fromJson(json[r'entityType'])!,
pathType: PathType.fromJson(json[r'pathType'])!,
pathValue: mapValueOfType<String>(json, r'pathValue')!,
);
}
return null;
}
static List<FileReportItemDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileReportItemDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileReportItemDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileReportItemDto> mapFromJson(dynamic json) {
final map = <String, FileReportItemDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileReportItemDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileReportItemDto-objects as value to a dart map
static Map<String, List<FileReportItemDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileReportItemDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileReportItemDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'entityId',
'entityType',
'pathType',
'pathValue',
};
}

View File

@@ -1,107 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class MemoryLaneResponseDto {
/// Returns a new [MemoryLaneResponseDto] instance.
MemoryLaneResponseDto({
this.assets = const [],
required this.yearsAgo,
});
List<AssetResponseDto> assets;
int yearsAgo;
@override
bool operator ==(Object other) => identical(this, other) || other is MemoryLaneResponseDto &&
_deepEquality.equals(other.assets, assets) &&
other.yearsAgo == yearsAgo;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(assets.hashCode) +
(yearsAgo.hashCode);
@override
String toString() => 'MemoryLaneResponseDto[assets=$assets, yearsAgo=$yearsAgo]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'assets'] = this.assets;
json[r'yearsAgo'] = this.yearsAgo;
return json;
}
/// Returns a new [MemoryLaneResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static MemoryLaneResponseDto? fromJson(dynamic value) {
upgradeDto(value, "MemoryLaneResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return MemoryLaneResponseDto(
assets: AssetResponseDto.listFromJson(json[r'assets']),
yearsAgo: mapValueOfType<int>(json, r'yearsAgo')!,
);
}
return null;
}
static List<MemoryLaneResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MemoryLaneResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MemoryLaneResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, MemoryLaneResponseDto> mapFromJson(dynamic json) {
final map = <String, MemoryLaneResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = MemoryLaneResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of MemoryLaneResponseDto-objects as value to a dart map
static Map<String, List<MemoryLaneResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<MemoryLaneResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = MemoryLaneResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'assets',
'yearsAgo',
};
}

View File

@@ -1,88 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class PathEntityType {
/// Instantiate a new enum with the provided [value].
const PathEntityType._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const asset = PathEntityType._(r'asset');
static const person = PathEntityType._(r'person');
static const user = PathEntityType._(r'user');
/// List of all possible values in this [enum][PathEntityType].
static const values = <PathEntityType>[
asset,
person,
user,
];
static PathEntityType? fromJson(dynamic value) => PathEntityTypeTypeTransformer().decode(value);
static List<PathEntityType> listFromJson(dynamic json, {bool growable = false,}) {
final result = <PathEntityType>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = PathEntityType.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [PathEntityType] to String,
/// and [decode] dynamic data back to [PathEntityType].
class PathEntityTypeTypeTransformer {
factory PathEntityTypeTypeTransformer() => _instance ??= const PathEntityTypeTypeTransformer._();
const PathEntityTypeTypeTransformer._();
String encode(PathEntityType data) => data.value;
/// Decodes a [dynamic value][data] to a PathEntityType.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
PathEntityType? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'asset': return PathEntityType.asset;
case r'person': return PathEntityType.person;
case r'user': return PathEntityType.user;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [PathEntityTypeTypeTransformer] instance.
static PathEntityTypeTypeTransformer? _instance;
}

View File

@@ -1,103 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class PathType {
/// Instantiate a new enum with the provided [value].
const PathType._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const original = PathType._(r'original');
static const fullsize = PathType._(r'fullsize');
static const preview = PathType._(r'preview');
static const thumbnail = PathType._(r'thumbnail');
static const encodedVideo = PathType._(r'encoded_video');
static const sidecar = PathType._(r'sidecar');
static const face = PathType._(r'face');
static const profile = PathType._(r'profile');
/// List of all possible values in this [enum][PathType].
static const values = <PathType>[
original,
fullsize,
preview,
thumbnail,
encodedVideo,
sidecar,
face,
profile,
];
static PathType? fromJson(dynamic value) => PathTypeTypeTransformer().decode(value);
static List<PathType> listFromJson(dynamic json, {bool growable = false,}) {
final result = <PathType>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = PathType.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [PathType] to String,
/// and [decode] dynamic data back to [PathType].
class PathTypeTypeTransformer {
factory PathTypeTypeTransformer() => _instance ??= const PathTypeTypeTransformer._();
const PathTypeTypeTransformer._();
String encode(PathType data) => data.value;
/// Decodes a [dynamic value][data] to a PathType.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
PathType? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'original': return PathType.original;
case r'fullsize': return PathType.fullsize;
case r'preview': return PathType.preview;
case r'thumbnail': return PathType.thumbnail;
case r'encoded_video': return PathType.encodedVideo;
case r'sidecar': return PathType.sidecar;
case r'face': return PathType.face;
case r'profile': return PathType.profile;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [PathTypeTypeTransformer] instance.
static PathTypeTypeTransformer? _instance;
}

View File

@@ -0,0 +1,243 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class TimeBucketAssetResponseDto {
/// Returns a new [TimeBucketAssetResponseDto] instance.
TimeBucketAssetResponseDto({
this.city = const [],
this.country = const [],
this.duration = const [],
this.id = const [],
this.isArchived = const [],
this.isFavorite = const [],
this.isImage = const [],
this.isTrashed = const [],
this.livePhotoVideoId = const [],
this.localDateTime = const [],
this.ownerId = const [],
this.projectionType = const [],
this.ratio = const [],
this.stack = const [],
this.thumbhash = const [],
});
List<String?> city;
List<String?> country;
List<String?> duration;
List<String> id;
List<num> isArchived;
List<num> isFavorite;
List<num> isImage;
List<num> isTrashed;
List<String?> livePhotoVideoId;
List<String> localDateTime;
List<String> ownerId;
List<String?> projectionType;
List<num> ratio;
/// (stack ID, stack asset count) tuple
List<List<String>?> stack;
List<String?> thumbhash;
@override
bool operator ==(Object other) => identical(this, other) || other is TimeBucketAssetResponseDto &&
_deepEquality.equals(other.city, city) &&
_deepEquality.equals(other.country, country) &&
_deepEquality.equals(other.duration, duration) &&
_deepEquality.equals(other.id, id) &&
_deepEquality.equals(other.isArchived, isArchived) &&
_deepEquality.equals(other.isFavorite, isFavorite) &&
_deepEquality.equals(other.isImage, isImage) &&
_deepEquality.equals(other.isTrashed, isTrashed) &&
_deepEquality.equals(other.livePhotoVideoId, livePhotoVideoId) &&
_deepEquality.equals(other.localDateTime, localDateTime) &&
_deepEquality.equals(other.ownerId, ownerId) &&
_deepEquality.equals(other.projectionType, projectionType) &&
_deepEquality.equals(other.ratio, ratio) &&
_deepEquality.equals(other.stack, stack) &&
_deepEquality.equals(other.thumbhash, thumbhash);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(city.hashCode) +
(country.hashCode) +
(duration.hashCode) +
(id.hashCode) +
(isArchived.hashCode) +
(isFavorite.hashCode) +
(isImage.hashCode) +
(isTrashed.hashCode) +
(livePhotoVideoId.hashCode) +
(localDateTime.hashCode) +
(ownerId.hashCode) +
(projectionType.hashCode) +
(ratio.hashCode) +
(stack.hashCode) +
(thumbhash.hashCode);
@override
String toString() => 'TimeBucketAssetResponseDto[city=$city, country=$country, duration=$duration, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isImage=$isImage, isTrashed=$isTrashed, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, ownerId=$ownerId, projectionType=$projectionType, ratio=$ratio, stack=$stack, thumbhash=$thumbhash]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'city'] = this.city;
json[r'country'] = this.country;
json[r'duration'] = this.duration;
json[r'id'] = this.id;
json[r'isArchived'] = this.isArchived;
json[r'isFavorite'] = this.isFavorite;
json[r'isImage'] = this.isImage;
json[r'isTrashed'] = this.isTrashed;
json[r'livePhotoVideoId'] = this.livePhotoVideoId;
json[r'localDateTime'] = this.localDateTime;
json[r'ownerId'] = this.ownerId;
json[r'projectionType'] = this.projectionType;
json[r'ratio'] = this.ratio;
json[r'stack'] = this.stack;
json[r'thumbhash'] = this.thumbhash;
return json;
}
/// Returns a new [TimeBucketAssetResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static TimeBucketAssetResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TimeBucketAssetResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return TimeBucketAssetResponseDto(
city: json[r'city'] is Iterable
? (json[r'city'] as Iterable).cast<String>().toList(growable: false)
: const [],
country: json[r'country'] is Iterable
? (json[r'country'] as Iterable).cast<String>().toList(growable: false)
: const [],
duration: json[r'duration'] is Iterable
? (json[r'duration'] as Iterable).cast<String>().toList(growable: false)
: const [],
id: json[r'id'] is Iterable
? (json[r'id'] as Iterable).cast<String>().toList(growable: false)
: const [],
isArchived: json[r'isArchived'] is Iterable
? (json[r'isArchived'] as Iterable).cast<num>().toList(growable: false)
: const [],
isFavorite: json[r'isFavorite'] is Iterable
? (json[r'isFavorite'] as Iterable).cast<num>().toList(growable: false)
: const [],
isImage: json[r'isImage'] is Iterable
? (json[r'isImage'] as Iterable).cast<num>().toList(growable: false)
: const [],
isTrashed: json[r'isTrashed'] is Iterable
? (json[r'isTrashed'] as Iterable).cast<num>().toList(growable: false)
: const [],
livePhotoVideoId: json[r'livePhotoVideoId'] is Iterable
? (json[r'livePhotoVideoId'] as Iterable).cast<String>().toList(growable: false)
: const [],
localDateTime: json[r'localDateTime'] is Iterable
? (json[r'localDateTime'] as Iterable).cast<String>().toList(growable: false)
: const [],
ownerId: json[r'ownerId'] is Iterable
? (json[r'ownerId'] as Iterable).cast<String>().toList(growable: false)
: const [],
projectionType: json[r'projectionType'] is Iterable
? (json[r'projectionType'] as Iterable).cast<String>().toList(growable: false)
: const [],
ratio: json[r'ratio'] is Iterable
? (json[r'ratio'] as Iterable).cast<num>().toList(growable: false)
: const [],
stack: json[r'stack'] is List
? (json[r'stack'] as List).map((e) =>
e == null ? null : (e as List).cast<String>()
).toList()
: const [],
thumbhash: json[r'thumbhash'] is Iterable
? (json[r'thumbhash'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<TimeBucketAssetResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketAssetResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TimeBucketAssetResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, TimeBucketAssetResponseDto> mapFromJson(dynamic json) {
final map = <String, TimeBucketAssetResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = TimeBucketAssetResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of TimeBucketAssetResponseDto-objects as value to a dart map
static Map<String, List<TimeBucketAssetResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TimeBucketAssetResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = TimeBucketAssetResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'city',
'country',
'duration',
'id',
'isArchived',
'isFavorite',
'isImage',
'isTrashed',
'livePhotoVideoId',
'localDateTime',
'ownerId',
'projectionType',
'ratio',
'thumbhash',
};
}

View File

@@ -1,85 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class TimeBucketSize {
/// Instantiate a new enum with the provided [value].
const TimeBucketSize._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const DAY = TimeBucketSize._(r'DAY');
static const MONTH = TimeBucketSize._(r'MONTH');
/// List of all possible values in this [enum][TimeBucketSize].
static const values = <TimeBucketSize>[
DAY,
MONTH,
];
static TimeBucketSize? fromJson(dynamic value) => TimeBucketSizeTypeTransformer().decode(value);
static List<TimeBucketSize> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketSize>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TimeBucketSize.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [TimeBucketSize] to String,
/// and [decode] dynamic data back to [TimeBucketSize].
class TimeBucketSizeTypeTransformer {
factory TimeBucketSizeTypeTransformer() => _instance ??= const TimeBucketSizeTypeTransformer._();
const TimeBucketSizeTypeTransformer._();
String encode(TimeBucketSize data) => data.value;
/// Decodes a [dynamic value][data] to a TimeBucketSize.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
TimeBucketSize? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'DAY': return TimeBucketSize.DAY;
case r'MONTH': return TimeBucketSize.MONTH;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [TimeBucketSizeTypeTransformer] instance.
static TimeBucketSizeTypeTransformer? _instance;
}

View File

@@ -10,9 +10,9 @@
part of openapi.api;
class TimeBucketResponseDto {
/// Returns a new [TimeBucketResponseDto] instance.
TimeBucketResponseDto({
class TimeBucketsResponseDto {
/// Returns a new [TimeBucketsResponseDto] instance.
TimeBucketsResponseDto({
required this.count,
required this.timeBucket,
});
@@ -22,7 +22,7 @@ class TimeBucketResponseDto {
String timeBucket;
@override
bool operator ==(Object other) => identical(this, other) || other is TimeBucketResponseDto &&
bool operator ==(Object other) => identical(this, other) || other is TimeBucketsResponseDto &&
other.count == count &&
other.timeBucket == timeBucket;
@@ -33,7 +33,7 @@ class TimeBucketResponseDto {
(timeBucket.hashCode);
@override
String toString() => 'TimeBucketResponseDto[count=$count, timeBucket=$timeBucket]';
String toString() => 'TimeBucketsResponseDto[count=$count, timeBucket=$timeBucket]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -42,15 +42,15 @@ class TimeBucketResponseDto {
return json;
}
/// Returns a new [TimeBucketResponseDto] instance and imports its values from
/// Returns a new [TimeBucketsResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static TimeBucketResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TimeBucketResponseDto");
static TimeBucketsResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TimeBucketsResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return TimeBucketResponseDto(
return TimeBucketsResponseDto(
count: mapValueOfType<int>(json, r'count')!,
timeBucket: mapValueOfType<String>(json, r'timeBucket')!,
);
@@ -58,11 +58,11 @@ class TimeBucketResponseDto {
return null;
}
static List<TimeBucketResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketResponseDto>[];
static List<TimeBucketsResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketsResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TimeBucketResponseDto.fromJson(row);
final value = TimeBucketsResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
@@ -71,12 +71,12 @@ class TimeBucketResponseDto {
return result.toList(growable: growable);
}
static Map<String, TimeBucketResponseDto> mapFromJson(dynamic json) {
final map = <String, TimeBucketResponseDto>{};
static Map<String, TimeBucketsResponseDto> mapFromJson(dynamic json) {
final map = <String, TimeBucketsResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = TimeBucketResponseDto.fromJson(entry.value);
final value = TimeBucketsResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
@@ -85,14 +85,14 @@ class TimeBucketResponseDto {
return map;
}
// maps a json object with a list of TimeBucketResponseDto-objects as value to a dart map
static Map<String, List<TimeBucketResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TimeBucketResponseDto>>{};
// maps a json object with a list of TimeBucketsResponseDto-objects as value to a dart map
static Map<String, List<TimeBucketsResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TimeBucketsResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = TimeBucketResponseDto.listFromJson(entry.value, growable: growable,);
map[entry.key] = TimeBucketsResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
OPENAPI_GENERATOR_VERSION=v7.8.0
OPENAPI_GENERATOR_VERSION=v7.12.0
# usage: ./bin/generate-open-api.sh
@@ -8,6 +8,7 @@ function dart {
cd ./templates/mobile/serialization/native
wget -O native_class.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/serialization/native/native_class.mustache
patch --no-backup-if-mismatch -u native_class.mustache <native_class.mustache.patch
patch --no-backup-if-mismatch -u native_class.mustache <native_class_nullable_items_in_arrays.patch
cd ../../
wget -O api.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/api.mustache

View File

@@ -0,0 +1,301 @@
class {{{classname}}} {
{{>dart_constructor}}
{{#vars}}
{{#description}}
/// {{{.}}}
{{/description}}
{{^isEnum}}
{{#minimum}}
{{#description}}
///
{{/description}}
/// Minimum value: {{{.}}}
{{/minimum}}
{{#maximum}}
{{#description}}
{{^minimum}}
///
{{/minimum}}
{{/description}}
/// Maximum value: {{{.}}}
{{/maximum}}
{{^isNullable}}
{{^required}}
{{^defaultValue}}
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
{{/defaultValue}}
{{/required}}
{{/isNullable}}
{{/isEnum}}
{{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{/vars}}
@override
bool operator ==(Object other) => identical(this, other) || other is {{{classname}}} &&
{{#vars}}
{{#isMap}}_deepEquality.equals(other.{{{name}}}, {{{name}}}){{/isMap}}{{^isMap}}{{#isArray}}_deepEquality.equals(other.{{{name}}}, {{{name}}}){{/isArray}}{{^isArray}}other.{{{name}}} == {{{name}}}{{/isArray}}{{/isMap}}{{^-last}} &&{{/-last}}{{#-last}};{{/-last}}
{{/vars}}
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
{{#vars}}
({{#isNullable}}{{{name}}} == null ? 0 : {{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}{{{name}}} == null ? 0 : {{/defaultValue}}{{/required}}{{/isNullable}}{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.hashCode){{^-last}} +{{/-last}}{{#-last}};{{/-last}}
{{/vars}}
@override
String toString() => '{{{classname}}}[{{#vars}}{{{name}}}=${{{name}}}{{^-last}}, {{/-last}}{{/vars}}]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
{{#vars}}
{{#isNullable}}
if (this.{{{name}}} != null) {
{{/isNullable}}
{{^isNullable}}
{{^required}}
{{^defaultValue}}
if (this.{{{name}}} != null) {
{{/defaultValue}}
{{/required}}
{{/isNullable}}
{{#isDateTime}}
{{#pattern}}
json[r'{{{baseName}}}'] = _isEpochMarker(r'{{{pattern}}}')
? this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.millisecondsSinceEpoch
: this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc().toIso8601String();
{{/pattern}}
{{^pattern}}
json[r'{{{baseName}}}'] = this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc().toIso8601String();
{{/pattern}}
{{/isDateTime}}
{{#isDate}}
{{#pattern}}
json[r'{{{baseName}}}'] = _isEpochMarker(r'{{{pattern}}}')
? this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.millisecondsSinceEpoch
: _dateFormatter.format(this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc());
{{/pattern}}
{{^pattern}}
json[r'{{{baseName}}}'] = _dateFormatter.format(this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc());
{{/pattern}}
{{/isDate}}
{{^isDateTime}}
{{^isDate}}
json[r'{{{baseName}}}'] = this.{{{name}}}{{#isArray}}{{#uniqueItems}}{{#isNullable}}!{{/isNullable}}.toList(growable: false){{/uniqueItems}}{{/isArray}};
{{/isDate}}
{{/isDateTime}}
{{#isNullable}}
} else {
json[r'{{{baseName}}}'] = null;
}
{{/isNullable}}
{{^isNullable}}
{{^required}}
{{^defaultValue}}
} else {
json[r'{{{baseName}}}'] = null;
}
{{/defaultValue}}
{{/required}}
{{/isNullable}}
{{/vars}}
return json;
}
/// Returns a new [{{{classname}}}] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static {{{classname}}}? fromJson(dynamic value) {
if (value is Map) {
final json = value.cast<String, dynamic>();
// Ensure that the map contains the required keys.
// Note 1: the values aren't checked for validity beyond being non-null.
// Note 2: this code is stripped in release mode!
assert(() {
requiredKeys.forEach((key) {
assert(json.containsKey(key), 'Required key "{{{classname}}}[$key]" is missing from JSON.');
assert(json[key] != null, 'Required key "{{{classname}}}[$key]" has a null value in JSON.');
});
return true;
}());
return {{{classname}}}(
{{#vars}}
{{#isDateTime}}
{{{name}}}: mapDateTime(json, r'{{{baseName}}}', r'{{{pattern}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isDateTime}}
{{#isDate}}
{{{name}}}: mapDateTime(json, r'{{{baseName}}}', r'{{{pattern}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isDate}}
{{^isDateTime}}
{{^isDate}}
{{#complexType}}
{{#isArray}}
{{#items.isArray}}
{{{name}}}: json[r'{{{baseName}}}'] is List
? (json[r'{{{baseName}}}'] as List).map((e) =>
{{#items.complexType}}
{{items.complexType}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}}
{{/items.complexType}}
{{^items.complexType}}
e == null ? {{#items.isNullable}}null{{/items.isNullable}}{{^items.isNullable}}const <{{items.items.dataType}}>[]{{/items.isNullable}} : (e as List).cast<{{items.items.dataType}}>()
{{/items.complexType}}
).toList()
: {{#isNullable}}null{{/isNullable}}{{^isNullable}}const []{{/isNullable}},
{{/items.isArray}}
{{^items.isArray}}
{{{name}}}: {{{complexType}}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}},
{{/items.isArray}}
{{/isArray}}
{{^isArray}}
{{#isMap}}
{{#items.isArray}}
{{{name}}}: json[r'{{{baseName}}}'] == null
? {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}}
{{#items.complexType}}
: {{items.complexType}}.mapListFromJson(json[r'{{{baseName}}}']),
{{/items.complexType}}
{{^items.complexType}}
: mapCastOfType<String, List>(json, r'{{{baseName}}}'),
{{/items.complexType}}
{{/items.isArray}}
{{^items.isArray}}
{{#items.isMap}}
{{#items.complexType}}
{{{name}}}: {{items.complexType}}.mapFromJson(json[r'{{{baseName}}}']),
{{/items.complexType}}
{{^items.complexType}}
{{{name}}}: mapCastOfType<String, dynamic>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/items.complexType}}
{{/items.isMap}}
{{^items.isMap}}
{{#items.complexType}}
{{{name}}}: {{{items.complexType}}}.mapFromJson(json[r'{{{baseName}}}']),
{{/items.complexType}}
{{^items.complexType}}
{{{name}}}: mapCastOfType<String, {{items.dataType}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/items.complexType}}
{{/items.isMap}}
{{/items.isArray}}
{{/isMap}}
{{^isMap}}
{{#isBinary}}
{{{name}}}: null, // No support for decoding binary content from JSON
{{/isBinary}}
{{^isBinary}}
{{{name}}}: {{{complexType}}}.fromJson(json[r'{{{baseName}}}']){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isBinary}}
{{/isMap}}
{{/isArray}}
{{/complexType}}
{{^complexType}}
{{#isArray}}
{{#isEnum}}
{{{name}}}: {{{items.datatypeWithEnum}}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}},
{{/isEnum}}
{{^isEnum}}
{{{name}}}: json[r'{{{baseName}}}'] is Iterable
? (json[r'{{{baseName}}}'] as Iterable).cast<{{{items.datatype}}}>().{{#uniqueItems}}toSet(){{/uniqueItems}}{{^uniqueItems}}toList(growable: false){{/uniqueItems}}
: {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}},
{{/isEnum}}
{{/isArray}}
{{^isArray}}
{{#isMap}}
{{{name}}}: mapCastOfType<String, {{{items.datatype}}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isMap}}
{{^isMap}}
{{#isNumber}}
{{{name}}}: {{#isNullable}}json[r'{{{baseName}}}'] == null
? {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}}
: {{/isNullable}}{{{datatypeWithEnum}}}.parse('${json[r'{{{baseName}}}']}'),
{{/isNumber}}
{{^isNumber}}
{{^isEnum}}
{{{name}}}: mapValueOfType<{{{datatypeWithEnum}}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isEnum}}
{{#isEnum}}
{{{name}}}: {{{enumName}}}.fromJson(json[r'{{{baseName}}}']){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isEnum}}
{{/isNumber}}
{{/isMap}}
{{/isArray}}
{{/complexType}}
{{/isDate}}
{{/isDateTime}}
{{/vars}}
);
}
return null;
}
static List<{{{classname}}}> listFromJson(dynamic json, {bool growable = false,}) {
final result = <{{{classname}}}>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = {{{classname}}}.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, {{{classname}}}> mapFromJson(dynamic json) {
final map = <String, {{{classname}}}>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = {{{classname}}}.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of {{{classname}}}-objects as value to a dart map
static Map<String, List<{{{classname}}}>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<{{{classname}}}>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = {{{classname}}}.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
{{#vars}}
{{#required}}
'{{{baseName}}}',
{{/required}}
{{/vars}}
};
}
{{#vars}}
{{^isModel}}
{{#isEnum}}
{{^isContainer}}
{{>serialization/native/native_enum_inline}}
{{/isContainer}}
{{#isContainer}}
{{#mostInnerItems}}
{{>serialization/native/native_enum_inline}}
{{/mostInnerItems}}
{{/isContainer}}
{{/isEnum}}
{{/isModel}}
{{/vars}}

View File

@@ -1726,62 +1726,6 @@
]
}
},
"/assets/memory-lane": {
"get": {
"operationId": "getMemoryLane",
"parameters": [
{
"name": "day",
"required": true,
"in": "query",
"schema": {
"minimum": 1,
"maximum": 31,
"type": "integer"
}
},
{
"name": "month",
"required": true,
"in": "query",
"schema": {
"minimum": 1,
"maximum": 12,
"type": "integer"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/MemoryLaneResponseDto"
},
"type": "array"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Assets"
]
}
},
"/assets/random": {
"get": {
"deprecated": true,
@@ -4651,118 +4595,6 @@
]
}
},
"/reports": {
"get": {
"operationId": "getAuditFiles",
"parameters": [],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileReportDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"File Reports"
]
}
},
"/reports/checksum": {
"post": {
"operationId": "getFileChecksums",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileChecksumDto"
}
}
},
"required": true
},
"responses": {
"201": {
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/FileChecksumResponseDto"
},
"type": "array"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"File Reports"
]
}
},
"/reports/fix": {
"post": {
"operationId": "fixAuditFiles",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileReportFixDto"
}
}
},
"required": true
},
"responses": {
"201": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"File Reports"
]
}
},
"/search/cities": {
"get": {
"operationId": "getAssetsByCity",
@@ -7117,6 +6949,24 @@
"$ref": "#/components/schemas/AssetOrder"
}
},
{
"name": "page",
"required": false,
"in": "query",
"schema": {
"minimum": 1,
"type": "number"
}
},
{
"name": "pageSize",
"required": false,
"in": "query",
"schema": {
"minimum": 1,
"type": "number"
}
},
{
"name": "personId",
"required": false,
@@ -7126,14 +6976,6 @@
"type": "string"
}
},
{
"name": "size",
"required": true,
"in": "query",
"schema": {
"$ref": "#/components/schemas/TimeBucketSize"
}
},
{
"name": "tagId",
"required": false,
@@ -7182,10 +7024,7 @@
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/AssetResponseDto"
},
"type": "array"
"$ref": "#/components/schemas/TimeBucketAssetResponseDto"
}
}
},
@@ -7270,14 +7109,6 @@
"type": "string"
}
},
{
"name": "size",
"required": true,
"in": "query",
"schema": {
"$ref": "#/components/schemas/TimeBucketSize"
}
},
{
"name": "tagId",
"required": false,
@@ -7319,7 +7150,7 @@
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/TimeBucketResponseDto"
"$ref": "#/components/schemas/TimeBucketsResponseDto"
},
"type": "array"
}
@@ -9749,105 +9580,6 @@
],
"type": "object"
},
"FileChecksumDto": {
"properties": {
"filenames": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"filenames"
],
"type": "object"
},
"FileChecksumResponseDto": {
"properties": {
"checksum": {
"type": "string"
},
"filename": {
"type": "string"
}
},
"required": [
"checksum",
"filename"
],
"type": "object"
},
"FileReportDto": {
"properties": {
"extras": {
"items": {
"type": "string"
},
"type": "array"
},
"orphans": {
"items": {
"$ref": "#/components/schemas/FileReportItemDto"
},
"type": "array"
}
},
"required": [
"extras",
"orphans"
],
"type": "object"
},
"FileReportFixDto": {
"properties": {
"items": {
"items": {
"$ref": "#/components/schemas/FileReportItemDto"
},
"type": "array"
}
},
"required": [
"items"
],
"type": "object"
},
"FileReportItemDto": {
"properties": {
"checksum": {
"type": "string"
},
"entityId": {
"format": "uuid",
"type": "string"
},
"entityType": {
"allOf": [
{
"$ref": "#/components/schemas/PathEntityType"
}
]
},
"pathType": {
"allOf": [
{
"$ref": "#/components/schemas/PathType"
}
]
},
"pathValue": {
"type": "string"
}
},
"required": [
"entityId",
"entityType",
"pathType",
"pathValue"
],
"type": "object"
},
"FoldersResponse": {
"properties": {
"enabled": {
@@ -10328,24 +10060,6 @@
],
"type": "object"
},
"MemoryLaneResponseDto": {
"properties": {
"assets": {
"items": {
"$ref": "#/components/schemas/AssetResponseDto"
},
"type": "array"
},
"yearsAgo": {
"type": "integer"
}
},
"required": [
"assets",
"yearsAgo"
],
"type": "object"
},
"MemoryResponseDto": {
"properties": {
"assets": {
@@ -10889,27 +10603,6 @@
],
"type": "object"
},
"PathEntityType": {
"enum": [
"asset",
"person",
"user"
],
"type": "string"
},
"PathType": {
"enum": [
"original",
"fullsize",
"preview",
"thumbnail",
"encoded_video",
"sidecar",
"face",
"profile"
],
"type": "string"
},
"PeopleResponse": {
"properties": {
"enabled": {
@@ -13867,7 +13560,131 @@
],
"type": "object"
},
"TimeBucketResponseDto": {
"TimeBucketAssetResponseDto": {
"properties": {
"city": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"country": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"duration": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"id": {
"items": {
"type": "string"
},
"type": "array"
},
"isArchived": {
"items": {
"type": "number"
},
"type": "array"
},
"isFavorite": {
"items": {
"type": "number"
},
"type": "array"
},
"isImage": {
"items": {
"type": "number"
},
"type": "array"
},
"isTrashed": {
"items": {
"type": "number"
},
"type": "array"
},
"livePhotoVideoId": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"localDateTime": {
"items": {
"type": "string"
},
"type": "array"
},
"ownerId": {
"items": {
"type": "string"
},
"type": "array"
},
"projectionType": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"ratio": {
"items": {
"type": "number"
},
"type": "array"
},
"stack": {
"description": "(stack ID, stack asset count) tuple",
"items": {
"items": {
"type": "string"
},
"maxItems": 2,
"minItems": 2,
"nullable": true,
"type": "array"
},
"type": "array"
},
"thumbhash": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
}
},
"required": [
"city",
"country",
"duration",
"id",
"isArchived",
"isFavorite",
"isImage",
"isTrashed",
"livePhotoVideoId",
"localDateTime",
"ownerId",
"projectionType",
"ratio",
"thumbhash"
],
"type": "object"
},
"TimeBucketsResponseDto": {
"properties": {
"count": {
"type": "integer"
@@ -13882,13 +13699,6 @@
],
"type": "object"
},
"TimeBucketSize": {
"enum": [
"DAY",
"MONTH"
],
"type": "string"
},
"ToneMapping": {
"enum": [
"hable",

View File

@@ -32,7 +32,7 @@ class {{{classname}}} {
{{/required}}
{{/isNullable}}
{{/isEnum}}
{{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{#isArray}}{{#uniqueItems}}Set{{/uniqueItems}}{{^uniqueItems}}List{{/uniqueItems}}<{{{items.dataType}}}{{#items.isNullable}}?{{/items.isNullable}}>{{/isArray}}{{^isArray}}{{{datatypeWithEnum}}}{{/isArray}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{/vars}}
@override

View File

@@ -0,0 +1,13 @@
diff --git a/open-api/templates/mobile/serialization/native/native_class.mustache b/open-api/templates/mobile/serialization/native/native_class.mustache
index 9a7b1439b..9f40d5b0b 100644
--- a/open-api/templates/mobile/serialization/native/native_class.mustache
+++ b/open-api/templates/mobile/serialization/native/native_class.mustache
@@ -32,7 +32,7 @@ class {{{classname}}} {
{{/required}}
{{/isNullable}}
{{/isEnum}}
- {{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
+ {{#isArray}}{{#uniqueItems}}Set{{/uniqueItems}}{{^uniqueItems}}List{{/uniqueItems}}<{{{items.dataType}}}{{#items.isNullable}}?{{/items.isNullable}}>{{/isArray}}{{^isArray}}{{{datatypeWithEnum}}}{{/isArray}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{/vars}}
@override

View File

@@ -462,10 +462,6 @@ export type AssetJobsDto = {
assetIds: string[];
name: AssetJobName;
};
export type MemoryLaneResponseDto = {
assets: AssetResponseDto[];
yearsAgo: number;
};
export type AssetStatsResponseDto = {
images: number;
total: number;
@@ -800,27 +796,6 @@ export type AssetFaceUpdateDto = {
export type PersonStatisticsResponseDto = {
assets: number;
};
export type FileReportItemDto = {
checksum?: string;
entityId: string;
entityType: PathEntityType;
pathType: PathType;
pathValue: string;
};
export type FileReportDto = {
extras: string[];
orphans: FileReportItemDto[];
};
export type FileChecksumDto = {
filenames: string[];
};
export type FileChecksumResponseDto = {
checksum: string;
filename: string;
};
export type FileReportFixDto = {
items: FileReportItemDto[];
};
export type SearchExploreItem = {
data: AssetResponseDto;
value: string;
@@ -1409,7 +1384,25 @@ export type TagBulkAssetsResponseDto = {
export type TagUpdateDto = {
color?: string | null;
};
export type TimeBucketResponseDto = {
export type TimeBucketAssetResponseDto = {
city: (string | null)[];
country: (string | null)[];
duration: (string | null)[];
id: string[];
isArchived: number[];
isFavorite: number[];
isImage: number[];
isTrashed: number[];
livePhotoVideoId: (string | null)[];
localDateTime: string[];
ownerId: string[];
projectionType: (string | null)[];
ratio: number[];
/** (stack ID, stack asset count) tuple */
stack?: (string[] | null)[];
thumbhash: (string | null)[];
};
export type TimeBucketsResponseDto = {
count: number;
timeBucket: string;
};
@@ -1887,20 +1880,6 @@ export function runAssetJobs({ assetJobsDto }: {
body: assetJobsDto
})));
}
export function getMemoryLane({ day, month }: {
day: number;
month: number;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: MemoryLaneResponseDto[];
}>(`/assets/memory-lane${QS.query(QS.explode({
day,
month
}))}`, {
...opts
}));
}
/**
* This property was deprecated in v1.116.0
*/
@@ -2663,35 +2642,6 @@ export function getPersonThumbnail({ id }: {
...opts
}));
}
export function getAuditFiles(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: FileReportDto;
}>("/reports", {
...opts
}));
}
export function getFileChecksums({ fileChecksumDto }: {
fileChecksumDto: FileChecksumDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 201;
data: FileChecksumResponseDto[];
}>("/reports/checksum", oazapfts.json({
...opts,
method: "POST",
body: fileChecksumDto
})));
}
export function fixAuditFiles({ fileReportFixDto }: {
fileReportFixDto: FileReportFixDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/reports/fix", oazapfts.json({
...opts,
method: "POST",
body: fileReportFixDto
})));
}
export function getAssetsByCity(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
@@ -3310,15 +3260,16 @@ export function tagAssets({ id, bulkIdsDto }: {
body: bulkIdsDto
})));
}
export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, size, tagId, timeBucket, userId, withPartners, withStacked }: {
export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key, order, page, pageSize, personId, tagId, timeBucket, userId, withPartners, withStacked }: {
albumId?: string;
isArchived?: boolean;
isFavorite?: boolean;
isTrashed?: boolean;
key?: string;
order?: AssetOrder;
page?: number;
pageSize?: number;
personId?: string;
size: TimeBucketSize;
tagId?: string;
timeBucket: string;
userId?: string;
@@ -3327,7 +3278,7 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: AssetResponseDto[];
data: TimeBucketAssetResponseDto;
}>(`/timeline/bucket${QS.query(QS.explode({
albumId,
isArchived,
@@ -3335,8 +3286,9 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
isTrashed,
key,
order,
page,
pageSize,
personId,
size,
tagId,
timeBucket,
userId,
@@ -3346,7 +3298,7 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
...opts
}));
}
export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, size, tagId, userId, withPartners, withStacked }: {
export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, tagId, userId, withPartners, withStacked }: {
albumId?: string;
isArchived?: boolean;
isFavorite?: boolean;
@@ -3354,7 +3306,6 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
key?: string;
order?: AssetOrder;
personId?: string;
size: TimeBucketSize;
tagId?: string;
userId?: string;
withPartners?: boolean;
@@ -3362,7 +3313,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TimeBucketResponseDto[];
data: TimeBucketsResponseDto[];
}>(`/timeline/buckets${QS.query(QS.explode({
albumId,
isArchived,
@@ -3371,7 +3322,6 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
key,
order,
personId,
size,
tagId,
userId,
withPartners,
@@ -3751,21 +3701,6 @@ export enum PartnerDirection {
SharedBy = "shared-by",
SharedWith = "shared-with"
}
export enum PathEntityType {
Asset = "asset",
Person = "person",
User = "user"
}
export enum PathType {
Original = "original",
Fullsize = "fullsize",
Preview = "preview",
Thumbnail = "thumbnail",
EncodedVideo = "encoded_video",
Sidecar = "sidecar",
Face = "face",
Profile = "profile"
}
export enum SearchSuggestionType {
Country = "country",
State = "state",
@@ -3865,7 +3800,3 @@ export enum OAuthTokenEndpointAuthMethod {
ClientSecretPost = "client_secret_post",
ClientSecretBasic = "client_secret_basic"
}
export enum TimeBucketSize {
Day = "DAY",
Month = "MONTH"
}

View File

@@ -28,7 +28,7 @@
"archiver": "^7.0.0",
"async-lock": "^1.4.0",
"bcrypt": "^5.1.1",
"bullmq": "^4.8.0",
"bullmq": "^5.51.0",
"chokidar": "^3.5.3",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
@@ -6886,63 +6886,20 @@
}
},
"node_modules/bullmq": {
"version": "4.18.2",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-4.18.2.tgz",
"integrity": "sha512-Cx0O98IlGiFw7UBa+zwGz+nH0Pcl1wfTvMVBlsMna3s0219hXroVovh1xPRgomyUcbyciHiugGCkW0RRNZDHYQ==",
"version": "5.51.0",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.51.0.tgz",
"integrity": "sha512-YjX+CO2U4nmbCq2ZgNb/Hnu6Xk953j8EFmp0eehTuudavPyNstoZsbnyvvM6PX9rfD9clhcc5kRLyyWoFEM3Lg==",
"license": "MIT",
"dependencies": {
"cron-parser": "^4.6.0",
"glob": "^8.0.3",
"ioredis": "^5.3.2",
"lodash": "^4.17.21",
"msgpackr": "^1.6.2",
"cron-parser": "^4.9.0",
"ioredis": "^5.4.1",
"msgpackr": "^1.11.2",
"node-abort-controller": "^3.1.1",
"semver": "^7.5.4",
"tslib": "^2.0.0",
"uuid": "^9.0.0"
}
},
"node_modules/bullmq/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/bullmq/node_modules/glob": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz",
"integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^5.0.1",
"once": "^1.3.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/bullmq/node_modules/minimatch": {
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=10"
}
},
"node_modules/busboy": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",

View File

@@ -53,7 +53,7 @@
"archiver": "^7.0.0",
"async-lock": "^1.4.0",
"bcrypt": "^5.1.1",
"bullmq": "^4.8.0",
"bullmq": "^5.51.0",
"chokidar": "^3.5.3",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",

View File

@@ -67,7 +67,7 @@ const runQuery = async (query: string) => {
const runMigrations = async () => {
const configRepository = new ConfigRepository();
const logger = new LoggingRepository(undefined, configRepository);
const logger = LoggingRepository.create();
const db = getDatabaseClient();
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
await databaseRepository.runMigrations();

View File

@@ -72,7 +72,9 @@ class SqlGenerator {
await rm(this.options.targetDir, { force: true, recursive: true });
await mkdir(this.options.targetDir);
process.env.DB_HOSTNAME = 'localhost';
if (!process.env.DB_HOSTNAME) {
process.env.DB_HOSTNAME = 'localhost';
}
const { database, cls, otel } = new ConfigRepository().getEnv();
const moduleFixture = await Test.createTestingModule({

View File

@@ -1,7 +1,7 @@
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put, Query } from '@nestjs/common';
import { ApiOperation, ApiTags } from '@nestjs/swagger';
import { EndpointLifecycle } from 'src/decorators';
import { AssetResponseDto, MemoryLaneResponseDto } from 'src/dtos/asset-response.dto';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import {
AssetBulkDeleteDto,
AssetBulkUpdateDto,
@@ -13,7 +13,6 @@ import {
UpdateAssetDto,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { MemoryLaneDto } from 'src/dtos/search.dto';
import { RouteKey } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { AssetService } from 'src/services/asset.service';
@@ -24,12 +23,6 @@ import { UUIDParamDto } from 'src/validation';
export class AssetController {
constructor(private service: AssetService) {}
@Get('memory-lane')
@Authenticated()
getMemoryLane(@Auth() auth: AuthDto, @Query() dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
return this.service.getMemoryLane(auth, dto);
}
@Get('random')
@Authenticated()
@EndpointLifecycle({ deprecatedAt: 'v1.116.0' })

View File

@@ -1,29 +0,0 @@
import { Body, Controller, Get, Post } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { FileChecksumDto, FileChecksumResponseDto, FileReportDto, FileReportFixDto } from 'src/dtos/audit.dto';
import { Authenticated } from 'src/middleware/auth.guard';
import { AuditService } from 'src/services/audit.service';
@ApiTags('File Reports')
@Controller('reports')
export class ReportController {
constructor(private service: AuditService) {}
@Get()
@Authenticated({ admin: true })
getAuditFiles(): Promise<FileReportDto> {
return this.service.getFileReport();
}
@Post('checksum')
@Authenticated({ admin: true })
getFileChecksums(@Body() dto: FileChecksumDto): Promise<FileChecksumResponseDto[]> {
return this.service.getChecksums(dto);
}
@Post('fix')
@Authenticated({ admin: true })
fixAuditFiles(@Body() dto: FileReportFixDto): Promise<void> {
return this.service.fixItems(dto.items);
}
}

View File

@@ -8,7 +8,6 @@ import { AuthController } from 'src/controllers/auth.controller';
import { DownloadController } from 'src/controllers/download.controller';
import { DuplicateController } from 'src/controllers/duplicate.controller';
import { FaceController } from 'src/controllers/face.controller';
import { ReportController } from 'src/controllers/file-report.controller';
import { JobController } from 'src/controllers/job.controller';
import { LibraryController } from 'src/controllers/library.controller';
import { MapController } from 'src/controllers/map.controller';
@@ -53,7 +52,6 @@ export const controllers = [
OAuthController,
PartnerController,
PersonController,
ReportController,
SearchController,
ServerController,
SessionController,

View File

@@ -46,7 +46,7 @@ export class SearchController {
@Get('explore')
@Authenticated()
getExploreData(@Auth() auth: AuthDto): Promise<SearchExploreResponseDto[]> {
return this.service.getExploreData(auth) as Promise<SearchExploreResponseDto[]>;
return this.service.getExploreData(auth);
}
@Get('person')

View File

@@ -1,8 +1,8 @@
import { Controller, Get, Query } from '@nestjs/common';
import { Controller, Get, Query, Res } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { Response } from 'express';
import { AuthDto } from 'src/dtos/auth.dto';
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
import { TimeBucketAssetDto, TimeBucketAssetResponseDto, TimeBucketDto } from 'src/dtos/time-bucket.dto';
import { Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { TimelineService } from 'src/services/timeline.service';
@@ -14,13 +14,19 @@ export class TimelineController {
@Get('buckets')
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto) {
return this.service.getTimeBuckets(auth, dto);
}
@Get('bucket')
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
getTimeBucket(@Auth() auth: AuthDto, @Query() dto: TimeBucketAssetDto): Promise<AssetResponseDto[]> {
return this.service.getTimeBucket(auth, dto) as Promise<AssetResponseDto[]>;
async getTimeBucket(
@Auth() auth: AuthDto,
@Query() dto: TimeBucketAssetDto,
@Res({ passthrough: true }) res: Response,
): Promise<TimeBucketAssetResponseDto> {
res.contentType('application/json');
const jsonBucket = await this.service.getTimeBucket(auth, dto);
return jsonBucket as unknown as TimeBucketAssetResponseDto;
}
}

View File

@@ -165,6 +165,12 @@ export type Stack = {
assetCount?: number;
};
export type TimelineStack = {
id: string;
primaryAssetId: string;
assetCount: number;
};
export type AuthSharedLink = {
id: string;
expiresAt: Date | null;

View File

@@ -13,6 +13,7 @@ import {
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
import { AssetStatus, AssetType } from 'src/enum';
import { hexOrBufferToBase64 } from 'src/utils/bytes';
import { mimeTypes } from 'src/utils/mime-types';
export class SanitizedAssetResponseDto {
@@ -140,15 +141,6 @@ const mapStack = (entity: { stack?: Stack | null }) => {
};
};
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
if (typeof encoded === 'string') {
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
}
return encoded.toString('base64');
};
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
const { stripMetadata = false, withStack = false } = options;
@@ -191,7 +183,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
tags: entity.tags?.map((tag) => mapTag(tag)),
people: peopleWithFaces(entity.faces),
unassignedFaces: entity.faces?.filter((face) => !face.person).map((a) => mapFacesWithoutPerson(a)),
checksum: hexOrBufferToBase64(entity.checksum),
checksum: hexOrBufferToBase64(entity.checksum)!,
stack: withStack ? mapStack(entity) : undefined,
isOffline: entity.isOffline,
hasMetadata: true,
@@ -199,10 +191,3 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
resized: true,
};
}
export class MemoryLaneResponseDto {
@ApiProperty({ type: 'integer' })
yearsAgo!: number;
assets!: AssetResponseDto[];
}

View File

@@ -1,73 +0,0 @@
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsArray, IsEnum, IsString, IsUUID, ValidateNested } from 'class-validator';
import { AssetPathType, EntityType, PathType, PersonPathType, UserPathType } from 'src/enum';
import { Optional, ValidateDate, ValidateUUID } from 'src/validation';
const PathEnum = Object.values({ ...AssetPathType, ...PersonPathType, ...UserPathType });
export class AuditDeletesDto {
@ValidateDate()
after!: Date;
@ApiProperty({ enum: EntityType, enumName: 'EntityType' })
@IsEnum(EntityType)
entityType!: EntityType;
@Optional()
@IsUUID('4')
@ApiProperty({ format: 'uuid' })
userId?: string;
}
export enum PathEntityType {
ASSET = 'asset',
PERSON = 'person',
USER = 'user',
}
export class AuditDeletesResponseDto {
needsFullSync!: boolean;
ids!: string[];
}
export class FileReportDto {
orphans!: FileReportItemDto[];
extras!: string[];
}
export class FileChecksumDto {
@IsString({ each: true })
filenames!: string[];
}
export class FileChecksumResponseDto {
filename!: string;
checksum!: string;
}
export class FileReportFixDto {
@IsArray()
@ValidateNested({ each: true })
@Type(() => FileReportItemDto)
items!: FileReportItemDto[];
}
// used both as request and response dto
export class FileReportItemDto {
@ValidateUUID()
entityId!: string;
@ApiProperty({ enumName: 'PathEntityType', enum: PathEntityType })
@IsEnum(PathEntityType)
entityType!: PathEntityType;
@ApiProperty({ enumName: 'PathType', enum: PathEnum })
@IsEnum(PathEnum)
pathType!: PathType;
@IsString()
pathValue!: string;
checksum?: string;
}

View File

@@ -1,15 +1,11 @@
import { ApiProperty } from '@nestjs/swagger';
import { IsEnum, IsNotEmpty, IsString } from 'class-validator';
import { IsEnum, IsInt, IsString, Min } from 'class-validator';
import { AssetOrder } from 'src/enum';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { TimeBucketAssets, TimelineStack } from 'src/services/timeline.service.types';
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
export class TimeBucketDto {
@IsNotEmpty()
@IsEnum(TimeBucketSize)
@ApiProperty({ enum: TimeBucketSize, enumName: 'TimeBucketSize' })
size!: TimeBucketSize;
@ValidateUUID({ optional: true })
userId?: string;
@@ -46,9 +42,75 @@ export class TimeBucketDto {
export class TimeBucketAssetDto extends TimeBucketDto {
@IsString()
timeBucket!: string;
@IsInt()
@Min(1)
@Optional()
page?: number;
@IsInt()
@Min(1)
@Optional()
pageSize?: number;
}
export class TimeBucketResponseDto {
export class TimelineStackResponseDto implements TimelineStack {
id!: string;
primaryAssetId!: string;
assetCount!: number;
}
export class TimeBucketAssetResponseDto implements TimeBucketAssets {
id!: string[];
ownerId!: string[];
ratio!: number[];
isFavorite!: number[];
isArchived!: number[];
isTrashed!: number[];
isImage!: number[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
thumbhash!: (string | null)[];
localDateTime!: string[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
duration!: (string | null)[];
// id, count
@ApiProperty({
type: 'array',
items: {
type: 'array',
items: { type: 'string' },
minItems: 2,
maxItems: 2,
nullable: true,
},
description: '(stack ID, stack asset count) tuple',
})
stack?: ([string, string] | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
projectionType!: (string | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
livePhotoVideoId!: (string | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
city!: (string | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
country!: (string | null)[];
}
export class TimeBucketsResponseDto {
@ApiProperty({ type: 'string' })
timeBucket!: string;

View File

@@ -194,15 +194,16 @@ where
"asset_files"."assetId" = $1
and "asset_files"."type" = $2
-- AssetJobRepository.streamForEncodeClip
-- AssetJobRepository.streamForSearchDuplicates
select
"assets"."id"
from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"job_status"."previewAt" is not null
and "assets"."isVisible" = $1
"assets"."isVisible" = $1
and "assets"."deletedAt" is null
and "job_status"."previewAt" is not null
and not exists (
select
from
@@ -210,7 +211,25 @@ where
where
"assetId" = "assets"."id"
)
and "job_status"."duplicatesDetectedAt" is null
-- AssetJobRepository.streamForEncodeClip
select
"assets"."id"
from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"assets"."isVisible" = $1
and "assets"."deletedAt" is null
and "job_status"."previewAt" is not null
and not exists (
select
from
"smart_search"
where
"assetId" = "assets"."id"
)
-- AssetJobRepository.getForClipEncoding
select
@@ -450,3 +469,37 @@ from
"assets"
where
"assets"."deletedAt" <= $1
-- AssetJobRepository.streamForSidecar
select
"assets"."id"
from
"assets"
where
(
"assets"."sidecarPath" = $1
or "assets"."sidecarPath" is null
)
and "assets"."isVisible" = $2
-- AssetJobRepository.streamForDetectFacesJob
select
"assets"."id"
from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"assets"."isVisible" = $1
and "assets"."deletedAt" is null
and "job_status"."previewAt" is not null
and "job_status"."facesRecognizedAt" is null
order by
"assets"."createdAt" desc
-- AssetJobRepository.streamForMigrationJob
select
"id"
from
"assets"
where
"assets"."deletedAt" is null

View File

@@ -232,35 +232,16 @@ where
limit
$3
-- AssetRepository.getWithout (sidecar)
select
"assets".*
from
"assets"
where
(
"assets"."sidecarPath" = $1
or "assets"."sidecarPath" is null
)
and "assets"."isVisible" = $2
and "deletedAt" is null
order by
"createdAt"
limit
$3
offset
$4
-- AssetRepository.getTimeBuckets
with
"assets" as (
select
date_trunc($1, "localDateTime" at time zone 'UTC') at time zone 'UTC' as "timeBucket"
date_trunc('MONTH', "localDateTime" at time zone 'UTC') at time zone 'UTC' as "timeBucket"
from
"assets"
where
"assets"."deletedAt" is null
and "assets"."isVisible" = $2
and "assets"."isVisible" = $1
)
select
"timeBucket",
@@ -273,37 +254,95 @@ order by
"timeBucket" desc
-- AssetRepository.getTimeBucket
select
"assets".*,
to_json("exif") as "exifInfo",
to_json("stacked_assets") as "stack"
from
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
left join lateral (
with
"cte" as (
select
"asset_stack".*,
count("stacked") as "assetCount"
"assets"."duration",
"assets"."id",
assets."isArchived"::int as "isArchived",
assets."isFavorite"::int as "isFavorite",
(assets.type = 'IMAGE')::int as "isImage",
(assets."deletedAt" is null)::int as "isTrashed",
(assets.type = 'VIDEO')::int as "isVideo",
"assets"."livePhotoVideoId",
"assets"."localDateTime",
"assets"."ownerId",
"assets"."status",
encode("assets"."thumbhash", 'base64') as "thumbhash",
"exif"."city",
"exif"."country",
"exif"."projectionType",
coalesce(
case
when exif."exifImageHeight" = 0
or exif."exifImageWidth" = 0 then 1
when "exif"."orientation" in ('5', '6', '7', '8', '-90', '90') then round(
exif."exifImageHeight"::numeric / exif."exifImageWidth"::numeric,
3
)
else round(
exif."exifImageWidth"::numeric / exif."exifImageHeight"::numeric,
3
)
end,
1
) as "ratio",
"stack"
from
"assets" as "stacked"
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
left join lateral (
select
array[stacked."stackId"::text, count('stacked')::text] as "stack"
from
"assets" as "stacked"
where
"stacked"."stackId" = "assets"."stackId"
and "stacked"."deletedAt" is null
and "stacked"."isArchived" = $1
group by
"stacked"."stackId"
) as "stacked_assets" on true
where
"stacked"."stackId" = "asset_stack"."id"
and "stacked"."deletedAt" is null
and "stacked"."isArchived" = $1
group by
"asset_stack"."id"
) as "stacked_assets" on "asset_stack"."id" is not null
where
(
"asset_stack"."primaryAssetId" = "assets"."id"
or "assets"."stackId" is null
"assets"."deletedAt" is null
and "assets"."isVisible" = $2
and date_trunc('MONTH', "localDateTime" at time zone 'UTC') at time zone 'UTC' = $3
and not exists (
select
from
"asset_stack"
where
"asset_stack"."id" = "assets"."stackId"
and "asset_stack"."primaryAssetId" != "assets"."id"
)
order by
"assets"."localDateTime" desc
),
"agg" as (
select
coalesce(array_agg("city"), '{}') as "city",
coalesce(array_agg("country"), '{}') as "country",
coalesce(array_agg("duration"), '{}') as "duration",
coalesce(array_agg("id"), '{}') as "id",
coalesce(array_agg("isArchived"), '{}') as "isArchived",
coalesce(array_agg("isFavorite"), '{}') as "isFavorite",
coalesce(array_agg("isImage"), '{}') as "isImage",
coalesce(array_agg("isTrashed"), '{}') as "isTrashed",
coalesce(array_agg("livePhotoVideoId"), '{}') as "livePhotoVideoId",
coalesce(array_agg("localDateTime"), '{}') as "localDateTime",
coalesce(array_agg("ownerId"), '{}') as "ownerId",
coalesce(array_agg("projectionType"), '{}') as "projectionType",
coalesce(array_agg("ratio"), '{}') as "ratio",
coalesce(array_agg("status"), '{}') as "status",
coalesce(array_agg("thumbhash"), '{}') as "thumbhash",
coalesce(json_agg("stack"), '[]') as "stack"
from
"cte"
)
and "assets"."deletedAt" is null
and "assets"."isVisible" = $2
and date_trunc($3, "localDateTime" at time zone 'UTC') at time zone 'UTC' = $4
order by
"assets"."localDateTime" desc
select
to_json(agg)::text as "assets"
from
"agg"
-- AssetRepository.getDuplicates
with

View File

@@ -135,20 +135,33 @@ export class AssetJobRepository {
.execute();
}
@GenerateSql({ params: [], stream: true })
streamForEncodeClip(force?: boolean) {
private assetsWithPreviews() {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null)
.where('assets.isVisible', '=', true)
.where('assets.deletedAt', 'is', null)
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null);
}
@GenerateSql({ params: [], stream: true })
streamForSearchDuplicates(force?: boolean) {
return this.assetsWithPreviews()
.where((eb) => eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))))
.$if(!force, (qb) => qb.where('job_status.duplicatesDetectedAt', 'is', null))
.select(['assets.id'])
.stream();
}
@GenerateSql({ params: [], stream: true })
streamForEncodeClip(force?: boolean) {
return this.assetsWithPreviews()
.select(['assets.id'])
.$if(!force, (qb) =>
qb.where((eb) =>
eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))),
),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
@@ -309,4 +322,30 @@ export class AssetJobRepository {
.where('assets.deletedAt', '<=', trashedBefore)
.stream();
}
@GenerateSql({ params: [], stream: true })
streamForSidecar(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.$if(!force, (qb) =>
qb.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)])),
)
.where('assets.isVisible', '=', true)
.stream();
}
@GenerateSql({ params: [], stream: true })
streamForDetectFacesJob(force?: boolean) {
return this.assetsWithPreviews()
.$if(!force, (qb) => qb.where('job_status.facesRecognizedAt', 'is', null))
.select(['assets.id'])
.orderBy('assets.createdAt', 'desc')
.stream();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForMigrationJob() {
return this.db.selectFrom('assets').select(['id']).where('assets.deletedAt', 'is', null).stream();
}
}

View File

@@ -7,13 +7,11 @@ import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
import {
anyUuid,
asUuid,
hasPeople,
removeUndefinedKeys,
searchAssetBuilder,
truncatedDate,
unnest,
withExif,
@@ -27,7 +25,6 @@ import {
withTags,
} from 'src/utils/database';
import { globToSqlPattern } from 'src/utils/misc';
import { PaginationOptions, paginationHelper } from 'src/utils/pagination';
export type AssetStats = Record<AssetType, number>;
@@ -45,15 +42,6 @@ export interface LivePhotoSearchOptions {
type: AssetType;
}
export enum WithoutProperty {
THUMBNAIL = 'thumbnail',
ENCODED_VIDEO = 'encoded-video',
EXIF = 'exif',
DUPLICATE = 'duplicate',
FACES = 'faces',
SIDECAR = 'sidecar',
}
export enum WithProperty {
SIDECAR = 'sidecar',
}
@@ -79,7 +67,6 @@ export interface AssetBuilderOptions {
}
export interface TimeBucketOptions extends AssetBuilderOptions {
size: TimeBucketSize;
order?: AssetOrder;
}
@@ -335,10 +322,6 @@ export class AssetRepository {
return assets.map((asset) => asset.deviceAssetId);
}
getByUserId(pagination: PaginationOptions, userId: string, options: Omit<AssetSearchOptions, 'userIds'> = {}) {
return this.getAll(pagination, { ...options, userIds: [userId] });
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
return this.db
@@ -350,16 +333,6 @@ export class AssetRepository {
.executeTakeFirst();
}
async getAll(pagination: PaginationOptions, { orderDirection, ...options }: AssetSearchOptions = {}) {
const builder = searchAssetBuilder(this.db, options)
.select(withFiles)
.orderBy('assets.createdAt', orderDirection ?? 'asc')
.limit(pagination.take + 1)
.offset(pagination.skip ?? 0);
const items = await builder.execute();
return paginationHelper(items, pagination.take);
}
/**
* Get assets by device's Id on the database
* @param ownerId
@@ -529,68 +502,6 @@ export class AssetRepository {
.executeTakeFirst();
}
@GenerateSql(
...Object.values(WithProperty).map((property) => ({
name: property,
params: [DummyValue.PAGINATION, property],
})),
)
async getWithout(pagination: PaginationOptions, property: WithoutProperty) {
const items = await this.db
.selectFrom('assets')
.selectAll('assets')
.$if(property === WithoutProperty.DUPLICATE, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assets.id', 'job_status.assetId')
.where('job_status.duplicatesDetectedAt', 'is', null)
.where('job_status.previewAt', 'is not', null)
.where((eb) => eb.exists(eb.selectFrom('smart_search').where('assetId', '=', eb.ref('assets.id'))))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.ENCODED_VIDEO, (qb) =>
qb
.where('assets.type', '=', AssetType.VIDEO)
.where((eb) => eb.or([eb('assets.encodedVideoPath', 'is', null), eb('assets.encodedVideoPath', '=', '')])),
)
.$if(property === WithoutProperty.EXIF, (qb) =>
qb
.leftJoin('asset_job_status as job_status', 'assets.id', 'job_status.assetId')
.where((eb) => eb.or([eb('job_status.metadataExtractedAt', 'is', null), eb('assetId', 'is', null)]))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.FACES, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null)
.where('job_status.facesRecognizedAt', 'is', null)
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.SIDECAR, (qb) =>
qb
.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)]))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.THUMBNAIL, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('assets.isVisible', '=', true)
.where((eb) =>
eb.or([
eb('job_status.previewAt', 'is', null),
eb('job_status.thumbnailAt', 'is', null),
eb('assets.thumbhash', 'is', null),
]),
),
)
.where('deletedAt', 'is', null)
.limit(pagination.take + 1)
.offset(pagination.skip ?? 0)
.orderBy('createdAt')
.execute();
return paginationHelper(items, pagination.take);
}
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
return this.db
.selectFrom('assets')
@@ -627,7 +538,7 @@ export class AssetRepository {
.with('assets', (qb) =>
qb
.selectFrom('assets')
.select(truncatedDate<Date>(options.size).as('timeBucket'))
.select(truncatedDate<Date>(TimeBucketSize.MONTH).as('timeBucket'))
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
.where('assets.isVisible', '=', true)
@@ -669,53 +580,125 @@ export class AssetRepository {
);
}
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
async getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
return this.db
.selectFrom('assets')
.selectAll('assets')
.$call(withExif)
.$if(!!options.albumId, (qb) =>
@GenerateSql({
params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }, { skip: 0, take: 1000 }],
})
getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
const query = this.db
.with('cte', (qb) =>
qb
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
.where('albums_assets_assets.albumsId', '=', options.albumId!),
.selectFrom('assets')
.innerJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => [
'assets.duration',
'assets.id',
sql`assets."isArchived"::int`.as('isArchived'),
sql`assets."isFavorite"::int`.as('isFavorite'),
sql`(assets.type = 'IMAGE')::int`.as('isImage'),
sql`(assets."deletedAt" is null)::int`.as('isTrashed'),
sql`(assets.type = 'VIDEO')::int`.as('isVideo'),
'assets.livePhotoVideoId',
'assets.localDateTime',
'assets.ownerId',
'assets.status',
eb.fn('encode', ['assets.thumbhash', sql.lit('base64')]).as('thumbhash'),
'exif.city',
'exif.country',
'exif.projectionType',
eb.fn
.coalesce(
eb
.case()
.when(sql`exif."exifImageHeight" = 0 or exif."exifImageWidth" = 0`)
.then(eb.lit(1))
.when('exif.orientation', 'in', sql<string>`('5', '6', '7', '8', '-90', '90')`)
.then(sql`round(exif."exifImageHeight"::numeric / exif."exifImageWidth"::numeric, 3)`)
.else(sql`round(exif."exifImageWidth"::numeric / exif."exifImageHeight"::numeric, 3)`)
.end(),
eb.lit(1),
)
.as('ratio'),
])
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
.where('assets.isVisible', '=', true)
.where(truncatedDate(TimeBucketSize.MONTH), '=', timeBucket.replace(/^[+-]/, ''))
.$if(!!options.albumId, (qb) =>
qb.where((eb) =>
eb.exists(
eb
.selectFrom('albums_assets_assets')
.whereRef('albums_assets_assets.assetsId', '=', 'assets.id')
.where('albums_assets_assets.albumsId', '=', asUuid(options.albumId!)),
),
),
)
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
.$if(!!options.withStacked, (qb) =>
qb
.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom('asset_stack')
.whereRef('asset_stack.id', '=', 'assets.stackId')
.whereRef('asset_stack.primaryAssetId', '!=', 'assets.id'),
),
),
)
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.select(sql`array[stacked."stackId"::text, count('stacked')::text]`.as('stack'))
.whereRef('stacked.stackId', '=', 'assets.stackId')
.where('stacked.deletedAt', 'is', null)
.where('stacked.isArchived', '=', false)
.groupBy('stacked.stackId')
.as('stacked_assets'),
(join) => join.onTrue(),
)
.select('stack'),
)
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
.$if(options.isDuplicate !== undefined, (qb) =>
qb.where('assets.duplicateId', options.isDuplicate ? 'is not' : 'is', null),
)
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
.$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!))
.orderBy('assets.localDateTime', options.order ?? 'desc'),
)
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
.$if(!!options.withStacked, (qb) =>
.with('agg', (qb) =>
qb
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.where((eb) =>
eb.or([eb('asset_stack.primaryAssetId', '=', eb.ref('assets.id')), eb('assets.stackId', 'is', null)]),
)
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.selectAll('asset_stack')
.select((eb) => eb.fn.count(eb.table('stacked')).as('assetCount'))
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.where('stacked.deletedAt', 'is', null)
.where('stacked.isArchived', '=', false)
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack')),
.selectFrom('cte')
.select((eb) => [
eb.fn.coalesce(eb.fn('array_agg', ['city']), sql.lit('{}')).as('city'),
eb.fn.coalesce(eb.fn('array_agg', ['country']), sql.lit('{}')).as('country'),
eb.fn.coalesce(eb.fn('array_agg', ['duration']), sql.lit('{}')).as('duration'),
eb.fn.coalesce(eb.fn('array_agg', ['id']), sql.lit('{}')).as('id'),
eb.fn.coalesce(eb.fn('array_agg', ['isArchived']), sql.lit('{}')).as('isArchived'),
eb.fn.coalesce(eb.fn('array_agg', ['isFavorite']), sql.lit('{}')).as('isFavorite'),
eb.fn.coalesce(eb.fn('array_agg', ['isImage']), sql.lit('{}')).as('isImage'),
// TODO: isTrashed is redundant as it will always be all 0s or 1s depending on the options
eb.fn.coalesce(eb.fn('array_agg', ['isTrashed']), sql.lit('{}')).as('isTrashed'),
eb.fn.coalesce(eb.fn('array_agg', ['livePhotoVideoId']), sql.lit('{}')).as('livePhotoVideoId'),
eb.fn.coalesce(eb.fn('array_agg', ['localDateTime']), sql.lit('{}')).as('localDateTime'),
eb.fn.coalesce(eb.fn('array_agg', ['ownerId']), sql.lit('{}')).as('ownerId'),
eb.fn.coalesce(eb.fn('array_agg', ['projectionType']), sql.lit('{}')).as('projectionType'),
eb.fn.coalesce(eb.fn('array_agg', ['ratio']), sql.lit('{}')).as('ratio'),
eb.fn.coalesce(eb.fn('array_agg', ['status']), sql.lit('{}')).as('status'),
eb.fn.coalesce(eb.fn('array_agg', ['thumbhash']), sql.lit('{}')).as('thumbhash'),
])
.$if(!!options.withStacked, (qb) =>
qb.select((eb) => eb.fn.coalesce(eb.fn('json_agg', ['stack']), sql.lit('[]')).as('stack')),
),
)
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
.$if(options.isDuplicate !== undefined, (qb) =>
qb.where('assets.duplicateId', options.isDuplicate ? 'is not' : 'is', null),
)
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
.$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!))
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
.where('assets.isVisible', '=', true)
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
.orderBy('assets.localDateTime', options.order ?? 'desc')
.execute();
.selectFrom('agg')
.select(sql<string>`to_json(agg)::text`.as('assets'));
return query.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
@@ -774,10 +757,7 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
async getAssetIdByCity(
ownerId: string,
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
): Promise<SearchExploreItem<string>> {
async getAssetIdByCity(ownerId: string, { minAssetsPerField, maxFields }: AssetExploreFieldOptions) {
const items = await this.db
.with('cities', (qb) =>
qb
@@ -792,6 +772,7 @@ export class AssetRepository {
.innerJoin('cities', 'exif.city', 'cities.city')
.distinctOn('exif.city')
.select(['assetId as data', 'exif.city as value'])
.$narrowType<{ value: NotNull }>()
.where('ownerId', '=', asUuid(ownerId))
.where('isVisible', '=', true)
.where('isArchived', '=', false)
@@ -800,7 +781,7 @@ export class AssetRepository {
.limit(maxFields)
.execute();
return { fieldName: 'exifInfo.city', items: items as SearchExploreItemSet<string> };
return { fieldName: 'exifInfo.city', items };
}
@GenerateSql({

View File

@@ -19,7 +19,7 @@ import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.d
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { JobItem } from 'src/types';
import { JobItem, JobSource } from 'src/types';
import { handlePromiseError } from 'src/utils/misc';
type EmitHandlers = Partial<{ [T in EmitEvent]: Array<EventItem<T>> }>;
@@ -48,7 +48,7 @@ type EventMap = {
'config.validate': [{ newConfig: SystemConfig; oldConfig: SystemConfig }];
// album events
'album.update': [{ id: string; recipientIds: string[] }];
'album.update': [{ id: string; recipientId: string }];
'album.invite': [{ id: string; userId: string }];
// asset events
@@ -58,6 +58,7 @@ type EventMap = {
'asset.show': [{ assetId: string; userId: string }];
'asset.trash': [{ assetId: string; userId: string }];
'asset.delete': [{ assetId: string; userId: string }];
'asset.metadataExtracted': [{ assetId: string; userId: string; source?: JobSource }];
// asset bulk events
'assets.trash': [{ assetIds: string[]; userId: string }];

View File

@@ -9,7 +9,7 @@ import { JobName, JobStatus, MetadataKey, QueueCleanType, QueueName } from 'src/
import { ConfigRepository } from 'src/repositories/config.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { IEntityJob, JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
import { JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
import { getKeyByValue, getMethodNames, ImmichStartupError } from 'src/utils/misc';
type JobMapItem = {
@@ -206,7 +206,10 @@ export class JobRepository {
private getJobOptions(item: JobItem): JobsOptions | null {
switch (item.name) {
case JobName.NOTIFY_ALBUM_UPDATE: {
return { jobId: item.data.id, delay: item.data?.delay };
return {
jobId: `${item.data.id}/${item.data.recipientId}`,
delay: item.data?.delay,
};
}
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
return { jobId: item.data.id };
@@ -227,19 +230,12 @@ export class JobRepository {
return this.moduleRef.get<Queue>(getQueueToken(queue), { strict: false });
}
public async removeJob(jobId: string, name: JobName): Promise<IEntityJob | undefined> {
const existingJob = await this.getQueue(this.getQueueName(name)).getJob(jobId);
if (!existingJob) {
return;
}
try {
/** @deprecated */
// todo: remove this when asset notifications no longer need it.
public async removeJob(name: JobName, jobID: string): Promise<void> {
const existingJob = await this.getQueue(this.getQueueName(name)).getJob(jobID);
if (existingJob) {
await existingJob.remove();
} catch (error: any) {
if (error.message?.includes('Missing key for job')) {
return;
}
throw error;
}
return existingJob.data;
}
}

View File

@@ -6,7 +6,7 @@ import { AssetFaces, DB, FaceSearch, Person } from 'src/db';
import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, SourceType } from 'src/enum';
import { removeUndefinedKeys } from 'src/utils/database';
import { PaginationOptions } from 'src/utils/pagination';
import { paginationHelper, PaginationOptions } from 'src/utils/pagination';
export interface PersonSearchOptions {
minimumFaceCount: number;
@@ -200,11 +200,7 @@ export class PersonRepository {
.limit(pagination.take + 1)
.execute();
if (items.length > pagination.take) {
return { items: items.slice(0, -1), hasNextPage: true };
}
return { items, hasNextPage: false };
return paginationHelper(items, pagination.take);
}
@GenerateSql()

View File

@@ -8,41 +8,10 @@ import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetStatus, AssetType } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { anyUuid, asUuid, searchAssetBuilder, vectorIndexQuery } from 'src/utils/database';
import { paginationHelper } from 'src/utils/pagination';
import { isValidInteger } from 'src/validation';
export interface SearchResult<T> {
/** total matches */
total: number;
/** collection size */
count: number;
/** current page */
page: number;
/** items for page */
items: T[];
/** score */
distances: number[];
facets: SearchFacet[];
}
export interface SearchFacet {
fieldName: string;
counts: Array<{
count: number;
value: string;
}>;
}
export type SearchExploreItemSet<T> = Array<{
value: string;
data: T;
}>;
export interface SearchExploreItem<T> {
fieldName: string;
items: SearchExploreItemSet<T>;
}
export interface SearchAssetIDOptions {
export interface SearchAssetIdOptions {
checksum?: Buffer;
deviceAssetId?: string;
id?: string;
@@ -54,7 +23,7 @@ export interface SearchUserIdOptions {
userIds?: string[];
}
export type SearchIdOptions = SearchAssetIDOptions & SearchUserIdOptions;
export type SearchIdOptions = SearchAssetIdOptions & SearchUserIdOptions;
export interface SearchStatusOptions {
isArchived?: boolean;
@@ -144,8 +113,6 @@ type BaseAssetSearchOptions = SearchDateOptions &
export type AssetSearchOptions = BaseAssetSearchOptions & SearchRelationOptions;
export type AssetSearchOneToOneRelationOptions = BaseAssetSearchOptions & SearchOneToOneRelationOptions;
export type AssetSearchBuilderOptions = Omit<AssetSearchOptions, 'orderDirection'>;
export type SmartSearchOptions = SearchDateOptions &
@@ -226,9 +193,8 @@ export class SearchRepository {
.limit(pagination.size + 1)
.offset((pagination.page - 1) * pagination.size)
.execute();
const hasNextPage = items.length > pagination.size;
items.splice(pagination.size);
return { items, hasNextPage };
return paginationHelper(items, pagination.size);
}
@GenerateSql({
@@ -283,9 +249,7 @@ export class SearchRepository {
.offset((pagination.page - 1) * pagination.size)
.execute();
const hasNextPage = items.length > pagination.size;
items.splice(pagination.size);
return { items, hasNextPage };
return paginationHelper(items, pagination.size);
}
@GenerateSql({

View File

@@ -606,7 +606,7 @@ describe(AlbumService.name, () => {
expect(mocks.album.addAssetIds).toHaveBeenCalledWith('album-123', ['asset-1', 'asset-2', 'asset-3']);
expect(mocks.event.emit).toHaveBeenCalledWith('album.update', {
id: 'album-123',
recipientIds: ['admin_id'],
recipientId: 'admin_id',
});
});

View File

@@ -170,8 +170,8 @@ export class AlbumService extends BaseService {
(userId) => userId !== auth.user.id,
);
if (allUsersExceptUs.length > 0) {
await this.eventRepository.emit('album.update', { id, recipientIds: allUsersExceptUs });
for (const recipientId of allUsersExceptUs) {
await this.eventRepository.emit('album.update', { id, recipientId });
}
}

View File

@@ -1,6 +1,6 @@
import { BadRequestException } from '@nestjs/common';
import { DateTime } from 'luxon';
import { MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
import { AssetStats } from 'src/repositories/asset.repository';
@@ -11,7 +11,6 @@ import { faceStub } from 'test/fixtures/face.stub';
import { userStub } from 'test/fixtures/user.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
import { vitest } from 'vitest';
const stats: AssetStats = {
[AssetType.IMAGE]: 10,
@@ -44,62 +43,6 @@ describe(AssetService.name, () => {
mockGetById([assetStub.livePhotoStillAsset, assetStub.livePhotoMotionAsset]);
});
describe('getMemoryLane', () => {
beforeAll(() => {
vitest.useFakeTimers();
vitest.setSystemTime(new Date('2024-01-15'));
});
afterAll(() => {
vitest.useRealTimers();
});
it('should group the assets correctly', async () => {
const image1 = { ...assetStub.image, localDateTime: new Date(2023, 1, 15, 0, 0, 0) };
const image2 = { ...assetStub.image, localDateTime: new Date(2023, 1, 15, 1, 0, 0) };
const image3 = { ...assetStub.image, localDateTime: new Date(2015, 1, 15) };
const image4 = { ...assetStub.image, localDateTime: new Date(2009, 1, 15) };
mocks.partner.getAll.mockResolvedValue([]);
mocks.asset.getByDayOfYear.mockResolvedValue([
{
year: 2023,
assets: [image1, image2],
},
{
year: 2015,
assets: [image3],
},
{
year: 2009,
assets: [image4],
},
] as any);
await expect(sut.getMemoryLane(authStub.admin, { day: 15, month: 1 })).resolves.toEqual([
{ yearsAgo: 1, title: '1 year ago', assets: [mapAsset(image1), mapAsset(image2)] },
{ yearsAgo: 9, title: '9 years ago', assets: [mapAsset(image3)] },
{ yearsAgo: 15, title: '15 years ago', assets: [mapAsset(image4)] },
]);
expect(mocks.asset.getByDayOfYear.mock.calls).toEqual([[[authStub.admin.user.id], { day: 15, month: 1 }]]);
});
it('should get memories with partners with inTimeline enabled', async () => {
const partner = factory.partner();
const auth = factory.auth({ user: { id: partner.sharedWithId } });
mocks.partner.getAll.mockResolvedValue([partner]);
mocks.asset.getByDayOfYear.mockResolvedValue([]);
await sut.getMemoryLane(auth, { day: 15, month: 1 });
expect(mocks.asset.getByDayOfYear.mock.calls).toEqual([
[[auth.user.id, partner.sharedById], { day: 15, month: 1 }],
]);
});
});
describe('getStatistics', () => {
it('should get the statistics for a user, excluding archived assets', async () => {
mocks.asset.getStatistics.mockResolvedValue(stats);

View File

@@ -3,13 +3,7 @@ import _ from 'lodash';
import { DateTime, Duration } from 'luxon';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { OnJob } from 'src/decorators';
import {
AssetResponseDto,
MapAsset,
MemoryLaneResponseDto,
SanitizedAssetResponseDto,
mapAsset,
} from 'src/dtos/asset-response.dto';
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import {
AssetBulkDeleteDto,
AssetBulkUpdateDto,
@@ -20,7 +14,6 @@ import {
mapStats,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { MemoryLaneDto } from 'src/dtos/search.dto';
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
@@ -28,26 +21,6 @@ import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUn
@Injectable()
export class AssetService extends BaseService {
async getMemoryLane(auth: AuthDto, dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
const partnerIds = await getMyPartnerIds({
userId: auth.user.id,
repository: this.partnerRepository,
timelineEnabled: true,
});
const userIds = [auth.user.id, ...partnerIds];
const groups = await this.assetRepository.getByDayOfYear(userIds, dto);
return groups.map(({ year, assets }) => {
const yearsAgo = DateTime.utc().year - year;
return {
yearsAgo,
// TODO move this to clients
title: `${yearsAgo} year${yearsAgo > 1 ? 's' : ''} ago`,
assets: assets.map((asset) => mapAsset(asset, { auth })),
};
});
}
async getStatistics(auth: AuthDto, dto: AssetStatsDto) {
const stats = await this.assetRepository.getStatistics(auth.user.id, dto);
return mapStats(stats);

View File

@@ -1,6 +1,4 @@
import { BadRequestException } from '@nestjs/common';
import { FileReportItemDto } from 'src/dtos/audit.dto';
import { AssetFileType, AssetPathType, JobStatus, PersonPathType, UserPathType } from 'src/enum';
import { JobStatus } from 'src/enum';
import { AuditService } from 'src/services/audit.service';
import { newTestService, ServiceMocks } from 'test/utils';
@@ -25,148 +23,4 @@ describe(AuditService.name, () => {
expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date));
});
});
describe('getChecksums', () => {
it('should fail if the file is not in the immich path', async () => {
await expect(sut.getChecksums({ filenames: ['foo/bar'] })).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.crypto.hashFile).not.toHaveBeenCalled();
});
it('should get checksum for valid file', async () => {
await expect(sut.getChecksums({ filenames: ['./upload/my-file.jpg'] })).resolves.toEqual([
{ filename: './upload/my-file.jpg', checksum: expect.any(String) },
]);
expect(mocks.crypto.hashFile).toHaveBeenCalledWith('./upload/my-file.jpg');
});
});
describe('fixItems', () => {
it('should fail if the file is not in the immich path', async () => {
await expect(
sut.fixItems([
{ entityId: 'my-id', pathType: AssetPathType.ORIGINAL, pathValue: 'foo/bar' } as FileReportItemDto,
]),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update encoded video path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.ENCODED_VIDEO,
pathValue: './upload/my-video.mp4',
} as FileReportItemDto,
]);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', encodedVideoPath: './upload/my-video.mp4' });
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update preview path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.PREVIEW,
pathValue: './upload/my-preview.png',
} as FileReportItemDto,
]);
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: 'my-id',
type: AssetFileType.PREVIEW,
path: './upload/my-preview.png',
});
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update thumbnail path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.THUMBNAIL,
pathValue: './upload/my-thumbnail.webp',
} as FileReportItemDto,
]);
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: 'my-id',
type: AssetFileType.THUMBNAIL,
path: './upload/my-thumbnail.webp',
});
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update original path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.ORIGINAL,
pathValue: './upload/my-original.png',
} as FileReportItemDto,
]);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', originalPath: './upload/my-original.png' });
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update sidecar path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.SIDECAR,
pathValue: './upload/my-sidecar.xmp',
} as FileReportItemDto,
]);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', sidecarPath: './upload/my-sidecar.xmp' });
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update face path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: PersonPathType.FACE,
pathValue: './upload/my-face.jpg',
} as FileReportItemDto,
]);
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'my-id', thumbnailPath: './upload/my-face.jpg' });
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update profile path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: UserPathType.PROFILE,
pathValue: './upload/my-profile-pic.jpg',
} as FileReportItemDto,
]);
expect(mocks.user.update).toHaveBeenCalledWith('my-id', { profileImagePath: './upload/my-profile-pic.jpg' });
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
});
});
});

View File

@@ -1,23 +1,9 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { Injectable } from '@nestjs/common';
import { DateTime } from 'luxon';
import { resolve } from 'node:path';
import { AUDIT_LOG_MAX_DURATION, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { OnJob } from 'src/decorators';
import { FileChecksumDto, FileChecksumResponseDto, FileReportItemDto, PathEntityType } from 'src/dtos/audit.dto';
import {
AssetFileType,
AssetPathType,
JobName,
JobStatus,
PersonPathType,
QueueName,
StorageFolder,
UserPathType,
} from 'src/enum';
import { JobName, JobStatus, QueueName } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { getAssetFiles } from 'src/utils/asset.util';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class AuditService extends BaseService {
@@ -26,187 +12,4 @@ export class AuditService extends BaseService {
await this.auditRepository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
return JobStatus.SUCCESS;
}
async getChecksums(dto: FileChecksumDto) {
const results: FileChecksumResponseDto[] = [];
for (const filename of dto.filenames) {
if (!StorageCore.isImmichPath(filename)) {
throw new BadRequestException(
`Could not get the checksum of ${filename} because the file isn't accessible by Immich`,
);
}
const checksum = await this.cryptoRepository.hashFile(filename);
results.push({ filename, checksum: checksum.toString('base64') });
}
return results;
}
async fixItems(items: FileReportItemDto[]) {
for (const { entityId: id, pathType, pathValue } of items) {
if (!StorageCore.isImmichPath(pathValue)) {
throw new BadRequestException(
`Could not fix item ${id} with path ${pathValue} because the file isn't accessible by Immich`,
);
}
switch (pathType) {
case AssetPathType.ENCODED_VIDEO: {
await this.assetRepository.update({ id, encodedVideoPath: pathValue });
break;
}
case AssetPathType.PREVIEW: {
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.PREVIEW, path: pathValue });
break;
}
case AssetPathType.THUMBNAIL: {
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.THUMBNAIL, path: pathValue });
break;
}
case AssetPathType.ORIGINAL: {
await this.assetRepository.update({ id, originalPath: pathValue });
break;
}
case AssetPathType.SIDECAR: {
await this.assetRepository.update({ id, sidecarPath: pathValue });
break;
}
case PersonPathType.FACE: {
await this.personRepository.update({ id, thumbnailPath: pathValue });
break;
}
case UserPathType.PROFILE: {
await this.userRepository.update(id, { profileImagePath: pathValue });
break;
}
}
}
}
private fullPath(filename: string) {
return resolve(filename);
}
async getFileReport() {
const hasFile = (items: Set<string>, filename: string) => items.has(filename) || items.has(this.fullPath(filename));
const crawl = async (folder: StorageFolder) =>
new Set(
await this.storageRepository.crawl({
includeHidden: true,
pathsToCrawl: [StorageCore.getBaseFolder(folder)],
}),
);
const uploadFiles = await crawl(StorageFolder.UPLOAD);
const libraryFiles = await crawl(StorageFolder.LIBRARY);
const thumbFiles = await crawl(StorageFolder.THUMBNAILS);
const videoFiles = await crawl(StorageFolder.ENCODED_VIDEO);
const profileFiles = await crawl(StorageFolder.PROFILE);
const allFiles = new Set<string>();
for (const list of [libraryFiles, thumbFiles, videoFiles, profileFiles, uploadFiles]) {
for (const item of list) {
allFiles.add(item);
}
}
const track = (filename: string | null | undefined) => {
if (!filename) {
return;
}
allFiles.delete(filename);
allFiles.delete(this.fullPath(filename));
};
this.logger.log(
`Found ${libraryFiles.size} original files, ${thumbFiles.size} thumbnails, ${videoFiles.size} encoded videos, ${profileFiles.size} profile files`,
);
const pagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (options) =>
this.assetRepository.getAll(options, { withDeleted: true, withArchived: true }),
);
let assetCount = 0;
const orphans: FileReportItemDto[] = [];
for await (const assets of pagination) {
assetCount += assets.length;
for (const { id, files, originalPath, encodedVideoPath, isExternal, checksum } of assets) {
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(files);
for (const file of [
originalPath,
fullsizeFile?.path,
previewFile?.path,
encodedVideoPath,
thumbnailFile?.path,
]) {
track(file);
}
const entity = { entityId: id, entityType: PathEntityType.ASSET, checksum: checksum.toString('base64') };
if (
originalPath &&
!hasFile(libraryFiles, originalPath) &&
!hasFile(uploadFiles, originalPath) &&
// Android motion assets
!hasFile(videoFiles, originalPath) &&
// ignore external library assets
!isExternal
) {
orphans.push({ ...entity, pathType: AssetPathType.ORIGINAL, pathValue: originalPath });
}
if (previewFile && !hasFile(thumbFiles, previewFile.path)) {
orphans.push({ ...entity, pathType: AssetPathType.PREVIEW, pathValue: previewFile.path });
}
if (thumbnailFile && !hasFile(thumbFiles, thumbnailFile.path)) {
orphans.push({ ...entity, pathType: AssetPathType.THUMBNAIL, pathValue: thumbnailFile.path });
}
if (encodedVideoPath && !hasFile(videoFiles, encodedVideoPath)) {
orphans.push({ ...entity, pathType: AssetPathType.THUMBNAIL, pathValue: encodedVideoPath });
}
}
}
const users = await this.userRepository.getList();
for (const { id, profileImagePath } of users) {
track(profileImagePath);
const entity = { entityId: id, entityType: PathEntityType.USER };
if (profileImagePath && !hasFile(profileFiles, profileImagePath)) {
orphans.push({ ...entity, pathType: UserPathType.PROFILE, pathValue: profileImagePath });
}
}
let peopleCount = 0;
for await (const { id, thumbnailPath } of this.personRepository.getAll()) {
track(thumbnailPath);
const entity = { entityId: id, entityType: PathEntityType.PERSON };
if (thumbnailPath && !hasFile(thumbFiles, thumbnailPath)) {
orphans.push({ ...entity, pathType: PersonPathType.FACE, pathValue: thumbnailPath });
}
if (peopleCount === JOBS_ASSET_PAGINATION_SIZE) {
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${peopleCount} people`);
peopleCount = 0;
}
}
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${peopleCount} people`);
const extras: string[] = [];
for (const file of allFiles) {
extras.push(file);
}
// send as absolute paths
for (const orphan of orphans) {
orphan.pathValue = this.fullPath(orphan.pathValue);
}
return { orphans, extras };
}
}

View File

@@ -1,10 +1,9 @@
import { AssetFileType, AssetType, JobName, JobStatus } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { DuplicateService } from 'src/services/duplicate.service';
import { SearchService } from 'src/services/search.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { newTestService, ServiceMocks } from 'test/utils';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
import { beforeEach, vitest } from 'vitest';
vitest.useFakeTimers();
@@ -113,14 +112,11 @@ describe(SearchService.name, () => {
});
it('should queue missing assets', async () => {
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.assetJob.streamForSearchDuplicates.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueSearchDuplicates({});
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.DUPLICATE);
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(undefined);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.DUPLICATE_DETECTION,
@@ -130,14 +126,11 @@ describe(SearchService.name, () => {
});
it('should queue all assets', async () => {
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.assetJob.streamForSearchDuplicates.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueSearchDuplicates({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.DUPLICATE_DETECTION,

View File

@@ -5,13 +5,11 @@ import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { AssetFileType, JobName, JobStatus, QueueName } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { AssetDuplicateResult } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
import { JobOf } from 'src/types';
import { JobItem, JobOf } from 'src/types';
import { getAssetFile } from 'src/utils/asset.util';
import { isDuplicateDetectionEnabled } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class DuplicateService extends BaseService {
@@ -30,18 +28,22 @@ export class DuplicateService extends BaseService {
return JobStatus.SKIPPED;
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, { isVisible: true })
: this.assetRepository.getWithout(pagination, WithoutProperty.DUPLICATE);
});
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } })),
);
const assets = this.assetJobRepository.streamForSearchDuplicates(force);
for await (const asset of assets) {
jobs.push({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
}
await queueAll();
return JobStatus.SUCCESS;
}

View File

@@ -239,10 +239,6 @@ describe(JobService.name, () => {
item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } },
jobs: [JobName.METADATA_EXTRACTION],
},
{
item: { name: JobName.METADATA_EXTRACTION, data: { id: 'asset-1' } },
jobs: [JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE],
},
{
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.GENERATE_THUMBNAILS],

View File

@@ -264,17 +264,6 @@ export class JobService extends BaseService {
break;
}
case JobName.METADATA_EXTRACTION: {
if (item.data.source === 'sidecar-write') {
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
if (asset) {
this.eventRepository.clientSend('on_asset_update', asset.ownerId, mapAsset(asset));
}
}
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: item.data });
break;
}
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
if (item.data.source === 'upload' || item.data.source === 'copy') {
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: item.data });

View File

@@ -273,7 +273,6 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
@@ -292,7 +291,6 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });

View File

@@ -38,10 +38,6 @@ describe(MediaService.name, () => {
describe('handleQueueGenerateThumbnails', () => {
it('should queue all assets', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream([personStub.newThumbnail]));
mocks.person.getFacesByIds.mockResolvedValue([faceStub.face1]);
@@ -67,10 +63,6 @@ describe(MediaService.name, () => {
it('should queue trashed assets when force is true', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.archived]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.trashed],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: true });
@@ -171,7 +163,7 @@ describe(MediaService.name, () => {
describe('handleQueueMigration', () => {
it('should remove empty directories and queue jobs', async () => {
mocks.asset.getAll.mockResolvedValue({ hasNextPage: false, items: [assetStub.image] });
mocks.assetJob.streamForMigrationJob.mockReturnValue(makeStream([assetStub.image]));
mocks.job.getJobCounts.mockResolvedValue({ active: 1, waiting: 0 } as JobCounts);
mocks.person.getAll.mockReturnValue(makeStream([personStub.withName]));

View File

@@ -36,7 +36,6 @@ import {
import { getAssetFiles } from 'src/utils/asset.util';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class MediaService extends BaseService {
@@ -50,18 +49,26 @@ export class MediaService extends BaseService {
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
const thumbJobs: JobItem[] = [];
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
thumbJobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
continue;
jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
}
await this.jobRepository.queueAll(thumbJobs);
const jobs: JobItem[] = [];
await queueAll();
const people = this.personRepository.getAll(force ? undefined : { thumbnailPath: '' });
@@ -76,32 +83,36 @@ export class MediaService extends BaseService {
}
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
}
await this.jobRepository.queueAll(jobs);
await queueAll();
return JobStatus.SUCCESS;
}
@OnJob({ name: JobName.QUEUE_MIGRATION, queue: QueueName.MIGRATION })
async handleQueueMigration(): Promise<JobStatus> {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination),
);
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
if (active === 1 && waiting === 0) {
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
}
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } })),
);
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForMigrationJob();
for await (const asset of assets) {
jobs.push({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
let jobs: { name: JobName.MIGRATE_PERSON; data: { id: string } }[] = [];
await this.jobRepository.queueAll(jobs);
jobs = [];
for await (const person of this.personRepository.getAll()) {
jobs.push({ name: JobName.MIGRATE_PERSON, data: { id: person.id } });
@@ -255,7 +266,9 @@ export class MediaService extends BaseService {
const { info, data, colorspace } = await this.decodeImage(
extracted ? extracted.buffer : asset.originalPath,
asset.exifInfo,
// only specify orientation to extracted images which don't have EXIF orientation data
// or it can double rotate the image
extracted ? asset.exifInfo : { ...asset.exifInfo, orientation: null },
convertFullsize ? undefined : image.preview.size,
);

View File

@@ -5,7 +5,6 @@ import { constants } from 'node:fs/promises';
import { defaults } from 'src/config';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { MetadataService } from 'src/services/metadata.service';
import { assetStub } from 'test/fixtures/asset.stub';
@@ -144,7 +143,8 @@ describe(MetadataService.name, () => {
it('should handle an asset that could not be found', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(void 0);
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).not.toHaveBeenCalled();
@@ -527,7 +527,7 @@ describe(MetadataService.name, () => {
ContainerDirectory: [{ Foo: 100 }],
});
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
});
it('should extract the correct video orientation', async () => {
@@ -1202,7 +1202,7 @@ describe(MetadataService.name, () => {
it('should handle livePhotoCID not set', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled();
@@ -1215,9 +1215,7 @@ describe(MetadataService.name, () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
await sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id);
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
@@ -1236,9 +1234,7 @@ describe(MetadataService.name, () => {
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoStillAsset.id);
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
@@ -1262,9 +1258,7 @@ describe(MetadataService.name, () => {
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
expect(mocks.event.emit).toHaveBeenCalledWith('asset.hide', {
userId: assetStub.livePhotoMotionAsset.ownerId,
@@ -1280,10 +1274,12 @@ describe(MetadataService.name, () => {
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
expect(mocks.event.emit).toHaveBeenCalledWith('asset.metadataExtracted', {
assetId: assetStub.livePhotoStillAsset.id,
userId: assetStub.livePhotoStillAsset.ownerId,
});
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
ownerId: 'user-id',
otherAssetId: 'live-photo-still-asset',
@@ -1346,12 +1342,11 @@ describe(MetadataService.name, () => {
describe('handleQueueSidecar', () => {
it('should queue assets with sidecar files', async () => {
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.sidecar], hasNextPage: false });
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueSidecar({ force: true });
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
expect(mocks.asset.getAll).toHaveBeenCalledWith({ take: 1000, skip: 0 });
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_SYNC,
@@ -1361,12 +1356,11 @@ describe(MetadataService.name, () => {
});
it('should queue assets without sidecar files', async () => {
mocks.asset.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueSidecar({ force: false });
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ take: 1000, skip: 0 }, WithoutProperty.SIDECAR);
expect(mocks.asset.getAll).not.toHaveBeenCalled();
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_DISCOVERY,

View File

@@ -22,14 +22,12 @@ import {
QueueName,
SourceType,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { ArgOf } from 'src/repositories/event.repository';
import { ReverseGeocodeResult } from 'src/repositories/map.repository';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { BaseService } from 'src/services/base.service';
import { JobOf } from 'src/types';
import { JobItem, JobOf } from 'src/types';
import { isFaceImportEnabled } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
import { upsertTags } from 'src/utils/tag';
/** look for a date from these tags (in order) */
@@ -184,14 +182,14 @@ export class MetadataService extends BaseService {
}
@OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>): Promise<JobStatus> {
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>) {
const [{ metadata, reverseGeocoding }, asset] = await Promise.all([
this.getConfig({ withCache: true }),
this.assetJobRepository.getForMetadataExtraction(data.id),
]);
if (!asset) {
return JobStatus.FAILED;
return;
}
const [exifTags, stats] = await Promise.all([
@@ -285,27 +283,31 @@ export class MetadataService extends BaseService {
await this.assetRepository.upsertJobStatus({ assetId: asset.id, metadataExtractedAt: new Date() });
return JobStatus.SUCCESS;
await this.eventRepository.emit('asset.metadataExtracted', {
assetId: asset.id,
userId: asset.ownerId,
source: data.source,
});
}
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
async handleQueueSidecar(job: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
: this.assetRepository.getWithout(pagination, WithoutProperty.SIDECAR);
});
async handleQueueSidecar({ force }: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({
name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY,
data: { id: asset.id },
})),
);
const assets = this.assetJobRepository.streamForSidecar(force);
for await (const asset of assets) {
jobs.push({ name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
}
await queueAll();
return JobStatus.SUCCESS;
}

View File

@@ -154,10 +154,10 @@ describe(NotificationService.name, () => {
describe('onAlbumUpdateEvent', () => {
it('should queue notify album update event', async () => {
await sut.onAlbumUpdate({ id: 'album', recipientIds: ['42'] });
await sut.onAlbumUpdate({ id: 'album', recipientId: '42' });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_UPDATE,
data: { id: 'album', recipientIds: ['42'], delay: 300_000 },
data: { id: 'album', recipientId: '42', delay: 300_000 },
});
});
});
@@ -414,14 +414,14 @@ describe(NotificationService.name, () => {
describe('handleAlbumUpdate', () => {
it('should skip if album could not be found', async () => {
await expect(sut.handleAlbumUpdate({ id: '', recipientIds: ['1'] })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.user.get).not.toHaveBeenCalled();
});
it('should skip if owner could not be found', async () => {
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
await expect(sut.handleAlbumUpdate({ id: '', recipientIds: ['1'] })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.systemMetadata.get).not.toHaveBeenCalled();
});
@@ -434,7 +434,7 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
});
@@ -456,7 +456,7 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
});
@@ -478,7 +478,7 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
});
@@ -492,21 +492,21 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalled();
});
it('should add new recipients for new images if job is already queued', async () => {
mocks.job.removeJob.mockResolvedValue({ id: '1', recipientIds: ['2', '3', '4'] } as INotifyAlbumUpdateJob);
await sut.onAlbumUpdate({ id: '1', recipientIds: ['1', '2', '3'] } as INotifyAlbumUpdateJob);
await sut.onAlbumUpdate({ id: '1', recipientId: '2' } as INotifyAlbumUpdateJob);
expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NOTIFY_ALBUM_UPDATE, '1/2');
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_UPDATE,
data: {
id: '1',
delay: 300_000,
recipientIds: ['1', '2', '3', '4'],
recipientId: '2',
},
});
});

View File

@@ -1,5 +1,6 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { OnEvent, OnJob } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
mapNotification,
@@ -22,7 +23,7 @@ import {
import { EmailTemplate } from 'src/repositories/email.repository';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { EmailImageAttachment, IEntityJob, INotifyAlbumUpdateJob, JobItem, JobOf } from 'src/types';
import { EmailImageAttachment, JobOf } from 'src/types';
import { getFilenameExtension } from 'src/utils/file';
import { getExternalDomain } from 'src/utils/misc';
import { isEqualObject } from 'src/utils/object';
@@ -152,6 +153,18 @@ export class NotificationService extends BaseService {
this.eventRepository.clientSend('on_asset_trash', userId, assetIds);
}
@OnEvent({ name: 'asset.metadataExtracted' })
async onAssetMetadataExtracted({ assetId, userId, source }: ArgOf<'asset.metadataExtracted'>) {
if (source !== 'sidecar-write') {
return;
}
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([assetId]);
if (asset) {
this.eventRepository.clientSend('on_asset_update', userId, mapAsset(asset));
}
}
@OnEvent({ name: 'assets.restore' })
onAssetsRestore({ assetIds, userId }: ArgOf<'assets.restore'>) {
this.eventRepository.clientSend('on_asset_restore', userId, assetIds);
@@ -185,30 +198,12 @@ export class NotificationService extends BaseService {
}
@OnEvent({ name: 'album.update' })
async onAlbumUpdate({ id, recipientIds }: ArgOf<'album.update'>) {
// if recipientIds is empty, album likely only has one user part of it, don't queue notification if so
if (recipientIds.length === 0) {
return;
}
const job: JobItem = {
async onAlbumUpdate({ id, recipientId }: ArgOf<'album.update'>) {
await this.jobRepository.removeJob(JobName.NOTIFY_ALBUM_UPDATE, `${id}/${recipientId}`);
await this.jobRepository.queue({
name: JobName.NOTIFY_ALBUM_UPDATE,
data: { id, recipientIds, delay: NotificationService.albumUpdateEmailDelayMs },
};
const previousJobData = await this.jobRepository.removeJob(id, JobName.NOTIFY_ALBUM_UPDATE);
if (previousJobData && this.isAlbumUpdateJob(previousJobData)) {
for (const id of previousJobData.recipientIds) {
if (!recipientIds.includes(id)) {
recipientIds.push(id);
}
}
}
await this.jobRepository.queue(job);
}
private isAlbumUpdateJob(job: IEntityJob): job is INotifyAlbumUpdateJob {
return 'recipientIds' in job;
data: { id, recipientId, delay: NotificationService.albumUpdateEmailDelayMs },
});
}
@OnEvent({ name: 'album.invite' })
@@ -399,7 +394,7 @@ export class NotificationService extends BaseService {
}
@OnJob({ name: JobName.NOTIFY_ALBUM_UPDATE, queue: QueueName.NOTIFICATION })
async handleAlbumUpdate({ id, recipientIds }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
async handleAlbumUpdate({ id, recipientId }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
const album = await this.albumRepository.getById(id, { withAssets: false });
if (!album) {
@@ -411,49 +406,44 @@ export class NotificationService extends BaseService {
return JobStatus.SKIPPED;
}
const recipients = [...album.albumUsers.map((user) => user.user), owner].filter((user) =>
recipientIds.includes(user.id),
);
const attachment = await this.getAlbumThumbnailAttachment(album);
const { server, templates } = await this.getConfig({ withCache: false });
for (const recipient of recipients) {
const user = await this.userRepository.get(recipient.id, { withDeleted: false });
if (!user) {
continue;
}
const { emailNotifications } = getPreferences(user.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
continue;
}
const { html, text } = await this.emailRepository.renderEmail({
template: EmailTemplate.ALBUM_UPDATE,
data: {
baseUrl: getExternalDomain(server),
albumId: album.id,
albumName: album.albumName,
recipientName: recipient.name,
cid: attachment ? attachment.cid : undefined,
},
customTemplate: templates.email.albumUpdateTemplate,
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
data: {
to: recipient.email,
subject: `New media has been added to an album - ${album.albumName}`,
html,
text,
imageAttachments: attachment ? [attachment] : undefined,
},
});
const user = await this.userRepository.get(recipientId, { withDeleted: false });
if (!user) {
return JobStatus.SKIPPED;
}
const { emailNotifications } = getPreferences(user.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
return JobStatus.SKIPPED;
}
const { html, text } = await this.emailRepository.renderEmail({
template: EmailTemplate.ALBUM_UPDATE,
data: {
baseUrl: getExternalDomain(server),
albumId: album.id,
albumName: album.albumName,
recipientName: user.name,
cid: attachment ? attachment.cid : undefined,
},
customTemplate: templates.email.albumUpdateTemplate,
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
data: {
to: user.email,
subject: `New media has been added to an album - ${album.albumName}`,
html,
text,
imageAttachments: attachment ? [attachment] : undefined,
},
});
return JobStatus.SUCCESS;
}

View File

@@ -2,7 +2,6 @@ import { BadRequestException, NotFoundException } from '@nestjs/common';
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { DetectedFaces } from 'src/repositories/machine-learning.repository';
import { FaceSearchResult } from 'src/repositories/search.repository';
import { PersonService } from 'src/services/person.service';
@@ -455,14 +454,11 @@ describe(PersonService.name, () => {
});
it('should queue missing assets', async () => {
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueDetectFaces({ force: false });
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
@@ -472,10 +468,7 @@ describe(PersonService.name, () => {
});
it('should queue all assets', async () => {
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.withName]);
await sut.handleQueueDetectFaces({ force: true });
@@ -483,7 +476,7 @@ describe(PersonService.name, () => {
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
@@ -493,17 +486,14 @@ describe(PersonService.name, () => {
});
it('should refresh all assets', async () => {
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueDetectFaces({ force: undefined });
expect(mocks.person.delete).not.toHaveBeenCalled();
expect(mocks.person.deleteFaces).not.toHaveBeenCalled();
expect(mocks.storage.unlink).not.toHaveBeenCalled();
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(undefined);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
@@ -516,16 +506,13 @@ describe(PersonService.name, () => {
it('should delete existing people and faces if forced', async () => {
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
mocks.person.deleteFaces.mockResolvedValue();
await sut.handleQueueDetectFaces({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,

View File

@@ -36,7 +36,6 @@ import {
SourceType,
SystemMetadataKey,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { BoundingBox } from 'src/repositories/machine-learning.repository';
import { UpdateFacesData } from 'src/repositories/person.repository';
import { BaseService } from 'src/services/base.service';
@@ -44,7 +43,6 @@ import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 's
import { ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class PersonService extends BaseService {
@@ -265,23 +263,19 @@ export class PersonService extends BaseService {
await this.handlePersonCleanup();
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force === false
? this.assetRepository.getWithout(pagination, WithoutProperty.FACES)
: this.assetRepository.getAll(pagination, {
orderDirection: 'desc',
withFaces: true,
withArchived: true,
isVisible: true,
});
});
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForDetectFacesJob(force);
for await (const asset of assets) {
jobs.push({ name: JobName.FACE_DETECTION, data: { id: asset.id } });
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.FACE_DETECTION, data: { id: asset.id } })),
);
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
if (force === undefined) {
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
}

View File

@@ -15,7 +15,6 @@ import {
SmartSearchDto,
} from 'src/dtos/search.dto';
import { AssetOrder } from 'src/enum';
import { SearchExploreItem } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
import { getMyPartnerIds } from 'src/utils/asset.util';
import { isSmartSearchEnabled } from 'src/utils/misc';
@@ -32,7 +31,7 @@ export class SearchService extends BaseService {
return places.map((place) => mapPlaces(place));
}
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
async getExploreData(auth: AuthDto) {
const options = { maxFields: 12, minAssetsPerField: 5 };
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
const assets = await this.assetRepository.getByIdsWithAllRelationsButStacks(cities.items.map(({ data }) => data));

View File

@@ -151,7 +151,6 @@ describe(SmartInfoService.name, () => {
await sut.handleQueueEncodeClip({});
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.search.setDimensionSize).not.toHaveBeenCalled();
});

View File

@@ -116,6 +116,11 @@ export class StorageTemplateService extends BaseService {
return { ...storageTokens, presetOptions: storagePresets };
}
@OnEvent({ name: 'asset.metadataExtracted' })
async onAssetMetadataExtracted({ source, assetId }: ArgOf<'asset.metadataExtracted'>) {
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { source, id: assetId } });
}
@OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, queue: QueueName.STORAGE_TEMPLATE_MIGRATION })
async handleMigrationSingle({ id }: JobOf<JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE>): Promise<JobStatus> {
const config = await this.getConfig({ withCache: true });

View File

@@ -4,7 +4,7 @@ import { DateTime } from 'luxon';
import { Writable } from 'node:stream';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { SessionSyncCheckpoints } from 'src/db';
import { AssetResponseDto, hexOrBufferToBase64, mapAsset } from 'src/dtos/asset-response.dto';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetDeltaSyncDto,
@@ -18,6 +18,7 @@ import { DatabaseAction, EntityType, Permission, SyncEntityType, SyncRequestType
import { BaseService } from 'src/services/base.service';
import { SyncAck } from 'src/types';
import { getMyPartnerIds } from 'src/utils/asset.util';
import { hexOrBufferToBase64 } from 'src/utils/bytes';
import { setIsEqual } from 'src/utils/set';
import { fromAck, serialize } from 'src/utils/sync';
@@ -141,7 +142,7 @@ export class SyncService extends BaseService {
updateId,
data: {
...data,
checksum: hexOrBufferToBase64(checksum),
checksum: hexOrBufferToBase64(checksum)!,
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
},
}),
@@ -171,7 +172,7 @@ export class SyncService extends BaseService {
updateId,
data: {
...data,
checksum: hexOrBufferToBase64(checksum),
checksum: hexOrBufferToBase64(checksum)!,
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
},
}),

View File

@@ -1,9 +1,6 @@
import { BadRequestException } from '@nestjs/common';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { TimelineService } from 'src/services/timeline.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
describe(TimelineService.name, () => {
@@ -18,13 +15,10 @@ describe(TimelineService.name, () => {
it("should return buckets if userId and albumId aren't set", async () => {
mocks.asset.getTimeBuckets.mockResolvedValue([{ timeBucket: 'bucket', count: 1 }]);
await expect(
sut.getTimeBuckets(authStub.admin, {
size: TimeBucketSize.DAY,
}),
).resolves.toEqual(expect.arrayContaining([{ timeBucket: 'bucket', count: 1 }]));
await expect(sut.getTimeBuckets(authStub.admin, {})).resolves.toEqual(
expect.arrayContaining([{ timeBucket: 'bucket', count: 1 }]),
);
expect(mocks.asset.getTimeBuckets).toHaveBeenCalledWith({
size: TimeBucketSize.DAY,
userIds: [authStub.admin.user.id],
});
});
@@ -33,35 +27,34 @@ describe(TimelineService.name, () => {
describe('getTimeBucket', () => {
it('should return the assets for a album time bucket if user has album.read', async () => {
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
await expect(
sut.getTimeBucket(authStub.admin, { size: TimeBucketSize.DAY, timeBucket: 'bucket', albumId: 'album-id' }),
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
await expect(sut.getTimeBucket(authStub.admin, { timeBucket: 'bucket', albumId: 'album-id' })).resolves.toEqual(
json,
);
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-id']));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
albumId: 'album-id',
});
});
it('should return the assets for a archive time bucket if user has archive.read', async () => {
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
userId: authStub.admin.user.id,
}),
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
expect.objectContaining({
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
userIds: [authStub.admin.user.id],
@@ -70,20 +63,19 @@ describe(TimelineService.name, () => {
});
it('should include partner shared assets', async () => {
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.partner.getAll.mockResolvedValue([]);
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: false,
userId: authStub.admin.user.id,
withPartners: true,
}),
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: false,
withPartners: true,
@@ -92,62 +84,37 @@ describe(TimelineService.name, () => {
});
it('should check permissions to read tag', async () => {
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-123']));
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
userId: authStub.admin.user.id,
tagId: 'tag-123',
}),
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
tagId: 'tag-123',
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
});
});
it('should strip metadata if showExif is disabled', async () => {
mocks.access.album.checkSharedLinkAccess.mockResolvedValue(new Set(['album-id']));
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const auth = factory.auth({ sharedLink: { showExif: false } });
const buckets = await sut.getTimeBucket(auth, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
albumId: 'album-id',
});
expect(buckets).toEqual([expect.objectContaining({ id: 'asset-id' })]);
expect(buckets[0]).not.toHaveProperty('exif');
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
albumId: 'album-id',
});
});
it('should return the assets for a library time bucket if user has library.read', async () => {
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
userId: authStub.admin.user.id,
}),
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
expect.objectContaining({
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
}),
@@ -157,7 +124,6 @@ describe(TimelineService.name, () => {
it('should throw an error if withParners is true and isArchived true or undefined', async () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
withPartners: true,
@@ -167,7 +133,6 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: undefined,
withPartners: true,
@@ -179,7 +144,6 @@ describe(TimelineService.name, () => {
it('should throw an error if withParners is true and isFavorite is either true or false', async () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isFavorite: true,
withPartners: true,
@@ -189,7 +153,6 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isFavorite: false,
withPartners: true,
@@ -201,7 +164,6 @@ describe(TimelineService.name, () => {
it('should throw an error if withParners is true and isTrash is true', async () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isTrashed: true,
withPartners: true,

View File

@@ -1,7 +1,7 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { AssetResponseDto, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { Stack } from 'src/database';
import { AuthDto } from 'src/dtos/auth.dto';
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketsResponseDto } from 'src/dtos/time-bucket.dto';
import { Permission } from 'src/enum';
import { TimeBucketOptions } from 'src/repositories/asset.repository';
import { BaseService } from 'src/services/base.service';
@@ -9,22 +9,32 @@ import { getMyPartnerIds } from 'src/utils/asset.util';
@Injectable()
export class TimelineService extends BaseService {
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketsResponseDto[]> {
await this.timeBucketChecks(auth, dto);
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
return this.assetRepository.getTimeBuckets(timeBucketOptions);
return await this.assetRepository.getTimeBuckets(timeBucketOptions);
}
async getTimeBucket(
auth: AuthDto,
dto: TimeBucketAssetDto,
): Promise<AssetResponseDto[] | SanitizedAssetResponseDto[]> {
// pre-jsonified response
async getTimeBucket(auth: AuthDto, dto: TimeBucketAssetDto): Promise<string> {
await this.timeBucketChecks(auth, dto);
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
const assets = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
return !auth.sharedLink || auth.sharedLink?.showExif
? assets.map((asset) => mapAsset(asset, { withStack: true, auth }))
: assets.map((asset) => mapAsset(asset, { stripMetadata: true, auth }));
const timeBucketOptions = await this.buildTimeBucketOptions(auth, { ...dto });
// TODO: use id cursor for pagination
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
return bucket.assets;
}
mapStack(entity?: Stack | null) {
if (!entity) {
return null;
}
return {
id: entity.id!,
primaryAssetId: entity.primaryAssetId!,
assetCount: entity.assetCount as number,
};
}
private async buildTimeBucketOptions(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketOptions> {

View File

@@ -0,0 +1,28 @@
export type TimelineStack = {
id: string;
primaryAssetId: string;
assetCount: number;
};
export type AssetDescription = {
city: string | null;
country: string | null;
};
export type TimeBucketAssets = {
id: string[];
ownerId: string[];
ratio: number[];
isFavorite: number[];
isArchived: number[];
isTrashed: number[];
isImage: number[];
thumbhash: (string | null)[];
localDateTime: string[];
stack?: ([string, string] | null)[];
duration: (string | null)[];
projectionType: (string | null)[];
livePhotoVideoId: (string | null)[];
city: (string | null)[];
country: (string | null)[];
};

View File

@@ -177,9 +177,10 @@ export interface IDelayedJob extends IBaseJob {
delay?: number;
}
export type JobSource = 'upload' | 'sidecar-write' | 'copy';
export interface IEntityJob extends IBaseJob {
id: string;
source?: 'upload' | 'sidecar-write' | 'copy';
source?: JobSource;
notify?: boolean;
}
@@ -251,7 +252,7 @@ export interface INotifyAlbumInviteJob extends IEntityJob {
}
export interface INotifyAlbumUpdateJob extends IEntityJob, IDelayedJob {
recipientIds: string[];
recipientId: string;
}
export interface JobCounts {

View File

@@ -197,3 +197,16 @@ export const asRequest = (request: AuthRequest, file: Express.Multer.File) => {
file: mapToUploadFile(file as ImmichFile),
};
};
function isRotated90CW(orientation: number) {
return orientation === 5 || orientation === 6 || orientation === 90;
}
function isRotated270CW(orientation: number) {
return orientation === 7 || orientation === 8 || orientation === -90;
}
export function isFlipped(orientation?: string | null) {
const value = Number(orientation);
return value && (isRotated270CW(value) || isRotated90CW(value));
}

View File

@@ -22,3 +22,15 @@ export function asHumanReadable(bytes: number, precision = 1): string {
return `${remainder.toFixed(magnitude == 0 ? 0 : precision)} ${units[magnitude]}`;
}
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
export const hexOrBufferToBase64 = (encoded: string | Buffer | null) => {
if (!encoded) {
return null;
}
if (typeof encoded === 'string') {
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
}
return encoded.toString('base64');
};

View File

@@ -262,7 +262,7 @@ export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
}
export function truncatedDate<O>(size: TimeBucketSize) {
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
return sql<O>`date_trunc(${sql.lit(size)}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
}
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
@@ -276,6 +276,7 @@ export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: str
),
);
}
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */

View File

@@ -8,22 +8,6 @@ export interface PaginationResult<T> {
hasNextPage: boolean;
}
export type Paginated<T> = Promise<PaginationResult<T>>;
/** @deprecated use `this.db. ... .stream()` instead */
export async function* usePagination<T>(
pageSize: number,
getNextPage: (pagination: PaginationOptions) => PaginationResult<T> | Paginated<T>,
) {
let hasNextPage = true;
for (let skip = 0; hasNextPage; skip += pageSize) {
const result = await getNextPage({ take: pageSize, skip });
hasNextPage = result.hasNextPage;
yield result.items;
}
}
export function paginationHelper<Entity extends object>(items: Entity[], take: number): PaginationResult<Entity> {
const hasNextPage = items.length > take;
items.splice(take);

View File

@@ -14,7 +14,6 @@ import { LoggingRepository } from 'src/repositories/logging.repository';
import { bootstrapTelemetry } from 'src/repositories/telemetry.repository';
import { ApiService } from 'src/services/api.service';
import { isStartUpError, useSwagger } from 'src/utils/misc';
async function bootstrap() {
process.title = 'immich-api';

View File

@@ -257,6 +257,10 @@ export const assetStub = {
duplicateId: null,
isOffline: false,
stack: null,
orientation: '',
projectionType: null,
height: 3840,
width: 2160,
}),
trashed: Object.freeze({

View File

@@ -142,18 +142,15 @@ export const getRepository = <K extends keyof RepositoriesTypes>(key: K, db: Kys
}
case 'database': {
const configRepo = new ConfigRepository();
return new DatabaseRepository(db, new LoggingRepository(undefined, configRepo), configRepo);
return new DatabaseRepository(db, LoggingRepository.create(), new ConfigRepository());
}
case 'email': {
const logger = new LoggingRepository(undefined, new ConfigRepository());
return new EmailRepository(logger);
return new EmailRepository(LoggingRepository.create());
}
case 'logger': {
const configMock = { getEnv: () => ({ noColor: false }) };
return new LoggingRepository(undefined, configMock as ConfigRepository);
return LoggingRepository.create();
}
case 'memory': {

View File

@@ -42,7 +42,7 @@ const globalSetup = async () => {
const db = new Kysely<DB>(getKyselyConfig({ connectionType: 'url', url: postgresUrl }));
const configRepository = new ConfigRepository();
const logger = new LoggingRepository(undefined, configRepository);
const logger = LoggingRepository.create();
await new DatabaseRepository(db, logger, configRepository).runMigrations();
await db.destroy();

View File

@@ -13,14 +13,11 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getByIds: vitest.fn().mockResolvedValue([]),
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
getByDeviceIds: vitest.fn(),
getByUserId: vitest.fn(),
getById: vitest.fn(),
getWithout: vitest.fn(),
getByChecksum: vitest.fn(),
getByChecksums: vitest.fn(),
getUploadAssetIdByChecksum: vitest.fn(),
getRandom: vitest.fn(),
getAll: vitest.fn().mockResolvedValue({ items: [], hasNextPage: false }),
getAllByDeviceId: vitest.fn(),
getLivePhotoCount: vitest.fn(),
getLibraryAssetCount: vitest.fn(),

View File

@@ -10,6 +10,7 @@ const envData: EnvData = {
buildMetadata: {},
bull: {
config: {
connection: {},
prefix: 'immich_bull',
},
queues: [{ name: 'queue-1' }],

View File

@@ -0,0 +1,6 @@
{
"name": "typescript-sdk",
"lockfileVersion": 3,
"requires": true,
"packages": {}
}

View File

@@ -5,10 +5,17 @@ TYPESCRIPT_SDK=/usr/src/open-api/typescript-sdk
npm --prefix "$TYPESCRIPT_SDK" install
npm --prefix "$TYPESCRIPT_SDK" run build
COUNT=0
UPSTREAM="${IMMICH_SERVER_URL:-http://immich-server:2283/}"
until wget --spider --quiet "${UPSTREAM}/api/server/config"; do
echo 'waiting for api server...'
until wget --spider --quiet "${UPSTREAM}/api/server/config" > /dev/null 2>&1; do
if [ $((COUNT % 10)) -eq 0 ]; then
echo "Waiting for $UPSTREAM to start..."
fi
COUNT=$((COUNT + 1))
sleep 1
done
echo "Connected to $UPSTREAM"
node ./node_modules/.bin/vite dev --host 0.0.0.0 --port 3000

8
web/package-lock.json generated
View File

@@ -11,7 +11,7 @@
"dependencies": {
"@formatjs/icu-messageformat-parser": "^2.9.8",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@immich/ui": "^0.18.1",
"@immich/ui": "^0.19.0",
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
"@mdi/js": "^7.4.47",
"@photo-sphere-viewer/core": "^5.11.5",
@@ -1320,9 +1320,9 @@
"link": true
},
"node_modules/@immich/ui": {
"version": "0.18.1",
"resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.18.1.tgz",
"integrity": "sha512-XWWO6OTfH3MektyxCn0hWefZyOGyWwwx/2zHinuShpxTHSyfveJ4mOkFP8DkyMz0dnvJ1EfdkPBMkld3y5R/Hw==",
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.19.0.tgz",
"integrity": "sha512-XVjSUoQVIoe83pxM4q8kmlejb2xep/TZEfoGbasI7takEGKNiWEyXr5eZaXZCSVgq78fcNRr4jyWz290ZAXh7A==",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@mdi/js": "^7.4.47",

View File

@@ -27,7 +27,7 @@
"dependencies": {
"@formatjs/icu-messageformat-parser": "^2.9.8",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@immich/ui": "^0.18.1",
"@immich/ui": "^0.19.0",
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
"@mdi/js": "^7.4.47",
"@photo-sphere-viewer/core": "^5.11.5",

View File

@@ -8,7 +8,6 @@
--immich-primary: 66 80 175;
--immich-bg: 255 255 255;
--immich-fg: 0 0 0;
--immich-gray: 246 246 244;
--immich-error: 229 115 115;
--immich-success: 129 199 132;
--immich-warning: 255 183 77;
@@ -33,6 +32,7 @@
--immich-ui-warning: 255 170 0;
--immich-ui-info: 14 165 233;
--immich-ui-default-border: 209 213 219;
--immich-ui-gray: 246 246 246;
}
.dark {
@@ -45,6 +45,7 @@
--immich-ui-warning: 255 170 0;
--immich-ui-info: 14 165 233;
--immich-ui-default-border: 55 65 81;
--immich-ui-gray: 33 33 33;
}
}

View File

@@ -1,8 +1,11 @@
import FocusTrapTest from '$lib/actions/__test__/focus-trap-test.svelte';
import { setDefaultTabbleOptions } from '$lib/utils/focus-util';
import { render, screen } from '@testing-library/svelte';
import userEvent from '@testing-library/user-event';
import { tick } from 'svelte';
setDefaultTabbleOptions({ displayCheck: 'none' });
describe('focusTrap action', () => {
const user = userEvent.setup();
@@ -38,6 +41,7 @@ describe('focusTrap action', () => {
const openButton = screen.getByText('Open');
await user.click(openButton);
await tick();
expect(document.activeElement).toEqual(screen.getByTestId('one'));
screen.getByText('Close').click();

View File

@@ -1,5 +1,5 @@
import { shortcuts } from '$lib/actions/shortcut';
import { getFocusable } from '$lib/utils/focus-util';
import { getTabbable } from '$lib/utils/focus-util';
import { tick } from 'svelte';
interface Options {
@@ -18,18 +18,21 @@ export function focusTrap(container: HTMLElement, options?: Options) {
};
};
const setInitialFocus = () => {
const focusableElement = getFocusable(container)[0];
// Use tick() to ensure focus trap works correctly inside <Portal />
void tick().then(() => focusableElement?.focus());
const setInitialFocus = async () => {
const focusableElement = getTabbable(container, false)[0];
if (focusableElement) {
// Use tick() to ensure focus trap works correctly inside <Portal />
await tick();
focusableElement?.focus();
}
};
if (withDefaults(options).active) {
setInitialFocus();
void setInitialFocus();
}
const getFocusableElements = () => {
const focusableElements = getFocusable(container);
const focusableElements = getTabbable(container);
return [
focusableElements.at(0), //
focusableElements.at(-1),
@@ -67,7 +70,7 @@ export function focusTrap(container: HTMLElement, options?: Options) {
update(newOptions?: Options) {
options = newOptions;
if (withDefaults(options).active) {
setInitialFocus();
void setInitialFocus();
}
},
destroy() {

View File

@@ -47,8 +47,7 @@
<ConfirmDialog
title={$t('delete_user')}
confirmText={forceDelete ? $t('permanently_delete') : $t('delete')}
onConfirm={handleDeleteUser}
{onCancel}
onClose={(confirmed) => (confirmed ? handleDeleteUser() : onCancel())}
disabled={deleteButtonDisabled}
>
{#snippet promptSnippet()}

View File

@@ -33,8 +33,7 @@
title={$t('restore_user')}
confirmText={$t('continue')}
confirmColor="success"
onConfirm={handleRestoreUser}
{onCancel}
onClose={(confirmed) => (confirmed ? handleRestoreUser() : onCancel())}
>
{#snippet promptSnippet()}
<p>

View File

@@ -49,8 +49,7 @@
{#if isConfirmOpen}
<ConfirmDialog
title={$t('admin.disable_login')}
onCancel={() => (isConfirmOpen = false)}
onConfirm={() => handleSave(true)}
onClose={(confirmed) => (confirmed ? handleSave(true) : (isConfirmOpen = false))}
>
{#snippet promptSnippet()}
<div class="flex flex-col gap-4">

View File

@@ -1,27 +1,27 @@
<script lang="ts">
import Icon from '$lib/components/elements/icon.svelte';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
import ConfirmDialog from '$lib/components/shared-components/dialog/confirm-dialog.svelte';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
import UserAvatar from '$lib/components/shared-components/user-avatar.svelte';
import { handleError } from '$lib/utils/handle-error';
import {
updateAlbumInfo,
AlbumUserRole,
AssetOrder,
removeUserFromAlbum,
updateAlbumInfo,
updateAlbumUser,
type AlbumResponseDto,
type UserResponseDto,
AssetOrder,
AlbumUserRole,
updateAlbumUser,
} from '@immich/sdk';
import { mdiArrowDownThin, mdiArrowUpThin, mdiPlus, mdiDotsVertical } from '@mdi/js';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import UserAvatar from '$lib/components/shared-components/user-avatar.svelte';
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
import SettingDropdown from '../shared-components/settings/setting-dropdown.svelte';
import type { RenderedOption } from '../elements/dropdown.svelte';
import { handleError } from '$lib/utils/handle-error';
import { mdiArrowDownThin, mdiArrowUpThin, mdiDotsVertical, mdiPlus } from '@mdi/js';
import { findKey } from 'lodash-es';
import { t } from 'svelte-i18n';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import ConfirmDialog from '$lib/components/shared-components/dialog/confirm-dialog.svelte';
import type { RenderedOption } from '../elements/dropdown.svelte';
import { notificationController, NotificationType } from '../shared-components/notification/notification';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
import SettingDropdown from '../shared-components/settings/setting-dropdown.svelte';
interface Props {
album: AlbumResponseDto;
@@ -195,7 +195,6 @@
title={$t('album_remove_user')}
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
confirmText={$t('remove_user')}
onConfirm={handleRemoveUser}
onCancel={() => (selectedRemoveUser = null)}
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
/>
{/if}

View File

@@ -1,25 +1,25 @@
<script lang="ts">
import { shortcut } from '$lib/actions/shortcut';
import SelectAllAssets from '$lib/components/photos-page/actions/select-all-assets.svelte';
import AssetSelectControlBar from '$lib/components/photos-page/asset-select-control-bar.svelte';
import { AssetInteraction } from '$lib/stores/asset-interaction.svelte';
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
import { AssetStore } from '$lib/stores/assets-store.svelte';
import { dragAndDropFilesStore } from '$lib/stores/drag-and-drop-files.store';
import { handlePromiseError } from '$lib/utils';
import { cancelMultiselect, downloadAlbum } from '$lib/utils/asset-utils';
import { fileUploadHandler, openFileUploadDialog } from '$lib/utils/file-uploader';
import type { AlbumResponseDto, SharedLinkResponseDto, UserResponseDto } from '@immich/sdk';
import { AssetStore } from '$lib/stores/assets-store.svelte';
import { cancelMultiselect, downloadAlbum } from '$lib/utils/asset-utils';
import { mdiFileImagePlusOutline, mdiFolderDownloadOutline } from '@mdi/js';
import { onDestroy } from 'svelte';
import { t } from 'svelte-i18n';
import CircleIconButton from '../elements/buttons/circle-icon-button.svelte';
import DownloadAction from '../photos-page/actions/download-action.svelte';
import AssetGrid from '../photos-page/asset-grid.svelte';
import AssetSelectControlBar from '../photos-page/asset-select-control-bar.svelte';
import ControlAppBar from '../shared-components/control-app-bar.svelte';
import ImmichLogoSmallLink from '../shared-components/immich-logo-small-link.svelte';
import ThemeButton from '../shared-components/theme-button.svelte';
import { shortcut } from '$lib/actions/shortcut';
import { mdiFileImagePlusOutline, mdiFolderDownloadOutline } from '@mdi/js';
import { handlePromiseError } from '$lib/utils';
import AlbumSummary from './album-summary.svelte';
import { t } from 'svelte-i18n';
import { onDestroy } from 'svelte';
import { AssetInteraction } from '$lib/stores/asset-interaction.svelte';
interface Props {
sharedLink: SharedLinkResponseDto;

View File

@@ -1,22 +1,22 @@
<script lang="ts">
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import {
AlbumUserRole,
getMyUser,
removeUserFromAlbum,
updateAlbumUser,
type AlbumResponseDto,
type UserResponseDto,
updateAlbumUser,
AlbumUserRole,
} from '@immich/sdk';
import { mdiDotsVertical } from '@mdi/js';
import { onMount } from 'svelte';
import { t } from 'svelte-i18n';
import { handleError } from '../../utils/handle-error';
import ConfirmDialog from '../shared-components/dialog/confirm-dialog.svelte';
import MenuOption from '../shared-components/context-menu/menu-option.svelte';
import ConfirmDialog from '../shared-components/dialog/confirm-dialog.svelte';
import { NotificationType, notificationController } from '../shared-components/notification/notification';
import UserAvatar from '../shared-components/user-avatar.svelte';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import { t } from 'svelte-i18n';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
interface Props {
album: AlbumResponseDto;
@@ -144,8 +144,7 @@
title={$t('album_leave')}
prompt={$t('album_leave_confirmation', { values: { album: album.albumName } })}
confirmText={$t('leave')}
onConfirm={handleRemoveUser}
onCancel={() => (selectedRemoveUser = null)}
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
/>
{/if}
@@ -154,7 +153,6 @@
title={$t('album_remove_user')}
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
confirmText={$t('remove_user')}
onConfirm={handleRemoveUser}
onCancel={() => (selectedRemoveUser = null)}
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
/>
{/if}

Some files were not shown because too many files have changed in this diff Show More