Compare commits

..

1 Commits

Author SHA1 Message Date
Min Idzelis
9806e94f6f wip 2025-04-30 03:50:13 +00:00
181 changed files with 4670 additions and 3272 deletions

View File

@@ -338,15 +338,12 @@ jobs:
name: End-to-End Tests (Server & CLI)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: ${{ matrix.runner }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [mich, ubuntu-24.04-arm]
steps:
- name: Checkout code
@@ -386,15 +383,12 @@ jobs:
name: End-to-End Tests (Web)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: ${{ matrix.runner }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [mich, ubuntu-24.04-arm]
steps:
- name: Checkout code
@@ -429,21 +423,6 @@ jobs:
run: npx playwright test
if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
mobile-unit-tests:
name: Unit Test Mobile
needs: pre-job

View File

@@ -1,194 +0,0 @@
{{>header}}
{{>part_of}}
{{#operations}}
class {{{classname}}} {
{{{classname}}}([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
{{#operation}}
{{#summary}}
/// {{{.}}}
{{/summary}}
{{#notes}}
{{#summary}}
///
{{/summary}}
/// {{{notes}}}
///
/// Note: This method returns the HTTP [Response].
{{/notes}}
{{^notes}}
{{#summary}}
///
/// Note: This method returns the HTTP [Response].
{{/summary}}
{{^summary}}
/// Performs an HTTP '{{{httpMethod}}} {{{path}}}' operation and returns the [Response].
{{/summary}}
{{/notes}}
{{#hasParams}}
{{#summary}}
///
{{/summary}}
{{^summary}}
{{#notes}}
///
{{/notes}}
{{/summary}}
/// Parameters:
///
{{/hasParams}}
{{#allParams}}
/// * [{{{dataType}}}] {{{paramName}}}{{#required}} (required){{/required}}{{#optional}} (optional){{/optional}}:
{{#description}}
/// {{{.}}}
{{/description}}
{{^-last}}
///
{{/-last}}
{{/allParams}}
Future<Response> {{{nickname}}}WithHttpInfo({{#allParams}}{{#required}}{{{dataType}}} {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}}{ {{#allParams}}{{^required}}{{{dataType}}}? {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} }{{/hasOptionalParams}}) async {
// ignore: prefer_const_declarations
final path = r'{{{path}}}'{{#pathParams}}
.replaceAll({{=<% %>=}}'{<% baseName %>}'<%={{ }}=%>, {{{paramName}}}{{^isString}}.toString(){{/isString}}){{/pathParams}};
// ignore: prefer_final_locals
Object? postBody{{#bodyParam}} = {{{paramName}}}{{/bodyParam}};
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
{{#hasQueryParams}}
{{#queryParams}}
{{^required}}
if ({{{paramName}}} != null) {
{{/required}}
queryParams.addAll(_queryParams('{{{collectionFormat}}}', '{{{baseName}}}', {{{paramName}}}));
{{^required}}
}
{{/required}}
{{/queryParams}}
{{/hasQueryParams}}
{{#hasHeaderParams}}
{{#headerParams}}
{{#required}}
headerParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
{{/required}}
{{^required}}
if ({{{paramName}}} != null) {
headerParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
}
{{/required}}
{{/headerParams}}
{{/hasHeaderParams}}
const contentTypes = <String>[{{#prioritizedContentTypes}}'{{{mediaType}}}'{{^-last}}, {{/-last}}{{/prioritizedContentTypes}}];
{{#isMultipart}}
bool hasFields = false;
final mp = MultipartRequest('{{{httpMethod}}}', Uri.parse(path));
{{#formParams}}
{{^isFile}}
if ({{{paramName}}} != null) {
hasFields = true;
mp.fields[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
}
{{/isFile}}
{{#isFile}}
if ({{{paramName}}} != null) {
hasFields = true;
mp.fields[r'{{{baseName}}}'] = {{{paramName}}}.field;
mp.files.add({{{paramName}}});
}
{{/isFile}}
{{/formParams}}
if (hasFields) {
postBody = mp;
}
{{/isMultipart}}
{{^isMultipart}}
{{#formParams}}
{{^isFile}}
if ({{{paramName}}} != null) {
formParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
}
{{/isFile}}
{{/formParams}}
{{/isMultipart}}
return apiClient.invokeAPI(
path,
'{{{httpMethod}}}',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
{{#summary}}
/// {{{.}}}
{{/summary}}
{{#notes}}
{{#summary}}
///
{{/summary}}
/// {{{notes}}}
{{/notes}}
{{#hasParams}}
{{#summary}}
///
{{/summary}}
{{^summary}}
{{#notes}}
///
{{/notes}}
{{/summary}}
/// Parameters:
///
{{/hasParams}}
{{#allParams}}
/// * [{{{dataType}}}] {{{paramName}}}{{#required}} (required){{/required}}{{#optional}} (optional){{/optional}}:
{{#description}}
/// {{{.}}}
{{/description}}
{{^-last}}
///
{{/-last}}
{{/allParams}}
Future<{{#returnType}}{{{.}}}?{{/returnType}}{{^returnType}}void{{/returnType}}> {{{nickname}}}({{#allParams}}{{#required}}{{{dataType}}} {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}}{ {{#allParams}}{{^required}}{{{dataType}}}? {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} }{{/hasOptionalParams}}) async {
final response = await {{{nickname}}}WithHttpInfo({{#allParams}}{{#required}}{{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}} {{#allParams}}{{^required}}{{{paramName}}}: {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} {{/hasOptionalParams}});
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
{{#returnType}}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
{{#native_serialization}}
{{#isArray}}
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, '{{{returnType}}}') as List)
.cast<{{{returnBaseType}}}>()
.{{#uniqueItems}}toSet(){{/uniqueItems}}{{^uniqueItems}}toList(growable: false){{/uniqueItems}};
{{/isArray}}
{{^isArray}}
{{#isMap}}
return {{{returnType}}}.from(await apiClient.deserializeAsync(await _decodeBodyBytes(response), '{{{returnType}}}'),);
{{/isMap}}
{{^isMap}}
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), '{{{returnType}}}',) as {{{returnType}}};
{{/isMap}}{{/isArray}}{{/native_serialization}}
}
return null;
{{/returnType}}
}
{{/operation}}
}
{{/operations}}

View File

@@ -0,0 +1,43 @@
import { deleteAssets, getAuditFiles, updateAsset, type LoginResponseDto } from '@immich/sdk';
import { asBearerAuth, utils } from 'src/utils';
import { beforeAll, describe, expect, it } from 'vitest';
describe('/audits', () => {
let admin: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
await utils.resetFilesystem();
admin = await utils.adminSetup();
});
// TODO: Enable these tests again once #7436 is resolved as these were flaky
describe.skip('GET :/file-report', () => {
it('excludes assets without issues from report', async () => {
const [trashedAsset, archivedAsset] = await Promise.all([
utils.createAsset(admin.accessToken),
utils.createAsset(admin.accessToken),
utils.createAsset(admin.accessToken),
]);
await Promise.all([
deleteAssets({ assetBulkDeleteDto: { ids: [trashedAsset.id] } }, { headers: asBearerAuth(admin.accessToken) }),
updateAsset(
{
id: archivedAsset.id,
updateAssetDto: { isArchived: true },
},
{ headers: asBearerAuth(admin.accessToken) },
),
]);
const body = await getAuditFiles({
headers: asBearerAuth(admin.accessToken),
});
expect(body.orphans).toHaveLength(0);
expect(body.extras).toHaveLength(0);
});
});
});

View File

@@ -1,4 +1,4 @@
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
import { DateTime } from 'luxon';
import { createUserDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
@@ -52,7 +52,7 @@ describe('/timeline', () => {
describe('GET /timeline/buckets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/buckets').query({});
const { status, body } = await request(app).get('/timeline/buckets').query({ size: TimeBucketSize.Month });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
@@ -61,7 +61,7 @@ describe('/timeline', () => {
const { status, body } = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({});
.query({ size: TimeBucketSize.Month });
expect(status).toBe(200);
expect(body).toEqual(
@@ -78,17 +78,33 @@ describe('/timeline', () => {
assetIds: userAssets.map(({ id }) => id),
});
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
const { status, body } = await request(app)
.get('/timeline/buckets')
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission);
});
it('should get time buckets by day', async () => {
const { status, body } = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Day });
expect(status).toBe(200);
expect(body).toEqual([
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
]);
});
it('should return error if time bucket is requested with partners asset and archived', async () => {
const req1 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isArchived: true });
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest());
@@ -96,7 +112,7 @@ describe('/timeline', () => {
const req2 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`)
.query({ withPartners: true, isArchived: undefined });
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest());
@@ -106,7 +122,7 @@ describe('/timeline', () => {
const req1 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isFavorite: true });
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest());
@@ -114,7 +130,7 @@ describe('/timeline', () => {
const req2 = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isFavorite: false });
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest());
@@ -124,7 +140,7 @@ describe('/timeline', () => {
const req = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`)
.query({ withPartners: true, isTrashed: true });
.query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
expect(req.status).toBe(400);
expect(req.body).toEqual(errorDto.badRequest());
@@ -134,6 +150,7 @@ describe('/timeline', () => {
describe('GET /timeline/bucket', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/bucket').query({
size: TimeBucketSize.Month,
timeBucket: '1900-01-01',
});
@@ -144,27 +161,11 @@ describe('/timeline', () => {
it('should handle 5 digit years', async () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.query({ timeBucket: '012345-01-01' })
.query({ size: TimeBucketSize.Month, timeBucket: '012345-01-01' })
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
city: [],
country: [],
duration: [],
id: [],
isArchived: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
localDateTime: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
expect(body).toEqual([]);
});
// TODO enable date string validation while still accepting 5 digit years
@@ -172,7 +173,7 @@ describe('/timeline', () => {
// const { status, body } = await request(app)
// .get('/timeline/bucket')
// .set('Authorization', `Bearer ${user.accessToken}`)
// .query({ timeBucket: 'foo' });
// .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
// expect(status).toBe(400);
// expect(body).toEqual(errorDto.badRequest);
@@ -182,26 +183,10 @@ describe('/timeline', () => {
const { status, body } = await request(app)
.get('/timeline/bucket')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ timeBucket: '1970-02-10' });
.query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10' });
expect(status).toBe(200);
expect(body).toEqual({
city: [],
country: [],
duration: [],
id: [],
isArchived: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
localDateTime: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
expect(body).toEqual([]);
});
});
});

View File

@@ -1260,7 +1260,6 @@
"no_favorites_message": "Add favorites to quickly find your best pictures and videos",
"no_libraries_message": "Create an external library to view your photos and videos",
"no_name": "No Name",
"no_people_found": "No matching people found",
"no_places": "No places",
"no_results": "No results",
"no_results_description": "Try a synonym or more general keyword",
@@ -1573,7 +1572,6 @@
"select_keep_all": "Select keep all",
"select_library_owner": "Select library owner",
"select_new_face": "Select new face",
"select_person_to_tag": "Select a person to tag",
"select_photos": "Select photos",
"select_trash_all": "Select trash all",
"select_user_for_sharing_page_err_album": "Failed to create album",

View File

@@ -100,6 +100,7 @@ Class | Method | HTTP request | Description
*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | getAllUserAssetsByDeviceId
*AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} |
*AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics |
*AssetsApi* | [**getMemoryLane**](doc//AssetsApi.md#getmemorylane) | **GET** /assets/memory-lane |
*AssetsApi* | [**getRandom**](doc//AssetsApi.md#getrandom) | **GET** /assets/random |
*AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback |
*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | replaceAsset
@@ -121,6 +122,9 @@ Class | Method | HTTP request | Description
*FacesApi* | [**deleteFace**](doc//FacesApi.md#deleteface) | **DELETE** /faces/{id} |
*FacesApi* | [**getFaces**](doc//FacesApi.md#getfaces) | **GET** /faces |
*FacesApi* | [**reassignFacesById**](doc//FacesApi.md#reassignfacesbyid) | **PUT** /faces/{id} |
*FileReportsApi* | [**fixAuditFiles**](doc//FileReportsApi.md#fixauditfiles) | **POST** /reports/fix |
*FileReportsApi* | [**getAuditFiles**](doc//FileReportsApi.md#getauditfiles) | **GET** /reports |
*FileReportsApi* | [**getFileChecksums**](doc//FileReportsApi.md#getfilechecksums) | **POST** /reports/checksum |
*JobsApi* | [**createJob**](doc//JobsApi.md#createjob) | **POST** /jobs |
*JobsApi* | [**getAllJobsStatus**](doc//JobsApi.md#getalljobsstatus) | **GET** /jobs |
*JobsApi* | [**sendJobCommand**](doc//JobsApi.md#sendjobcommand) | **PUT** /jobs/{id} |
@@ -328,6 +332,11 @@ Class | Method | HTTP request | Description
- [ExifResponseDto](doc//ExifResponseDto.md)
- [FaceDto](doc//FaceDto.md)
- [FacialRecognitionConfig](doc//FacialRecognitionConfig.md)
- [FileChecksumDto](doc//FileChecksumDto.md)
- [FileChecksumResponseDto](doc//FileChecksumResponseDto.md)
- [FileReportDto](doc//FileReportDto.md)
- [FileReportFixDto](doc//FileReportFixDto.md)
- [FileReportItemDto](doc//FileReportItemDto.md)
- [FoldersResponse](doc//FoldersResponse.md)
- [FoldersUpdate](doc//FoldersUpdate.md)
- [ImageFormat](doc//ImageFormat.md)
@@ -352,6 +361,7 @@ Class | Method | HTTP request | Description
- [MemoriesResponse](doc//MemoriesResponse.md)
- [MemoriesUpdate](doc//MemoriesUpdate.md)
- [MemoryCreateDto](doc//MemoryCreateDto.md)
- [MemoryLaneResponseDto](doc//MemoryLaneResponseDto.md)
- [MemoryResponseDto](doc//MemoryResponseDto.md)
- [MemoryType](doc//MemoryType.md)
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
@@ -371,6 +381,8 @@ Class | Method | HTTP request | Description
- [OnThisDayDto](doc//OnThisDayDto.md)
- [PartnerDirection](doc//PartnerDirection.md)
- [PartnerResponseDto](doc//PartnerResponseDto.md)
- [PathEntityType](doc//PathEntityType.md)
- [PathType](doc//PathType.md)
- [PeopleResponse](doc//PeopleResponse.md)
- [PeopleResponseDto](doc//PeopleResponseDto.md)
- [PeopleUpdate](doc//PeopleUpdate.md)
@@ -477,8 +489,8 @@ Class | Method | HTTP request | Description
- [TemplateDto](doc//TemplateDto.md)
- [TemplateResponseDto](doc//TemplateResponseDto.md)
- [TestEmailResponseDto](doc//TestEmailResponseDto.md)
- [TimeBucketAssetResponseDto](doc//TimeBucketAssetResponseDto.md)
- [TimeBucketsResponseDto](doc//TimeBucketsResponseDto.md)
- [TimeBucketResponseDto](doc//TimeBucketResponseDto.md)
- [TimeBucketSize](doc//TimeBucketSize.md)
- [ToneMapping](doc//ToneMapping.md)
- [TranscodeHWAccel](doc//TranscodeHWAccel.md)
- [TranscodePolicy](doc//TranscodePolicy.md)

View File

@@ -39,6 +39,7 @@ part 'api/deprecated_api.dart';
part 'api/download_api.dart';
part 'api/duplicates_api.dart';
part 'api/faces_api.dart';
part 'api/file_reports_api.dart';
part 'api/jobs_api.dart';
part 'api/libraries_api.dart';
part 'api/map_api.dart';
@@ -132,6 +133,11 @@ part 'model/email_notifications_update.dart';
part 'model/exif_response_dto.dart';
part 'model/face_dto.dart';
part 'model/facial_recognition_config.dart';
part 'model/file_checksum_dto.dart';
part 'model/file_checksum_response_dto.dart';
part 'model/file_report_dto.dart';
part 'model/file_report_fix_dto.dart';
part 'model/file_report_item_dto.dart';
part 'model/folders_response.dart';
part 'model/folders_update.dart';
part 'model/image_format.dart';
@@ -156,6 +162,7 @@ part 'model/map_reverse_geocode_response_dto.dart';
part 'model/memories_response.dart';
part 'model/memories_update.dart';
part 'model/memory_create_dto.dart';
part 'model/memory_lane_response_dto.dart';
part 'model/memory_response_dto.dart';
part 'model/memory_type.dart';
part 'model/memory_update_dto.dart';
@@ -175,6 +182,8 @@ part 'model/o_auth_token_endpoint_auth_method.dart';
part 'model/on_this_day_dto.dart';
part 'model/partner_direction.dart';
part 'model/partner_response_dto.dart';
part 'model/path_entity_type.dart';
part 'model/path_type.dart';
part 'model/people_response.dart';
part 'model/people_response_dto.dart';
part 'model/people_update.dart';
@@ -281,8 +290,8 @@ part 'model/tags_update.dart';
part 'model/template_dto.dart';
part 'model/template_response_dto.dart';
part 'model/test_email_response_dto.dart';
part 'model/time_bucket_asset_response_dto.dart';
part 'model/time_buckets_response_dto.dart';
part 'model/time_bucket_response_dto.dart';
part 'model/time_bucket_size.dart';
part 'model/tone_mapping.dart';
part 'model/transcode_hw_accel.dart';
part 'model/transcode_policy.dart';

View File

@@ -404,6 +404,63 @@ class AssetsApi {
return null;
}
/// Performs an HTTP 'GET /assets/memory-lane' operation and returns the [Response].
/// Parameters:
///
/// * [int] day (required):
///
/// * [int] month (required):
Future<Response> getMemoryLaneWithHttpInfo(int day, int month,) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/memory-lane';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
queryParams.addAll(_queryParams('', 'day', day));
queryParams.addAll(_queryParams('', 'month', month));
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [int] day (required):
///
/// * [int] month (required):
Future<List<MemoryLaneResponseDto>?> getMemoryLane(int day, int month,) async {
final response = await getMemoryLaneWithHttpInfo(day, month,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<MemoryLaneResponseDto>') as List)
.cast<MemoryLaneResponseDto>()
.toList(growable: false);
}
return null;
}
/// This property was deprecated in v1.116.0
///
/// Note: This method returns the HTTP [Response].

View File

@@ -0,0 +1,148 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportsApi {
FileReportsApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
final ApiClient apiClient;
/// Performs an HTTP 'POST /reports/fix' operation and returns the [Response].
/// Parameters:
///
/// * [FileReportFixDto] fileReportFixDto (required):
Future<Response> fixAuditFilesWithHttpInfo(FileReportFixDto fileReportFixDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/reports/fix';
// ignore: prefer_final_locals
Object? postBody = fileReportFixDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [FileReportFixDto] fileReportFixDto (required):
Future<void> fixAuditFiles(FileReportFixDto fileReportFixDto,) async {
final response = await fixAuditFilesWithHttpInfo(fileReportFixDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'GET /reports' operation and returns the [Response].
Future<Response> getAuditFilesWithHttpInfo() async {
// ignore: prefer_const_declarations
final apiPath = r'/reports';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
Future<FileReportDto?> getAuditFiles() async {
final response = await getAuditFilesWithHttpInfo();
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'FileReportDto',) as FileReportDto;
}
return null;
}
/// Performs an HTTP 'POST /reports/checksum' operation and returns the [Response].
/// Parameters:
///
/// * [FileChecksumDto] fileChecksumDto (required):
Future<Response> getFileChecksumsWithHttpInfo(FileChecksumDto fileChecksumDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/reports/checksum';
// ignore: prefer_final_locals
Object? postBody = fileChecksumDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [FileChecksumDto] fileChecksumDto (required):
Future<List<FileChecksumResponseDto>?> getFileChecksums(FileChecksumDto fileChecksumDto,) async {
final response = await getFileChecksumsWithHttpInfo(fileChecksumDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<FileChecksumResponseDto>') as List)
.cast<FileChecksumResponseDto>()
.toList(growable: false);
}
return null;
}
}

View File

@@ -19,6 +19,8 @@ class TimelineApi {
/// Performs an HTTP 'GET /timeline/bucket' operation and returns the [Response].
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] timeBucket (required):
///
/// * [String] albumId:
@@ -33,10 +35,6 @@ class TimelineApi {
///
/// * [AssetOrder] order:
///
/// * [num] page:
///
/// * [num] pageSize:
///
/// * [String] personId:
///
/// * [String] tagId:
@@ -46,7 +44,7 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<Response> getTimeBucketWithHttpInfo(String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, num? page, num? pageSize, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
Future<Response> getTimeBucketWithHttpInfo(TimeBucketSize size, String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/timeline/bucket';
@@ -75,15 +73,10 @@ class TimelineApi {
if (order != null) {
queryParams.addAll(_queryParams('', 'order', order));
}
if (page != null) {
queryParams.addAll(_queryParams('', 'page', page));
}
if (pageSize != null) {
queryParams.addAll(_queryParams('', 'pageSize', pageSize));
}
if (personId != null) {
queryParams.addAll(_queryParams('', 'personId', personId));
}
queryParams.addAll(_queryParams('', 'size', size));
if (tagId != null) {
queryParams.addAll(_queryParams('', 'tagId', tagId));
}
@@ -114,6 +107,8 @@ class TimelineApi {
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] timeBucket (required):
///
/// * [String] albumId:
@@ -128,10 +123,6 @@ class TimelineApi {
///
/// * [AssetOrder] order:
///
/// * [num] page:
///
/// * [num] pageSize:
///
/// * [String] personId:
///
/// * [String] tagId:
@@ -141,8 +132,8 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<TimeBucketAssetResponseDto?> getTimeBucket(String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, num? page, num? pageSize, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketWithHttpInfo(timeBucket, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, page: page, pageSize: pageSize, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
Future<List<AssetResponseDto>?> getTimeBucket(TimeBucketSize size, String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketWithHttpInfo(size, timeBucket, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
@@ -150,8 +141,11 @@ class TimelineApi {
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'TimeBucketAssetResponseDto',) as TimeBucketAssetResponseDto;
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<AssetResponseDto>') as List)
.cast<AssetResponseDto>()
.toList(growable: false);
}
return null;
}
@@ -159,6 +153,8 @@ class TimelineApi {
/// Performs an HTTP 'GET /timeline/buckets' operation and returns the [Response].
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] albumId:
///
/// * [bool] isArchived:
@@ -180,7 +176,7 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<Response> getTimeBucketsWithHttpInfo({ String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
Future<Response> getTimeBucketsWithHttpInfo(TimeBucketSize size, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/timeline/buckets';
@@ -212,6 +208,7 @@ class TimelineApi {
if (personId != null) {
queryParams.addAll(_queryParams('', 'personId', personId));
}
queryParams.addAll(_queryParams('', 'size', size));
if (tagId != null) {
queryParams.addAll(_queryParams('', 'tagId', tagId));
}
@@ -241,6 +238,8 @@ class TimelineApi {
/// Parameters:
///
/// * [TimeBucketSize] size (required):
///
/// * [String] albumId:
///
/// * [bool] isArchived:
@@ -262,8 +261,8 @@ class TimelineApi {
/// * [bool] withPartners:
///
/// * [bool] withStacked:
Future<List<TimeBucketsResponseDto>?> getTimeBuckets({ String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketsWithHttpInfo( albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
Future<List<TimeBucketResponseDto>?> getTimeBuckets(TimeBucketSize size, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
final response = await getTimeBucketsWithHttpInfo(size, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
@@ -272,8 +271,8 @@ class TimelineApi {
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<TimeBucketsResponseDto>') as List)
.cast<TimeBucketsResponseDto>()
return (await apiClient.deserializeAsync(responseBody, 'List<TimeBucketResponseDto>') as List)
.cast<TimeBucketResponseDto>()
.toList(growable: false);
}

View File

@@ -320,6 +320,16 @@ class ApiClient {
return FaceDto.fromJson(value);
case 'FacialRecognitionConfig':
return FacialRecognitionConfig.fromJson(value);
case 'FileChecksumDto':
return FileChecksumDto.fromJson(value);
case 'FileChecksumResponseDto':
return FileChecksumResponseDto.fromJson(value);
case 'FileReportDto':
return FileReportDto.fromJson(value);
case 'FileReportFixDto':
return FileReportFixDto.fromJson(value);
case 'FileReportItemDto':
return FileReportItemDto.fromJson(value);
case 'FoldersResponse':
return FoldersResponse.fromJson(value);
case 'FoldersUpdate':
@@ -368,6 +378,8 @@ class ApiClient {
return MemoriesUpdate.fromJson(value);
case 'MemoryCreateDto':
return MemoryCreateDto.fromJson(value);
case 'MemoryLaneResponseDto':
return MemoryLaneResponseDto.fromJson(value);
case 'MemoryResponseDto':
return MemoryResponseDto.fromJson(value);
case 'MemoryType':
@@ -406,6 +418,10 @@ class ApiClient {
return PartnerDirectionTypeTransformer().decode(value);
case 'PartnerResponseDto':
return PartnerResponseDto.fromJson(value);
case 'PathEntityType':
return PathEntityTypeTypeTransformer().decode(value);
case 'PathType':
return PathTypeTypeTransformer().decode(value);
case 'PeopleResponse':
return PeopleResponse.fromJson(value);
case 'PeopleResponseDto':
@@ -618,10 +634,10 @@ class ApiClient {
return TemplateResponseDto.fromJson(value);
case 'TestEmailResponseDto':
return TestEmailResponseDto.fromJson(value);
case 'TimeBucketAssetResponseDto':
return TimeBucketAssetResponseDto.fromJson(value);
case 'TimeBucketsResponseDto':
return TimeBucketsResponseDto.fromJson(value);
case 'TimeBucketResponseDto':
return TimeBucketResponseDto.fromJson(value);
case 'TimeBucketSize':
return TimeBucketSizeTypeTransformer().decode(value);
case 'ToneMapping':
return ToneMappingTypeTransformer().decode(value);
case 'TranscodeHWAccel':

View File

@@ -112,6 +112,12 @@ String parameterToString(dynamic value) {
if (value is PartnerDirection) {
return PartnerDirectionTypeTransformer().encode(value).toString();
}
if (value is PathEntityType) {
return PathEntityTypeTypeTransformer().encode(value).toString();
}
if (value is PathType) {
return PathTypeTypeTransformer().encode(value).toString();
}
if (value is Permission) {
return PermissionTypeTransformer().encode(value).toString();
}
@@ -136,6 +142,9 @@ String parameterToString(dynamic value) {
if (value is SyncRequestType) {
return SyncRequestTypeTypeTransformer().encode(value).toString();
}
if (value is TimeBucketSize) {
return TimeBucketSizeTypeTransformer().encode(value).toString();
}
if (value is ToneMapping) {
return ToneMappingTypeTransformer().encode(value).toString();
}

View File

@@ -0,0 +1,101 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileChecksumDto {
/// Returns a new [FileChecksumDto] instance.
FileChecksumDto({
this.filenames = const [],
});
List<String> filenames;
@override
bool operator ==(Object other) => identical(this, other) || other is FileChecksumDto &&
_deepEquality.equals(other.filenames, filenames);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(filenames.hashCode);
@override
String toString() => 'FileChecksumDto[filenames=$filenames]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'filenames'] = this.filenames;
return json;
}
/// Returns a new [FileChecksumDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileChecksumDto? fromJson(dynamic value) {
upgradeDto(value, "FileChecksumDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileChecksumDto(
filenames: json[r'filenames'] is Iterable
? (json[r'filenames'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<FileChecksumDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileChecksumDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileChecksumDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileChecksumDto> mapFromJson(dynamic json) {
final map = <String, FileChecksumDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileChecksumDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileChecksumDto-objects as value to a dart map
static Map<String, List<FileChecksumDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileChecksumDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileChecksumDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'filenames',
};
}

View File

@@ -0,0 +1,107 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileChecksumResponseDto {
/// Returns a new [FileChecksumResponseDto] instance.
FileChecksumResponseDto({
required this.checksum,
required this.filename,
});
String checksum;
String filename;
@override
bool operator ==(Object other) => identical(this, other) || other is FileChecksumResponseDto &&
other.checksum == checksum &&
other.filename == filename;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(checksum.hashCode) +
(filename.hashCode);
@override
String toString() => 'FileChecksumResponseDto[checksum=$checksum, filename=$filename]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'checksum'] = this.checksum;
json[r'filename'] = this.filename;
return json;
}
/// Returns a new [FileChecksumResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileChecksumResponseDto? fromJson(dynamic value) {
upgradeDto(value, "FileChecksumResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileChecksumResponseDto(
checksum: mapValueOfType<String>(json, r'checksum')!,
filename: mapValueOfType<String>(json, r'filename')!,
);
}
return null;
}
static List<FileChecksumResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileChecksumResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileChecksumResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileChecksumResponseDto> mapFromJson(dynamic json) {
final map = <String, FileChecksumResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileChecksumResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileChecksumResponseDto-objects as value to a dart map
static Map<String, List<FileChecksumResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileChecksumResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileChecksumResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'checksum',
'filename',
};
}

View File

@@ -0,0 +1,109 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportDto {
/// Returns a new [FileReportDto] instance.
FileReportDto({
this.extras = const [],
this.orphans = const [],
});
List<String> extras;
List<FileReportItemDto> orphans;
@override
bool operator ==(Object other) => identical(this, other) || other is FileReportDto &&
_deepEquality.equals(other.extras, extras) &&
_deepEquality.equals(other.orphans, orphans);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(extras.hashCode) +
(orphans.hashCode);
@override
String toString() => 'FileReportDto[extras=$extras, orphans=$orphans]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'extras'] = this.extras;
json[r'orphans'] = this.orphans;
return json;
}
/// Returns a new [FileReportDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileReportDto? fromJson(dynamic value) {
upgradeDto(value, "FileReportDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileReportDto(
extras: json[r'extras'] is Iterable
? (json[r'extras'] as Iterable).cast<String>().toList(growable: false)
: const [],
orphans: FileReportItemDto.listFromJson(json[r'orphans']),
);
}
return null;
}
static List<FileReportDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileReportDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileReportDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileReportDto> mapFromJson(dynamic json) {
final map = <String, FileReportDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileReportDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileReportDto-objects as value to a dart map
static Map<String, List<FileReportDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileReportDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileReportDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'extras',
'orphans',
};
}

View File

@@ -0,0 +1,99 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportFixDto {
/// Returns a new [FileReportFixDto] instance.
FileReportFixDto({
this.items = const [],
});
List<FileReportItemDto> items;
@override
bool operator ==(Object other) => identical(this, other) || other is FileReportFixDto &&
_deepEquality.equals(other.items, items);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(items.hashCode);
@override
String toString() => 'FileReportFixDto[items=$items]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'items'] = this.items;
return json;
}
/// Returns a new [FileReportFixDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileReportFixDto? fromJson(dynamic value) {
upgradeDto(value, "FileReportFixDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileReportFixDto(
items: FileReportItemDto.listFromJson(json[r'items']),
);
}
return null;
}
static List<FileReportFixDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileReportFixDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileReportFixDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileReportFixDto> mapFromJson(dynamic json) {
final map = <String, FileReportFixDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileReportFixDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileReportFixDto-objects as value to a dart map
static Map<String, List<FileReportFixDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileReportFixDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileReportFixDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'items',
};
}

View File

@@ -0,0 +1,140 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class FileReportItemDto {
/// Returns a new [FileReportItemDto] instance.
FileReportItemDto({
this.checksum,
required this.entityId,
required this.entityType,
required this.pathType,
required this.pathValue,
});
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? checksum;
String entityId;
PathEntityType entityType;
PathType pathType;
String pathValue;
@override
bool operator ==(Object other) => identical(this, other) || other is FileReportItemDto &&
other.checksum == checksum &&
other.entityId == entityId &&
other.entityType == entityType &&
other.pathType == pathType &&
other.pathValue == pathValue;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(checksum == null ? 0 : checksum!.hashCode) +
(entityId.hashCode) +
(entityType.hashCode) +
(pathType.hashCode) +
(pathValue.hashCode);
@override
String toString() => 'FileReportItemDto[checksum=$checksum, entityId=$entityId, entityType=$entityType, pathType=$pathType, pathValue=$pathValue]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.checksum != null) {
json[r'checksum'] = this.checksum;
} else {
// json[r'checksum'] = null;
}
json[r'entityId'] = this.entityId;
json[r'entityType'] = this.entityType;
json[r'pathType'] = this.pathType;
json[r'pathValue'] = this.pathValue;
return json;
}
/// Returns a new [FileReportItemDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static FileReportItemDto? fromJson(dynamic value) {
upgradeDto(value, "FileReportItemDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return FileReportItemDto(
checksum: mapValueOfType<String>(json, r'checksum'),
entityId: mapValueOfType<String>(json, r'entityId')!,
entityType: PathEntityType.fromJson(json[r'entityType'])!,
pathType: PathType.fromJson(json[r'pathType'])!,
pathValue: mapValueOfType<String>(json, r'pathValue')!,
);
}
return null;
}
static List<FileReportItemDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <FileReportItemDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = FileReportItemDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, FileReportItemDto> mapFromJson(dynamic json) {
final map = <String, FileReportItemDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = FileReportItemDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of FileReportItemDto-objects as value to a dart map
static Map<String, List<FileReportItemDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<FileReportItemDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = FileReportItemDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'entityId',
'entityType',
'pathType',
'pathValue',
};
}

View File

@@ -0,0 +1,107 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class MemoryLaneResponseDto {
/// Returns a new [MemoryLaneResponseDto] instance.
MemoryLaneResponseDto({
this.assets = const [],
required this.yearsAgo,
});
List<AssetResponseDto> assets;
int yearsAgo;
@override
bool operator ==(Object other) => identical(this, other) || other is MemoryLaneResponseDto &&
_deepEquality.equals(other.assets, assets) &&
other.yearsAgo == yearsAgo;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(assets.hashCode) +
(yearsAgo.hashCode);
@override
String toString() => 'MemoryLaneResponseDto[assets=$assets, yearsAgo=$yearsAgo]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'assets'] = this.assets;
json[r'yearsAgo'] = this.yearsAgo;
return json;
}
/// Returns a new [MemoryLaneResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static MemoryLaneResponseDto? fromJson(dynamic value) {
upgradeDto(value, "MemoryLaneResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return MemoryLaneResponseDto(
assets: AssetResponseDto.listFromJson(json[r'assets']),
yearsAgo: mapValueOfType<int>(json, r'yearsAgo')!,
);
}
return null;
}
static List<MemoryLaneResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MemoryLaneResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MemoryLaneResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, MemoryLaneResponseDto> mapFromJson(dynamic json) {
final map = <String, MemoryLaneResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = MemoryLaneResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of MemoryLaneResponseDto-objects as value to a dart map
static Map<String, List<MemoryLaneResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<MemoryLaneResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = MemoryLaneResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'assets',
'yearsAgo',
};
}

View File

@@ -0,0 +1,88 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class PathEntityType {
/// Instantiate a new enum with the provided [value].
const PathEntityType._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const asset = PathEntityType._(r'asset');
static const person = PathEntityType._(r'person');
static const user = PathEntityType._(r'user');
/// List of all possible values in this [enum][PathEntityType].
static const values = <PathEntityType>[
asset,
person,
user,
];
static PathEntityType? fromJson(dynamic value) => PathEntityTypeTypeTransformer().decode(value);
static List<PathEntityType> listFromJson(dynamic json, {bool growable = false,}) {
final result = <PathEntityType>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = PathEntityType.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [PathEntityType] to String,
/// and [decode] dynamic data back to [PathEntityType].
class PathEntityTypeTypeTransformer {
factory PathEntityTypeTypeTransformer() => _instance ??= const PathEntityTypeTypeTransformer._();
const PathEntityTypeTypeTransformer._();
String encode(PathEntityType data) => data.value;
/// Decodes a [dynamic value][data] to a PathEntityType.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
PathEntityType? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'asset': return PathEntityType.asset;
case r'person': return PathEntityType.person;
case r'user': return PathEntityType.user;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [PathEntityTypeTypeTransformer] instance.
static PathEntityTypeTypeTransformer? _instance;
}

103
mobile/openapi/lib/model/path_type.dart generated Normal file
View File

@@ -0,0 +1,103 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class PathType {
/// Instantiate a new enum with the provided [value].
const PathType._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const original = PathType._(r'original');
static const fullsize = PathType._(r'fullsize');
static const preview = PathType._(r'preview');
static const thumbnail = PathType._(r'thumbnail');
static const encodedVideo = PathType._(r'encoded_video');
static const sidecar = PathType._(r'sidecar');
static const face = PathType._(r'face');
static const profile = PathType._(r'profile');
/// List of all possible values in this [enum][PathType].
static const values = <PathType>[
original,
fullsize,
preview,
thumbnail,
encodedVideo,
sidecar,
face,
profile,
];
static PathType? fromJson(dynamic value) => PathTypeTypeTransformer().decode(value);
static List<PathType> listFromJson(dynamic json, {bool growable = false,}) {
final result = <PathType>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = PathType.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [PathType] to String,
/// and [decode] dynamic data back to [PathType].
class PathTypeTypeTransformer {
factory PathTypeTypeTransformer() => _instance ??= const PathTypeTypeTransformer._();
const PathTypeTypeTransformer._();
String encode(PathType data) => data.value;
/// Decodes a [dynamic value][data] to a PathType.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
PathType? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'original': return PathType.original;
case r'fullsize': return PathType.fullsize;
case r'preview': return PathType.preview;
case r'thumbnail': return PathType.thumbnail;
case r'encoded_video': return PathType.encodedVideo;
case r'sidecar': return PathType.sidecar;
case r'face': return PathType.face;
case r'profile': return PathType.profile;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [PathTypeTypeTransformer] instance.
static PathTypeTypeTransformer? _instance;
}

View File

@@ -1,243 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class TimeBucketAssetResponseDto {
/// Returns a new [TimeBucketAssetResponseDto] instance.
TimeBucketAssetResponseDto({
this.city = const [],
this.country = const [],
this.duration = const [],
this.id = const [],
this.isArchived = const [],
this.isFavorite = const [],
this.isImage = const [],
this.isTrashed = const [],
this.livePhotoVideoId = const [],
this.localDateTime = const [],
this.ownerId = const [],
this.projectionType = const [],
this.ratio = const [],
this.stack = const [],
this.thumbhash = const [],
});
List<String?> city;
List<String?> country;
List<String?> duration;
List<String> id;
List<num> isArchived;
List<num> isFavorite;
List<num> isImage;
List<num> isTrashed;
List<String?> livePhotoVideoId;
List<String> localDateTime;
List<String> ownerId;
List<String?> projectionType;
List<num> ratio;
/// (stack ID, stack asset count) tuple
List<List<String>?> stack;
List<String?> thumbhash;
@override
bool operator ==(Object other) => identical(this, other) || other is TimeBucketAssetResponseDto &&
_deepEquality.equals(other.city, city) &&
_deepEquality.equals(other.country, country) &&
_deepEquality.equals(other.duration, duration) &&
_deepEquality.equals(other.id, id) &&
_deepEquality.equals(other.isArchived, isArchived) &&
_deepEquality.equals(other.isFavorite, isFavorite) &&
_deepEquality.equals(other.isImage, isImage) &&
_deepEquality.equals(other.isTrashed, isTrashed) &&
_deepEquality.equals(other.livePhotoVideoId, livePhotoVideoId) &&
_deepEquality.equals(other.localDateTime, localDateTime) &&
_deepEquality.equals(other.ownerId, ownerId) &&
_deepEquality.equals(other.projectionType, projectionType) &&
_deepEquality.equals(other.ratio, ratio) &&
_deepEquality.equals(other.stack, stack) &&
_deepEquality.equals(other.thumbhash, thumbhash);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(city.hashCode) +
(country.hashCode) +
(duration.hashCode) +
(id.hashCode) +
(isArchived.hashCode) +
(isFavorite.hashCode) +
(isImage.hashCode) +
(isTrashed.hashCode) +
(livePhotoVideoId.hashCode) +
(localDateTime.hashCode) +
(ownerId.hashCode) +
(projectionType.hashCode) +
(ratio.hashCode) +
(stack.hashCode) +
(thumbhash.hashCode);
@override
String toString() => 'TimeBucketAssetResponseDto[city=$city, country=$country, duration=$duration, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isImage=$isImage, isTrashed=$isTrashed, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, ownerId=$ownerId, projectionType=$projectionType, ratio=$ratio, stack=$stack, thumbhash=$thumbhash]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'city'] = this.city;
json[r'country'] = this.country;
json[r'duration'] = this.duration;
json[r'id'] = this.id;
json[r'isArchived'] = this.isArchived;
json[r'isFavorite'] = this.isFavorite;
json[r'isImage'] = this.isImage;
json[r'isTrashed'] = this.isTrashed;
json[r'livePhotoVideoId'] = this.livePhotoVideoId;
json[r'localDateTime'] = this.localDateTime;
json[r'ownerId'] = this.ownerId;
json[r'projectionType'] = this.projectionType;
json[r'ratio'] = this.ratio;
json[r'stack'] = this.stack;
json[r'thumbhash'] = this.thumbhash;
return json;
}
/// Returns a new [TimeBucketAssetResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static TimeBucketAssetResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TimeBucketAssetResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return TimeBucketAssetResponseDto(
city: json[r'city'] is Iterable
? (json[r'city'] as Iterable).cast<String>().toList(growable: false)
: const [],
country: json[r'country'] is Iterable
? (json[r'country'] as Iterable).cast<String>().toList(growable: false)
: const [],
duration: json[r'duration'] is Iterable
? (json[r'duration'] as Iterable).cast<String>().toList(growable: false)
: const [],
id: json[r'id'] is Iterable
? (json[r'id'] as Iterable).cast<String>().toList(growable: false)
: const [],
isArchived: json[r'isArchived'] is Iterable
? (json[r'isArchived'] as Iterable).cast<num>().toList(growable: false)
: const [],
isFavorite: json[r'isFavorite'] is Iterable
? (json[r'isFavorite'] as Iterable).cast<num>().toList(growable: false)
: const [],
isImage: json[r'isImage'] is Iterable
? (json[r'isImage'] as Iterable).cast<num>().toList(growable: false)
: const [],
isTrashed: json[r'isTrashed'] is Iterable
? (json[r'isTrashed'] as Iterable).cast<num>().toList(growable: false)
: const [],
livePhotoVideoId: json[r'livePhotoVideoId'] is Iterable
? (json[r'livePhotoVideoId'] as Iterable).cast<String>().toList(growable: false)
: const [],
localDateTime: json[r'localDateTime'] is Iterable
? (json[r'localDateTime'] as Iterable).cast<String>().toList(growable: false)
: const [],
ownerId: json[r'ownerId'] is Iterable
? (json[r'ownerId'] as Iterable).cast<String>().toList(growable: false)
: const [],
projectionType: json[r'projectionType'] is Iterable
? (json[r'projectionType'] as Iterable).cast<String>().toList(growable: false)
: const [],
ratio: json[r'ratio'] is Iterable
? (json[r'ratio'] as Iterable).cast<num>().toList(growable: false)
: const [],
stack: json[r'stack'] is List
? (json[r'stack'] as List).map((e) =>
e == null ? null : (e as List).cast<String>()
).toList()
: const [],
thumbhash: json[r'thumbhash'] is Iterable
? (json[r'thumbhash'] as Iterable).cast<String>().toList(growable: false)
: const [],
);
}
return null;
}
static List<TimeBucketAssetResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketAssetResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TimeBucketAssetResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, TimeBucketAssetResponseDto> mapFromJson(dynamic json) {
final map = <String, TimeBucketAssetResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = TimeBucketAssetResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of TimeBucketAssetResponseDto-objects as value to a dart map
static Map<String, List<TimeBucketAssetResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TimeBucketAssetResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = TimeBucketAssetResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'city',
'country',
'duration',
'id',
'isArchived',
'isFavorite',
'isImage',
'isTrashed',
'livePhotoVideoId',
'localDateTime',
'ownerId',
'projectionType',
'ratio',
'thumbhash',
};
}

View File

@@ -10,9 +10,9 @@
part of openapi.api;
class TimeBucketsResponseDto {
/// Returns a new [TimeBucketsResponseDto] instance.
TimeBucketsResponseDto({
class TimeBucketResponseDto {
/// Returns a new [TimeBucketResponseDto] instance.
TimeBucketResponseDto({
required this.count,
required this.timeBucket,
});
@@ -22,7 +22,7 @@ class TimeBucketsResponseDto {
String timeBucket;
@override
bool operator ==(Object other) => identical(this, other) || other is TimeBucketsResponseDto &&
bool operator ==(Object other) => identical(this, other) || other is TimeBucketResponseDto &&
other.count == count &&
other.timeBucket == timeBucket;
@@ -33,7 +33,7 @@ class TimeBucketsResponseDto {
(timeBucket.hashCode);
@override
String toString() => 'TimeBucketsResponseDto[count=$count, timeBucket=$timeBucket]';
String toString() => 'TimeBucketResponseDto[count=$count, timeBucket=$timeBucket]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -42,15 +42,15 @@ class TimeBucketsResponseDto {
return json;
}
/// Returns a new [TimeBucketsResponseDto] instance and imports its values from
/// Returns a new [TimeBucketResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static TimeBucketsResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TimeBucketsResponseDto");
static TimeBucketResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TimeBucketResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return TimeBucketsResponseDto(
return TimeBucketResponseDto(
count: mapValueOfType<int>(json, r'count')!,
timeBucket: mapValueOfType<String>(json, r'timeBucket')!,
);
@@ -58,11 +58,11 @@ class TimeBucketsResponseDto {
return null;
}
static List<TimeBucketsResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketsResponseDto>[];
static List<TimeBucketResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TimeBucketsResponseDto.fromJson(row);
final value = TimeBucketResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
@@ -71,12 +71,12 @@ class TimeBucketsResponseDto {
return result.toList(growable: growable);
}
static Map<String, TimeBucketsResponseDto> mapFromJson(dynamic json) {
final map = <String, TimeBucketsResponseDto>{};
static Map<String, TimeBucketResponseDto> mapFromJson(dynamic json) {
final map = <String, TimeBucketResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = TimeBucketsResponseDto.fromJson(entry.value);
final value = TimeBucketResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
@@ -85,14 +85,14 @@ class TimeBucketsResponseDto {
return map;
}
// maps a json object with a list of TimeBucketsResponseDto-objects as value to a dart map
static Map<String, List<TimeBucketsResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TimeBucketsResponseDto>>{};
// maps a json object with a list of TimeBucketResponseDto-objects as value to a dart map
static Map<String, List<TimeBucketResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TimeBucketResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = TimeBucketsResponseDto.listFromJson(entry.value, growable: growable,);
map[entry.key] = TimeBucketResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;

View File

@@ -0,0 +1,85 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class TimeBucketSize {
/// Instantiate a new enum with the provided [value].
const TimeBucketSize._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const DAY = TimeBucketSize._(r'DAY');
static const MONTH = TimeBucketSize._(r'MONTH');
/// List of all possible values in this [enum][TimeBucketSize].
static const values = <TimeBucketSize>[
DAY,
MONTH,
];
static TimeBucketSize? fromJson(dynamic value) => TimeBucketSizeTypeTransformer().decode(value);
static List<TimeBucketSize> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TimeBucketSize>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TimeBucketSize.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [TimeBucketSize] to String,
/// and [decode] dynamic data back to [TimeBucketSize].
class TimeBucketSizeTypeTransformer {
factory TimeBucketSizeTypeTransformer() => _instance ??= const TimeBucketSizeTypeTransformer._();
const TimeBucketSizeTypeTransformer._();
String encode(TimeBucketSize data) => data.value;
/// Decodes a [dynamic value][data] to a TimeBucketSize.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
TimeBucketSize? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'DAY': return TimeBucketSize.DAY;
case r'MONTH': return TimeBucketSize.MONTH;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [TimeBucketSizeTypeTransformer] instance.
static TimeBucketSizeTypeTransformer? _instance;
}

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash
OPENAPI_GENERATOR_VERSION=v7.12.0
OPENAPI_GENERATOR_VERSION=v7.8.0
# usage: ./bin/generate-open-api.sh
@@ -8,7 +8,6 @@ function dart {
cd ./templates/mobile/serialization/native
wget -O native_class.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/serialization/native/native_class.mustache
patch --no-backup-if-mismatch -u native_class.mustache <native_class.mustache.patch
patch --no-backup-if-mismatch -u native_class.mustache <native_class_nullable_items_in_arrays.patch
cd ../../
wget -O api.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/api.mustache

View File

@@ -1,301 +0,0 @@
class {{{classname}}} {
{{>dart_constructor}}
{{#vars}}
{{#description}}
/// {{{.}}}
{{/description}}
{{^isEnum}}
{{#minimum}}
{{#description}}
///
{{/description}}
/// Minimum value: {{{.}}}
{{/minimum}}
{{#maximum}}
{{#description}}
{{^minimum}}
///
{{/minimum}}
{{/description}}
/// Maximum value: {{{.}}}
{{/maximum}}
{{^isNullable}}
{{^required}}
{{^defaultValue}}
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
{{/defaultValue}}
{{/required}}
{{/isNullable}}
{{/isEnum}}
{{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{/vars}}
@override
bool operator ==(Object other) => identical(this, other) || other is {{{classname}}} &&
{{#vars}}
{{#isMap}}_deepEquality.equals(other.{{{name}}}, {{{name}}}){{/isMap}}{{^isMap}}{{#isArray}}_deepEquality.equals(other.{{{name}}}, {{{name}}}){{/isArray}}{{^isArray}}other.{{{name}}} == {{{name}}}{{/isArray}}{{/isMap}}{{^-last}} &&{{/-last}}{{#-last}};{{/-last}}
{{/vars}}
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
{{#vars}}
({{#isNullable}}{{{name}}} == null ? 0 : {{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}{{{name}}} == null ? 0 : {{/defaultValue}}{{/required}}{{/isNullable}}{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.hashCode){{^-last}} +{{/-last}}{{#-last}};{{/-last}}
{{/vars}}
@override
String toString() => '{{{classname}}}[{{#vars}}{{{name}}}=${{{name}}}{{^-last}}, {{/-last}}{{/vars}}]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
{{#vars}}
{{#isNullable}}
if (this.{{{name}}} != null) {
{{/isNullable}}
{{^isNullable}}
{{^required}}
{{^defaultValue}}
if (this.{{{name}}} != null) {
{{/defaultValue}}
{{/required}}
{{/isNullable}}
{{#isDateTime}}
{{#pattern}}
json[r'{{{baseName}}}'] = _isEpochMarker(r'{{{pattern}}}')
? this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.millisecondsSinceEpoch
: this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc().toIso8601String();
{{/pattern}}
{{^pattern}}
json[r'{{{baseName}}}'] = this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc().toIso8601String();
{{/pattern}}
{{/isDateTime}}
{{#isDate}}
{{#pattern}}
json[r'{{{baseName}}}'] = _isEpochMarker(r'{{{pattern}}}')
? this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.millisecondsSinceEpoch
: _dateFormatter.format(this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc());
{{/pattern}}
{{^pattern}}
json[r'{{{baseName}}}'] = _dateFormatter.format(this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc());
{{/pattern}}
{{/isDate}}
{{^isDateTime}}
{{^isDate}}
json[r'{{{baseName}}}'] = this.{{{name}}}{{#isArray}}{{#uniqueItems}}{{#isNullable}}!{{/isNullable}}.toList(growable: false){{/uniqueItems}}{{/isArray}};
{{/isDate}}
{{/isDateTime}}
{{#isNullable}}
} else {
json[r'{{{baseName}}}'] = null;
}
{{/isNullable}}
{{^isNullable}}
{{^required}}
{{^defaultValue}}
} else {
json[r'{{{baseName}}}'] = null;
}
{{/defaultValue}}
{{/required}}
{{/isNullable}}
{{/vars}}
return json;
}
/// Returns a new [{{{classname}}}] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static {{{classname}}}? fromJson(dynamic value) {
if (value is Map) {
final json = value.cast<String, dynamic>();
// Ensure that the map contains the required keys.
// Note 1: the values aren't checked for validity beyond being non-null.
// Note 2: this code is stripped in release mode!
assert(() {
requiredKeys.forEach((key) {
assert(json.containsKey(key), 'Required key "{{{classname}}}[$key]" is missing from JSON.');
assert(json[key] != null, 'Required key "{{{classname}}}[$key]" has a null value in JSON.');
});
return true;
}());
return {{{classname}}}(
{{#vars}}
{{#isDateTime}}
{{{name}}}: mapDateTime(json, r'{{{baseName}}}', r'{{{pattern}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isDateTime}}
{{#isDate}}
{{{name}}}: mapDateTime(json, r'{{{baseName}}}', r'{{{pattern}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isDate}}
{{^isDateTime}}
{{^isDate}}
{{#complexType}}
{{#isArray}}
{{#items.isArray}}
{{{name}}}: json[r'{{{baseName}}}'] is List
? (json[r'{{{baseName}}}'] as List).map((e) =>
{{#items.complexType}}
{{items.complexType}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}}
{{/items.complexType}}
{{^items.complexType}}
e == null ? {{#items.isNullable}}null{{/items.isNullable}}{{^items.isNullable}}const <{{items.items.dataType}}>[]{{/items.isNullable}} : (e as List).cast<{{items.items.dataType}}>()
{{/items.complexType}}
).toList()
: {{#isNullable}}null{{/isNullable}}{{^isNullable}}const []{{/isNullable}},
{{/items.isArray}}
{{^items.isArray}}
{{{name}}}: {{{complexType}}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}},
{{/items.isArray}}
{{/isArray}}
{{^isArray}}
{{#isMap}}
{{#items.isArray}}
{{{name}}}: json[r'{{{baseName}}}'] == null
? {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}}
{{#items.complexType}}
: {{items.complexType}}.mapListFromJson(json[r'{{{baseName}}}']),
{{/items.complexType}}
{{^items.complexType}}
: mapCastOfType<String, List>(json, r'{{{baseName}}}'),
{{/items.complexType}}
{{/items.isArray}}
{{^items.isArray}}
{{#items.isMap}}
{{#items.complexType}}
{{{name}}}: {{items.complexType}}.mapFromJson(json[r'{{{baseName}}}']),
{{/items.complexType}}
{{^items.complexType}}
{{{name}}}: mapCastOfType<String, dynamic>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/items.complexType}}
{{/items.isMap}}
{{^items.isMap}}
{{#items.complexType}}
{{{name}}}: {{{items.complexType}}}.mapFromJson(json[r'{{{baseName}}}']),
{{/items.complexType}}
{{^items.complexType}}
{{{name}}}: mapCastOfType<String, {{items.dataType}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/items.complexType}}
{{/items.isMap}}
{{/items.isArray}}
{{/isMap}}
{{^isMap}}
{{#isBinary}}
{{{name}}}: null, // No support for decoding binary content from JSON
{{/isBinary}}
{{^isBinary}}
{{{name}}}: {{{complexType}}}.fromJson(json[r'{{{baseName}}}']){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isBinary}}
{{/isMap}}
{{/isArray}}
{{/complexType}}
{{^complexType}}
{{#isArray}}
{{#isEnum}}
{{{name}}}: {{{items.datatypeWithEnum}}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}},
{{/isEnum}}
{{^isEnum}}
{{{name}}}: json[r'{{{baseName}}}'] is Iterable
? (json[r'{{{baseName}}}'] as Iterable).cast<{{{items.datatype}}}>().{{#uniqueItems}}toSet(){{/uniqueItems}}{{^uniqueItems}}toList(growable: false){{/uniqueItems}}
: {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}},
{{/isEnum}}
{{/isArray}}
{{^isArray}}
{{#isMap}}
{{{name}}}: mapCastOfType<String, {{{items.datatype}}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isMap}}
{{^isMap}}
{{#isNumber}}
{{{name}}}: {{#isNullable}}json[r'{{{baseName}}}'] == null
? {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}}
: {{/isNullable}}{{{datatypeWithEnum}}}.parse('${json[r'{{{baseName}}}']}'),
{{/isNumber}}
{{^isNumber}}
{{^isEnum}}
{{{name}}}: mapValueOfType<{{{datatypeWithEnum}}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isEnum}}
{{#isEnum}}
{{{name}}}: {{{enumName}}}.fromJson(json[r'{{{baseName}}}']){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
{{/isEnum}}
{{/isNumber}}
{{/isMap}}
{{/isArray}}
{{/complexType}}
{{/isDate}}
{{/isDateTime}}
{{/vars}}
);
}
return null;
}
static List<{{{classname}}}> listFromJson(dynamic json, {bool growable = false,}) {
final result = <{{{classname}}}>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = {{{classname}}}.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, {{{classname}}}> mapFromJson(dynamic json) {
final map = <String, {{{classname}}}>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = {{{classname}}}.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of {{{classname}}}-objects as value to a dart map
static Map<String, List<{{{classname}}}>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<{{{classname}}}>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = {{{classname}}}.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
{{#vars}}
{{#required}}
'{{{baseName}}}',
{{/required}}
{{/vars}}
};
}
{{#vars}}
{{^isModel}}
{{#isEnum}}
{{^isContainer}}
{{>serialization/native/native_enum_inline}}
{{/isContainer}}
{{#isContainer}}
{{#mostInnerItems}}
{{>serialization/native/native_enum_inline}}
{{/mostInnerItems}}
{{/isContainer}}
{{/isEnum}}
{{/isModel}}
{{/vars}}

View File

@@ -1726,6 +1726,62 @@
]
}
},
"/assets/memory-lane": {
"get": {
"operationId": "getMemoryLane",
"parameters": [
{
"name": "day",
"required": true,
"in": "query",
"schema": {
"minimum": 1,
"maximum": 31,
"type": "integer"
}
},
{
"name": "month",
"required": true,
"in": "query",
"schema": {
"minimum": 1,
"maximum": 12,
"type": "integer"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/MemoryLaneResponseDto"
},
"type": "array"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Assets"
]
}
},
"/assets/random": {
"get": {
"deprecated": true,
@@ -4595,6 +4651,118 @@
]
}
},
"/reports": {
"get": {
"operationId": "getAuditFiles",
"parameters": [],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileReportDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"File Reports"
]
}
},
"/reports/checksum": {
"post": {
"operationId": "getFileChecksums",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileChecksumDto"
}
}
},
"required": true
},
"responses": {
"201": {
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/FileChecksumResponseDto"
},
"type": "array"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"File Reports"
]
}
},
"/reports/fix": {
"post": {
"operationId": "fixAuditFiles",
"parameters": [],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/FileReportFixDto"
}
}
},
"required": true
},
"responses": {
"201": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"File Reports"
]
}
},
"/search/cities": {
"get": {
"operationId": "getAssetsByCity",
@@ -6949,24 +7117,6 @@
"$ref": "#/components/schemas/AssetOrder"
}
},
{
"name": "page",
"required": false,
"in": "query",
"schema": {
"minimum": 1,
"type": "number"
}
},
{
"name": "pageSize",
"required": false,
"in": "query",
"schema": {
"minimum": 1,
"type": "number"
}
},
{
"name": "personId",
"required": false,
@@ -6976,6 +7126,14 @@
"type": "string"
}
},
{
"name": "size",
"required": true,
"in": "query",
"schema": {
"$ref": "#/components/schemas/TimeBucketSize"
}
},
{
"name": "tagId",
"required": false,
@@ -7024,7 +7182,10 @@
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TimeBucketAssetResponseDto"
"items": {
"$ref": "#/components/schemas/AssetResponseDto"
},
"type": "array"
}
}
},
@@ -7109,6 +7270,14 @@
"type": "string"
}
},
{
"name": "size",
"required": true,
"in": "query",
"schema": {
"$ref": "#/components/schemas/TimeBucketSize"
}
},
{
"name": "tagId",
"required": false,
@@ -7150,7 +7319,7 @@
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/TimeBucketsResponseDto"
"$ref": "#/components/schemas/TimeBucketResponseDto"
},
"type": "array"
}
@@ -9580,6 +9749,105 @@
],
"type": "object"
},
"FileChecksumDto": {
"properties": {
"filenames": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"filenames"
],
"type": "object"
},
"FileChecksumResponseDto": {
"properties": {
"checksum": {
"type": "string"
},
"filename": {
"type": "string"
}
},
"required": [
"checksum",
"filename"
],
"type": "object"
},
"FileReportDto": {
"properties": {
"extras": {
"items": {
"type": "string"
},
"type": "array"
},
"orphans": {
"items": {
"$ref": "#/components/schemas/FileReportItemDto"
},
"type": "array"
}
},
"required": [
"extras",
"orphans"
],
"type": "object"
},
"FileReportFixDto": {
"properties": {
"items": {
"items": {
"$ref": "#/components/schemas/FileReportItemDto"
},
"type": "array"
}
},
"required": [
"items"
],
"type": "object"
},
"FileReportItemDto": {
"properties": {
"checksum": {
"type": "string"
},
"entityId": {
"format": "uuid",
"type": "string"
},
"entityType": {
"allOf": [
{
"$ref": "#/components/schemas/PathEntityType"
}
]
},
"pathType": {
"allOf": [
{
"$ref": "#/components/schemas/PathType"
}
]
},
"pathValue": {
"type": "string"
}
},
"required": [
"entityId",
"entityType",
"pathType",
"pathValue"
],
"type": "object"
},
"FoldersResponse": {
"properties": {
"enabled": {
@@ -10060,6 +10328,24 @@
],
"type": "object"
},
"MemoryLaneResponseDto": {
"properties": {
"assets": {
"items": {
"$ref": "#/components/schemas/AssetResponseDto"
},
"type": "array"
},
"yearsAgo": {
"type": "integer"
}
},
"required": [
"assets",
"yearsAgo"
],
"type": "object"
},
"MemoryResponseDto": {
"properties": {
"assets": {
@@ -10603,6 +10889,27 @@
],
"type": "object"
},
"PathEntityType": {
"enum": [
"asset",
"person",
"user"
],
"type": "string"
},
"PathType": {
"enum": [
"original",
"fullsize",
"preview",
"thumbnail",
"encoded_video",
"sidecar",
"face",
"profile"
],
"type": "string"
},
"PeopleResponse": {
"properties": {
"enabled": {
@@ -13560,131 +13867,7 @@
],
"type": "object"
},
"TimeBucketAssetResponseDto": {
"properties": {
"city": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"country": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"duration": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"id": {
"items": {
"type": "string"
},
"type": "array"
},
"isArchived": {
"items": {
"type": "number"
},
"type": "array"
},
"isFavorite": {
"items": {
"type": "number"
},
"type": "array"
},
"isImage": {
"items": {
"type": "number"
},
"type": "array"
},
"isTrashed": {
"items": {
"type": "number"
},
"type": "array"
},
"livePhotoVideoId": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"localDateTime": {
"items": {
"type": "string"
},
"type": "array"
},
"ownerId": {
"items": {
"type": "string"
},
"type": "array"
},
"projectionType": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
},
"ratio": {
"items": {
"type": "number"
},
"type": "array"
},
"stack": {
"description": "(stack ID, stack asset count) tuple",
"items": {
"items": {
"type": "string"
},
"maxItems": 2,
"minItems": 2,
"nullable": true,
"type": "array"
},
"type": "array"
},
"thumbhash": {
"items": {
"nullable": true,
"type": "string"
},
"type": "array"
}
},
"required": [
"city",
"country",
"duration",
"id",
"isArchived",
"isFavorite",
"isImage",
"isTrashed",
"livePhotoVideoId",
"localDateTime",
"ownerId",
"projectionType",
"ratio",
"thumbhash"
],
"type": "object"
},
"TimeBucketsResponseDto": {
"TimeBucketResponseDto": {
"properties": {
"count": {
"type": "integer"
@@ -13699,6 +13882,13 @@
],
"type": "object"
},
"TimeBucketSize": {
"enum": [
"DAY",
"MONTH"
],
"type": "string"
},
"ToneMapping": {
"enum": [
"hable",

View File

@@ -32,7 +32,7 @@ class {{{classname}}} {
{{/required}}
{{/isNullable}}
{{/isEnum}}
{{#isArray}}{{#uniqueItems}}Set{{/uniqueItems}}{{^uniqueItems}}List{{/uniqueItems}}<{{{items.dataType}}}{{#items.isNullable}}?{{/items.isNullable}}>{{/isArray}}{{^isArray}}{{{datatypeWithEnum}}}{{/isArray}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{/vars}}
@override

View File

@@ -1,13 +0,0 @@
diff --git a/open-api/templates/mobile/serialization/native/native_class.mustache b/open-api/templates/mobile/serialization/native/native_class.mustache
index 9a7b1439b..9f40d5b0b 100644
--- a/open-api/templates/mobile/serialization/native/native_class.mustache
+++ b/open-api/templates/mobile/serialization/native/native_class.mustache
@@ -32,7 +32,7 @@ class {{{classname}}} {
{{/required}}
{{/isNullable}}
{{/isEnum}}
- {{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
+ {{#isArray}}{{#uniqueItems}}Set{{/uniqueItems}}{{^uniqueItems}}List{{/uniqueItems}}<{{{items.dataType}}}{{#items.isNullable}}?{{/items.isNullable}}>{{/isArray}}{{^isArray}}{{{datatypeWithEnum}}}{{/isArray}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
{{/vars}}
@override

View File

@@ -462,6 +462,10 @@ export type AssetJobsDto = {
assetIds: string[];
name: AssetJobName;
};
export type MemoryLaneResponseDto = {
assets: AssetResponseDto[];
yearsAgo: number;
};
export type AssetStatsResponseDto = {
images: number;
total: number;
@@ -796,6 +800,27 @@ export type AssetFaceUpdateDto = {
export type PersonStatisticsResponseDto = {
assets: number;
};
export type FileReportItemDto = {
checksum?: string;
entityId: string;
entityType: PathEntityType;
pathType: PathType;
pathValue: string;
};
export type FileReportDto = {
extras: string[];
orphans: FileReportItemDto[];
};
export type FileChecksumDto = {
filenames: string[];
};
export type FileChecksumResponseDto = {
checksum: string;
filename: string;
};
export type FileReportFixDto = {
items: FileReportItemDto[];
};
export type SearchExploreItem = {
data: AssetResponseDto;
value: string;
@@ -1384,25 +1409,7 @@ export type TagBulkAssetsResponseDto = {
export type TagUpdateDto = {
color?: string | null;
};
export type TimeBucketAssetResponseDto = {
city: (string | null)[];
country: (string | null)[];
duration: (string | null)[];
id: string[];
isArchived: number[];
isFavorite: number[];
isImage: number[];
isTrashed: number[];
livePhotoVideoId: (string | null)[];
localDateTime: string[];
ownerId: string[];
projectionType: (string | null)[];
ratio: number[];
/** (stack ID, stack asset count) tuple */
stack?: (string[] | null)[];
thumbhash: (string | null)[];
};
export type TimeBucketsResponseDto = {
export type TimeBucketResponseDto = {
count: number;
timeBucket: string;
};
@@ -1880,6 +1887,20 @@ export function runAssetJobs({ assetJobsDto }: {
body: assetJobsDto
})));
}
export function getMemoryLane({ day, month }: {
day: number;
month: number;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: MemoryLaneResponseDto[];
}>(`/assets/memory-lane${QS.query(QS.explode({
day,
month
}))}`, {
...opts
}));
}
/**
* This property was deprecated in v1.116.0
*/
@@ -2642,6 +2663,35 @@ export function getPersonThumbnail({ id }: {
...opts
}));
}
export function getAuditFiles(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: FileReportDto;
}>("/reports", {
...opts
}));
}
export function getFileChecksums({ fileChecksumDto }: {
fileChecksumDto: FileChecksumDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 201;
data: FileChecksumResponseDto[];
}>("/reports/checksum", oazapfts.json({
...opts,
method: "POST",
body: fileChecksumDto
})));
}
export function fixAuditFiles({ fileReportFixDto }: {
fileReportFixDto: FileReportFixDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/reports/fix", oazapfts.json({
...opts,
method: "POST",
body: fileReportFixDto
})));
}
export function getAssetsByCity(opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
@@ -3260,16 +3310,15 @@ export function tagAssets({ id, bulkIdsDto }: {
body: bulkIdsDto
})));
}
export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key, order, page, pageSize, personId, tagId, timeBucket, userId, withPartners, withStacked }: {
export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, size, tagId, timeBucket, userId, withPartners, withStacked }: {
albumId?: string;
isArchived?: boolean;
isFavorite?: boolean;
isTrashed?: boolean;
key?: string;
order?: AssetOrder;
page?: number;
pageSize?: number;
personId?: string;
size: TimeBucketSize;
tagId?: string;
timeBucket: string;
userId?: string;
@@ -3278,7 +3327,7 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TimeBucketAssetResponseDto;
data: AssetResponseDto[];
}>(`/timeline/bucket${QS.query(QS.explode({
albumId,
isArchived,
@@ -3286,9 +3335,8 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
isTrashed,
key,
order,
page,
pageSize,
personId,
size,
tagId,
timeBucket,
userId,
@@ -3298,7 +3346,7 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
...opts
}));
}
export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, tagId, userId, withPartners, withStacked }: {
export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, size, tagId, userId, withPartners, withStacked }: {
albumId?: string;
isArchived?: boolean;
isFavorite?: boolean;
@@ -3306,6 +3354,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
key?: string;
order?: AssetOrder;
personId?: string;
size: TimeBucketSize;
tagId?: string;
userId?: string;
withPartners?: boolean;
@@ -3313,7 +3362,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TimeBucketsResponseDto[];
data: TimeBucketResponseDto[];
}>(`/timeline/buckets${QS.query(QS.explode({
albumId,
isArchived,
@@ -3322,6 +3371,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
key,
order,
personId,
size,
tagId,
userId,
withPartners,
@@ -3701,6 +3751,21 @@ export enum PartnerDirection {
SharedBy = "shared-by",
SharedWith = "shared-with"
}
export enum PathEntityType {
Asset = "asset",
Person = "person",
User = "user"
}
export enum PathType {
Original = "original",
Fullsize = "fullsize",
Preview = "preview",
Thumbnail = "thumbnail",
EncodedVideo = "encoded_video",
Sidecar = "sidecar",
Face = "face",
Profile = "profile"
}
export enum SearchSuggestionType {
Country = "country",
State = "state",
@@ -3800,3 +3865,7 @@ export enum OAuthTokenEndpointAuthMethod {
ClientSecretPost = "client_secret_post",
ClientSecretBasic = "client_secret_basic"
}
export enum TimeBucketSize {
Day = "DAY",
Month = "MONTH"
}

View File

@@ -28,7 +28,7 @@
"archiver": "^7.0.0",
"async-lock": "^1.4.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.51.0",
"bullmq": "^4.8.0",
"chokidar": "^3.5.3",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
@@ -6886,20 +6886,63 @@
}
},
"node_modules/bullmq": {
"version": "5.51.0",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.51.0.tgz",
"integrity": "sha512-YjX+CO2U4nmbCq2ZgNb/Hnu6Xk953j8EFmp0eehTuudavPyNstoZsbnyvvM6PX9rfD9clhcc5kRLyyWoFEM3Lg==",
"version": "4.18.2",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-4.18.2.tgz",
"integrity": "sha512-Cx0O98IlGiFw7UBa+zwGz+nH0Pcl1wfTvMVBlsMna3s0219hXroVovh1xPRgomyUcbyciHiugGCkW0RRNZDHYQ==",
"license": "MIT",
"dependencies": {
"cron-parser": "^4.9.0",
"ioredis": "^5.4.1",
"msgpackr": "^1.11.2",
"cron-parser": "^4.6.0",
"glob": "^8.0.3",
"ioredis": "^5.3.2",
"lodash": "^4.17.21",
"msgpackr": "^1.6.2",
"node-abort-controller": "^3.1.1",
"semver": "^7.5.4",
"tslib": "^2.0.0",
"uuid": "^9.0.0"
}
},
"node_modules/bullmq/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/bullmq/node_modules/glob": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz",
"integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^5.0.1",
"once": "^1.3.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/bullmq/node_modules/minimatch": {
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=10"
}
},
"node_modules/busboy": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",

View File

@@ -53,7 +53,7 @@
"archiver": "^7.0.0",
"async-lock": "^1.4.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.51.0",
"bullmq": "^4.8.0",
"chokidar": "^3.5.3",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",

View File

@@ -67,7 +67,7 @@ const runQuery = async (query: string) => {
const runMigrations = async () => {
const configRepository = new ConfigRepository();
const logger = LoggingRepository.create();
const logger = new LoggingRepository(undefined, configRepository);
const db = getDatabaseClient();
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
await databaseRepository.runMigrations();

View File

@@ -72,9 +72,7 @@ class SqlGenerator {
await rm(this.options.targetDir, { force: true, recursive: true });
await mkdir(this.options.targetDir);
if (!process.env.DB_HOSTNAME) {
process.env.DB_HOSTNAME = 'localhost';
}
process.env.DB_HOSTNAME = 'localhost';
const { database, cls, otel } = new ConfigRepository().getEnv();
const moduleFixture = await Test.createTestingModule({

View File

@@ -1,7 +1,7 @@
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put, Query } from '@nestjs/common';
import { ApiOperation, ApiTags } from '@nestjs/swagger';
import { EndpointLifecycle } from 'src/decorators';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetResponseDto, MemoryLaneResponseDto } from 'src/dtos/asset-response.dto';
import {
AssetBulkDeleteDto,
AssetBulkUpdateDto,
@@ -13,6 +13,7 @@ import {
UpdateAssetDto,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { MemoryLaneDto } from 'src/dtos/search.dto';
import { RouteKey } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { AssetService } from 'src/services/asset.service';
@@ -23,6 +24,12 @@ import { UUIDParamDto } from 'src/validation';
export class AssetController {
constructor(private service: AssetService) {}
@Get('memory-lane')
@Authenticated()
getMemoryLane(@Auth() auth: AuthDto, @Query() dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
return this.service.getMemoryLane(auth, dto);
}
@Get('random')
@Authenticated()
@EndpointLifecycle({ deprecatedAt: 'v1.116.0' })

View File

@@ -0,0 +1,29 @@
import { Body, Controller, Get, Post } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { FileChecksumDto, FileChecksumResponseDto, FileReportDto, FileReportFixDto } from 'src/dtos/audit.dto';
import { Authenticated } from 'src/middleware/auth.guard';
import { AuditService } from 'src/services/audit.service';
@ApiTags('File Reports')
@Controller('reports')
export class ReportController {
constructor(private service: AuditService) {}
@Get()
@Authenticated({ admin: true })
getAuditFiles(): Promise<FileReportDto> {
return this.service.getFileReport();
}
@Post('checksum')
@Authenticated({ admin: true })
getFileChecksums(@Body() dto: FileChecksumDto): Promise<FileChecksumResponseDto[]> {
return this.service.getChecksums(dto);
}
@Post('fix')
@Authenticated({ admin: true })
fixAuditFiles(@Body() dto: FileReportFixDto): Promise<void> {
return this.service.fixItems(dto.items);
}
}

View File

@@ -8,6 +8,7 @@ import { AuthController } from 'src/controllers/auth.controller';
import { DownloadController } from 'src/controllers/download.controller';
import { DuplicateController } from 'src/controllers/duplicate.controller';
import { FaceController } from 'src/controllers/face.controller';
import { ReportController } from 'src/controllers/file-report.controller';
import { JobController } from 'src/controllers/job.controller';
import { LibraryController } from 'src/controllers/library.controller';
import { MapController } from 'src/controllers/map.controller';
@@ -52,6 +53,7 @@ export const controllers = [
OAuthController,
PartnerController,
PersonController,
ReportController,
SearchController,
ServerController,
SessionController,

View File

@@ -46,7 +46,7 @@ export class SearchController {
@Get('explore')
@Authenticated()
getExploreData(@Auth() auth: AuthDto): Promise<SearchExploreResponseDto[]> {
return this.service.getExploreData(auth);
return this.service.getExploreData(auth) as Promise<SearchExploreResponseDto[]>;
}
@Get('person')

View File

@@ -1,8 +1,8 @@
import { Controller, Get, Query, Res } from '@nestjs/common';
import { Controller, Get, Query } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { Response } from 'express';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { TimeBucketAssetDto, TimeBucketAssetResponseDto, TimeBucketDto } from 'src/dtos/time-bucket.dto';
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
import { Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { TimelineService } from 'src/services/timeline.service';
@@ -14,19 +14,13 @@ export class TimelineController {
@Get('buckets')
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto) {
getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
return this.service.getTimeBuckets(auth, dto);
}
@Get('bucket')
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
async getTimeBucket(
@Auth() auth: AuthDto,
@Query() dto: TimeBucketAssetDto,
@Res({ passthrough: true }) res: Response,
): Promise<TimeBucketAssetResponseDto> {
res.contentType('application/json');
const jsonBucket = await this.service.getTimeBucket(auth, dto);
return jsonBucket as unknown as TimeBucketAssetResponseDto;
getTimeBucket(@Auth() auth: AuthDto, @Query() dto: TimeBucketAssetDto): Promise<AssetResponseDto[]> {
return this.service.getTimeBucket(auth, dto) as Promise<AssetResponseDto[]>;
}
}

View File

@@ -165,12 +165,6 @@ export type Stack = {
assetCount?: number;
};
export type TimelineStack = {
id: string;
primaryAssetId: string;
assetCount: number;
};
export type AuthSharedLink = {
id: string;
expiresAt: Date | null;

View File

@@ -13,7 +13,6 @@ import {
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
import { AssetStatus, AssetType } from 'src/enum';
import { hexOrBufferToBase64 } from 'src/utils/bytes';
import { mimeTypes } from 'src/utils/mime-types';
export class SanitizedAssetResponseDto {
@@ -141,6 +140,15 @@ const mapStack = (entity: { stack?: Stack | null }) => {
};
};
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
if (typeof encoded === 'string') {
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
}
return encoded.toString('base64');
};
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
const { stripMetadata = false, withStack = false } = options;
@@ -183,7 +191,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
tags: entity.tags?.map((tag) => mapTag(tag)),
people: peopleWithFaces(entity.faces),
unassignedFaces: entity.faces?.filter((face) => !face.person).map((a) => mapFacesWithoutPerson(a)),
checksum: hexOrBufferToBase64(entity.checksum)!,
checksum: hexOrBufferToBase64(entity.checksum),
stack: withStack ? mapStack(entity) : undefined,
isOffline: entity.isOffline,
hasMetadata: true,
@@ -191,3 +199,10 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
resized: true,
};
}
export class MemoryLaneResponseDto {
@ApiProperty({ type: 'integer' })
yearsAgo!: number;
assets!: AssetResponseDto[];
}

View File

@@ -0,0 +1,73 @@
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsArray, IsEnum, IsString, IsUUID, ValidateNested } from 'class-validator';
import { AssetPathType, EntityType, PathType, PersonPathType, UserPathType } from 'src/enum';
import { Optional, ValidateDate, ValidateUUID } from 'src/validation';
const PathEnum = Object.values({ ...AssetPathType, ...PersonPathType, ...UserPathType });
export class AuditDeletesDto {
@ValidateDate()
after!: Date;
@ApiProperty({ enum: EntityType, enumName: 'EntityType' })
@IsEnum(EntityType)
entityType!: EntityType;
@Optional()
@IsUUID('4')
@ApiProperty({ format: 'uuid' })
userId?: string;
}
export enum PathEntityType {
ASSET = 'asset',
PERSON = 'person',
USER = 'user',
}
export class AuditDeletesResponseDto {
needsFullSync!: boolean;
ids!: string[];
}
export class FileReportDto {
orphans!: FileReportItemDto[];
extras!: string[];
}
export class FileChecksumDto {
@IsString({ each: true })
filenames!: string[];
}
export class FileChecksumResponseDto {
filename!: string;
checksum!: string;
}
export class FileReportFixDto {
@IsArray()
@ValidateNested({ each: true })
@Type(() => FileReportItemDto)
items!: FileReportItemDto[];
}
// used both as request and response dto
export class FileReportItemDto {
@ValidateUUID()
entityId!: string;
@ApiProperty({ enumName: 'PathEntityType', enum: PathEntityType })
@IsEnum(PathEntityType)
entityType!: PathEntityType;
@ApiProperty({ enumName: 'PathType', enum: PathEnum })
@IsEnum(PathEnum)
pathType!: PathType;
@IsString()
pathValue!: string;
checksum?: string;
}

View File

@@ -1,11 +1,15 @@
import { ApiProperty } from '@nestjs/swagger';
import { IsEnum, IsInt, IsString, Min } from 'class-validator';
import { IsEnum, IsNotEmpty, IsString } from 'class-validator';
import { AssetOrder } from 'src/enum';
import { TimeBucketAssets, TimelineStack } from 'src/services/timeline.service.types';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
export class TimeBucketDto {
@IsNotEmpty()
@IsEnum(TimeBucketSize)
@ApiProperty({ enum: TimeBucketSize, enumName: 'TimeBucketSize' })
size!: TimeBucketSize;
@ValidateUUID({ optional: true })
userId?: string;
@@ -42,75 +46,9 @@ export class TimeBucketDto {
export class TimeBucketAssetDto extends TimeBucketDto {
@IsString()
timeBucket!: string;
@IsInt()
@Min(1)
@Optional()
page?: number;
@IsInt()
@Min(1)
@Optional()
pageSize?: number;
}
export class TimelineStackResponseDto implements TimelineStack {
id!: string;
primaryAssetId!: string;
assetCount!: number;
}
export class TimeBucketAssetResponseDto implements TimeBucketAssets {
id!: string[];
ownerId!: string[];
ratio!: number[];
isFavorite!: number[];
isArchived!: number[];
isTrashed!: number[];
isImage!: number[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
thumbhash!: (string | null)[];
localDateTime!: string[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
duration!: (string | null)[];
// id, count
@ApiProperty({
type: 'array',
items: {
type: 'array',
items: { type: 'string' },
minItems: 2,
maxItems: 2,
nullable: true,
},
description: '(stack ID, stack asset count) tuple',
})
stack?: ([string, string] | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
projectionType!: (string | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
livePhotoVideoId!: (string | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
city!: (string | null)[];
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
country!: (string | null)[];
}
export class TimeBucketsResponseDto {
export class TimeBucketResponseDto {
@ApiProperty({ type: 'string' })
timeBucket!: string;

View File

@@ -194,25 +194,6 @@ where
"asset_files"."assetId" = $1
and "asset_files"."type" = $2
-- AssetJobRepository.streamForSearchDuplicates
select
"assets"."id"
from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"assets"."isVisible" = $1
and "assets"."deletedAt" is null
and "job_status"."previewAt" is not null
and not exists (
select
from
"smart_search"
where
"assetId" = "assets"."id"
)
and "job_status"."duplicatesDetectedAt" is null
-- AssetJobRepository.streamForEncodeClip
select
"assets"."id"
@@ -220,9 +201,8 @@ from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"assets"."isVisible" = $1
and "assets"."deletedAt" is null
and "job_status"."previewAt" is not null
"job_status"."previewAt" is not null
and "assets"."isVisible" = $1
and not exists (
select
from
@@ -230,6 +210,7 @@ where
where
"assetId" = "assets"."id"
)
and "assets"."deletedAt" is null
-- AssetJobRepository.getForClipEncoding
select
@@ -469,37 +450,3 @@ from
"assets"
where
"assets"."deletedAt" <= $1
-- AssetJobRepository.streamForSidecar
select
"assets"."id"
from
"assets"
where
(
"assets"."sidecarPath" = $1
or "assets"."sidecarPath" is null
)
and "assets"."isVisible" = $2
-- AssetJobRepository.streamForDetectFacesJob
select
"assets"."id"
from
"assets"
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
where
"assets"."isVisible" = $1
and "assets"."deletedAt" is null
and "job_status"."previewAt" is not null
and "job_status"."facesRecognizedAt" is null
order by
"assets"."createdAt" desc
-- AssetJobRepository.streamForMigrationJob
select
"id"
from
"assets"
where
"assets"."deletedAt" is null

View File

@@ -232,16 +232,35 @@ where
limit
$3
-- AssetRepository.getWithout (sidecar)
select
"assets".*
from
"assets"
where
(
"assets"."sidecarPath" = $1
or "assets"."sidecarPath" is null
)
and "assets"."isVisible" = $2
and "deletedAt" is null
order by
"createdAt"
limit
$3
offset
$4
-- AssetRepository.getTimeBuckets
with
"assets" as (
select
date_trunc('MONTH', "localDateTime" at time zone 'UTC') at time zone 'UTC' as "timeBucket"
date_trunc($1, "localDateTime" at time zone 'UTC') at time zone 'UTC' as "timeBucket"
from
"assets"
where
"assets"."deletedAt" is null
and "assets"."isVisible" = $1
and "assets"."isVisible" = $2
)
select
"timeBucket",
@@ -254,95 +273,37 @@ order by
"timeBucket" desc
-- AssetRepository.getTimeBucket
with
"cte" as (
select
"assets"."duration",
"assets"."id",
assets."isArchived"::int as "isArchived",
assets."isFavorite"::int as "isFavorite",
(assets.type = 'IMAGE')::int as "isImage",
(assets."deletedAt" is null)::int as "isTrashed",
(assets.type = 'VIDEO')::int as "isVideo",
"assets"."livePhotoVideoId",
"assets"."localDateTime",
"assets"."ownerId",
"assets"."status",
encode("assets"."thumbhash", 'base64') as "thumbhash",
"exif"."city",
"exif"."country",
"exif"."projectionType",
coalesce(
case
when exif."exifImageHeight" = 0
or exif."exifImageWidth" = 0 then 1
when "exif"."orientation" in ('5', '6', '7', '8', '-90', '90') then round(
exif."exifImageHeight"::numeric / exif."exifImageWidth"::numeric,
3
)
else round(
exif."exifImageWidth"::numeric / exif."exifImageHeight"::numeric,
3
)
end,
1
) as "ratio",
"stack"
from
"assets"
inner join "exif" on "assets"."id" = "exif"."assetId"
left join lateral (
select
array[stacked."stackId"::text, count('stacked')::text] as "stack"
from
"assets" as "stacked"
where
"stacked"."stackId" = "assets"."stackId"
and "stacked"."deletedAt" is null
and "stacked"."isArchived" = $1
group by
"stacked"."stackId"
) as "stacked_assets" on true
where
"assets"."deletedAt" is null
and "assets"."isVisible" = $2
and date_trunc('MONTH', "localDateTime" at time zone 'UTC') at time zone 'UTC' = $3
and not exists (
select
from
"asset_stack"
where
"asset_stack"."id" = "assets"."stackId"
and "asset_stack"."primaryAssetId" != "assets"."id"
)
order by
"assets"."localDateTime" desc
),
"agg" as (
select
coalesce(array_agg("city"), '{}') as "city",
coalesce(array_agg("country"), '{}') as "country",
coalesce(array_agg("duration"), '{}') as "duration",
coalesce(array_agg("id"), '{}') as "id",
coalesce(array_agg("isArchived"), '{}') as "isArchived",
coalesce(array_agg("isFavorite"), '{}') as "isFavorite",
coalesce(array_agg("isImage"), '{}') as "isImage",
coalesce(array_agg("isTrashed"), '{}') as "isTrashed",
coalesce(array_agg("livePhotoVideoId"), '{}') as "livePhotoVideoId",
coalesce(array_agg("localDateTime"), '{}') as "localDateTime",
coalesce(array_agg("ownerId"), '{}') as "ownerId",
coalesce(array_agg("projectionType"), '{}') as "projectionType",
coalesce(array_agg("ratio"), '{}') as "ratio",
coalesce(array_agg("status"), '{}') as "status",
coalesce(array_agg("thumbhash"), '{}') as "thumbhash",
coalesce(json_agg("stack"), '[]') as "stack"
from
"cte"
)
select
to_json(agg)::text as "assets"
"assets".*,
to_json("exif") as "exifInfo",
to_json("stacked_assets") as "stack"
from
"agg"
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
left join lateral (
select
"asset_stack".*,
count("stacked") as "assetCount"
from
"assets" as "stacked"
where
"stacked"."stackId" = "asset_stack"."id"
and "stacked"."deletedAt" is null
and "stacked"."isArchived" = $1
group by
"asset_stack"."id"
) as "stacked_assets" on "asset_stack"."id" is not null
where
(
"asset_stack"."primaryAssetId" = "assets"."id"
or "assets"."stackId" is null
)
and "assets"."deletedAt" is null
and "assets"."isVisible" = $2
and date_trunc($3, "localDateTime" at time zone 'UTC') at time zone 'UTC' = $4
order by
"assets"."localDateTime" desc
-- AssetRepository.getDuplicates
with

View File

@@ -135,33 +135,20 @@ export class AssetJobRepository {
.execute();
}
private assetsWithPreviews() {
return this.db
.selectFrom('assets')
.where('assets.isVisible', '=', true)
.where('assets.deletedAt', 'is', null)
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null);
}
@GenerateSql({ params: [], stream: true })
streamForSearchDuplicates(force?: boolean) {
return this.assetsWithPreviews()
.where((eb) => eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))))
.$if(!force, (qb) => qb.where('job_status.duplicatesDetectedAt', 'is', null))
.select(['assets.id'])
.stream();
}
@GenerateSql({ params: [], stream: true })
streamForEncodeClip(force?: boolean) {
return this.assetsWithPreviews()
return this.db
.selectFrom('assets')
.select(['assets.id'])
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null)
.where('assets.isVisible', '=', true)
.$if(!force, (qb) =>
qb.where((eb) =>
eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))),
),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
@@ -322,30 +309,4 @@ export class AssetJobRepository {
.where('assets.deletedAt', '<=', trashedBefore)
.stream();
}
@GenerateSql({ params: [], stream: true })
streamForSidecar(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.$if(!force, (qb) =>
qb.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)])),
)
.where('assets.isVisible', '=', true)
.stream();
}
@GenerateSql({ params: [], stream: true })
streamForDetectFacesJob(force?: boolean) {
return this.assetsWithPreviews()
.$if(!force, (qb) => qb.where('job_status.facesRecognizedAt', 'is', null))
.select(['assets.id'])
.orderBy('assets.createdAt', 'desc')
.stream();
}
@GenerateSql({ params: [DummyValue.DATE], stream: true })
streamForMigrationJob() {
return this.db.selectFrom('assets').select(['id']).where('assets.deletedAt', 'is', null).stream();
}
}

View File

@@ -7,11 +7,13 @@ import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
import {
anyUuid,
asUuid,
hasPeople,
removeUndefinedKeys,
searchAssetBuilder,
truncatedDate,
unnest,
withExif,
@@ -25,6 +27,7 @@ import {
withTags,
} from 'src/utils/database';
import { globToSqlPattern } from 'src/utils/misc';
import { PaginationOptions, paginationHelper } from 'src/utils/pagination';
export type AssetStats = Record<AssetType, number>;
@@ -42,6 +45,15 @@ export interface LivePhotoSearchOptions {
type: AssetType;
}
export enum WithoutProperty {
THUMBNAIL = 'thumbnail',
ENCODED_VIDEO = 'encoded-video',
EXIF = 'exif',
DUPLICATE = 'duplicate',
FACES = 'faces',
SIDECAR = 'sidecar',
}
export enum WithProperty {
SIDECAR = 'sidecar',
}
@@ -67,6 +79,7 @@ export interface AssetBuilderOptions {
}
export interface TimeBucketOptions extends AssetBuilderOptions {
size: TimeBucketSize;
order?: AssetOrder;
}
@@ -322,6 +335,10 @@ export class AssetRepository {
return assets.map((asset) => asset.deviceAssetId);
}
getByUserId(pagination: PaginationOptions, userId: string, options: Omit<AssetSearchOptions, 'userIds'> = {}) {
return this.getAll(pagination, { ...options, userIds: [userId] });
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
return this.db
@@ -333,6 +350,16 @@ export class AssetRepository {
.executeTakeFirst();
}
async getAll(pagination: PaginationOptions, { orderDirection, ...options }: AssetSearchOptions = {}) {
const builder = searchAssetBuilder(this.db, options)
.select(withFiles)
.orderBy('assets.createdAt', orderDirection ?? 'asc')
.limit(pagination.take + 1)
.offset(pagination.skip ?? 0);
const items = await builder.execute();
return paginationHelper(items, pagination.take);
}
/**
* Get assets by device's Id on the database
* @param ownerId
@@ -502,6 +529,68 @@ export class AssetRepository {
.executeTakeFirst();
}
@GenerateSql(
...Object.values(WithProperty).map((property) => ({
name: property,
params: [DummyValue.PAGINATION, property],
})),
)
async getWithout(pagination: PaginationOptions, property: WithoutProperty) {
const items = await this.db
.selectFrom('assets')
.selectAll('assets')
.$if(property === WithoutProperty.DUPLICATE, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assets.id', 'job_status.assetId')
.where('job_status.duplicatesDetectedAt', 'is', null)
.where('job_status.previewAt', 'is not', null)
.where((eb) => eb.exists(eb.selectFrom('smart_search').where('assetId', '=', eb.ref('assets.id'))))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.ENCODED_VIDEO, (qb) =>
qb
.where('assets.type', '=', AssetType.VIDEO)
.where((eb) => eb.or([eb('assets.encodedVideoPath', 'is', null), eb('assets.encodedVideoPath', '=', '')])),
)
.$if(property === WithoutProperty.EXIF, (qb) =>
qb
.leftJoin('asset_job_status as job_status', 'assets.id', 'job_status.assetId')
.where((eb) => eb.or([eb('job_status.metadataExtractedAt', 'is', null), eb('assetId', 'is', null)]))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.FACES, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('job_status.previewAt', 'is not', null)
.where('job_status.facesRecognizedAt', 'is', null)
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.SIDECAR, (qb) =>
qb
.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)]))
.where('assets.isVisible', '=', true),
)
.$if(property === WithoutProperty.THUMBNAIL, (qb) =>
qb
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
.where('assets.isVisible', '=', true)
.where((eb) =>
eb.or([
eb('job_status.previewAt', 'is', null),
eb('job_status.thumbnailAt', 'is', null),
eb('assets.thumbhash', 'is', null),
]),
),
)
.where('deletedAt', 'is', null)
.limit(pagination.take + 1)
.offset(pagination.skip ?? 0)
.orderBy('createdAt')
.execute();
return paginationHelper(items, pagination.take);
}
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
return this.db
.selectFrom('assets')
@@ -538,7 +627,7 @@ export class AssetRepository {
.with('assets', (qb) =>
qb
.selectFrom('assets')
.select(truncatedDate<Date>(TimeBucketSize.MONTH).as('timeBucket'))
.select(truncatedDate<Date>(options.size).as('timeBucket'))
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
.where('assets.isVisible', '=', true)
@@ -580,125 +669,53 @@ export class AssetRepository {
);
}
@GenerateSql({
params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }, { skip: 0, take: 1000 }],
})
getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
const query = this.db
.with('cte', (qb) =>
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
async getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
return this.db
.selectFrom('assets')
.selectAll('assets')
.$call(withExif)
.$if(!!options.albumId, (qb) =>
qb
.selectFrom('assets')
.innerJoin('exif', 'assets.id', 'exif.assetId')
.select((eb) => [
'assets.duration',
'assets.id',
sql`assets."isArchived"::int`.as('isArchived'),
sql`assets."isFavorite"::int`.as('isFavorite'),
sql`(assets.type = 'IMAGE')::int`.as('isImage'),
sql`(assets."deletedAt" is null)::int`.as('isTrashed'),
sql`(assets.type = 'VIDEO')::int`.as('isVideo'),
'assets.livePhotoVideoId',
'assets.localDateTime',
'assets.ownerId',
'assets.status',
eb.fn('encode', ['assets.thumbhash', sql.lit('base64')]).as('thumbhash'),
'exif.city',
'exif.country',
'exif.projectionType',
eb.fn
.coalesce(
eb
.case()
.when(sql`exif."exifImageHeight" = 0 or exif."exifImageWidth" = 0`)
.then(eb.lit(1))
.when('exif.orientation', 'in', sql<string>`('5', '6', '7', '8', '-90', '90')`)
.then(sql`round(exif."exifImageHeight"::numeric / exif."exifImageWidth"::numeric, 3)`)
.else(sql`round(exif."exifImageWidth"::numeric / exif."exifImageHeight"::numeric, 3)`)
.end(),
eb.lit(1),
)
.as('ratio'),
])
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
.where('assets.isVisible', '=', true)
.where(truncatedDate(TimeBucketSize.MONTH), '=', timeBucket.replace(/^[+-]/, ''))
.$if(!!options.albumId, (qb) =>
qb.where((eb) =>
eb.exists(
eb
.selectFrom('albums_assets_assets')
.whereRef('albums_assets_assets.assetsId', '=', 'assets.id')
.where('albums_assets_assets.albumsId', '=', asUuid(options.albumId!)),
),
),
)
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
.$if(!!options.withStacked, (qb) =>
qb
.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom('asset_stack')
.whereRef('asset_stack.id', '=', 'assets.stackId')
.whereRef('asset_stack.primaryAssetId', '!=', 'assets.id'),
),
),
)
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.select(sql`array[stacked."stackId"::text, count('stacked')::text]`.as('stack'))
.whereRef('stacked.stackId', '=', 'assets.stackId')
.where('stacked.deletedAt', 'is', null)
.where('stacked.isArchived', '=', false)
.groupBy('stacked.stackId')
.as('stacked_assets'),
(join) => join.onTrue(),
)
.select('stack'),
)
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
.$if(options.isDuplicate !== undefined, (qb) =>
qb.where('assets.duplicateId', options.isDuplicate ? 'is not' : 'is', null),
)
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
.$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!))
.orderBy('assets.localDateTime', options.order ?? 'desc'),
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
.where('albums_assets_assets.albumsId', '=', options.albumId!),
)
.with('agg', (qb) =>
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
.$if(!!options.withStacked, (qb) =>
qb
.selectFrom('cte')
.select((eb) => [
eb.fn.coalesce(eb.fn('array_agg', ['city']), sql.lit('{}')).as('city'),
eb.fn.coalesce(eb.fn('array_agg', ['country']), sql.lit('{}')).as('country'),
eb.fn.coalesce(eb.fn('array_agg', ['duration']), sql.lit('{}')).as('duration'),
eb.fn.coalesce(eb.fn('array_agg', ['id']), sql.lit('{}')).as('id'),
eb.fn.coalesce(eb.fn('array_agg', ['isArchived']), sql.lit('{}')).as('isArchived'),
eb.fn.coalesce(eb.fn('array_agg', ['isFavorite']), sql.lit('{}')).as('isFavorite'),
eb.fn.coalesce(eb.fn('array_agg', ['isImage']), sql.lit('{}')).as('isImage'),
// TODO: isTrashed is redundant as it will always be all 0s or 1s depending on the options
eb.fn.coalesce(eb.fn('array_agg', ['isTrashed']), sql.lit('{}')).as('isTrashed'),
eb.fn.coalesce(eb.fn('array_agg', ['livePhotoVideoId']), sql.lit('{}')).as('livePhotoVideoId'),
eb.fn.coalesce(eb.fn('array_agg', ['localDateTime']), sql.lit('{}')).as('localDateTime'),
eb.fn.coalesce(eb.fn('array_agg', ['ownerId']), sql.lit('{}')).as('ownerId'),
eb.fn.coalesce(eb.fn('array_agg', ['projectionType']), sql.lit('{}')).as('projectionType'),
eb.fn.coalesce(eb.fn('array_agg', ['ratio']), sql.lit('{}')).as('ratio'),
eb.fn.coalesce(eb.fn('array_agg', ['status']), sql.lit('{}')).as('status'),
eb.fn.coalesce(eb.fn('array_agg', ['thumbhash']), sql.lit('{}')).as('thumbhash'),
])
.$if(!!options.withStacked, (qb) =>
qb.select((eb) => eb.fn.coalesce(eb.fn('json_agg', ['stack']), sql.lit('[]')).as('stack')),
),
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.where((eb) =>
eb.or([eb('asset_stack.primaryAssetId', '=', eb.ref('assets.id')), eb('assets.stackId', 'is', null)]),
)
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.selectAll('asset_stack')
.select((eb) => eb.fn.count(eb.table('stacked')).as('assetCount'))
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.where('stacked.deletedAt', 'is', null)
.where('stacked.isArchived', '=', false)
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack')),
)
.selectFrom('agg')
.select(sql<string>`to_json(agg)::text`.as('assets'));
return query.executeTakeFirstOrThrow();
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
.$if(options.isDuplicate !== undefined, (qb) =>
qb.where('assets.duplicateId', options.isDuplicate ? 'is not' : 'is', null),
)
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
.$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!))
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
.where('assets.isVisible', '=', true)
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
.orderBy('assets.localDateTime', options.order ?? 'desc')
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
@@ -757,7 +774,10 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
async getAssetIdByCity(ownerId: string, { minAssetsPerField, maxFields }: AssetExploreFieldOptions) {
async getAssetIdByCity(
ownerId: string,
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
): Promise<SearchExploreItem<string>> {
const items = await this.db
.with('cities', (qb) =>
qb
@@ -772,7 +792,6 @@ export class AssetRepository {
.innerJoin('cities', 'exif.city', 'cities.city')
.distinctOn('exif.city')
.select(['assetId as data', 'exif.city as value'])
.$narrowType<{ value: NotNull }>()
.where('ownerId', '=', asUuid(ownerId))
.where('isVisible', '=', true)
.where('isArchived', '=', false)
@@ -781,7 +800,7 @@ export class AssetRepository {
.limit(maxFields)
.execute();
return { fieldName: 'exifInfo.city', items };
return { fieldName: 'exifInfo.city', items: items as SearchExploreItemSet<string> };
}
@GenerateSql({

View File

@@ -19,7 +19,7 @@ import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.d
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { JobItem, JobSource } from 'src/types';
import { JobItem } from 'src/types';
import { handlePromiseError } from 'src/utils/misc';
type EmitHandlers = Partial<{ [T in EmitEvent]: Array<EventItem<T>> }>;
@@ -48,7 +48,7 @@ type EventMap = {
'config.validate': [{ newConfig: SystemConfig; oldConfig: SystemConfig }];
// album events
'album.update': [{ id: string; recipientId: string }];
'album.update': [{ id: string; recipientIds: string[] }];
'album.invite': [{ id: string; userId: string }];
// asset events
@@ -58,7 +58,6 @@ type EventMap = {
'asset.show': [{ assetId: string; userId: string }];
'asset.trash': [{ assetId: string; userId: string }];
'asset.delete': [{ assetId: string; userId: string }];
'asset.metadataExtracted': [{ assetId: string; userId: string; source?: JobSource }];
// asset bulk events
'assets.trash': [{ assetIds: string[]; userId: string }];

View File

@@ -9,7 +9,7 @@ import { JobName, JobStatus, MetadataKey, QueueCleanType, QueueName } from 'src/
import { ConfigRepository } from 'src/repositories/config.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
import { IEntityJob, JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
import { getKeyByValue, getMethodNames, ImmichStartupError } from 'src/utils/misc';
type JobMapItem = {
@@ -206,10 +206,7 @@ export class JobRepository {
private getJobOptions(item: JobItem): JobsOptions | null {
switch (item.name) {
case JobName.NOTIFY_ALBUM_UPDATE: {
return {
jobId: `${item.data.id}/${item.data.recipientId}`,
delay: item.data?.delay,
};
return { jobId: item.data.id, delay: item.data?.delay };
}
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
return { jobId: item.data.id };
@@ -230,12 +227,19 @@ export class JobRepository {
return this.moduleRef.get<Queue>(getQueueToken(queue), { strict: false });
}
/** @deprecated */
// todo: remove this when asset notifications no longer need it.
public async removeJob(name: JobName, jobID: string): Promise<void> {
const existingJob = await this.getQueue(this.getQueueName(name)).getJob(jobID);
if (existingJob) {
await existingJob.remove();
public async removeJob(jobId: string, name: JobName): Promise<IEntityJob | undefined> {
const existingJob = await this.getQueue(this.getQueueName(name)).getJob(jobId);
if (!existingJob) {
return;
}
try {
await existingJob.remove();
} catch (error: any) {
if (error.message?.includes('Missing key for job')) {
return;
}
throw error;
}
return existingJob.data;
}
}

View File

@@ -6,7 +6,7 @@ import { AssetFaces, DB, FaceSearch, Person } from 'src/db';
import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, SourceType } from 'src/enum';
import { removeUndefinedKeys } from 'src/utils/database';
import { paginationHelper, PaginationOptions } from 'src/utils/pagination';
import { PaginationOptions } from 'src/utils/pagination';
export interface PersonSearchOptions {
minimumFaceCount: number;
@@ -200,7 +200,11 @@ export class PersonRepository {
.limit(pagination.take + 1)
.execute();
return paginationHelper(items, pagination.take);
if (items.length > pagination.take) {
return { items: items.slice(0, -1), hasNextPage: true };
}
return { items, hasNextPage: false };
}
@GenerateSql()

View File

@@ -8,10 +8,41 @@ import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetStatus, AssetType } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { anyUuid, asUuid, searchAssetBuilder, vectorIndexQuery } from 'src/utils/database';
import { paginationHelper } from 'src/utils/pagination';
import { isValidInteger } from 'src/validation';
export interface SearchAssetIdOptions {
export interface SearchResult<T> {
/** total matches */
total: number;
/** collection size */
count: number;
/** current page */
page: number;
/** items for page */
items: T[];
/** score */
distances: number[];
facets: SearchFacet[];
}
export interface SearchFacet {
fieldName: string;
counts: Array<{
count: number;
value: string;
}>;
}
export type SearchExploreItemSet<T> = Array<{
value: string;
data: T;
}>;
export interface SearchExploreItem<T> {
fieldName: string;
items: SearchExploreItemSet<T>;
}
export interface SearchAssetIDOptions {
checksum?: Buffer;
deviceAssetId?: string;
id?: string;
@@ -23,7 +54,7 @@ export interface SearchUserIdOptions {
userIds?: string[];
}
export type SearchIdOptions = SearchAssetIdOptions & SearchUserIdOptions;
export type SearchIdOptions = SearchAssetIDOptions & SearchUserIdOptions;
export interface SearchStatusOptions {
isArchived?: boolean;
@@ -113,6 +144,8 @@ type BaseAssetSearchOptions = SearchDateOptions &
export type AssetSearchOptions = BaseAssetSearchOptions & SearchRelationOptions;
export type AssetSearchOneToOneRelationOptions = BaseAssetSearchOptions & SearchOneToOneRelationOptions;
export type AssetSearchBuilderOptions = Omit<AssetSearchOptions, 'orderDirection'>;
export type SmartSearchOptions = SearchDateOptions &
@@ -193,8 +226,9 @@ export class SearchRepository {
.limit(pagination.size + 1)
.offset((pagination.page - 1) * pagination.size)
.execute();
return paginationHelper(items, pagination.size);
const hasNextPage = items.length > pagination.size;
items.splice(pagination.size);
return { items, hasNextPage };
}
@GenerateSql({
@@ -249,7 +283,9 @@ export class SearchRepository {
.offset((pagination.page - 1) * pagination.size)
.execute();
return paginationHelper(items, pagination.size);
const hasNextPage = items.length > pagination.size;
items.splice(pagination.size);
return { items, hasNextPage };
}
@GenerateSql({

View File

@@ -606,7 +606,7 @@ describe(AlbumService.name, () => {
expect(mocks.album.addAssetIds).toHaveBeenCalledWith('album-123', ['asset-1', 'asset-2', 'asset-3']);
expect(mocks.event.emit).toHaveBeenCalledWith('album.update', {
id: 'album-123',
recipientId: 'admin_id',
recipientIds: ['admin_id'],
});
});

View File

@@ -170,8 +170,8 @@ export class AlbumService extends BaseService {
(userId) => userId !== auth.user.id,
);
for (const recipientId of allUsersExceptUs) {
await this.eventRepository.emit('album.update', { id, recipientId });
if (allUsersExceptUs.length > 0) {
await this.eventRepository.emit('album.update', { id, recipientIds: allUsersExceptUs });
}
}

View File

@@ -1,6 +1,6 @@
import { BadRequestException } from '@nestjs/common';
import { DateTime } from 'luxon';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
import { AssetStats } from 'src/repositories/asset.repository';
@@ -11,6 +11,7 @@ import { faceStub } from 'test/fixtures/face.stub';
import { userStub } from 'test/fixtures/user.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
import { vitest } from 'vitest';
const stats: AssetStats = {
[AssetType.IMAGE]: 10,
@@ -43,6 +44,62 @@ describe(AssetService.name, () => {
mockGetById([assetStub.livePhotoStillAsset, assetStub.livePhotoMotionAsset]);
});
describe('getMemoryLane', () => {
beforeAll(() => {
vitest.useFakeTimers();
vitest.setSystemTime(new Date('2024-01-15'));
});
afterAll(() => {
vitest.useRealTimers();
});
it('should group the assets correctly', async () => {
const image1 = { ...assetStub.image, localDateTime: new Date(2023, 1, 15, 0, 0, 0) };
const image2 = { ...assetStub.image, localDateTime: new Date(2023, 1, 15, 1, 0, 0) };
const image3 = { ...assetStub.image, localDateTime: new Date(2015, 1, 15) };
const image4 = { ...assetStub.image, localDateTime: new Date(2009, 1, 15) };
mocks.partner.getAll.mockResolvedValue([]);
mocks.asset.getByDayOfYear.mockResolvedValue([
{
year: 2023,
assets: [image1, image2],
},
{
year: 2015,
assets: [image3],
},
{
year: 2009,
assets: [image4],
},
] as any);
await expect(sut.getMemoryLane(authStub.admin, { day: 15, month: 1 })).resolves.toEqual([
{ yearsAgo: 1, title: '1 year ago', assets: [mapAsset(image1), mapAsset(image2)] },
{ yearsAgo: 9, title: '9 years ago', assets: [mapAsset(image3)] },
{ yearsAgo: 15, title: '15 years ago', assets: [mapAsset(image4)] },
]);
expect(mocks.asset.getByDayOfYear.mock.calls).toEqual([[[authStub.admin.user.id], { day: 15, month: 1 }]]);
});
it('should get memories with partners with inTimeline enabled', async () => {
const partner = factory.partner();
const auth = factory.auth({ user: { id: partner.sharedWithId } });
mocks.partner.getAll.mockResolvedValue([partner]);
mocks.asset.getByDayOfYear.mockResolvedValue([]);
await sut.getMemoryLane(auth, { day: 15, month: 1 });
expect(mocks.asset.getByDayOfYear.mock.calls).toEqual([
[[auth.user.id, partner.sharedById], { day: 15, month: 1 }],
]);
});
});
describe('getStatistics', () => {
it('should get the statistics for a user, excluding archived assets', async () => {
mocks.asset.getStatistics.mockResolvedValue(stats);

View File

@@ -3,7 +3,13 @@ import _ from 'lodash';
import { DateTime, Duration } from 'luxon';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { OnJob } from 'src/decorators';
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import {
AssetResponseDto,
MapAsset,
MemoryLaneResponseDto,
SanitizedAssetResponseDto,
mapAsset,
} from 'src/dtos/asset-response.dto';
import {
AssetBulkDeleteDto,
AssetBulkUpdateDto,
@@ -14,6 +20,7 @@ import {
mapStats,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { MemoryLaneDto } from 'src/dtos/search.dto';
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
@@ -21,6 +28,26 @@ import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUn
@Injectable()
export class AssetService extends BaseService {
async getMemoryLane(auth: AuthDto, dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
const partnerIds = await getMyPartnerIds({
userId: auth.user.id,
repository: this.partnerRepository,
timelineEnabled: true,
});
const userIds = [auth.user.id, ...partnerIds];
const groups = await this.assetRepository.getByDayOfYear(userIds, dto);
return groups.map(({ year, assets }) => {
const yearsAgo = DateTime.utc().year - year;
return {
yearsAgo,
// TODO move this to clients
title: `${yearsAgo} year${yearsAgo > 1 ? 's' : ''} ago`,
assets: assets.map((asset) => mapAsset(asset, { auth })),
};
});
}
async getStatistics(auth: AuthDto, dto: AssetStatsDto) {
const stats = await this.assetRepository.getStatistics(auth.user.id, dto);
return mapStats(stats);

View File

@@ -1,4 +1,6 @@
import { JobStatus } from 'src/enum';
import { BadRequestException } from '@nestjs/common';
import { FileReportItemDto } from 'src/dtos/audit.dto';
import { AssetFileType, AssetPathType, JobStatus, PersonPathType, UserPathType } from 'src/enum';
import { AuditService } from 'src/services/audit.service';
import { newTestService, ServiceMocks } from 'test/utils';
@@ -23,4 +25,148 @@ describe(AuditService.name, () => {
expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date));
});
});
describe('getChecksums', () => {
it('should fail if the file is not in the immich path', async () => {
await expect(sut.getChecksums({ filenames: ['foo/bar'] })).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.crypto.hashFile).not.toHaveBeenCalled();
});
it('should get checksum for valid file', async () => {
await expect(sut.getChecksums({ filenames: ['./upload/my-file.jpg'] })).resolves.toEqual([
{ filename: './upload/my-file.jpg', checksum: expect.any(String) },
]);
expect(mocks.crypto.hashFile).toHaveBeenCalledWith('./upload/my-file.jpg');
});
});
describe('fixItems', () => {
it('should fail if the file is not in the immich path', async () => {
await expect(
sut.fixItems([
{ entityId: 'my-id', pathType: AssetPathType.ORIGINAL, pathValue: 'foo/bar' } as FileReportItemDto,
]),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update encoded video path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.ENCODED_VIDEO,
pathValue: './upload/my-video.mp4',
} as FileReportItemDto,
]);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', encodedVideoPath: './upload/my-video.mp4' });
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update preview path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.PREVIEW,
pathValue: './upload/my-preview.png',
} as FileReportItemDto,
]);
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: 'my-id',
type: AssetFileType.PREVIEW,
path: './upload/my-preview.png',
});
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update thumbnail path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.THUMBNAIL,
pathValue: './upload/my-thumbnail.webp',
} as FileReportItemDto,
]);
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: 'my-id',
type: AssetFileType.THUMBNAIL,
path: './upload/my-thumbnail.webp',
});
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update original path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.ORIGINAL,
pathValue: './upload/my-original.png',
} as FileReportItemDto,
]);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', originalPath: './upload/my-original.png' });
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update sidecar path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: AssetPathType.SIDECAR,
pathValue: './upload/my-sidecar.xmp',
} as FileReportItemDto,
]);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', sidecarPath: './upload/my-sidecar.xmp' });
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update face path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: PersonPathType.FACE,
pathValue: './upload/my-face.jpg',
} as FileReportItemDto,
]);
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'my-id', thumbnailPath: './upload/my-face.jpg' });
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.user.update).not.toHaveBeenCalled();
});
it('should update profile path', async () => {
await sut.fixItems([
{
entityId: 'my-id',
pathType: UserPathType.PROFILE,
pathValue: './upload/my-profile-pic.jpg',
} as FileReportItemDto,
]);
expect(mocks.user.update).toHaveBeenCalledWith('my-id', { profileImagePath: './upload/my-profile-pic.jpg' });
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.person.update).not.toHaveBeenCalled();
});
});
});

View File

@@ -1,9 +1,23 @@
import { Injectable } from '@nestjs/common';
import { BadRequestException, Injectable } from '@nestjs/common';
import { DateTime } from 'luxon';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { resolve } from 'node:path';
import { AUDIT_LOG_MAX_DURATION, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnJob } from 'src/decorators';
import { JobName, JobStatus, QueueName } from 'src/enum';
import { FileChecksumDto, FileChecksumResponseDto, FileReportItemDto, PathEntityType } from 'src/dtos/audit.dto';
import {
AssetFileType,
AssetPathType,
JobName,
JobStatus,
PersonPathType,
QueueName,
StorageFolder,
UserPathType,
} from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { getAssetFiles } from 'src/utils/asset.util';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class AuditService extends BaseService {
@@ -12,4 +26,187 @@ export class AuditService extends BaseService {
await this.auditRepository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
return JobStatus.SUCCESS;
}
async getChecksums(dto: FileChecksumDto) {
const results: FileChecksumResponseDto[] = [];
for (const filename of dto.filenames) {
if (!StorageCore.isImmichPath(filename)) {
throw new BadRequestException(
`Could not get the checksum of ${filename} because the file isn't accessible by Immich`,
);
}
const checksum = await this.cryptoRepository.hashFile(filename);
results.push({ filename, checksum: checksum.toString('base64') });
}
return results;
}
async fixItems(items: FileReportItemDto[]) {
for (const { entityId: id, pathType, pathValue } of items) {
if (!StorageCore.isImmichPath(pathValue)) {
throw new BadRequestException(
`Could not fix item ${id} with path ${pathValue} because the file isn't accessible by Immich`,
);
}
switch (pathType) {
case AssetPathType.ENCODED_VIDEO: {
await this.assetRepository.update({ id, encodedVideoPath: pathValue });
break;
}
case AssetPathType.PREVIEW: {
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.PREVIEW, path: pathValue });
break;
}
case AssetPathType.THUMBNAIL: {
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.THUMBNAIL, path: pathValue });
break;
}
case AssetPathType.ORIGINAL: {
await this.assetRepository.update({ id, originalPath: pathValue });
break;
}
case AssetPathType.SIDECAR: {
await this.assetRepository.update({ id, sidecarPath: pathValue });
break;
}
case PersonPathType.FACE: {
await this.personRepository.update({ id, thumbnailPath: pathValue });
break;
}
case UserPathType.PROFILE: {
await this.userRepository.update(id, { profileImagePath: pathValue });
break;
}
}
}
}
private fullPath(filename: string) {
return resolve(filename);
}
async getFileReport() {
const hasFile = (items: Set<string>, filename: string) => items.has(filename) || items.has(this.fullPath(filename));
const crawl = async (folder: StorageFolder) =>
new Set(
await this.storageRepository.crawl({
includeHidden: true,
pathsToCrawl: [StorageCore.getBaseFolder(folder)],
}),
);
const uploadFiles = await crawl(StorageFolder.UPLOAD);
const libraryFiles = await crawl(StorageFolder.LIBRARY);
const thumbFiles = await crawl(StorageFolder.THUMBNAILS);
const videoFiles = await crawl(StorageFolder.ENCODED_VIDEO);
const profileFiles = await crawl(StorageFolder.PROFILE);
const allFiles = new Set<string>();
for (const list of [libraryFiles, thumbFiles, videoFiles, profileFiles, uploadFiles]) {
for (const item of list) {
allFiles.add(item);
}
}
const track = (filename: string | null | undefined) => {
if (!filename) {
return;
}
allFiles.delete(filename);
allFiles.delete(this.fullPath(filename));
};
this.logger.log(
`Found ${libraryFiles.size} original files, ${thumbFiles.size} thumbnails, ${videoFiles.size} encoded videos, ${profileFiles.size} profile files`,
);
const pagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (options) =>
this.assetRepository.getAll(options, { withDeleted: true, withArchived: true }),
);
let assetCount = 0;
const orphans: FileReportItemDto[] = [];
for await (const assets of pagination) {
assetCount += assets.length;
for (const { id, files, originalPath, encodedVideoPath, isExternal, checksum } of assets) {
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(files);
for (const file of [
originalPath,
fullsizeFile?.path,
previewFile?.path,
encodedVideoPath,
thumbnailFile?.path,
]) {
track(file);
}
const entity = { entityId: id, entityType: PathEntityType.ASSET, checksum: checksum.toString('base64') };
if (
originalPath &&
!hasFile(libraryFiles, originalPath) &&
!hasFile(uploadFiles, originalPath) &&
// Android motion assets
!hasFile(videoFiles, originalPath) &&
// ignore external library assets
!isExternal
) {
orphans.push({ ...entity, pathType: AssetPathType.ORIGINAL, pathValue: originalPath });
}
if (previewFile && !hasFile(thumbFiles, previewFile.path)) {
orphans.push({ ...entity, pathType: AssetPathType.PREVIEW, pathValue: previewFile.path });
}
if (thumbnailFile && !hasFile(thumbFiles, thumbnailFile.path)) {
orphans.push({ ...entity, pathType: AssetPathType.THUMBNAIL, pathValue: thumbnailFile.path });
}
if (encodedVideoPath && !hasFile(videoFiles, encodedVideoPath)) {
orphans.push({ ...entity, pathType: AssetPathType.THUMBNAIL, pathValue: encodedVideoPath });
}
}
}
const users = await this.userRepository.getList();
for (const { id, profileImagePath } of users) {
track(profileImagePath);
const entity = { entityId: id, entityType: PathEntityType.USER };
if (profileImagePath && !hasFile(profileFiles, profileImagePath)) {
orphans.push({ ...entity, pathType: UserPathType.PROFILE, pathValue: profileImagePath });
}
}
let peopleCount = 0;
for await (const { id, thumbnailPath } of this.personRepository.getAll()) {
track(thumbnailPath);
const entity = { entityId: id, entityType: PathEntityType.PERSON };
if (thumbnailPath && !hasFile(thumbFiles, thumbnailPath)) {
orphans.push({ ...entity, pathType: PersonPathType.FACE, pathValue: thumbnailPath });
}
if (peopleCount === JOBS_ASSET_PAGINATION_SIZE) {
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${peopleCount} people`);
peopleCount = 0;
}
}
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${peopleCount} people`);
const extras: string[] = [];
for (const file of allFiles) {
extras.push(file);
}
// send as absolute paths
for (const orphan of orphans) {
orphan.pathValue = this.fullPath(orphan.pathValue);
}
return { orphans, extras };
}
}

View File

@@ -1,9 +1,10 @@
import { AssetFileType, AssetType, JobName, JobStatus } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { DuplicateService } from 'src/services/duplicate.service';
import { SearchService } from 'src/services/search.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
import { newTestService, ServiceMocks } from 'test/utils';
import { beforeEach, vitest } from 'vitest';
vitest.useFakeTimers();
@@ -112,11 +113,14 @@ describe(SearchService.name, () => {
});
it('should queue missing assets', async () => {
mocks.assetJob.streamForSearchDuplicates.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueSearchDuplicates({});
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(undefined);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.DUPLICATE);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.DUPLICATE_DETECTION,
@@ -126,11 +130,14 @@ describe(SearchService.name, () => {
});
it('should queue all assets', async () => {
mocks.assetJob.streamForSearchDuplicates.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueSearchDuplicates({ force: true });
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(true);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.DUPLICATE_DETECTION,

View File

@@ -5,11 +5,13 @@ import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { AssetFileType, JobName, JobStatus, QueueName } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { AssetDuplicateResult } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { JobOf } from 'src/types';
import { getAssetFile } from 'src/utils/asset.util';
import { isDuplicateDetectionEnabled } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class DuplicateService extends BaseService {
@@ -28,22 +30,18 @@ export class DuplicateService extends BaseService {
return JobStatus.SKIPPED;
}
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, { isVisible: true })
: this.assetRepository.getWithout(pagination, WithoutProperty.DUPLICATE);
});
const assets = this.assetJobRepository.streamForSearchDuplicates(force);
for await (const asset of assets) {
jobs.push({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } })),
);
}
await queueAll();
return JobStatus.SUCCESS;
}

View File

@@ -239,6 +239,10 @@ describe(JobService.name, () => {
item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } },
jobs: [JobName.METADATA_EXTRACTION],
},
{
item: { name: JobName.METADATA_EXTRACTION, data: { id: 'asset-1' } },
jobs: [JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE],
},
{
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.GENERATE_THUMBNAILS],

View File

@@ -264,6 +264,17 @@ export class JobService extends BaseService {
break;
}
case JobName.METADATA_EXTRACTION: {
if (item.data.source === 'sidecar-write') {
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
if (asset) {
this.eventRepository.clientSend('on_asset_update', asset.ownerId, mapAsset(asset));
}
}
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: item.data });
break;
}
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
if (item.data.source === 'upload' || item.data.source === 'copy') {
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: item.data });

View File

@@ -273,6 +273,7 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
@@ -291,6 +292,7 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
mocks.storage.walk.mockImplementation(async function* generator() {});
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });

View File

@@ -38,6 +38,10 @@ describe(MediaService.name, () => {
describe('handleQueueGenerateThumbnails', () => {
it('should queue all assets', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream([personStub.newThumbnail]));
mocks.person.getFacesByIds.mockResolvedValue([faceStub.face1]);
@@ -63,6 +67,10 @@ describe(MediaService.name, () => {
it('should queue trashed assets when force is true', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.archived]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.trashed],
hasNextPage: false,
});
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: true });
@@ -163,7 +171,7 @@ describe(MediaService.name, () => {
describe('handleQueueMigration', () => {
it('should remove empty directories and queue jobs', async () => {
mocks.assetJob.streamForMigrationJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({ hasNextPage: false, items: [assetStub.image] });
mocks.job.getJobCounts.mockResolvedValue({ active: 1, waiting: 0 } as JobCounts);
mocks.person.getAll.mockReturnValue(makeStream([personStub.withName]));

View File

@@ -36,6 +36,7 @@ import {
import { getAssetFiles } from 'src/utils/asset.util';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class MediaService extends BaseService {
@@ -49,26 +50,18 @@ export class MediaService extends BaseService {
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
const thumbJobs: JobItem[] = [];
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
thumbJobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
continue;
}
}
await this.jobRepository.queueAll(thumbJobs);
await queueAll();
const jobs: JobItem[] = [];
const people = this.personRepository.getAll(force ? undefined : { thumbnailPath: '' });
@@ -83,36 +76,32 @@ export class MediaService extends BaseService {
}
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
}
await queueAll();
await this.jobRepository.queueAll(jobs);
return JobStatus.SUCCESS;
}
@OnJob({ name: JobName.QUEUE_MIGRATION, queue: QueueName.MIGRATION })
async handleQueueMigration(): Promise<JobStatus> {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination),
);
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
if (active === 1 && waiting === 0) {
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
}
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForMigrationJob();
for await (const asset of assets) {
jobs.push({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } })),
);
}
await this.jobRepository.queueAll(jobs);
jobs = [];
let jobs: { name: JobName.MIGRATE_PERSON; data: { id: string } }[] = [];
for await (const person of this.personRepository.getAll()) {
jobs.push({ name: JobName.MIGRATE_PERSON, data: { id: person.id } });
@@ -266,9 +255,7 @@ export class MediaService extends BaseService {
const { info, data, colorspace } = await this.decodeImage(
extracted ? extracted.buffer : asset.originalPath,
// only specify orientation to extracted images which don't have EXIF orientation data
// or it can double rotate the image
extracted ? asset.exifInfo : { ...asset.exifInfo, orientation: null },
asset.exifInfo,
convertFullsize ? undefined : image.preview.size,
);

View File

@@ -5,6 +5,7 @@ import { constants } from 'node:fs/promises';
import { defaults } from 'src/config';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { MetadataService } from 'src/services/metadata.service';
import { assetStub } from 'test/fixtures/asset.stub';
@@ -143,8 +144,7 @@ describe(MetadataService.name, () => {
it('should handle an asset that could not be found', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(void 0);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.upsertExif).not.toHaveBeenCalled();
@@ -527,7 +527,7 @@ describe(MetadataService.name, () => {
ContainerDirectory: [{ Foo: 100 }],
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
});
it('should extract the correct video orientation', async () => {
@@ -1202,7 +1202,7 @@ describe(MetadataService.name, () => {
it('should handle livePhotoCID not set', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled();
@@ -1215,7 +1215,9 @@ describe(MetadataService.name, () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id);
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
@@ -1234,7 +1236,9 @@ describe(MetadataService.name, () => {
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoStillAsset.id);
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
@@ -1258,7 +1262,9 @@ describe(MetadataService.name, () => {
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(mocks.event.emit).toHaveBeenCalledWith('asset.hide', {
userId: assetStub.livePhotoMotionAsset.ownerId,
@@ -1274,12 +1280,10 @@ describe(MetadataService.name, () => {
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
mockReadTags({ ContentIdentifier: 'CID' });
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
JobStatus.SUCCESS,
);
expect(mocks.event.emit).toHaveBeenCalledWith('asset.metadataExtracted', {
assetId: assetStub.livePhotoStillAsset.id,
userId: assetStub.livePhotoStillAsset.ownerId,
});
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
ownerId: 'user-id',
otherAssetId: 'live-photo-still-asset',
@@ -1342,11 +1346,12 @@ describe(MetadataService.name, () => {
describe('handleQueueSidecar', () => {
it('should queue assets with sidecar files', async () => {
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.sidecar], hasNextPage: false });
await sut.handleQueueSidecar({ force: true });
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
expect(mocks.asset.getAll).toHaveBeenCalledWith({ take: 1000, skip: 0 });
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_SYNC,
@@ -1356,11 +1361,12 @@ describe(MetadataService.name, () => {
});
it('should queue assets without sidecar files', async () => {
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
await sut.handleQueueSidecar({ force: false });
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ take: 1000, skip: 0 }, WithoutProperty.SIDECAR);
expect(mocks.asset.getAll).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_DISCOVERY,

View File

@@ -22,12 +22,14 @@ import {
QueueName,
SourceType,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { ArgOf } from 'src/repositories/event.repository';
import { ReverseGeocodeResult } from 'src/repositories/map.repository';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { JobOf } from 'src/types';
import { isFaceImportEnabled } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
import { upsertTags } from 'src/utils/tag';
/** look for a date from these tags (in order) */
@@ -182,14 +184,14 @@ export class MetadataService extends BaseService {
}
@OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>) {
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>): Promise<JobStatus> {
const [{ metadata, reverseGeocoding }, asset] = await Promise.all([
this.getConfig({ withCache: true }),
this.assetJobRepository.getForMetadataExtraction(data.id),
]);
if (!asset) {
return;
return JobStatus.FAILED;
}
const [exifTags, stats] = await Promise.all([
@@ -283,31 +285,27 @@ export class MetadataService extends BaseService {
await this.assetRepository.upsertJobStatus({ assetId: asset.id, metadataExtractedAt: new Date() });
await this.eventRepository.emit('asset.metadataExtracted', {
assetId: asset.id,
userId: asset.ownerId,
source: data.source,
});
return JobStatus.SUCCESS;
}
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
async handleQueueSidecar({ force }: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
jobs = [];
};
async handleQueueSidecar(job: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
: this.assetRepository.getWithout(pagination, WithoutProperty.SIDECAR);
});
const assets = this.assetJobRepository.streamForSidecar(force);
for await (const asset of assets) {
jobs.push({ name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({
name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY,
data: { id: asset.id },
})),
);
}
await queueAll();
return JobStatus.SUCCESS;
}

View File

@@ -154,10 +154,10 @@ describe(NotificationService.name, () => {
describe('onAlbumUpdateEvent', () => {
it('should queue notify album update event', async () => {
await sut.onAlbumUpdate({ id: 'album', recipientId: '42' });
await sut.onAlbumUpdate({ id: 'album', recipientIds: ['42'] });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_UPDATE,
data: { id: 'album', recipientId: '42', delay: 300_000 },
data: { id: 'album', recipientIds: ['42'], delay: 300_000 },
});
});
});
@@ -414,14 +414,14 @@ describe(NotificationService.name, () => {
describe('handleAlbumUpdate', () => {
it('should skip if album could not be found', async () => {
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumUpdate({ id: '', recipientIds: ['1'] })).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.user.get).not.toHaveBeenCalled();
});
it('should skip if owner could not be found', async () => {
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumUpdate({ id: '', recipientIds: ['1'] })).resolves.toBe(JobStatus.SKIPPED);
expect(mocks.systemMetadata.get).not.toHaveBeenCalled();
});
@@ -434,7 +434,7 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
});
@@ -456,7 +456,7 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
});
@@ -478,7 +478,7 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
});
@@ -492,21 +492,21 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
expect(mocks.email.renderEmail).toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalled();
});
it('should add new recipients for new images if job is already queued', async () => {
await sut.onAlbumUpdate({ id: '1', recipientId: '2' } as INotifyAlbumUpdateJob);
expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NOTIFY_ALBUM_UPDATE, '1/2');
mocks.job.removeJob.mockResolvedValue({ id: '1', recipientIds: ['2', '3', '4'] } as INotifyAlbumUpdateJob);
await sut.onAlbumUpdate({ id: '1', recipientIds: ['1', '2', '3'] } as INotifyAlbumUpdateJob);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_UPDATE,
data: {
id: '1',
delay: 300_000,
recipientId: '2',
recipientIds: ['1', '2', '3', '4'],
},
});
});

View File

@@ -1,6 +1,5 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { OnEvent, OnJob } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
mapNotification,
@@ -23,7 +22,7 @@ import {
import { EmailTemplate } from 'src/repositories/email.repository';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { EmailImageAttachment, JobOf } from 'src/types';
import { EmailImageAttachment, IEntityJob, INotifyAlbumUpdateJob, JobItem, JobOf } from 'src/types';
import { getFilenameExtension } from 'src/utils/file';
import { getExternalDomain } from 'src/utils/misc';
import { isEqualObject } from 'src/utils/object';
@@ -153,18 +152,6 @@ export class NotificationService extends BaseService {
this.eventRepository.clientSend('on_asset_trash', userId, assetIds);
}
@OnEvent({ name: 'asset.metadataExtracted' })
async onAssetMetadataExtracted({ assetId, userId, source }: ArgOf<'asset.metadataExtracted'>) {
if (source !== 'sidecar-write') {
return;
}
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([assetId]);
if (asset) {
this.eventRepository.clientSend('on_asset_update', userId, mapAsset(asset));
}
}
@OnEvent({ name: 'assets.restore' })
onAssetsRestore({ assetIds, userId }: ArgOf<'assets.restore'>) {
this.eventRepository.clientSend('on_asset_restore', userId, assetIds);
@@ -198,12 +185,30 @@ export class NotificationService extends BaseService {
}
@OnEvent({ name: 'album.update' })
async onAlbumUpdate({ id, recipientId }: ArgOf<'album.update'>) {
await this.jobRepository.removeJob(JobName.NOTIFY_ALBUM_UPDATE, `${id}/${recipientId}`);
await this.jobRepository.queue({
async onAlbumUpdate({ id, recipientIds }: ArgOf<'album.update'>) {
// if recipientIds is empty, album likely only has one user part of it, don't queue notification if so
if (recipientIds.length === 0) {
return;
}
const job: JobItem = {
name: JobName.NOTIFY_ALBUM_UPDATE,
data: { id, recipientId, delay: NotificationService.albumUpdateEmailDelayMs },
});
data: { id, recipientIds, delay: NotificationService.albumUpdateEmailDelayMs },
};
const previousJobData = await this.jobRepository.removeJob(id, JobName.NOTIFY_ALBUM_UPDATE);
if (previousJobData && this.isAlbumUpdateJob(previousJobData)) {
for (const id of previousJobData.recipientIds) {
if (!recipientIds.includes(id)) {
recipientIds.push(id);
}
}
}
await this.jobRepository.queue(job);
}
private isAlbumUpdateJob(job: IEntityJob): job is INotifyAlbumUpdateJob {
return 'recipientIds' in job;
}
@OnEvent({ name: 'album.invite' })
@@ -394,7 +399,7 @@ export class NotificationService extends BaseService {
}
@OnJob({ name: JobName.NOTIFY_ALBUM_UPDATE, queue: QueueName.NOTIFICATION })
async handleAlbumUpdate({ id, recipientId }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
async handleAlbumUpdate({ id, recipientIds }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
const album = await this.albumRepository.getById(id, { withAssets: false });
if (!album) {
@@ -406,44 +411,49 @@ export class NotificationService extends BaseService {
return JobStatus.SKIPPED;
}
const recipients = [...album.albumUsers.map((user) => user.user), owner].filter((user) =>
recipientIds.includes(user.id),
);
const attachment = await this.getAlbumThumbnailAttachment(album);
const { server, templates } = await this.getConfig({ withCache: false });
const user = await this.userRepository.get(recipientId, { withDeleted: false });
if (!user) {
return JobStatus.SKIPPED;
for (const recipient of recipients) {
const user = await this.userRepository.get(recipient.id, { withDeleted: false });
if (!user) {
continue;
}
const { emailNotifications } = getPreferences(user.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
continue;
}
const { html, text } = await this.emailRepository.renderEmail({
template: EmailTemplate.ALBUM_UPDATE,
data: {
baseUrl: getExternalDomain(server),
albumId: album.id,
albumName: album.albumName,
recipientName: recipient.name,
cid: attachment ? attachment.cid : undefined,
},
customTemplate: templates.email.albumUpdateTemplate,
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
data: {
to: recipient.email,
subject: `New media has been added to an album - ${album.albumName}`,
html,
text,
imageAttachments: attachment ? [attachment] : undefined,
},
});
}
const { emailNotifications } = getPreferences(user.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
return JobStatus.SKIPPED;
}
const { html, text } = await this.emailRepository.renderEmail({
template: EmailTemplate.ALBUM_UPDATE,
data: {
baseUrl: getExternalDomain(server),
albumId: album.id,
albumName: album.albumName,
recipientName: user.name,
cid: attachment ? attachment.cid : undefined,
},
customTemplate: templates.email.albumUpdateTemplate,
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
data: {
to: user.email,
subject: `New media has been added to an album - ${album.albumName}`,
html,
text,
imageAttachments: attachment ? [attachment] : undefined,
},
});
return JobStatus.SUCCESS;
}

View File

@@ -2,6 +2,7 @@ import { BadRequestException, NotFoundException } from '@nestjs/common';
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { DetectedFaces } from 'src/repositories/machine-learning.repository';
import { FaceSearchResult } from 'src/repositories/search.repository';
import { PersonService } from 'src/services/person.service';
@@ -454,11 +455,14 @@ describe(PersonService.name, () => {
});
it('should queue missing assets', async () => {
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueDetectFaces({ force: false });
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(false);
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
@@ -468,7 +472,10 @@ describe(PersonService.name, () => {
});
it('should queue all assets', async () => {
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.withName]);
await sut.handleQueueDetectFaces({ force: true });
@@ -476,7 +483,7 @@ describe(PersonService.name, () => {
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]);
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
@@ -486,14 +493,17 @@ describe(PersonService.name, () => {
});
it('should refresh all assets', async () => {
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueDetectFaces({ force: undefined });
expect(mocks.person.delete).not.toHaveBeenCalled();
expect(mocks.person.deleteFaces).not.toHaveBeenCalled();
expect(mocks.storage.unlink).not.toHaveBeenCalled();
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(undefined);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
@@ -506,13 +516,16 @@ describe(PersonService.name, () => {
it('should delete existing people and faces if forced', async () => {
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
mocks.person.deleteFaces.mockResolvedValue();
await sut.handleQueueDetectFaces({ force: true });
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,

View File

@@ -36,6 +36,7 @@ import {
SourceType,
SystemMetadataKey,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { BoundingBox } from 'src/repositories/machine-learning.repository';
import { UpdateFacesData } from 'src/repositories/person.repository';
import { BaseService } from 'src/services/base.service';
@@ -43,6 +44,7 @@ import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 's
import { ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class PersonService extends BaseService {
@@ -263,19 +265,23 @@ export class PersonService extends BaseService {
await this.handlePersonCleanup();
}
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForDetectFacesJob(force);
for await (const asset of assets) {
jobs.push({ name: JobName.FACE_DETECTION, data: { id: asset.id } });
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force === false
? this.assetRepository.getWithout(pagination, WithoutProperty.FACES)
: this.assetRepository.getAll(pagination, {
orderDirection: 'desc',
withFaces: true,
withArchived: true,
isVisible: true,
});
});
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.FACE_DETECTION, data: { id: asset.id } })),
);
}
await this.jobRepository.queueAll(jobs);
if (force === undefined) {
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
}

View File

@@ -15,6 +15,7 @@ import {
SmartSearchDto,
} from 'src/dtos/search.dto';
import { AssetOrder } from 'src/enum';
import { SearchExploreItem } from 'src/repositories/search.repository';
import { BaseService } from 'src/services/base.service';
import { getMyPartnerIds } from 'src/utils/asset.util';
import { isSmartSearchEnabled } from 'src/utils/misc';
@@ -31,7 +32,7 @@ export class SearchService extends BaseService {
return places.map((place) => mapPlaces(place));
}
async getExploreData(auth: AuthDto) {
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
const options = { maxFields: 12, minAssetsPerField: 5 };
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
const assets = await this.assetRepository.getByIdsWithAllRelationsButStacks(cities.items.map(({ data }) => data));

View File

@@ -151,6 +151,7 @@ describe(SmartInfoService.name, () => {
await sut.handleQueueEncodeClip({});
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.search.setDimensionSize).not.toHaveBeenCalled();
});

View File

@@ -116,11 +116,6 @@ export class StorageTemplateService extends BaseService {
return { ...storageTokens, presetOptions: storagePresets };
}
@OnEvent({ name: 'asset.metadataExtracted' })
async onAssetMetadataExtracted({ source, assetId }: ArgOf<'asset.metadataExtracted'>) {
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { source, id: assetId } });
}
@OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, queue: QueueName.STORAGE_TEMPLATE_MIGRATION })
async handleMigrationSingle({ id }: JobOf<JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE>): Promise<JobStatus> {
const config = await this.getConfig({ withCache: true });

View File

@@ -4,7 +4,7 @@ import { DateTime } from 'luxon';
import { Writable } from 'node:stream';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { SessionSyncCheckpoints } from 'src/db';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AssetResponseDto, hexOrBufferToBase64, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetDeltaSyncDto,
@@ -18,7 +18,6 @@ import { DatabaseAction, EntityType, Permission, SyncEntityType, SyncRequestType
import { BaseService } from 'src/services/base.service';
import { SyncAck } from 'src/types';
import { getMyPartnerIds } from 'src/utils/asset.util';
import { hexOrBufferToBase64 } from 'src/utils/bytes';
import { setIsEqual } from 'src/utils/set';
import { fromAck, serialize } from 'src/utils/sync';
@@ -142,7 +141,7 @@ export class SyncService extends BaseService {
updateId,
data: {
...data,
checksum: hexOrBufferToBase64(checksum)!,
checksum: hexOrBufferToBase64(checksum),
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
},
}),
@@ -172,7 +171,7 @@ export class SyncService extends BaseService {
updateId,
data: {
...data,
checksum: hexOrBufferToBase64(checksum)!,
checksum: hexOrBufferToBase64(checksum),
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
},
}),

View File

@@ -1,6 +1,9 @@
import { BadRequestException } from '@nestjs/common';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { TimelineService } from 'src/services/timeline.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
describe(TimelineService.name, () => {
@@ -15,10 +18,13 @@ describe(TimelineService.name, () => {
it("should return buckets if userId and albumId aren't set", async () => {
mocks.asset.getTimeBuckets.mockResolvedValue([{ timeBucket: 'bucket', count: 1 }]);
await expect(sut.getTimeBuckets(authStub.admin, {})).resolves.toEqual(
expect.arrayContaining([{ timeBucket: 'bucket', count: 1 }]),
);
await expect(
sut.getTimeBuckets(authStub.admin, {
size: TimeBucketSize.DAY,
}),
).resolves.toEqual(expect.arrayContaining([{ timeBucket: 'bucket', count: 1 }]));
expect(mocks.asset.getTimeBuckets).toHaveBeenCalledWith({
size: TimeBucketSize.DAY,
userIds: [authStub.admin.user.id],
});
});
@@ -27,34 +33,35 @@ describe(TimelineService.name, () => {
describe('getTimeBucket', () => {
it('should return the assets for a album time bucket if user has album.read', async () => {
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
await expect(sut.getTimeBucket(authStub.admin, { timeBucket: 'bucket', albumId: 'album-id' })).resolves.toEqual(
json,
);
await expect(
sut.getTimeBucket(authStub.admin, { size: TimeBucketSize.DAY, timeBucket: 'bucket', albumId: 'album-id' }),
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-id']));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
albumId: 'album-id',
});
});
it('should return the assets for a archive time bucket if user has archive.read', async () => {
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
userId: authStub.admin.user.id,
}),
).resolves.toEqual(json);
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
expect.objectContaining({
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
userIds: [authStub.admin.user.id],
@@ -63,19 +70,20 @@ describe(TimelineService.name, () => {
});
it('should include partner shared assets', async () => {
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
mocks.partner.getAll.mockResolvedValue([]);
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: false,
userId: authStub.admin.user.id,
withPartners: true,
}),
).resolves.toEqual(json);
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: false,
withPartners: true,
@@ -84,37 +92,62 @@ describe(TimelineService.name, () => {
});
it('should check permissions to read tag', async () => {
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-123']));
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
userId: authStub.admin.user.id,
tagId: 'tag-123',
}),
).resolves.toEqual(json);
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
tagId: 'tag-123',
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
});
});
it('should strip metadata if showExif is disabled', async () => {
mocks.access.album.checkSharedLinkAccess.mockResolvedValue(new Set(['album-id']));
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
const auth = factory.auth({ sharedLink: { showExif: false } });
const buckets = await sut.getTimeBucket(auth, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
albumId: 'album-id',
});
expect(buckets).toEqual([expect.objectContaining({ id: 'asset-id' })]);
expect(buckets[0]).not.toHaveProperty('exif');
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
albumId: 'album-id',
});
});
it('should return the assets for a library time bucket if user has library.read', async () => {
const json = `[{ id: ['asset-id'] }]`;
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
userId: authStub.admin.user.id,
}),
).resolves.toEqual(json);
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
expect.objectContaining({
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
}),
@@ -124,6 +157,7 @@ describe(TimelineService.name, () => {
it('should throw an error if withParners is true and isArchived true or undefined', async () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: true,
withPartners: true,
@@ -133,6 +167,7 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isArchived: undefined,
withPartners: true,
@@ -144,6 +179,7 @@ describe(TimelineService.name, () => {
it('should throw an error if withParners is true and isFavorite is either true or false', async () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isFavorite: true,
withPartners: true,
@@ -153,6 +189,7 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isFavorite: false,
withPartners: true,
@@ -164,6 +201,7 @@ describe(TimelineService.name, () => {
it('should throw an error if withParners is true and isTrash is true', async () => {
await expect(
sut.getTimeBucket(authStub.admin, {
size: TimeBucketSize.DAY,
timeBucket: 'bucket',
isTrashed: true,
withPartners: true,

View File

@@ -1,7 +1,7 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { Stack } from 'src/database';
import { AssetResponseDto, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketsResponseDto } from 'src/dtos/time-bucket.dto';
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
import { Permission } from 'src/enum';
import { TimeBucketOptions } from 'src/repositories/asset.repository';
import { BaseService } from 'src/services/base.service';
@@ -9,32 +9,22 @@ import { getMyPartnerIds } from 'src/utils/asset.util';
@Injectable()
export class TimelineService extends BaseService {
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketsResponseDto[]> {
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
await this.timeBucketChecks(auth, dto);
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
return await this.assetRepository.getTimeBuckets(timeBucketOptions);
return this.assetRepository.getTimeBuckets(timeBucketOptions);
}
// pre-jsonified response
async getTimeBucket(auth: AuthDto, dto: TimeBucketAssetDto): Promise<string> {
async getTimeBucket(
auth: AuthDto,
dto: TimeBucketAssetDto,
): Promise<AssetResponseDto[] | SanitizedAssetResponseDto[]> {
await this.timeBucketChecks(auth, dto);
const timeBucketOptions = await this.buildTimeBucketOptions(auth, { ...dto });
// TODO: use id cursor for pagination
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
return bucket.assets;
}
mapStack(entity?: Stack | null) {
if (!entity) {
return null;
}
return {
id: entity.id!,
primaryAssetId: entity.primaryAssetId!,
assetCount: entity.assetCount as number,
};
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
const assets = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
return !auth.sharedLink || auth.sharedLink?.showExif
? assets.map((asset) => mapAsset(asset, { withStack: true, auth }))
: assets.map((asset) => mapAsset(asset, { stripMetadata: true, auth }));
}
private async buildTimeBucketOptions(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketOptions> {

View File

@@ -1,28 +0,0 @@
export type TimelineStack = {
id: string;
primaryAssetId: string;
assetCount: number;
};
export type AssetDescription = {
city: string | null;
country: string | null;
};
export type TimeBucketAssets = {
id: string[];
ownerId: string[];
ratio: number[];
isFavorite: number[];
isArchived: number[];
isTrashed: number[];
isImage: number[];
thumbhash: (string | null)[];
localDateTime: string[];
stack?: ([string, string] | null)[];
duration: (string | null)[];
projectionType: (string | null)[];
livePhotoVideoId: (string | null)[];
city: (string | null)[];
country: (string | null)[];
};

View File

@@ -177,10 +177,9 @@ export interface IDelayedJob extends IBaseJob {
delay?: number;
}
export type JobSource = 'upload' | 'sidecar-write' | 'copy';
export interface IEntityJob extends IBaseJob {
id: string;
source?: JobSource;
source?: 'upload' | 'sidecar-write' | 'copy';
notify?: boolean;
}
@@ -252,7 +251,7 @@ export interface INotifyAlbumInviteJob extends IEntityJob {
}
export interface INotifyAlbumUpdateJob extends IEntityJob, IDelayedJob {
recipientId: string;
recipientIds: string[];
}
export interface JobCounts {

View File

@@ -197,16 +197,3 @@ export const asRequest = (request: AuthRequest, file: Express.Multer.File) => {
file: mapToUploadFile(file as ImmichFile),
};
};
function isRotated90CW(orientation: number) {
return orientation === 5 || orientation === 6 || orientation === 90;
}
function isRotated270CW(orientation: number) {
return orientation === 7 || orientation === 8 || orientation === -90;
}
export function isFlipped(orientation?: string | null) {
const value = Number(orientation);
return value && (isRotated270CW(value) || isRotated90CW(value));
}

View File

@@ -22,15 +22,3 @@ export function asHumanReadable(bytes: number, precision = 1): string {
return `${remainder.toFixed(magnitude == 0 ? 0 : precision)} ${units[magnitude]}`;
}
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
export const hexOrBufferToBase64 = (encoded: string | Buffer | null) => {
if (!encoded) {
return null;
}
if (typeof encoded === 'string') {
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
}
return encoded.toString('base64');
};

View File

@@ -262,7 +262,7 @@ export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
}
export function truncatedDate<O>(size: TimeBucketSize) {
return sql<O>`date_trunc(${sql.lit(size)}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
}
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
@@ -276,7 +276,6 @@ export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: str
),
);
}
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */

View File

@@ -8,6 +8,22 @@ export interface PaginationResult<T> {
hasNextPage: boolean;
}
export type Paginated<T> = Promise<PaginationResult<T>>;
/** @deprecated use `this.db. ... .stream()` instead */
export async function* usePagination<T>(
pageSize: number,
getNextPage: (pagination: PaginationOptions) => PaginationResult<T> | Paginated<T>,
) {
let hasNextPage = true;
for (let skip = 0; hasNextPage; skip += pageSize) {
const result = await getNextPage({ take: pageSize, skip });
hasNextPage = result.hasNextPage;
yield result.items;
}
}
export function paginationHelper<Entity extends object>(items: Entity[], take: number): PaginationResult<Entity> {
const hasNextPage = items.length > take;
items.splice(take);

View File

@@ -14,6 +14,7 @@ import { LoggingRepository } from 'src/repositories/logging.repository';
import { bootstrapTelemetry } from 'src/repositories/telemetry.repository';
import { ApiService } from 'src/services/api.service';
import { isStartUpError, useSwagger } from 'src/utils/misc';
async function bootstrap() {
process.title = 'immich-api';

View File

@@ -257,10 +257,6 @@ export const assetStub = {
duplicateId: null,
isOffline: false,
stack: null,
orientation: '',
projectionType: null,
height: 3840,
width: 2160,
}),
trashed: Object.freeze({

View File

@@ -142,15 +142,18 @@ export const getRepository = <K extends keyof RepositoriesTypes>(key: K, db: Kys
}
case 'database': {
return new DatabaseRepository(db, LoggingRepository.create(), new ConfigRepository());
const configRepo = new ConfigRepository();
return new DatabaseRepository(db, new LoggingRepository(undefined, configRepo), configRepo);
}
case 'email': {
return new EmailRepository(LoggingRepository.create());
const logger = new LoggingRepository(undefined, new ConfigRepository());
return new EmailRepository(logger);
}
case 'logger': {
return LoggingRepository.create();
const configMock = { getEnv: () => ({ noColor: false }) };
return new LoggingRepository(undefined, configMock as ConfigRepository);
}
case 'memory': {

View File

@@ -42,7 +42,7 @@ const globalSetup = async () => {
const db = new Kysely<DB>(getKyselyConfig({ connectionType: 'url', url: postgresUrl }));
const configRepository = new ConfigRepository();
const logger = LoggingRepository.create();
const logger = new LoggingRepository(undefined, configRepository);
await new DatabaseRepository(db, logger, configRepository).runMigrations();
await db.destroy();

View File

@@ -13,11 +13,14 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getByIds: vitest.fn().mockResolvedValue([]),
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
getByDeviceIds: vitest.fn(),
getByUserId: vitest.fn(),
getById: vitest.fn(),
getWithout: vitest.fn(),
getByChecksum: vitest.fn(),
getByChecksums: vitest.fn(),
getUploadAssetIdByChecksum: vitest.fn(),
getRandom: vitest.fn(),
getAll: vitest.fn().mockResolvedValue({ items: [], hasNextPage: false }),
getAllByDeviceId: vitest.fn(),
getLivePhotoCount: vitest.fn(),
getLibraryAssetCount: vitest.fn(),

View File

@@ -10,7 +10,6 @@ const envData: EnvData = {
buildMetadata: {},
bull: {
config: {
connection: {},
prefix: 'immich_bull',
},
queues: [{ name: 'queue-1' }],

View File

@@ -1,6 +0,0 @@
{
"name": "typescript-sdk",
"lockfileVersion": 3,
"requires": true,
"packages": {}
}

View File

@@ -5,17 +5,10 @@ TYPESCRIPT_SDK=/usr/src/open-api/typescript-sdk
npm --prefix "$TYPESCRIPT_SDK" install
npm --prefix "$TYPESCRIPT_SDK" run build
COUNT=0
UPSTREAM="${IMMICH_SERVER_URL:-http://immich-server:2283/}"
until wget --spider --quiet "${UPSTREAM}/api/server/config" > /dev/null 2>&1; do
if [ $((COUNT % 10)) -eq 0 ]; then
echo "Waiting for $UPSTREAM to start..."
fi
COUNT=$((COUNT + 1))
until wget --spider --quiet "${UPSTREAM}/api/server/config"; do
echo 'waiting for api server...'
sleep 1
done
echo "Connected to $UPSTREAM"
node ./node_modules/.bin/vite dev --host 0.0.0.0 --port 3000

8
web/package-lock.json generated
View File

@@ -11,7 +11,7 @@
"dependencies": {
"@formatjs/icu-messageformat-parser": "^2.9.8",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@immich/ui": "^0.19.0",
"@immich/ui": "^0.18.1",
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
"@mdi/js": "^7.4.47",
"@photo-sphere-viewer/core": "^5.11.5",
@@ -1320,9 +1320,9 @@
"link": true
},
"node_modules/@immich/ui": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.19.0.tgz",
"integrity": "sha512-XVjSUoQVIoe83pxM4q8kmlejb2xep/TZEfoGbasI7takEGKNiWEyXr5eZaXZCSVgq78fcNRr4jyWz290ZAXh7A==",
"version": "0.18.1",
"resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.18.1.tgz",
"integrity": "sha512-XWWO6OTfH3MektyxCn0hWefZyOGyWwwx/2zHinuShpxTHSyfveJ4mOkFP8DkyMz0dnvJ1EfdkPBMkld3y5R/Hw==",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@mdi/js": "^7.4.47",

View File

@@ -27,7 +27,7 @@
"dependencies": {
"@formatjs/icu-messageformat-parser": "^2.9.8",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@immich/ui": "^0.19.0",
"@immich/ui": "^0.18.1",
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
"@mdi/js": "^7.4.47",
"@photo-sphere-viewer/core": "^5.11.5",

View File

@@ -8,6 +8,7 @@
--immich-primary: 66 80 175;
--immich-bg: 255 255 255;
--immich-fg: 0 0 0;
--immich-gray: 246 246 244;
--immich-error: 229 115 115;
--immich-success: 129 199 132;
--immich-warning: 255 183 77;
@@ -32,7 +33,9 @@
--immich-ui-warning: 255 170 0;
--immich-ui-info: 14 165 233;
--immich-ui-default-border: 209 213 219;
--immich-ui-gray: 246 246 246;
}
* {
--tw-ring-offset-width: 0px;
}
.dark {
@@ -45,7 +48,6 @@
--immich-ui-warning: 255 170 0;
--immich-ui-info: 14 165 233;
--immich-ui-default-border: 55 65 81;
--immich-ui-gray: 33 33 33;
}
}

View File

@@ -1,11 +1,8 @@
import FocusTrapTest from '$lib/actions/__test__/focus-trap-test.svelte';
import { setDefaultTabbleOptions } from '$lib/utils/focus-util';
import { render, screen } from '@testing-library/svelte';
import userEvent from '@testing-library/user-event';
import { tick } from 'svelte';
setDefaultTabbleOptions({ displayCheck: 'none' });
describe('focusTrap action', () => {
const user = userEvent.setup();
@@ -41,7 +38,6 @@ describe('focusTrap action', () => {
const openButton = screen.getByText('Open');
await user.click(openButton);
await tick();
expect(document.activeElement).toEqual(screen.getByTestId('one'));
screen.getByText('Close').click();

View File

@@ -1,5 +1,5 @@
import { shortcuts } from '$lib/actions/shortcut';
import { getTabbable } from '$lib/utils/focus-util';
import { getFocusable } from '$lib/utils/focus-util';
import { tick } from 'svelte';
interface Options {
@@ -18,21 +18,18 @@ export function focusTrap(container: HTMLElement, options?: Options) {
};
};
const setInitialFocus = async () => {
const focusableElement = getTabbable(container, false)[0];
if (focusableElement) {
// Use tick() to ensure focus trap works correctly inside <Portal />
await tick();
focusableElement?.focus();
}
const setInitialFocus = () => {
const focusableElement = getFocusable(container)[0];
// Use tick() to ensure focus trap works correctly inside <Portal />
void tick().then(() => focusableElement?.focus());
};
if (withDefaults(options).active) {
void setInitialFocus();
setInitialFocus();
}
const getFocusableElements = () => {
const focusableElements = getTabbable(container);
const focusableElements = getFocusable(container);
return [
focusableElements.at(0), //
focusableElements.at(-1),
@@ -70,7 +67,7 @@ export function focusTrap(container: HTMLElement, options?: Options) {
update(newOptions?: Options) {
options = newOptions;
if (withDefaults(options).active) {
void setInitialFocus();
setInitialFocus();
}
},
destroy() {

View File

@@ -47,7 +47,8 @@
<ConfirmDialog
title={$t('delete_user')}
confirmText={forceDelete ? $t('permanently_delete') : $t('delete')}
onClose={(confirmed) => (confirmed ? handleDeleteUser() : onCancel())}
onConfirm={handleDeleteUser}
{onCancel}
disabled={deleteButtonDisabled}
>
{#snippet promptSnippet()}

View File

@@ -33,7 +33,8 @@
title={$t('restore_user')}
confirmText={$t('continue')}
confirmColor="success"
onClose={(confirmed) => (confirmed ? handleRestoreUser() : onCancel())}
onConfirm={handleRestoreUser}
{onCancel}
>
{#snippet promptSnippet()}
<p>

View File

@@ -49,7 +49,8 @@
{#if isConfirmOpen}
<ConfirmDialog
title={$t('admin.disable_login')}
onClose={(confirmed) => (confirmed ? handleSave(true) : (isConfirmOpen = false))}
onCancel={() => (isConfirmOpen = false)}
onConfirm={() => handleSave(true)}
>
{#snippet promptSnippet()}
<div class="flex flex-col gap-4">

View File

@@ -1,27 +1,27 @@
<script lang="ts">
import Icon from '$lib/components/elements/icon.svelte';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
import ConfirmDialog from '$lib/components/shared-components/dialog/confirm-dialog.svelte';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
import UserAvatar from '$lib/components/shared-components/user-avatar.svelte';
import { handleError } from '$lib/utils/handle-error';
import {
AlbumUserRole,
AssetOrder,
removeUserFromAlbum,
updateAlbumInfo,
updateAlbumUser,
removeUserFromAlbum,
type AlbumResponseDto,
type UserResponseDto,
AssetOrder,
AlbumUserRole,
updateAlbumUser,
} from '@immich/sdk';
import { mdiArrowDownThin, mdiArrowUpThin, mdiDotsVertical, mdiPlus } from '@mdi/js';
import { mdiArrowDownThin, mdiArrowUpThin, mdiPlus, mdiDotsVertical } from '@mdi/js';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import UserAvatar from '$lib/components/shared-components/user-avatar.svelte';
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
import SettingDropdown from '../shared-components/settings/setting-dropdown.svelte';
import type { RenderedOption } from '../elements/dropdown.svelte';
import { handleError } from '$lib/utils/handle-error';
import { findKey } from 'lodash-es';
import { t } from 'svelte-i18n';
import type { RenderedOption } from '../elements/dropdown.svelte';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import ConfirmDialog from '$lib/components/shared-components/dialog/confirm-dialog.svelte';
import { notificationController, NotificationType } from '../shared-components/notification/notification';
import SettingDropdown from '../shared-components/settings/setting-dropdown.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
interface Props {
album: AlbumResponseDto;
@@ -195,6 +195,7 @@
title={$t('album_remove_user')}
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
confirmText={$t('remove_user')}
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
onConfirm={handleRemoveUser}
onCancel={() => (selectedRemoveUser = null)}
/>
{/if}

View File

@@ -1,25 +1,25 @@
<script lang="ts">
import { shortcut } from '$lib/actions/shortcut';
import SelectAllAssets from '$lib/components/photos-page/actions/select-all-assets.svelte';
import AssetSelectControlBar from '$lib/components/photos-page/asset-select-control-bar.svelte';
import { AssetInteraction } from '$lib/stores/asset-interaction.svelte';
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
import { AssetStore } from '$lib/stores/assets-store.svelte';
import { dragAndDropFilesStore } from '$lib/stores/drag-and-drop-files.store';
import { handlePromiseError } from '$lib/utils';
import { cancelMultiselect, downloadAlbum } from '$lib/utils/asset-utils';
import { fileUploadHandler, openFileUploadDialog } from '$lib/utils/file-uploader';
import type { AlbumResponseDto, SharedLinkResponseDto, UserResponseDto } from '@immich/sdk';
import { mdiFileImagePlusOutline, mdiFolderDownloadOutline } from '@mdi/js';
import { onDestroy } from 'svelte';
import { t } from 'svelte-i18n';
import { AssetStore } from '$lib/stores/assets-store.svelte';
import { cancelMultiselect, downloadAlbum } from '$lib/utils/asset-utils';
import CircleIconButton from '../elements/buttons/circle-icon-button.svelte';
import DownloadAction from '../photos-page/actions/download-action.svelte';
import AssetGrid from '../photos-page/asset-grid.svelte';
import AssetSelectControlBar from '../photos-page/asset-select-control-bar.svelte';
import ControlAppBar from '../shared-components/control-app-bar.svelte';
import ImmichLogoSmallLink from '../shared-components/immich-logo-small-link.svelte';
import ThemeButton from '../shared-components/theme-button.svelte';
import { shortcut } from '$lib/actions/shortcut';
import { mdiFileImagePlusOutline, mdiFolderDownloadOutline } from '@mdi/js';
import { handlePromiseError } from '$lib/utils';
import AlbumSummary from './album-summary.svelte';
import { t } from 'svelte-i18n';
import { onDestroy } from 'svelte';
import { AssetInteraction } from '$lib/stores/asset-interaction.svelte';
interface Props {
sharedLink: SharedLinkResponseDto;

View File

@@ -1,22 +1,22 @@
<script lang="ts">
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import {
AlbumUserRole,
getMyUser,
removeUserFromAlbum,
updateAlbumUser,
type AlbumResponseDto,
type UserResponseDto,
updateAlbumUser,
AlbumUserRole,
} from '@immich/sdk';
import { mdiDotsVertical } from '@mdi/js';
import { onMount } from 'svelte';
import { t } from 'svelte-i18n';
import { handleError } from '../../utils/handle-error';
import MenuOption from '../shared-components/context-menu/menu-option.svelte';
import ConfirmDialog from '../shared-components/dialog/confirm-dialog.svelte';
import MenuOption from '../shared-components/context-menu/menu-option.svelte';
import { NotificationType, notificationController } from '../shared-components/notification/notification';
import UserAvatar from '../shared-components/user-avatar.svelte';
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
import { t } from 'svelte-i18n';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
interface Props {
album: AlbumResponseDto;
@@ -144,7 +144,8 @@
title={$t('album_leave')}
prompt={$t('album_leave_confirmation', { values: { album: album.albumName } })}
confirmText={$t('leave')}
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
onConfirm={handleRemoveUser}
onCancel={() => (selectedRemoveUser = null)}
/>
{/if}
@@ -153,6 +154,7 @@
title={$t('album_remove_user')}
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
confirmText={$t('remove_user')}
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
onConfirm={handleRemoveUser}
onCancel={() => (selectedRemoveUser = null)}
/>
{/if}

Some files were not shown because too many files have changed in this diff Show More