refactor: enum casing (#19946)

This commit is contained in:
Jason Rasmussen
2025-07-15 14:50:13 -04:00
committed by GitHub
parent 920d7de349
commit e73abe0762
174 changed files with 2675 additions and 2459 deletions

View File

@@ -18,7 +18,7 @@ import { BaseService } from 'src/services/base.service';
@Injectable()
export class ActivityService extends BaseService {
async getAll(auth: AuthDto, dto: ActivitySearchDto): Promise<ActivityResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [dto.albumId] });
await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [dto.albumId] });
const activities = await this.activityRepository.search({
userId: dto.userId,
albumId: dto.albumId,
@@ -30,12 +30,12 @@ export class ActivityService extends BaseService {
}
async getStatistics(auth: AuthDto, dto: ActivityDto): Promise<ActivityStatisticsResponseDto> {
await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [dto.albumId] });
await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [dto.albumId] });
return await this.activityRepository.getStatistics({ albumId: dto.albumId, assetId: dto.assetId });
}
async create(auth: AuthDto, dto: ActivityCreateDto): Promise<MaybeDuplicate<ActivityResponseDto>> {
await this.requireAccess({ auth, permission: Permission.ACTIVITY_CREATE, ids: [dto.albumId] });
await this.requireAccess({ auth, permission: Permission.ActivityCreate, ids: [dto.albumId] });
const common = {
userId: auth.user.id,
@@ -69,7 +69,7 @@ export class ActivityService extends BaseService {
}
async delete(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.ACTIVITY_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.ActivityDelete, ids: [id] });
await this.activityRepository.delete(id);
}
}

View File

@@ -146,7 +146,7 @@ describe(AlbumService.name, () => {
await sut.create(authStub.admin, {
albumName: 'Empty album',
albumUsers: [{ userId: 'user-id', role: AlbumUserRole.EDITOR }],
albumUsers: [{ userId: 'user-id', role: AlbumUserRole.Editor }],
description: '',
assetIds: ['123'],
});
@@ -160,7 +160,7 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: '123',
},
['123'],
[{ userId: 'user-id', role: AlbumUserRole.EDITOR }],
[{ userId: 'user-id', role: AlbumUserRole.Editor }],
);
expect(mocks.user.get).toHaveBeenCalledWith('user-id', {});
@@ -177,10 +177,10 @@ describe(AlbumService.name, () => {
mocks.user.get.mockResolvedValue(userStub.user1);
mocks.user.getMetadata.mockResolvedValue([
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: {
albums: {
defaultAssetOrder: AssetOrder.ASC,
defaultAssetOrder: AssetOrder.Asc,
},
},
},
@@ -189,7 +189,7 @@ describe(AlbumService.name, () => {
await sut.create(authStub.admin, {
albumName: 'Empty album',
albumUsers: [{ userId: 'user-id', role: AlbumUserRole.EDITOR }],
albumUsers: [{ userId: 'user-id', role: AlbumUserRole.Editor }],
description: '',
assetIds: ['123'],
});
@@ -203,7 +203,7 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: '123',
},
['123'],
[{ userId: 'user-id', role: AlbumUserRole.EDITOR }],
[{ userId: 'user-id', role: AlbumUserRole.Editor }],
);
expect(mocks.user.get).toHaveBeenCalledWith('user-id', {});
@@ -220,7 +220,7 @@ describe(AlbumService.name, () => {
await expect(
sut.create(authStub.admin, {
albumName: 'Empty album',
albumUsers: [{ userId: 'user-3', role: AlbumUserRole.EDITOR }],
albumUsers: [{ userId: 'user-3', role: AlbumUserRole.Editor }],
}),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.user.get).toHaveBeenCalledWith('user-3', {});
@@ -262,7 +262,7 @@ describe(AlbumService.name, () => {
await expect(
sut.create(authStub.admin, {
albumName: 'Empty album',
albumUsers: [{ userId: userStub.admin.id, role: AlbumUserRole.EDITOR }],
albumUsers: [{ userId: userStub.admin.id, role: AlbumUserRole.Editor }],
}),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.album.create).not.toHaveBeenCalled();
@@ -404,7 +404,7 @@ describe(AlbumService.name, () => {
mocks.albumUser.create.mockResolvedValue({
usersId: userStub.user2.id,
albumsId: albumStub.sharedWithAdmin.id,
role: AlbumUserRole.EDITOR,
role: AlbumUserRole.Editor,
});
await sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, {
albumUsers: [{ userId: authStub.user2.user.id }],
@@ -512,11 +512,11 @@ describe(AlbumService.name, () => {
mocks.albumUser.update.mockResolvedValue(null as any);
await sut.updateUser(authStub.user1, albumStub.sharedWithAdmin.id, userStub.admin.id, {
role: AlbumUserRole.EDITOR,
role: AlbumUserRole.Editor,
});
expect(mocks.albumUser.update).toHaveBeenCalledWith(
{ albumsId: albumStub.sharedWithAdmin.id, usersId: userStub.admin.id },
{ role: AlbumUserRole.EDITOR },
{ role: AlbumUserRole.Editor },
);
});
});
@@ -585,7 +585,7 @@ describe(AlbumService.name, () => {
expect(mocks.access.album.checkSharedAlbumAccess).toHaveBeenCalledWith(
authStub.user1.user.id,
new Set(['album-123']),
AlbumUserRole.VIEWER,
AlbumUserRole.Viewer,
);
});
@@ -596,7 +596,7 @@ describe(AlbumService.name, () => {
expect(mocks.access.album.checkSharedAlbumAccess).toHaveBeenCalledWith(
authStub.admin.user.id,
new Set(['album-123']),
AlbumUserRole.VIEWER,
AlbumUserRole.Viewer,
);
});
});

View File

@@ -71,7 +71,7 @@ export class AlbumService extends BaseService {
}
async get(auth: AuthDto, id: string, dto: AlbumInfoDto): Promise<AlbumResponseDto> {
await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [id] });
await this.albumRepository.updateThumbnails();
const withAssets = dto.withoutAssets === undefined ? true : !dto.withoutAssets;
const album = await this.findOrFail(id, { withAssets });
@@ -102,7 +102,7 @@ export class AlbumService extends BaseService {
const allowedAssetIdsSet = await this.checkAccess({
auth,
permission: Permission.ASSET_SHARE,
permission: Permission.AssetShare,
ids: dto.assetIds || [],
});
const assetIds = [...allowedAssetIdsSet].map((id) => id);
@@ -129,7 +129,7 @@ export class AlbumService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: UpdateAlbumDto): Promise<AlbumResponseDto> {
await this.requireAccess({ auth, permission: Permission.ALBUM_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumUpdate, ids: [id] });
const album = await this.findOrFail(id, { withAssets: true });
@@ -152,13 +152,13 @@ export class AlbumService extends BaseService {
}
async delete(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.ALBUM_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumDelete, ids: [id] });
await this.albumRepository.delete(id);
}
async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
const album = await this.findOrFail(id, { withAssets: false });
await this.requireAccess({ auth, permission: Permission.ALBUM_ADD_ASSET, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumAddAsset, ids: [id] });
const results = await addAssets(
auth,
@@ -187,13 +187,13 @@ export class AlbumService extends BaseService {
}
async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.ALBUM_REMOVE_ASSET, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumRemoveAsset, ids: [id] });
const album = await this.findOrFail(id, { withAssets: false });
const results = await removeAssets(
auth,
{ access: this.accessRepository, bulk: this.albumRepository },
{ parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.ALBUM_DELETE },
{ parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.AlbumDelete },
);
const removedIds = results.filter(({ success }) => success).map(({ id }) => id);
@@ -205,7 +205,7 @@ export class AlbumService extends BaseService {
}
async addUsers(auth: AuthDto, id: string, { albumUsers }: AddUsersDto): Promise<AlbumResponseDto> {
await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
const album = await this.findOrFail(id, { withAssets: false });
@@ -249,14 +249,14 @@ export class AlbumService extends BaseService {
// non-admin can remove themselves
if (auth.user.id !== userId) {
await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
}
await this.albumUserRepository.delete({ albumsId: id, usersId: userId });
}
async updateUser(auth: AuthDto, id: string, userId: string, dto: UpdateAlbumUserDto): Promise<void> {
await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
await this.albumUserRepository.update({ albumsId: id, usersId: userId }, { role: dto.role });
}

View File

@@ -15,7 +15,7 @@ describe(ApiKeyService.name, () => {
describe('create', () => {
it('should create a new key', async () => {
const auth = factory.auth();
const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.ALL] });
const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.All] });
const key = 'super-secret';
mocks.crypto.randomBytesAsText.mockReturnValue(key);
@@ -41,12 +41,12 @@ describe(ApiKeyService.name, () => {
mocks.crypto.randomBytesAsText.mockReturnValue(key);
mocks.apiKey.create.mockResolvedValue(apiKey);
await sut.create(auth, { permissions: [Permission.ALL] });
await sut.create(auth, { permissions: [Permission.All] });
expect(mocks.apiKey.create).toHaveBeenCalledWith({
key: 'super-secret (hashed)',
name: 'API Key',
permissions: [Permission.ALL],
permissions: [Permission.All],
userId: auth.user.id,
});
expect(mocks.crypto.randomBytesAsText).toHaveBeenCalled();
@@ -54,9 +54,9 @@ describe(ApiKeyService.name, () => {
});
it('should throw an error if the api key does not have sufficient permissions', async () => {
const auth = factory.auth({ apiKey: { permissions: [Permission.ASSET_READ] } });
const auth = factory.auth({ apiKey: { permissions: [Permission.AssetRead] } });
await expect(sut.create(auth, { permissions: [Permission.ASSET_UPDATE] })).rejects.toBeInstanceOf(
await expect(sut.create(auth, { permissions: [Permission.AssetUpdate] })).rejects.toBeInstanceOf(
BadRequestException,
);
});
@@ -69,7 +69,7 @@ describe(ApiKeyService.name, () => {
mocks.apiKey.getById.mockResolvedValue(void 0);
await expect(sut.update(auth, id, { name: 'New Name', permissions: [Permission.ALL] })).rejects.toBeInstanceOf(
await expect(sut.update(auth, id, { name: 'New Name', permissions: [Permission.All] })).rejects.toBeInstanceOf(
BadRequestException,
);
@@ -84,18 +84,18 @@ describe(ApiKeyService.name, () => {
mocks.apiKey.getById.mockResolvedValue(apiKey);
mocks.apiKey.update.mockResolvedValue(apiKey);
await sut.update(auth, apiKey.id, { name: newName, permissions: [Permission.ALL] });
await sut.update(auth, apiKey.id, { name: newName, permissions: [Permission.All] });
expect(mocks.apiKey.update).toHaveBeenCalledWith(auth.user.id, apiKey.id, {
name: newName,
permissions: [Permission.ALL],
permissions: [Permission.All],
});
});
it('should update permissions', async () => {
const auth = factory.auth();
const apiKey = factory.apiKey({ userId: auth.user.id });
const newPermissions = [Permission.ACTIVITY_CREATE, Permission.ACTIVITY_READ, Permission.ACTIVITY_UPDATE];
const newPermissions = [Permission.ActivityCreate, Permission.ActivityRead, Permission.ActivityUpdate];
mocks.apiKey.getById.mockResolvedValue(apiKey);
mocks.apiKey.update.mockResolvedValue(apiKey);

View File

@@ -157,7 +157,7 @@ const assetEntity = Object.freeze({
ownerId: 'user_id_1',
deviceAssetId: 'device_asset_id_1',
deviceId: 'device_id_1',
type: AssetType.VIDEO,
type: AssetType.Video,
originalPath: 'fake_path/asset_1.jpeg',
fileModifiedAt: new Date('2022-06-19T23:41:36.910Z'),
fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'),
@@ -177,7 +177,7 @@ const assetEntity = Object.freeze({
const existingAsset = Object.freeze({
...assetEntity,
duration: null,
type: AssetType.IMAGE,
type: AssetType.Image,
checksum: Buffer.from('_getExistingAsset', 'utf8'),
libraryId: 'libraryId',
originalFileName: 'existing-filename.jpeg',
@@ -384,7 +384,7 @@ describe(AssetMediaService.name, () => {
});
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: { files: ['fake_path/asset_1.jpeg', undefined] },
});
expect(mocks.user.updateUsage).not.toHaveBeenCalled();
@@ -409,7 +409,7 @@ describe(AssetMediaService.name, () => {
);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: { files: ['fake_path/asset_1.jpeg', undefined] },
});
expect(mocks.user.updateUsage).not.toHaveBeenCalled();
@@ -437,7 +437,7 @@ describe(AssetMediaService.name, () => {
it('should hide the linked motion asset', async () => {
mocks.asset.getById.mockResolvedValueOnce({
...assetStub.livePhotoMotionAsset,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
mocks.asset.create.mockResolvedValueOnce(assetStub.livePhotoStillAsset);
@@ -455,7 +455,7 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.getById).toHaveBeenCalledWith('live-photo-motion-asset');
expect(mocks.asset.update).toHaveBeenCalledWith({
id: 'live-photo-motion-asset',
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
});
});
@@ -506,7 +506,7 @@ describe(AssetMediaService.name, () => {
new ImmichFileResponse({
path: '/original/path.jpg',
contentType: 'image/jpeg',
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
}),
);
});
@@ -546,7 +546,7 @@ describe(AssetMediaService.name, () => {
{
id: '42',
path: '/path/to/preview',
type: AssetFileType.THUMBNAIL,
type: AssetFileType.Thumbnail,
},
],
});
@@ -563,7 +563,7 @@ describe(AssetMediaService.name, () => {
{
id: '42',
path: '/path/to/preview.jpg',
type: AssetFileType.PREVIEW,
type: AssetFileType.Preview,
},
],
});
@@ -573,7 +573,7 @@ describe(AssetMediaService.name, () => {
).resolves.toEqual(
new ImmichFileResponse({
path: '/path/to/preview.jpg',
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
contentType: 'image/jpeg',
fileName: 'asset-id_thumbnail.jpg',
}),
@@ -588,7 +588,7 @@ describe(AssetMediaService.name, () => {
).resolves.toEqual(
new ImmichFileResponse({
path: '/uploads/user-id/thumbs/path.jpg',
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
contentType: 'image/jpeg',
fileName: 'asset-id_preview.jpg',
}),
@@ -603,7 +603,7 @@ describe(AssetMediaService.name, () => {
).resolves.toEqual(
new ImmichFileResponse({
path: '/uploads/user-id/webp/path.ext',
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
contentType: 'application/octet-stream',
fileName: 'asset-id_thumbnail.ext',
}),
@@ -640,7 +640,7 @@ describe(AssetMediaService.name, () => {
await expect(sut.playbackVideo(authStub.admin, assetStub.hasEncodedVideo.id)).resolves.toEqual(
new ImmichFileResponse({
path: assetStub.hasEncodedVideo.encodedVideoPath!,
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
contentType: 'video/mp4',
}),
);
@@ -653,7 +653,7 @@ describe(AssetMediaService.name, () => {
await expect(sut.playbackVideo(authStub.admin, assetStub.video.id)).resolves.toEqual(
new ImmichFileResponse({
path: assetStub.video.originalPath,
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
contentType: 'application/octet-stream',
}),
);
@@ -723,7 +723,7 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
deletedAt: expect.any(Date),
status: AssetStatus.TRASHED,
status: AssetStatus.Trashed,
});
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
@@ -754,7 +754,7 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
deletedAt: expect.any(Date),
status: AssetStatus.TRASHED,
status: AssetStatus.Trashed,
});
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
@@ -783,7 +783,7 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
deletedAt: expect.any(Date),
status: AssetStatus.TRASHED,
status: AssetStatus.Trashed,
});
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
@@ -815,7 +815,7 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.create).not.toHaveBeenCalled();
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: { files: [updatedFile.originalPath, undefined] },
});
expect(mocks.user.updateUsage).not.toHaveBeenCalled();
@@ -912,7 +912,7 @@ describe(AssetMediaService.name, () => {
await sut.onUploadError(request, file);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: { files: ['upload/upload/user-id/ra/nd/random-uuid.jpg'] },
});
});

View File

@@ -106,9 +106,9 @@ export class AssetMediaService extends BaseService {
getUploadFolder({ auth, fieldName, file }: UploadRequest): string {
auth = requireUploadAccess(auth);
let folder = StorageCore.getNestedFolder(StorageFolder.UPLOAD, auth.user.id, file.uuid);
let folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, file.uuid);
if (fieldName === UploadFieldName.PROFILE_DATA) {
folder = StorageCore.getFolderLocation(StorageFolder.PROFILE, auth.user.id);
folder = StorageCore.getFolderLocation(StorageFolder.Profile, auth.user.id);
}
this.storageRepository.mkdirSync(folder);
@@ -121,7 +121,7 @@ export class AssetMediaService extends BaseService {
const uploadFolder = this.getUploadFolder(asRequest(request, file));
const uploadPath = `${uploadFolder}/${uploadFilename}`;
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [uploadPath] } });
await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [uploadPath] } });
}
async uploadAsset(
@@ -133,7 +133,7 @@ export class AssetMediaService extends BaseService {
try {
await this.requireAccess({
auth,
permission: Permission.ASSET_UPLOAD,
permission: Permission.AssetUpload,
// do not need an id here, but the interface requires it
ids: [auth.user.id],
});
@@ -164,7 +164,7 @@ export class AssetMediaService extends BaseService {
sidecarFile?: UploadFile,
): Promise<AssetMediaResponseDto> {
try {
await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] });
const asset = await this.assetRepository.getById(id);
if (!asset) {
@@ -179,7 +179,7 @@ export class AssetMediaService extends BaseService {
// but the local variable holds the original file data paths.
const copiedPhoto = await this.createCopy(asset);
// and immediate trash it
await this.assetRepository.updateAll([copiedPhoto.id], { deletedAt: new Date(), status: AssetStatus.TRASHED });
await this.assetRepository.updateAll([copiedPhoto.id], { deletedAt: new Date(), status: AssetStatus.Trashed });
await this.eventRepository.emit('AssetTrash', { assetId: copiedPhoto.id, userId: auth.user.id });
await this.userRepository.updateUsage(auth.user.id, file.size);
@@ -191,14 +191,14 @@ export class AssetMediaService extends BaseService {
}
async downloadOriginal(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: [id] });
const asset = await this.findOrFail(id);
return new ImmichFileResponse({
path: asset.originalPath,
contentType: mimeTypes.lookup(asset.originalPath),
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
});
}
@@ -207,7 +207,7 @@ export class AssetMediaService extends BaseService {
id: string,
dto: AssetMediaOptionsDto,
): Promise<ImmichFileResponse | AssetMediaRedirectResponse> {
await this.requireAccess({ auth, permission: Permission.ASSET_VIEW, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] });
const asset = await this.findOrFail(id);
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
@@ -240,16 +240,16 @@ export class AssetMediaService extends BaseService {
fileName,
path: filepath,
contentType: mimeTypes.lookup(filepath),
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
});
}
async playbackVideo(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
await this.requireAccess({ auth, permission: Permission.ASSET_VIEW, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] });
const asset = await this.findOrFail(id);
if (asset.type !== AssetType.VIDEO) {
if (asset.type !== AssetType.Video) {
throw new BadRequestException('Asset is not a video');
}
@@ -258,7 +258,7 @@ export class AssetMediaService extends BaseService {
return new ImmichFileResponse({
path: filepath,
contentType: mimeTypes.lookup(filepath),
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
cacheControl: CacheControl.PrivateWithCache,
});
}
@@ -312,7 +312,7 @@ export class AssetMediaService extends BaseService {
): Promise<AssetMediaResponseDto> {
// clean up files
await this.jobRepository.queue({
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: { files: [file.originalPath, sidecarFile?.originalPath] },
});
@@ -365,7 +365,7 @@ export class AssetMediaService extends BaseService {
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
await this.jobRepository.queue({
name: JobName.METADATA_EXTRACTION,
name: JobName.MetadataExtraction,
data: { id: assetId, source: 'upload' },
});
}
@@ -394,7 +394,7 @@ export class AssetMediaService extends BaseService {
const { size } = await this.storageRepository.stat(created.originalPath);
await this.assetRepository.upsertExif({ assetId: created.id, fileSizeInByte: size });
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: created.id, source: 'copy' } });
await this.jobRepository.queue({ name: JobName.MetadataExtraction, data: { id: created.id, source: 'copy' } });
return created;
}
@@ -416,7 +416,7 @@ export class AssetMediaService extends BaseService {
type: mimeTypes.assetType(file.originalPath),
isFavorite: dto.isFavorite,
duration: dto.duration || null,
visibility: dto.visibility ?? AssetVisibility.TIMELINE,
visibility: dto.visibility ?? AssetVisibility.Timeline,
livePhotoVideoId: dto.livePhotoVideoId,
originalFileName: dto.filename || file.originalName,
sidecarPath: sidecarFile?.originalPath,
@@ -427,7 +427,7 @@ export class AssetMediaService extends BaseService {
}
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId: asset.id, fileSizeInByte: file.size });
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } });
await this.jobRepository.queue({ name: JobName.MetadataExtraction, data: { id: asset.id, source: 'upload' } });
return asset;
}

View File

@@ -13,10 +13,10 @@ import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const stats: AssetStats = {
[AssetType.IMAGE]: 10,
[AssetType.VIDEO]: 23,
[AssetType.AUDIO]: 0,
[AssetType.OTHER]: 0,
[AssetType.Image]: 10,
[AssetType.Video]: 23,
[AssetType.Audio]: 0,
[AssetType.Other]: 0,
};
const statResponse: AssetStatsResponseDto = {
@@ -46,21 +46,21 @@ describe(AssetService.name, () => {
describe('getStatistics', () => {
it('should get the statistics for a user, excluding archived assets', async () => {
mocks.asset.getStatistics.mockResolvedValue(stats);
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.TIMELINE })).resolves.toEqual(
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Timeline })).resolves.toEqual(
statResponse,
);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
});
it('should get the statistics for a user for archived assets', async () => {
mocks.asset.getStatistics.mockResolvedValue(stats);
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.ARCHIVE })).resolves.toEqual(
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Archive })).resolves.toEqual(
statResponse,
);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {
visibility: AssetVisibility.ARCHIVE,
visibility: AssetVisibility.Archive,
});
});
@@ -202,7 +202,7 @@ describe(AssetService.name, () => {
describe('update', () => {
it('should require asset write access for the id', async () => {
await expect(
sut.update(authStub.admin, 'asset-1', { visibility: AssetVisibility.TIMELINE }),
sut.update(authStub.admin, 'asset-1', { visibility: AssetVisibility.Timeline }),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.asset.update).not.toHaveBeenCalled();
@@ -253,7 +253,7 @@ describe(AssetService.name, () => {
});
expect(mocks.asset.update).not.toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
@@ -277,7 +277,7 @@ describe(AssetService.name, () => {
});
expect(mocks.asset.update).not.toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
@@ -301,7 +301,7 @@ describe(AssetService.name, () => {
});
expect(mocks.asset.update).not.toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
@@ -314,7 +314,7 @@ describe(AssetService.name, () => {
mocks.asset.getById.mockResolvedValueOnce({
...assetStub.livePhotoMotionAsset,
ownerId: authStub.admin.user.id,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
mocks.asset.getById.mockResolvedValueOnce(assetStub.image);
mocks.asset.update.mockResolvedValue(assetStub.image);
@@ -325,7 +325,7 @@ describe(AssetService.name, () => {
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
});
expect(mocks.event.emit).toHaveBeenCalledWith('AssetHide', {
assetId: assetStub.livePhotoMotionAsset.id,
@@ -392,10 +392,10 @@ describe(AssetService.name, () => {
it('should update all assets', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
await sut.updateAll(authStub.admin, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.ARCHIVE });
await sut.updateAll(authStub.admin, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.Archive });
expect(mocks.asset.updateAll).toHaveBeenCalledWith(['asset-1', 'asset-2'], {
visibility: AssetVisibility.ARCHIVE,
visibility: AssetVisibility.Archive,
});
});
@@ -428,7 +428,7 @@ describe(AssetService.name, () => {
expect(mocks.asset.updateAll).toHaveBeenCalled();
expect(mocks.asset.updateAllExif).toHaveBeenCalledWith(['asset-1'], { latitude: 0, longitude: 0 });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.SIDECAR_WRITE, data: { id: 'asset-1', latitude: 0, longitude: 0 } },
{ name: JobName.SidecarWrite, data: { id: 'asset-1', latitude: 0, longitude: 0 } },
]);
});
@@ -451,7 +451,7 @@ describe(AssetService.name, () => {
longitude: 50,
});
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.SIDECAR_WRITE, data: { id: 'asset-1', dateTimeOriginal, latitude: 30, longitude: 50 } },
{ name: JobName.SidecarWrite, data: { id: 'asset-1', dateTimeOriginal, latitude: 30, longitude: 50 } },
]);
});
@@ -497,7 +497,7 @@ describe(AssetService.name, () => {
expect(mocks.asset.updateAll).toHaveBeenCalledWith(['asset1', 'asset2'], {
deletedAt: expect.any(Date),
status: AssetStatus.TRASHED,
status: AssetStatus.Trashed,
});
expect(mocks.job.queue.mock.calls).toEqual([]);
});
@@ -518,11 +518,11 @@ describe(AssetService.name, () => {
mocks.assetJob.streamForDeletedJob.mockReturnValue(makeStream([asset]));
mocks.systemMetadata.get.mockResolvedValue({ trash: { enabled: false } });
await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.streamForDeletedJob).toHaveBeenCalledWith(new Date());
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.ASSET_DELETION, data: { id: asset.id, deleteOnDisk: true } },
{ name: JobName.AssetDeletion, data: { id: asset.id, deleteOnDisk: true } },
]);
});
@@ -532,11 +532,11 @@ describe(AssetService.name, () => {
mocks.assetJob.streamForDeletedJob.mockReturnValue(makeStream([asset]));
mocks.systemMetadata.get.mockResolvedValue({ trash: { enabled: true, days: 7 } });
await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleAssetDeletionCheck()).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.streamForDeletedJob).toHaveBeenCalledWith(DateTime.now().minus({ days: 7 }).toJSDate());
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.ASSET_DELETION, data: { id: asset.id, deleteOnDisk: true } },
{ name: JobName.AssetDeletion, data: { id: asset.id, deleteOnDisk: true } },
]);
});
});
@@ -552,7 +552,7 @@ describe(AssetService.name, () => {
expect(mocks.job.queue.mock.calls).toEqual([
[
{
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: {
files: [
'/uploads/user-id/webp/path.ext',
@@ -606,7 +606,7 @@ describe(AssetService.name, () => {
expect(mocks.job.queue.mock.calls).toEqual([
[
{
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: {
id: assetStub.livePhotoMotionAsset.id,
deleteOnDisk: true,
@@ -615,7 +615,7 @@ describe(AssetService.name, () => {
],
[
{
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: {
files: [
'/uploads/user-id/webp/path.ext',
@@ -643,7 +643,7 @@ describe(AssetService.name, () => {
expect(mocks.job.queue.mock.calls).toEqual([
[
{
name: JobName.DELETE_FILES,
name: JobName.DeleteFiles,
data: {
files: [
'/uploads/user-id/webp/path.ext',
@@ -668,7 +668,7 @@ describe(AssetService.name, () => {
it('should fail if asset could not be found', async () => {
mocks.assetJob.getForAssetDeletion.mockResolvedValue(void 0);
await expect(sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true })).resolves.toBe(
JobStatus.FAILED,
JobStatus.Failed,
);
});
});
@@ -679,7 +679,7 @@ describe(AssetService.name, () => {
await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.REFRESH_FACES });
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.FACE_DETECTION, data: { id: 'asset-1' } }]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.FaceDetection, data: { id: 'asset-1' } }]);
});
it('should run the refresh metadata job', async () => {
@@ -687,7 +687,7 @@ describe(AssetService.name, () => {
await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.REFRESH_METADATA });
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.METADATA_EXTRACTION, data: { id: 'asset-1' } }]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.MetadataExtraction, data: { id: 'asset-1' } }]);
});
it('should run the refresh thumbnails job', async () => {
@@ -695,7 +695,7 @@ describe(AssetService.name, () => {
await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.REGENERATE_THUMBNAIL });
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1' } }]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.GenerateThumbnails, data: { id: 'asset-1' } }]);
});
it('should run the transcode video', async () => {
@@ -703,7 +703,7 @@ describe(AssetService.name, () => {
await sut.run(authStub.admin, { assetIds: ['asset-1'], name: AssetJobName.TRANSCODE_VIDEO });
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.VIDEO_CONVERSION, data: { id: 'asset-1' } }]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.VideoConversation, data: { id: 'asset-1' } }]);
});
});

View File

@@ -23,7 +23,7 @@ import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUn
@Injectable()
export class AssetService extends BaseService {
async getStatistics(auth: AuthDto, dto: AssetStatsDto) {
if (dto.visibility === AssetVisibility.LOCKED) {
if (dto.visibility === AssetVisibility.Locked) {
requireElevatedPermission(auth);
}
@@ -46,7 +46,7 @@ export class AssetService extends BaseService {
}
async get(auth: AuthDto, id: string): Promise<AssetResponseDto | SanitizedAssetResponseDto> {
await this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
const asset = await this.assetRepository.getById(id, {
exifInfo: true,
@@ -78,7 +78,7 @@ export class AssetService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: UpdateAssetDto): Promise<AssetResponseDto> {
await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] });
const { description, dateTimeOriginal, latitude, longitude, rating, ...rest } = dto;
const repos = { asset: this.assetRepository, event: this.eventRepository };
@@ -114,7 +114,7 @@ export class AssetService extends BaseService {
async updateAll(auth: AuthDto, dto: AssetBulkUpdateDto): Promise<void> {
const { ids, description, dateTimeOriginal, latitude, longitude, ...options } = dto;
await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids });
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids });
if (
description !== undefined ||
@@ -125,7 +125,7 @@ export class AssetService extends BaseService {
await this.assetRepository.updateAllExif(ids, { description, dateTimeOriginal, latitude, longitude });
await this.jobRepository.queueAll(
ids.map((id) => ({
name: JobName.SIDECAR_WRITE,
name: JobName.SidecarWrite,
data: { id, description, dateTimeOriginal, latitude, longitude },
})),
);
@@ -139,13 +139,13 @@ export class AssetService extends BaseService {
) {
await this.assetRepository.updateAll(ids, options);
if (options.visibility === AssetVisibility.LOCKED) {
if (options.visibility === AssetVisibility.Locked) {
await this.albumRepository.removeAssetsFromAll(ids);
}
}
}
@OnJob({ name: JobName.ASSET_DELETION_CHECK, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.AssetDeletionCheck, queue: QueueName.BackgroundTask })
async handleAssetDeletionCheck(): Promise<JobStatus> {
const config = await this.getConfig({ withCache: false });
const trashedDays = config.trash.enabled ? config.trash.days : 0;
@@ -158,7 +158,7 @@ export class AssetService extends BaseService {
if (chunk.length > 0) {
await this.jobRepository.queueAll(
chunk.map(({ id, isOffline }) => ({
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: { id, deleteOnDisk: !isOffline },
})),
);
@@ -176,17 +176,17 @@ export class AssetService extends BaseService {
await queueChunk();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.ASSET_DELETION, queue: QueueName.BACKGROUND_TASK })
async handleAssetDeletion(job: JobOf<JobName.ASSET_DELETION>): Promise<JobStatus> {
@OnJob({ name: JobName.AssetDeletion, queue: QueueName.BackgroundTask })
async handleAssetDeletion(job: JobOf<JobName.AssetDeletion>): Promise<JobStatus> {
const { id, deleteOnDisk } = job;
const asset = await this.assetJobRepository.getForAssetDeletion(id);
if (!asset) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
// Replace the parent of the stack children with a new asset
@@ -215,7 +215,7 @@ export class AssetService extends BaseService {
const count = await this.assetRepository.getLivePhotoCount(asset.livePhotoVideoId);
if (count === 0) {
await this.jobRepository.queue({
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: { id: asset.livePhotoVideoId, deleteOnDisk },
});
}
@@ -228,18 +228,18 @@ export class AssetService extends BaseService {
files.push(asset.sidecarPath, asset.originalPath);
}
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files } });
await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files } });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
async deleteAll(auth: AuthDto, dto: AssetBulkDeleteDto): Promise<void> {
const { ids, force } = dto;
await this.requireAccess({ auth, permission: Permission.ASSET_DELETE, ids });
await this.requireAccess({ auth, permission: Permission.AssetDelete, ids });
await this.assetRepository.updateAll(ids, {
deletedAt: new Date(),
status: force ? AssetStatus.DELETED : AssetStatus.TRASHED,
status: force ? AssetStatus.Deleted : AssetStatus.Trashed,
});
await this.eventRepository.emit(force ? 'AssetDeleteAll' : 'AssetTrashAll', {
assetIds: ids,
@@ -248,29 +248,29 @@ export class AssetService extends BaseService {
}
async run(auth: AuthDto, dto: AssetJobsDto) {
await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: dto.assetIds });
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds });
const jobs: JobItem[] = [];
for (const id of dto.assetIds) {
switch (dto.name) {
case AssetJobName.REFRESH_FACES: {
jobs.push({ name: JobName.FACE_DETECTION, data: { id } });
jobs.push({ name: JobName.FaceDetection, data: { id } });
break;
}
case AssetJobName.REFRESH_METADATA: {
jobs.push({ name: JobName.METADATA_EXTRACTION, data: { id } });
jobs.push({ name: JobName.MetadataExtraction, data: { id } });
break;
}
case AssetJobName.REGENERATE_THUMBNAIL: {
jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id } });
jobs.push({ name: JobName.GenerateThumbnails, data: { id } });
break;
}
case AssetJobName.TRANSCODE_VIDEO: {
jobs.push({ name: JobName.VIDEO_CONVERSION, data: { id } });
jobs.push({ name: JobName.VideoConversation, data: { id } });
break;
}
}
@@ -292,7 +292,7 @@ export class AssetService extends BaseService {
const writes = _.omitBy({ description, dateTimeOriginal, latitude, longitude, rating }, _.isUndefined);
if (Object.keys(writes).length > 0) {
await this.assetRepository.upsertExif({ assetId: id, ...writes });
await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id, ...writes } });
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id, ...writes } });
}
}
}

View File

@@ -18,7 +18,7 @@ describe(AuditService.name, () => {
it('should delete old audit entries', async () => {
mocks.audit.removeBefore.mockResolvedValue();
await expect(sut.handleCleanup()).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleCleanup()).resolves.toBe(JobStatus.Success);
expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date));
});

View File

@@ -7,9 +7,9 @@ import { BaseService } from 'src/services/base.service';
@Injectable()
export class AuditService extends BaseService {
@OnJob({ name: JobName.CLEAN_OLD_AUDIT_LOGS, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.CleanOldAuditLogs, queue: QueueName.BackgroundTask })
async handleCleanup(): Promise<JobStatus> {
await this.auditRepository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
return JobStatus.SUCCESS;
return JobStatus.Success;
}
}

View File

@@ -154,7 +154,7 @@ describe(AuthService.name, () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({
await expect(sut.logout(auth, AuthType.OAuth)).resolves.toEqual({
successful: true,
redirectUri: 'http://end-session-endpoint',
});
@@ -163,7 +163,7 @@ describe(AuthService.name, () => {
it('should return the default redirect', async () => {
const auth = factory.auth();
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
@@ -173,7 +173,7 @@ describe(AuthService.name, () => {
const auth = { user: { id: '123' }, session: { id: 'token123' } } as AuthDto;
mocks.session.delete.mockResolvedValue();
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
@@ -185,7 +185,7 @@ describe(AuthService.name, () => {
it('should return the default redirect if auth type is OAUTH but oauth is not enabled', async () => {
const auth = { user: { id: '123' } } as AuthDto;
await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({
await expect(sut.logout(auth, AuthType.OAuth)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
@@ -463,7 +463,7 @@ describe(AuthService.name, () => {
sut.authenticate({
headers: { 'x-api-key': 'auth_token' },
queryParams: {},
metadata: { adminRoute: false, sharedLinkRoute: false, uri: 'test', permission: Permission.ASSET_READ },
metadata: { adminRoute: false, sharedLinkRoute: false, uri: 'test', permission: Permission.AssetRead },
}),
).rejects.toBeInstanceOf(ForbiddenException);
});

View File

@@ -194,13 +194,13 @@ export class AuthService extends BaseService {
}
private async validate({ headers, queryParams }: Omit<ValidateRequest, 'metadata'>): Promise<AuthDto> {
const shareKey = (headers[ImmichHeader.SHARED_LINK_KEY] || queryParams[ImmichQuery.SHARED_LINK_KEY]) as string;
const session = (headers[ImmichHeader.USER_TOKEN] ||
headers[ImmichHeader.SESSION_TOKEN] ||
queryParams[ImmichQuery.SESSION_KEY] ||
const shareKey = (headers[ImmichHeader.SharedLinkKey] || queryParams[ImmichQuery.SharedLinkKey]) as string;
const session = (headers[ImmichHeader.UserToken] ||
headers[ImmichHeader.SessionToken] ||
queryParams[ImmichQuery.SessionKey] ||
this.getBearerToken(headers) ||
this.getCookieToken(headers)) as string;
const apiKey = (headers[ImmichHeader.API_KEY] || queryParams[ImmichQuery.API_KEY]) as string;
const apiKey = (headers[ImmichHeader.ApiKey] || queryParams[ImmichQuery.ApiKey]) as string;
if (shareKey) {
return this.validateSharedLink(shareKey);
@@ -321,7 +321,7 @@ export class AuthService extends BaseService {
const { contentType, data } = await this.oauthRepository.getProfilePicture(url);
const extensionWithDot = mimeTypes.toExtension(contentType || 'image/jpeg') ?? 'jpg';
const profileImagePath = join(
StorageCore.getFolderLocation(StorageFolder.PROFILE, user.id),
StorageCore.getFolderLocation(StorageFolder.Profile, user.id),
`${this.cryptoRepository.randomUUID()}${extensionWithDot}`,
);
@@ -330,7 +330,7 @@ export class AuthService extends BaseService {
await this.userRepository.update(user.id, { profileImagePath, profileChangedAt: new Date() });
if (oldPath) {
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [oldPath] } });
await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [oldPath] } });
}
} catch (error: Error | any) {
this.logger.warn(`Unable to sync oauth profile picture: ${error}`, error?.stack);
@@ -366,7 +366,7 @@ export class AuthService extends BaseService {
}
private async getLogoutEndpoint(authType: AuthType): Promise<string> {
if (authType !== AuthType.OAUTH) {
if (authType !== AuthType.OAuth) {
return LOGIN_URL;
}
@@ -389,17 +389,17 @@ export class AuthService extends BaseService {
private getCookieToken(headers: IncomingHttpHeaders): string | null {
const cookies = parse(headers.cookie || '');
return cookies[ImmichCookie.ACCESS_TOKEN] || null;
return cookies[ImmichCookie.AccessToken] || null;
}
private getCookieOauthState(headers: IncomingHttpHeaders): string | null {
const cookies = parse(headers.cookie || '');
return cookies[ImmichCookie.OAUTH_STATE] || null;
return cookies[ImmichCookie.OAuthState] || null;
}
private getCookieCodeVerifier(headers: IncomingHttpHeaders): string | null {
const cookies = parse(headers.cookie || '');
return cookies[ImmichCookie.OAUTH_CODE_VERIFIER] || null;
return cookies[ImmichCookie.OAuthCodeVerifier] || null;
}
async validateSharedLink(key: string | string[]): Promise<AuthDto> {

View File

@@ -38,7 +38,7 @@ describe(BackupService.name, () => {
});
it('should not initialise backup database job when running on microservices', async () => {
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).not.toHaveBeenCalled();
@@ -98,10 +98,10 @@ describe(BackupService.name, () => {
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(2);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-123.sql.gz.tmp`,
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-123.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-345.sql.gz.tmp`,
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-345.sql.gz.tmp`,
);
});
@@ -111,7 +111,7 @@ describe(BackupService.name, () => {
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz`,
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz`,
);
});
@@ -125,10 +125,10 @@ describe(BackupService.name, () => {
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(2);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz.tmp`,
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-2.sql.gz`,
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-2.sql.gz`,
);
});
});
@@ -145,13 +145,13 @@ describe(BackupService.name, () => {
it('should run a database backup successfully', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
});
it('should rename file on success', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.rename).toHaveBeenCalled();
});
@@ -219,7 +219,7 @@ describe(BackupService.name, () => {
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
const result = await sut.handleBackupDatabase();
expect(mocks.process.spawn).not.toHaveBeenCalled();
expect(result).toBe(JobStatus.FAILED);
expect(result).toBe(JobStatus.Failed);
});
});
});

View File

@@ -14,7 +14,7 @@ import { handlePromiseError } from 'src/utils/misc';
export class BackupService extends BaseService {
private backupLock = false;
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] })
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
async onConfigInit({
newConfig: {
backup: { database },
@@ -26,7 +26,7 @@ export class BackupService extends BaseService {
this.cronRepository.create({
name: 'backupDatabase',
expression: database.cronExpression,
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.BackupDatabase }), this.logger),
start: database.enabled,
});
}
@@ -51,7 +51,7 @@ export class BackupService extends BaseService {
backup: { database: config },
} = await this.getConfig({ withCache: false });
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.BACKUPS);
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const files = await this.storageRepository.readdir(backupsFolder);
const failedBackups = files.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz\.tmp$/));
const backups = files
@@ -68,7 +68,7 @@ export class BackupService extends BaseService {
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
}
@OnJob({ name: JobName.BACKUP_DATABASE, queue: QueueName.BACKUP_DATABASE })
@OnJob({ name: JobName.BackupDatabase, queue: QueueName.BackupDatabase })
async handleBackupDatabase(): Promise<JobStatus> {
this.logger.debug(`Database Backup Started`);
const { database } = this.configRepository.getEnv();
@@ -92,7 +92,7 @@ export class BackupService extends BaseService {
databaseParams.push('--clean', '--if-exists');
const databaseVersion = await this.databaseRepository.getPostgresVersion();
const backupFilePath = path.join(
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
StorageCore.getBaseFolder(StorageFolder.Backups),
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
);
const databaseSemver = semver.coerce(databaseVersion);
@@ -100,7 +100,7 @@ export class BackupService extends BaseService {
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <18.0.0')) {
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
@@ -179,6 +179,6 @@ export class BackupService extends BaseService {
this.logger.log(`Database Backup Success`);
await this.cleanupDatabaseBackups();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
}

View File

@@ -19,7 +19,7 @@ describe(DatabaseService.name, () => {
({ sut, mocks } = newTestService(DatabaseService));
extensionRange = '0.2.x';
mocks.database.getVectorExtension.mockResolvedValue(DatabaseExtension.VECTORCHORD);
mocks.database.getVectorExtension.mockResolvedValue(DatabaseExtension.VectorChord);
mocks.database.getExtensionVersionRange.mockReturnValue(extensionRange);
versionBelowRange = '0.1.0';
@@ -28,7 +28,7 @@ describe(DatabaseService.name, () => {
versionAboveRange = '0.3.0';
mocks.database.getExtensionVersions.mockResolvedValue([
{
name: DatabaseExtension.VECTORCHORD,
name: DatabaseExtension.VectorChord,
installedVersion: null,
availableVersion: minVersionInRange,
},
@@ -49,9 +49,9 @@ describe(DatabaseService.name, () => {
});
describe.each(<Array<{ extension: VectorExtension; extensionName: string }>>[
{ extension: DatabaseExtension.VECTOR, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTOR] },
{ extension: DatabaseExtension.VECTORS, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORS] },
{ extension: DatabaseExtension.VECTORCHORD, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORCHORD] },
{ extension: DatabaseExtension.Vector, extensionName: EXTENSION_NAMES[DatabaseExtension.Vector] },
{ extension: DatabaseExtension.Vectors, extensionName: EXTENSION_NAMES[DatabaseExtension.Vectors] },
{ extension: DatabaseExtension.VectorChord, extensionName: EXTENSION_NAMES[DatabaseExtension.VectorChord] },
])('should work with $extensionName', ({ extension, extensionName }) => {
beforeEach(() => {
mocks.database.getExtensionVersions.mockResolvedValue([
@@ -292,8 +292,8 @@ describe(DatabaseService.name, () => {
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledExactlyOnceWith([
VectorIndex.CLIP,
VectorIndex.FACE,
VectorIndex.Clip,
VectorIndex.Face,
]);
expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledTimes(1);
expect(mocks.database.runMigrations).toHaveBeenCalledTimes(1);
@@ -306,8 +306,8 @@ describe(DatabaseService.name, () => {
await expect(sut.onBootstrap()).rejects.toBeDefined();
expect(mocks.database.reindexVectorsIfNeeded).toHaveBeenCalledExactlyOnceWith([
VectorIndex.CLIP,
VectorIndex.FACE,
VectorIndex.Clip,
VectorIndex.Face,
]);
expect(mocks.database.runMigrations).not.toHaveBeenCalled();
expect(mocks.logger.fatal).not.toHaveBeenCalled();
@@ -330,7 +330,7 @@ describe(DatabaseService.name, () => {
database: 'immich',
},
skipMigrations: true,
vectorExtension: DatabaseExtension.VECTORS,
vectorExtension: DatabaseExtension.Vectors,
},
}),
);
@@ -356,12 +356,12 @@ describe(DatabaseService.name, () => {
it(`should drop unused extension`, async () => {
mocks.database.getExtensionVersions.mockResolvedValue([
{
name: DatabaseExtension.VECTORS,
name: DatabaseExtension.Vectors,
installedVersion: minVersionInRange,
availableVersion: minVersionInRange,
},
{
name: DatabaseExtension.VECTORCHORD,
name: DatabaseExtension.VectorChord,
installedVersion: null,
availableVersion: minVersionInRange,
},
@@ -369,19 +369,19 @@ describe(DatabaseService.name, () => {
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORCHORD);
expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORS);
expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VectorChord);
expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.Vectors);
});
it(`should warn if unused extension could not be dropped`, async () => {
mocks.database.getExtensionVersions.mockResolvedValue([
{
name: DatabaseExtension.VECTORS,
name: DatabaseExtension.Vectors,
installedVersion: minVersionInRange,
availableVersion: minVersionInRange,
},
{
name: DatabaseExtension.VECTORCHORD,
name: DatabaseExtension.VectorChord,
installedVersion: null,
availableVersion: minVersionInRange,
},
@@ -390,8 +390,8 @@ describe(DatabaseService.name, () => {
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORCHORD);
expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VECTORS);
expect(mocks.database.createExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.VectorChord);
expect(mocks.database.dropExtension).toHaveBeenCalledExactlyOnceWith(DatabaseExtension.Vectors);
expect(mocks.logger.warn).toHaveBeenCalledTimes(1);
expect(mocks.logger.warn.mock.calls[0][0]).toContain('DROP EXTENSION vectors');
});
@@ -399,12 +399,12 @@ describe(DatabaseService.name, () => {
it(`should not try to drop pgvector when using vectorchord`, async () => {
mocks.database.getExtensionVersions.mockResolvedValue([
{
name: DatabaseExtension.VECTOR,
name: DatabaseExtension.Vector,
installedVersion: minVersionInRange,
availableVersion: minVersionInRange,
},
{
name: DatabaseExtension.VECTORCHORD,
name: DatabaseExtension.VectorChord,
installedVersion: minVersionInRange,
availableVersion: minVersionInRange,
},

View File

@@ -100,7 +100,7 @@ export class DatabaseService extends BaseService {
}
try {
await this.databaseRepository.reindexVectorsIfNeeded([VectorIndex.CLIP, VectorIndex.FACE]);
await this.databaseRepository.reindexVectorsIfNeeded([VectorIndex.Clip, VectorIndex.Face]);
} catch (error) {
this.logger.warn(
'Could not run vector reindexing checks. If the extension was updated, please restart the Postgres instance. If you are upgrading directly from a version below 1.107.2, please upgrade to 1.107.2 first.',
@@ -109,7 +109,7 @@ export class DatabaseService extends BaseService {
}
for (const { name: dbName, installedVersion } of extensionVersions) {
const isDepended = dbName === DatabaseExtension.VECTOR && extension === DatabaseExtension.VECTORCHORD;
const isDepended = dbName === DatabaseExtension.Vector && extension === DatabaseExtension.VectorChord;
if (dbName !== extension && installedVersion && !isDepended) {
await this.dropExtension(dbName);
}
@@ -120,8 +120,8 @@ export class DatabaseService extends BaseService {
await this.databaseRepository.runMigrations();
}
await Promise.all([
this.databaseRepository.prewarm(VectorIndex.CLIP),
this.databaseRepository.prewarm(VectorIndex.FACE),
this.databaseRepository.prewarm(VectorIndex.Clip),
this.databaseRepository.prewarm(VectorIndex.Face),
]);
});
}

View File

@@ -17,15 +17,15 @@ export class DownloadService extends BaseService {
if (dto.assetIds) {
const assetIds = dto.assetIds;
await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: assetIds });
await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: assetIds });
assets = this.downloadRepository.downloadAssetIds(assetIds);
} else if (dto.albumId) {
const albumId = dto.albumId;
await this.requireAccess({ auth, permission: Permission.ALBUM_DOWNLOAD, ids: [albumId] });
await this.requireAccess({ auth, permission: Permission.AlbumDownload, ids: [albumId] });
assets = this.downloadRepository.downloadAlbumId(albumId);
} else if (dto.userId) {
const userId = dto.userId;
await this.requireAccess({ auth, permission: Permission.TIMELINE_DOWNLOAD, ids: [userId] });
await this.requireAccess({ auth, permission: Permission.TimelineDownload, ids: [userId] });
assets = this.downloadRepository.downloadUserId(userId);
} else {
throw new BadRequestException('assetIds, albumId, or userId is required');
@@ -81,7 +81,7 @@ export class DownloadService extends BaseService {
}
async downloadArchive(auth: AuthDto, dto: AssetIdsDto): Promise<ImmichReadStream> {
await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: dto.assetIds });
await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: dto.assetIds });
const zip = this.storageRepository.createZipStream();
const assets = await this.assetRepository.getByIds(dto.assetIds);

View File

@@ -12,10 +12,10 @@ const hasEmbedding = {
id: 'asset-1',
ownerId: 'user-id',
stackId: null,
type: AssetType.IMAGE,
type: AssetType.Image,
duplicateId: null,
embedding: '[1, 2, 3, 4]',
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
};
const hasDupe = {
@@ -78,7 +78,7 @@ describe(SearchService.name, () => {
},
});
await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.Skipped);
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.systemMetadata.get).toHaveBeenCalled();
@@ -94,7 +94,7 @@ describe(SearchService.name, () => {
},
});
await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueSearchDuplicates({})).resolves.toBe(JobStatus.Skipped);
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.systemMetadata.get).toHaveBeenCalled();
@@ -108,7 +108,7 @@ describe(SearchService.name, () => {
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(undefined);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.DUPLICATE_DETECTION,
name: JobName.DuplicateDetection,
data: { id: assetStub.image.id },
},
]);
@@ -122,7 +122,7 @@ describe(SearchService.name, () => {
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.DUPLICATE_DETECTION,
name: JobName.DuplicateDetection,
data: { id: assetStub.image.id },
},
]);
@@ -154,7 +154,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id });
expect(result).toBe(JobStatus.SKIPPED);
expect(result).toBe(JobStatus.Skipped);
expect(mocks.assetJob.getForSearchDuplicatesJob).not.toHaveBeenCalled();
});
@@ -171,7 +171,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id });
expect(result).toBe(JobStatus.SKIPPED);
expect(result).toBe(JobStatus.Skipped);
expect(mocks.assetJob.getForSearchDuplicatesJob).not.toHaveBeenCalled();
});
@@ -180,7 +180,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id: assetStub.image.id });
expect(result).toBe(JobStatus.FAILED);
expect(result).toBe(JobStatus.Failed);
expect(mocks.logger.error).toHaveBeenCalledWith(`Asset ${assetStub.image.id} not found`);
});
@@ -190,7 +190,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id });
expect(result).toBe(JobStatus.SKIPPED);
expect(result).toBe(JobStatus.Skipped);
expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${id} is part of a stack, skipping`);
});
@@ -198,12 +198,12 @@ describe(SearchService.name, () => {
const id = assetStub.livePhotoMotionAsset.id;
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue({
...hasEmbedding,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
});
const result = await sut.handleSearchDuplicates({ id });
expect(result).toBe(JobStatus.SKIPPED);
expect(result).toBe(JobStatus.Skipped);
expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${id} is not visible, skipping`);
});
@@ -212,7 +212,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id: assetStub.image.id });
expect(result).toBe(JobStatus.FAILED);
expect(result).toBe(JobStatus.Failed);
expect(mocks.logger.debug).toHaveBeenCalledWith(`Asset ${assetStub.image.id} is missing embedding`);
});
@@ -226,7 +226,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(result).toBe(JobStatus.Success);
expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({
assetId: hasEmbedding.id,
embedding: hasEmbedding.embedding,
@@ -253,7 +253,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(result).toBe(JobStatus.Success);
expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({
assetId: hasEmbedding.id,
embedding: hasEmbedding.embedding,
@@ -277,7 +277,7 @@ describe(SearchService.name, () => {
const result = await sut.handleSearchDuplicates({ id: hasDupe.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(result).toBe(JobStatus.Success);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: hasDupe.id, duplicateId: null });
expect(mocks.asset.upsertJobStatus).toHaveBeenCalledWith({
assetId: hasDupe.id,

View File

@@ -29,11 +29,11 @@ export class DuplicateService extends BaseService {
await this.duplicateRepository.deleteAll(auth.user.id, dto.ids);
}
@OnJob({ name: JobName.QUEUE_DUPLICATE_DETECTION, queue: QueueName.DUPLICATE_DETECTION })
async handleQueueSearchDuplicates({ force }: JobOf<JobName.QUEUE_DUPLICATE_DETECTION>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueDuplicateDetection, queue: QueueName.DuplicateDetection })
async handleQueueSearchDuplicates({ force }: JobOf<JobName.QueueDuplicateDetection>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
if (!isDuplicateDetectionEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
let jobs: JobItem[] = [];
@@ -44,7 +44,7 @@ export class DuplicateService extends BaseService {
const assets = this.assetJobRepository.streamForSearchDuplicates(force);
for await (const asset of assets) {
jobs.push({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } });
jobs.push({ name: JobName.DuplicateDetection, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
@@ -52,40 +52,40 @@ export class DuplicateService extends BaseService {
await queueAll();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.DUPLICATE_DETECTION, queue: QueueName.DUPLICATE_DETECTION })
async handleSearchDuplicates({ id }: JobOf<JobName.DUPLICATE_DETECTION>): Promise<JobStatus> {
@OnJob({ name: JobName.DuplicateDetection, queue: QueueName.DuplicateDetection })
async handleSearchDuplicates({ id }: JobOf<JobName.DuplicateDetection>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: true });
if (!isDuplicateDetectionEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const asset = await this.assetJobRepository.getForSearchDuplicatesJob(id);
if (!asset) {
this.logger.error(`Asset ${id} not found`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (asset.stackId) {
this.logger.debug(`Asset ${id} is part of a stack, skipping`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
if (asset.visibility === AssetVisibility.HIDDEN) {
if (asset.visibility === AssetVisibility.Hidden) {
this.logger.debug(`Asset ${id} is not visible, skipping`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
if (asset.visibility === AssetVisibility.LOCKED) {
if (asset.visibility === AssetVisibility.Locked) {
this.logger.debug(`Asset ${id} is locked, skipping`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
if (!asset.embedding) {
this.logger.debug(`Asset ${id} is missing embedding`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
const duplicateAssets = await this.duplicateRepository.search({
@@ -110,7 +110,7 @@ export class DuplicateService extends BaseService {
const duplicatesDetectedAt = new Date();
await this.assetRepository.upsertJobStatus(...assetIds.map((assetId) => ({ assetId, duplicatesDetectedAt })));
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async updateDuplicates(

View File

@@ -13,7 +13,7 @@ describe(JobService.name, () => {
beforeEach(() => {
({ sut, mocks } = newTestService(JobService, {}));
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
});
it('should work', () => {
@@ -25,10 +25,10 @@ describe(JobService.name, () => {
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(9, QueueName.STORAGE_TEMPLATE_MIGRATION, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(9, QueueName.StorageTemplateMigration, 1);
});
});
@@ -37,16 +37,16 @@ describe(JobService.name, () => {
await sut.handleNightlyJobs();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.ASSET_DELETION_CHECK },
{ name: JobName.USER_DELETE_CHECK },
{ name: JobName.PERSON_CLEANUP },
{ name: JobName.MEMORIES_CLEANUP },
{ name: JobName.CLEAN_OLD_SESSION_TOKENS },
{ name: JobName.CLEAN_OLD_AUDIT_LOGS },
{ name: JobName.MEMORIES_CREATE },
{ name: JobName.USER_SYNC_USAGE },
{ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } },
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false, nightly: true } },
{ name: JobName.AssetDeletionCheck },
{ name: JobName.UserDeleteCheck },
{ name: JobName.PersonCleanup },
{ name: JobName.MemoriesCleanup },
{ name: JobName.CleanOldSessionTokens },
{ name: JobName.CleanOldAuditLogs },
{ name: JobName.MemoriesCreate },
{ name: JobName.userSyncUsage },
{ name: JobName.QueueGenerateThumbnails, data: { force: false } },
{ name: JobName.QueueFacialRecognition, data: { force: false, nightly: true } },
]);
});
});
@@ -82,49 +82,49 @@ describe(JobService.name, () => {
};
await expect(sut.getAllJobsStatus()).resolves.toEqual({
[QueueName.BACKGROUND_TASK]: expectedJobStatus,
[QueueName.DUPLICATE_DETECTION]: expectedJobStatus,
[QueueName.SMART_SEARCH]: expectedJobStatus,
[QueueName.METADATA_EXTRACTION]: expectedJobStatus,
[QueueName.SEARCH]: expectedJobStatus,
[QueueName.STORAGE_TEMPLATE_MIGRATION]: expectedJobStatus,
[QueueName.MIGRATION]: expectedJobStatus,
[QueueName.THUMBNAIL_GENERATION]: expectedJobStatus,
[QueueName.VIDEO_CONVERSION]: expectedJobStatus,
[QueueName.FACE_DETECTION]: expectedJobStatus,
[QueueName.FACIAL_RECOGNITION]: expectedJobStatus,
[QueueName.SIDECAR]: expectedJobStatus,
[QueueName.LIBRARY]: expectedJobStatus,
[QueueName.NOTIFICATION]: expectedJobStatus,
[QueueName.BACKUP_DATABASE]: expectedJobStatus,
[QueueName.BackgroundTask]: expectedJobStatus,
[QueueName.DuplicateDetection]: expectedJobStatus,
[QueueName.SmartSearch]: expectedJobStatus,
[QueueName.MetadataExtraction]: expectedJobStatus,
[QueueName.Search]: expectedJobStatus,
[QueueName.StorageTemplateMigration]: expectedJobStatus,
[QueueName.Migration]: expectedJobStatus,
[QueueName.ThumbnailGeneration]: expectedJobStatus,
[QueueName.VideoConversion]: expectedJobStatus,
[QueueName.FaceDetection]: expectedJobStatus,
[QueueName.FacialRecognition]: expectedJobStatus,
[QueueName.Sidecar]: expectedJobStatus,
[QueueName.Library]: expectedJobStatus,
[QueueName.Notification]: expectedJobStatus,
[QueueName.BackupDatabase]: expectedJobStatus,
});
});
});
describe('handleCommand', () => {
it('should handle a pause command', async () => {
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.PAUSE, force: false });
await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Pause, force: false });
expect(mocks.job.pause).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
expect(mocks.job.pause).toHaveBeenCalledWith(QueueName.MetadataExtraction);
});
it('should handle a resume command', async () => {
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.RESUME, force: false });
await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Resume, force: false });
expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.MetadataExtraction);
});
it('should handle an empty command', async () => {
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.EMPTY, force: false });
await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Empty, force: false });
expect(mocks.job.empty).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
expect(mocks.job.empty).toHaveBeenCalledWith(QueueName.MetadataExtraction);
});
it('should not start a job that is already running', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: true, isPaused: false });
await expect(
sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }),
sut.handleCommand(QueueName.VideoConversion, { command: JobCommand.Start, force: false }),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.job.queue).not.toHaveBeenCalled();
@@ -134,80 +134,80 @@ describe(JobService.name, () => {
it('should handle a start video conversion command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.VideoConversion, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_VIDEO_CONVERSION, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueVideoConversion, data: { force: false } });
});
it('should handle a start storage template migration command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.StorageTemplateMigration, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.StorageTemplateMigration });
});
it('should handle a start smart search command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.SmartSearch, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SMART_SEARCH, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueSmartSearch, data: { force: false } });
});
it('should handle a start metadata extraction command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.MetadataExtraction, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueMetadataExtraction, data: { force: false } });
});
it('should handle a start sidecar command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.Sidecar, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SIDECAR, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueSidecar, data: { force: false } });
});
it('should handle a start thumbnail generation command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.ThumbnailGeneration, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueGenerateThumbnails, data: { force: false } });
});
it('should handle a start face detection command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.FaceDetection, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACE_DETECTION, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueFaceDetection, data: { force: false } });
});
it('should handle a start facial recognition command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.FacialRecognition, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueFacialRecognition, data: { force: false } });
});
it('should handle a start backup database command', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await sut.handleCommand(QueueName.BACKUP_DATABASE, { command: JobCommand.START, force: false });
await sut.handleCommand(QueueName.BackupDatabase, { command: JobCommand.Start, force: false });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.BACKUP_DATABASE, data: { force: false } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.BackupDatabase, data: { force: false } });
});
it('should throw a bad request when an invalid queue is used', async () => {
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
await expect(
sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }),
sut.handleCommand(QueueName.BackgroundTask, { command: JobCommand.Start, force: false }),
).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.job.queue).not.toHaveBeenCalled();
@@ -217,10 +217,10 @@ describe(JobService.name, () => {
describe('onJobStart', () => {
it('should process a successful job', async () => {
mocks.job.run.mockResolvedValue(JobStatus.SUCCESS);
mocks.job.run.mockResolvedValue(JobStatus.Success);
await sut.onJobStart(QueueName.BACKGROUND_TASK, {
name: JobName.DELETE_FILES,
await sut.onJobStart(QueueName.BackgroundTask, {
name: JobName.DeleteFiles,
data: { files: ['path/to/file'] },
});
@@ -232,55 +232,55 @@ describe(JobService.name, () => {
const tests: Array<{ item: JobItem; jobs: JobName[]; stub?: any }> = [
{
item: { name: JobName.SIDECAR_SYNC, data: { id: 'asset-1' } },
jobs: [JobName.METADATA_EXTRACTION],
item: { name: JobName.SidecarSync, data: { id: 'asset-1' } },
jobs: [JobName.MetadataExtraction],
},
{
item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } },
jobs: [JobName.METADATA_EXTRACTION],
item: { name: JobName.SidecarDiscovery, data: { id: 'asset-1' } },
jobs: [JobName.MetadataExtraction],
},
{
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.GENERATE_THUMBNAILS],
item: { name: JobName.StorageTemplateMigrationSingle, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.GenerateThumbnails],
},
{
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1' } },
item: { name: JobName.StorageTemplateMigrationSingle, data: { id: 'asset-1' } },
jobs: [],
},
{
item: { name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'asset-1' } },
item: { name: JobName.GeneratePersonThumbnail, data: { id: 'asset-1' } },
jobs: [],
},
{
item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1' } },
item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1' } },
jobs: [],
stub: [assetStub.image],
},
{
item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1' } },
item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1' } },
jobs: [],
stub: [assetStub.video],
},
{
item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.SMART_SEARCH, JobName.FACE_DETECTION],
item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.SmartSearch, JobName.FaceDetection],
stub: [assetStub.livePhotoStillAsset],
},
{
item: { name: JobName.GENERATE_THUMBNAILS, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.SMART_SEARCH, JobName.FACE_DETECTION, JobName.VIDEO_CONVERSION],
item: { name: JobName.GenerateThumbnails, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.SmartSearch, JobName.FaceDetection, JobName.VideoConversation],
stub: [assetStub.video],
},
{
item: { name: JobName.SMART_SEARCH, data: { id: 'asset-1' } },
item: { name: JobName.SmartSearch, data: { id: 'asset-1' } },
jobs: [],
},
{
item: { name: JobName.FACE_DETECTION, data: { id: 'asset-1' } },
item: { name: JobName.FaceDetection, data: { id: 'asset-1' } },
jobs: [],
},
{
item: { name: JobName.FACIAL_RECOGNITION, data: { id: 'asset-1' } },
item: { name: JobName.FacialRecognition, data: { id: 'asset-1' } },
jobs: [],
},
];
@@ -291,9 +291,9 @@ describe(JobService.name, () => {
mocks.asset.getByIdsWithAllRelationsButStacks.mockResolvedValue(stub);
}
mocks.job.run.mockResolvedValue(JobStatus.SUCCESS);
mocks.job.run.mockResolvedValue(JobStatus.Success);
await sut.onJobStart(QueueName.BACKGROUND_TASK, item);
await sut.onJobStart(QueueName.BackgroundTask, item);
if (jobs.length > 1) {
expect(mocks.job.queueAll).toHaveBeenCalledWith(
@@ -308,9 +308,9 @@ describe(JobService.name, () => {
});
it(`should not queue any jobs when ${item.name} fails`, async () => {
mocks.job.run.mockResolvedValue(JobStatus.FAILED);
mocks.job.run.mockResolvedValue(JobStatus.Failed);
await sut.onJobStart(QueueName.BACKGROUND_TASK, item);
await sut.onJobStart(QueueName.BackgroundTask, item);
expect(mocks.job.queueAll).not.toHaveBeenCalled();
});

View File

@@ -27,28 +27,28 @@ import { handlePromiseError } from 'src/utils/misc';
const asJobItem = (dto: JobCreateDto): JobItem => {
switch (dto.name) {
case ManualJobName.TAG_CLEANUP: {
return { name: JobName.TAG_CLEANUP };
case ManualJobName.TagCleanup: {
return { name: JobName.TagCleanup };
}
case ManualJobName.PERSON_CLEANUP: {
return { name: JobName.PERSON_CLEANUP };
case ManualJobName.PersonCleanup: {
return { name: JobName.PersonCleanup };
}
case ManualJobName.USER_CLEANUP: {
return { name: JobName.USER_DELETE_CHECK };
case ManualJobName.UserCleanup: {
return { name: JobName.UserDeleteCheck };
}
case ManualJobName.MEMORY_CLEANUP: {
return { name: JobName.MEMORIES_CLEANUP };
case ManualJobName.MemoryCleanup: {
return { name: JobName.MemoriesCleanup };
}
case ManualJobName.MEMORY_CREATE: {
return { name: JobName.MEMORIES_CREATE };
case ManualJobName.MemoryCreate: {
return { name: JobName.MemoriesCreate };
}
case ManualJobName.BACKUP_DATABASE: {
return { name: JobName.BACKUP_DATABASE };
case ManualJobName.BackupDatabase: {
return { name: JobName.BackupDatabase };
}
default: {
@@ -69,7 +69,7 @@ export class JobService extends BaseService {
@OnEvent({ name: 'ConfigInit' })
async onConfigInit({ newConfig: config }: ArgOf<'ConfigInit'>) {
if (this.worker === ImmichWorker.MICROSERVICES) {
if (this.worker === ImmichWorker.Microservices) {
this.updateQueueConcurrency(config);
return;
}
@@ -89,7 +89,7 @@ export class JobService extends BaseService {
@OnEvent({ name: 'ConfigUpdate', server: true })
onConfigUpdate({ newConfig: config }: ArgOf<'ConfigUpdate'>) {
if (this.worker === ImmichWorker.MICROSERVICES) {
if (this.worker === ImmichWorker.Microservices) {
this.updateQueueConcurrency(config);
return;
}
@@ -104,7 +104,7 @@ export class JobService extends BaseService {
@OnEvent({ name: 'AppBootstrap', priority: BootstrapEventPriority.JobService })
onBootstrap() {
this.jobRepository.setup(this.services);
if (this.worker === ImmichWorker.MICROSERVICES) {
if (this.worker === ImmichWorker.Microservices) {
this.jobRepository.startWorkers();
}
}
@@ -133,28 +133,28 @@ export class JobService extends BaseService {
this.logger.debug(`Handling command: queue=${queueName},command=${dto.command},force=${dto.force}`);
switch (dto.command) {
case JobCommand.START: {
case JobCommand.Start: {
await this.start(queueName, dto);
break;
}
case JobCommand.PAUSE: {
case JobCommand.Pause: {
await this.jobRepository.pause(queueName);
break;
}
case JobCommand.RESUME: {
case JobCommand.Resume: {
await this.jobRepository.resume(queueName);
break;
}
case JobCommand.EMPTY: {
case JobCommand.Empty: {
await this.jobRepository.empty(queueName);
break;
}
case JobCommand.CLEAR_FAILED: {
const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.FAILED);
case JobCommand.ClearFailed: {
const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.Failed);
this.logger.debug(`Cleared failed jobs: ${failedJobs}`);
break;
}
@@ -189,52 +189,52 @@ export class JobService extends BaseService {
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
switch (name) {
case QueueName.VIDEO_CONVERSION: {
return this.jobRepository.queue({ name: JobName.QUEUE_VIDEO_CONVERSION, data: { force } });
case QueueName.VideoConversion: {
return this.jobRepository.queue({ name: JobName.QueueVideoConversion, data: { force } });
}
case QueueName.STORAGE_TEMPLATE_MIGRATION: {
return this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
case QueueName.StorageTemplateMigration: {
return this.jobRepository.queue({ name: JobName.StorageTemplateMigration });
}
case QueueName.MIGRATION: {
return this.jobRepository.queue({ name: JobName.QUEUE_MIGRATION });
case QueueName.Migration: {
return this.jobRepository.queue({ name: JobName.QueueMigration });
}
case QueueName.SMART_SEARCH: {
return this.jobRepository.queue({ name: JobName.QUEUE_SMART_SEARCH, data: { force } });
case QueueName.SmartSearch: {
return this.jobRepository.queue({ name: JobName.QueueSmartSearch, data: { force } });
}
case QueueName.DUPLICATE_DETECTION: {
return this.jobRepository.queue({ name: JobName.QUEUE_DUPLICATE_DETECTION, data: { force } });
case QueueName.DuplicateDetection: {
return this.jobRepository.queue({ name: JobName.QueueDuplicateDetection, data: { force } });
}
case QueueName.METADATA_EXTRACTION: {
return this.jobRepository.queue({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force } });
case QueueName.MetadataExtraction: {
return this.jobRepository.queue({ name: JobName.QueueMetadataExtraction, data: { force } });
}
case QueueName.SIDECAR: {
return this.jobRepository.queue({ name: JobName.QUEUE_SIDECAR, data: { force } });
case QueueName.Sidecar: {
return this.jobRepository.queue({ name: JobName.QueueSidecar, data: { force } });
}
case QueueName.THUMBNAIL_GENERATION: {
return this.jobRepository.queue({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force } });
case QueueName.ThumbnailGeneration: {
return this.jobRepository.queue({ name: JobName.QueueGenerateThumbnails, data: { force } });
}
case QueueName.FACE_DETECTION: {
return this.jobRepository.queue({ name: JobName.QUEUE_FACE_DETECTION, data: { force } });
case QueueName.FaceDetection: {
return this.jobRepository.queue({ name: JobName.QueueFaceDetection, data: { force } });
}
case QueueName.FACIAL_RECOGNITION: {
return this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force } });
case QueueName.FacialRecognition: {
return this.jobRepository.queue({ name: JobName.QueueFacialRecognition, data: { force } });
}
case QueueName.LIBRARY: {
return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force } });
case QueueName.Library: {
return this.jobRepository.queue({ name: JobName.LibraryQueueScanAll, data: { force } });
}
case QueueName.BACKUP_DATABASE: {
return this.jobRepository.queue({ name: JobName.BACKUP_DATABASE, data: { force } });
case QueueName.BackupDatabase: {
return this.jobRepository.queue({ name: JobName.BackupDatabase, data: { force } });
}
default: {
@@ -251,7 +251,7 @@ export class JobService extends BaseService {
const status = await this.jobRepository.run(job);
const jobMetric = `immich.jobs.${job.name.replaceAll('-', '_')}.${status}`;
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
if (status === JobStatus.SUCCESS || status == JobStatus.SKIPPED) {
if (status === JobStatus.Success || status == JobStatus.Skipped) {
await this.onDone(job);
}
} catch (error: Error | any) {
@@ -263,10 +263,10 @@ export class JobService extends BaseService {
private isConcurrentQueue(name: QueueName): name is ConcurrentQueueName {
return ![
QueueName.FACIAL_RECOGNITION,
QueueName.STORAGE_TEMPLATE_MIGRATION,
QueueName.DUPLICATE_DETECTION,
QueueName.BACKUP_DATABASE,
QueueName.FacialRecognition,
QueueName.StorageTemplateMigration,
QueueName.DuplicateDetection,
QueueName.BackupDatabase,
].includes(name);
}
@@ -276,29 +276,29 @@ export class JobService extends BaseService {
if (config.nightlyTasks.databaseCleanup) {
jobs.push(
{ name: JobName.ASSET_DELETION_CHECK },
{ name: JobName.USER_DELETE_CHECK },
{ name: JobName.PERSON_CLEANUP },
{ name: JobName.MEMORIES_CLEANUP },
{ name: JobName.CLEAN_OLD_SESSION_TOKENS },
{ name: JobName.CLEAN_OLD_AUDIT_LOGS },
{ name: JobName.AssetDeletionCheck },
{ name: JobName.UserDeleteCheck },
{ name: JobName.PersonCleanup },
{ name: JobName.MemoriesCleanup },
{ name: JobName.CleanOldSessionTokens },
{ name: JobName.CleanOldAuditLogs },
);
}
if (config.nightlyTasks.generateMemories) {
jobs.push({ name: JobName.MEMORIES_CREATE });
jobs.push({ name: JobName.MemoriesCreate });
}
if (config.nightlyTasks.syncQuotaUsage) {
jobs.push({ name: JobName.USER_SYNC_USAGE });
jobs.push({ name: JobName.userSyncUsage });
}
if (config.nightlyTasks.missingThumbnails) {
jobs.push({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } });
jobs.push({ name: JobName.QueueGenerateThumbnails, data: { force: false } });
}
if (config.nightlyTasks.clusterNewFaces) {
jobs.push({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false, nightly: true } });
jobs.push({ name: JobName.QueueFacialRecognition, data: { force: false, nightly: true } });
}
await this.jobRepository.queueAll(jobs);
@@ -309,28 +309,28 @@ export class JobService extends BaseService {
*/
private async onDone(item: JobItem) {
switch (item.name) {
case JobName.SIDECAR_SYNC:
case JobName.SIDECAR_DISCOVERY: {
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: item.data });
case JobName.SidecarSync:
case JobName.SidecarDiscovery: {
await this.jobRepository.queue({ name: JobName.MetadataExtraction, data: item.data });
break;
}
case JobName.SIDECAR_WRITE: {
case JobName.SidecarWrite: {
await this.jobRepository.queue({
name: JobName.METADATA_EXTRACTION,
name: JobName.MetadataExtraction,
data: { id: item.data.id, source: 'sidecar-write' },
});
break;
}
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
case JobName.StorageTemplateMigrationSingle: {
if (item.data.source === 'upload' || item.data.source === 'copy') {
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: item.data });
await this.jobRepository.queue({ name: JobName.GenerateThumbnails, data: item.data });
}
break;
}
case JobName.GENERATE_PERSON_THUMBNAIL: {
case JobName.GeneratePersonThumbnail: {
const { id } = item.data;
const person = await this.personRepository.getById(id);
if (person) {
@@ -339,7 +339,7 @@ export class JobService extends BaseService {
break;
}
case JobName.GENERATE_THUMBNAILS: {
case JobName.GenerateThumbnails: {
if (!item.data.notify && item.data.source !== 'upload') {
break;
}
@@ -351,16 +351,16 @@ export class JobService extends BaseService {
}
const jobs: JobItem[] = [
{ name: JobName.SMART_SEARCH, data: item.data },
{ name: JobName.FACE_DETECTION, data: item.data },
{ name: JobName.SmartSearch, data: item.data },
{ name: JobName.FaceDetection, data: item.data },
];
if (asset.type === AssetType.VIDEO) {
jobs.push({ name: JobName.VIDEO_CONVERSION, data: item.data });
if (asset.type === AssetType.Video) {
jobs.push({ name: JobName.VideoConversation, data: item.data });
}
await this.jobRepository.queueAll(jobs);
if (asset.visibility === AssetVisibility.TIMELINE || asset.visibility === AssetVisibility.ARCHIVE) {
if (asset.visibility === AssetVisibility.Timeline || asset.visibility === AssetVisibility.Archive) {
this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
if (asset.exifInfo) {
const exif = asset.exifInfo;
@@ -417,14 +417,14 @@ export class JobService extends BaseService {
break;
}
case JobName.SMART_SEARCH: {
case JobName.SmartSearch: {
if (item.data.source === 'upload') {
await this.jobRepository.queue({ name: JobName.DUPLICATE_DETECTION, data: item.data });
await this.jobRepository.queue({ name: JobName.DuplicateDetection, data: item.data });
}
break;
}
case JobName.USER_DELETION: {
case JobName.UserDeletion: {
this.eventRepository.clientBroadcast('on_user_delete', item.data.id);
break;
}

View File

@@ -27,7 +27,7 @@ describe(LibraryService.name, () => {
({ sut, mocks } = newTestService(LibraryService, {}));
mocks.database.tryLock.mockResolvedValue(true);
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
});
it('should work', () => {
@@ -173,7 +173,7 @@ describe(LibraryService.name, () => {
await sut.handleQueueSyncFiles({ id: library.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LibrarySyncFiles,
data: {
libraryId: library.id,
paths: ['/data/user1/photo.jpg'],
@@ -185,7 +185,7 @@ describe(LibraryService.name, () => {
it('should fail when library is not found', async () => {
const library = factory.library({ importPaths: ['/foo', '/bar'] });
await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.Skipped);
});
it('should ignore import paths that do not exist', async () => {
@@ -228,7 +228,7 @@ describe(LibraryService.name, () => {
await sut.handleQueueSyncFiles({ id: library.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LibrarySyncFiles,
data: {
libraryId: library.id,
paths: ['/data/user1/photo.jpg'],
@@ -240,7 +240,7 @@ describe(LibraryService.name, () => {
it("should fail when library can't be found", async () => {
const library = factory.library({ importPaths: ['/foo', '/bar'] });
await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueSyncFiles({ id: library.id })).resolves.toBe(JobStatus.Skipped);
});
it('should ignore import paths that do not exist', async () => {
@@ -282,7 +282,7 @@ describe(LibraryService.name, () => {
const response = await sut.handleQueueSyncAssets({ id: library.id });
expect(response).toBe(JobStatus.SUCCESS);
expect(response).toBe(JobStatus.Success);
expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith(
library.id,
library.importPaths,
@@ -300,7 +300,7 @@ describe(LibraryService.name, () => {
const response = await sut.handleQueueSyncAssets({ id: library.id });
expect(response).toBe(JobStatus.SUCCESS);
expect(response).toBe(JobStatus.Success);
expect(mocks.asset.detectOfflineExternalAssets).not.toHaveBeenCalled();
});
@@ -317,7 +317,7 @@ describe(LibraryService.name, () => {
const response = await sut.handleQueueSyncAssets({ id: library.id });
expect(mocks.job.queue).toBeCalledWith({
name: JobName.LIBRARY_SYNC_ASSETS,
name: JobName.LibrarySyncAssets,
data: {
libraryId: library.id,
importPaths: library.importPaths,
@@ -328,7 +328,7 @@ describe(LibraryService.name, () => {
},
});
expect(response).toBe(JobStatus.SUCCESS);
expect(response).toBe(JobStatus.Success);
expect(mocks.asset.detectOfflineExternalAssets).toHaveBeenCalledWith(
library.id,
library.importPaths,
@@ -337,7 +337,7 @@ describe(LibraryService.name, () => {
});
it("should fail if library can't be found", async () => {
await expect(sut.handleQueueSyncAssets({ id: newUuid() })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueSyncAssets({ id: newUuid() })).resolves.toBe(JobStatus.Skipped);
});
});
@@ -355,7 +355,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true,
@@ -376,7 +376,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true,
@@ -397,7 +397,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockRejectedValue(new Error('Could not read file'));
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
});
@@ -415,7 +415,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: false,
@@ -436,7 +436,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
@@ -456,7 +456,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
@@ -476,7 +476,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
});
@@ -494,7 +494,7 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.trashedOffline]);
mocks.storage.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.updateAll).toHaveBeenCalledWith(
[assetStub.trashedOffline.id],
@@ -523,11 +523,11 @@ describe(LibraryService.name, () => {
mocks.assetJob.getForSyncAssets.mockResolvedValue([assetStub.external]);
mocks.storage.stat.mockResolvedValue({ mtime } as Stats);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.Success);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_DISCOVERY,
name: JobName.SidecarDiscovery,
data: {
id: assetStub.external.id,
source: 'upload',
@@ -557,7 +557,7 @@ describe(LibraryService.name, () => {
mocks.asset.createAll.mockResolvedValue([assetStub.image]);
mocks.library.get.mockResolvedValue(library);
await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.Success);
expect(mocks.asset.createAll).toHaveBeenCalledWith([
expect.objectContaining({
@@ -565,7 +565,7 @@ describe(LibraryService.name, () => {
libraryId: library.id,
originalPath: '/data/user1/photo.jpg',
deviceId: 'Library Import',
type: AssetType.IMAGE,
type: AssetType.Image,
originalFileName: 'photo.jpg',
isExternal: true,
}),
@@ -573,7 +573,7 @@ describe(LibraryService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_DISCOVERY,
name: JobName.SidecarDiscovery,
data: {
id: assetStub.image.id,
source: 'upload',
@@ -592,7 +592,7 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.Failed);
expect(mocks.asset.createAll.mock.calls).toEqual([]);
});
@@ -607,7 +607,7 @@ describe(LibraryService.name, () => {
await sut.delete(library.id);
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.LIBRARY_DELETE, data: { id: library.id } });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.LibraryDelete, data: { id: library.id } });
expect(mocks.library.softDelete).toHaveBeenCalledWith(library.id);
});
@@ -620,7 +620,7 @@ describe(LibraryService.name, () => {
await sut.delete(library.id);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_DELETE,
name: JobName.LibraryDelete,
data: { id: library.id },
});
@@ -838,11 +838,11 @@ describe(LibraryService.name, () => {
const library2 = factory.library({ deletedAt: new Date() });
mocks.library.getAllDeleted.mockResolvedValue([library1, library2]);
await expect(sut.handleQueueCleanup()).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleQueueCleanup()).resolves.toBe(JobStatus.Success);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_DELETE, data: { id: library1.id } },
{ name: JobName.LIBRARY_DELETE, data: { id: library2.id } },
{ name: JobName.LibraryDelete, data: { id: library1.id } },
{ name: JobName.LibraryDelete, data: { id: library2.id } },
]);
});
});
@@ -968,7 +968,7 @@ describe(LibraryService.name, () => {
await sut.watchAll();
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LibrarySyncFiles,
data: {
libraryId: library.id,
paths: ['/foo/photo.jpg'],
@@ -989,7 +989,7 @@ describe(LibraryService.name, () => {
await sut.watchAll();
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LibrarySyncFiles,
data: {
libraryId: library.id,
paths: ['/foo/photo.jpg'],
@@ -1010,7 +1010,7 @@ describe(LibraryService.name, () => {
await sut.watchAll();
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_ASSET_REMOVAL,
name: JobName.LibraryAssetRemoval,
data: {
libraryId: library.id,
paths: [assetStub.image.originalPath],
@@ -1106,7 +1106,7 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
mocks.library.streamAssetIds.mockReturnValue(makeStream([]));
await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.Success);
expect(mocks.library.delete).toHaveBeenCalled();
});
@@ -1117,7 +1117,7 @@ describe(LibraryService.name, () => {
mocks.library.get.mockResolvedValue(library);
mocks.library.streamAssetIds.mockReturnValue(makeStream([assetStub.image1]));
await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleDeleteLibrary({ id: library.id })).resolves.toBe(JobStatus.Success);
});
});
@@ -1131,11 +1131,11 @@ describe(LibraryService.name, () => {
expect(mocks.job.queue).toHaveBeenCalledTimes(2);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
name: JobName.LibraryQueueSyncFiles,
data: { id: library.id },
});
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_QUEUE_SYNC_ASSETS,
name: JobName.LibraryQueueSyncAssets,
data: { id: library.id },
});
});
@@ -1147,14 +1147,14 @@ describe(LibraryService.name, () => {
mocks.library.getAll.mockResolvedValue([library]);
await expect(sut.handleQueueScanAll()).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleQueueScanAll()).resolves.toBe(JobStatus.Success);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_QUEUE_CLEANUP,
name: JobName.LibraryQueueCleanup,
data: {},
});
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_QUEUE_SYNC_FILES, data: { id: library.id } },
{ name: JobName.LibraryQueueSyncFiles, data: { id: library.id } },
]);
});
});

View File

@@ -32,7 +32,7 @@ export class LibraryService extends BaseService {
private lock = false;
private watchers: Record<string, () => Promise<void>> = {};
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] })
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
async onConfigInit({
newConfig: {
library: { watch, scan },
@@ -47,8 +47,7 @@ export class LibraryService extends BaseService {
this.cronRepository.create({
name: CronJob.LibraryScan,
expression: scan.cronExpression,
onTick: () =>
handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL }), this.logger),
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.LibraryQueueScanAll }), this.logger),
start: scan.enabled,
});
}
@@ -103,7 +102,7 @@ export class LibraryService extends BaseService {
if (matcher(path)) {
this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`);
await this.jobRepository.queue({
name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LibrarySyncFiles,
data: { libraryId: library.id, paths: [path] },
});
} else {
@@ -114,7 +113,7 @@ export class LibraryService extends BaseService {
const deletionHandler = async (path: string) => {
this.logger.debug(`File unlink event received for ${path} in library ${library.id}}`);
await this.jobRepository.queue({
name: JobName.LIBRARY_ASSET_REMOVAL,
name: JobName.LibraryAssetRemoval,
data: { libraryId: library.id, paths: [path] },
});
};
@@ -199,7 +198,7 @@ export class LibraryService extends BaseService {
return libraries.map((library) => mapLibrary(library));
}
@OnJob({ name: JobName.LIBRARY_QUEUE_CLEANUP, queue: QueueName.LIBRARY })
@OnJob({ name: JobName.LibraryQueueCleanup, queue: QueueName.Library })
async handleQueueCleanup(): Promise<JobStatus> {
this.logger.log('Checking for any libraries pending deletion...');
const pendingDeletions = await this.libraryRepository.getAllDeleted();
@@ -208,11 +207,11 @@ export class LibraryService extends BaseService {
this.logger.log(`Found ${pendingDeletions.length} ${libraryString} pending deletion, cleaning up...`);
await this.jobRepository.queueAll(
pendingDeletions.map((libraryToDelete) => ({ name: JobName.LIBRARY_DELETE, data: { id: libraryToDelete.id } })),
pendingDeletions.map((libraryToDelete) => ({ name: JobName.LibraryDelete, data: { id: libraryToDelete.id } })),
);
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
async create(dto: CreateLibraryDto): Promise<LibraryResponseDto> {
@@ -225,16 +224,16 @@ export class LibraryService extends BaseService {
return mapLibrary(library);
}
@OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY })
async handleSyncFiles(job: JobOf<JobName.LIBRARY_SYNC_FILES>): Promise<JobStatus> {
@OnJob({ name: JobName.LibrarySyncFiles, queue: QueueName.Library })
async handleSyncFiles(job: JobOf<JobName.LibrarySyncFiles>): Promise<JobStatus> {
const library = await this.libraryRepository.get(job.libraryId);
// We need to check if the library still exists as it could have been deleted after the scan was queued
if (!library) {
this.logger.debug(`Library ${job.libraryId} not found, skipping file import`);
return JobStatus.FAILED;
return JobStatus.Failed;
} else if (library.deletedAt) {
this.logger.debug(`Library ${job.libraryId} is deleted, won't import assets into it`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
const assetImports: Insertable<AssetTable>[] = [];
@@ -263,7 +262,7 @@ export class LibraryService extends BaseService {
await this.queuePostSyncJobs(assetIds);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
@@ -339,11 +338,11 @@ export class LibraryService extends BaseService {
}
await this.libraryRepository.softDelete(id);
await this.jobRepository.queue({ name: JobName.LIBRARY_DELETE, data: { id } });
await this.jobRepository.queue({ name: JobName.LibraryDelete, data: { id } });
}
@OnJob({ name: JobName.LIBRARY_DELETE, queue: QueueName.LIBRARY })
async handleDeleteLibrary(job: JobOf<JobName.LIBRARY_DELETE>): Promise<JobStatus> {
@OnJob({ name: JobName.LibraryDelete, queue: QueueName.Library })
async handleDeleteLibrary(job: JobOf<JobName.LibraryDelete>): Promise<JobStatus> {
const libraryId = job.id;
await this.assetRepository.updateByLibraryId(libraryId, { deletedAt: new Date() });
@@ -356,7 +355,7 @@ export class LibraryService extends BaseService {
assetsFound = true;
this.logger.debug(`Queueing deletion of ${chunk.length} asset(s) in library ${libraryId}`);
await this.jobRepository.queueAll(
chunk.map((id) => ({ name: JobName.ASSET_DELETION, data: { id, deleteOnDisk: false } })),
chunk.map((id) => ({ name: JobName.AssetDeletion, data: { id, deleteOnDisk: false } })),
);
chunk = [];
}
@@ -379,7 +378,7 @@ export class LibraryService extends BaseService {
await this.libraryRepository.delete(libraryId);
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async processEntity(filePath: string, ownerId: string, libraryId: string) {
@@ -398,7 +397,7 @@ export class LibraryService extends BaseService {
// TODO: device asset id is deprecated, remove it
deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''),
deviceId: 'Library Import',
type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE,
type: mimeTypes.isVideo(assetPath) ? AssetType.Video : AssetType.Image,
originalFileName: parse(assetPath).base,
isExternal: true,
livePhotoVideoId: null,
@@ -411,7 +410,7 @@ export class LibraryService extends BaseService {
// We queue a sidecar discovery which, in turn, queues metadata extraction
await this.jobRepository.queueAll(
assetIds.map((assetId) => ({
name: JobName.SIDECAR_DISCOVERY,
name: JobName.SidecarDiscovery,
data: { id: assetId, source: 'upload' },
})),
);
@@ -423,30 +422,30 @@ export class LibraryService extends BaseService {
this.logger.log(`Starting to scan library ${id}`);
await this.jobRepository.queue({
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
name: JobName.LibraryQueueSyncFiles,
data: {
id,
},
});
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } });
await this.jobRepository.queue({ name: JobName.LibraryQueueSyncAssets, data: { id } });
}
async queueScanAll() {
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: {} });
await this.jobRepository.queue({ name: JobName.LibraryQueueScanAll, data: {} });
}
@OnJob({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, queue: QueueName.LIBRARY })
@OnJob({ name: JobName.LibraryQueueScanAll, queue: QueueName.Library })
async handleQueueScanAll(): Promise<JobStatus> {
this.logger.log(`Initiating scan of all external libraries...`);
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} });
await this.jobRepository.queue({ name: JobName.LibraryQueueCleanup, data: {} });
const libraries = await this.libraryRepository.getAll(true);
await this.jobRepository.queueAll(
libraries.map((library) => ({
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
name: JobName.LibraryQueueSyncFiles,
data: {
id: library.id,
},
@@ -454,18 +453,18 @@ export class LibraryService extends BaseService {
);
await this.jobRepository.queueAll(
libraries.map((library) => ({
name: JobName.LIBRARY_QUEUE_SYNC_ASSETS,
name: JobName.LibraryQueueSyncAssets,
data: {
id: library.id,
},
})),
);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY })
async handleSyncAssets(job: JobOf<JobName.LIBRARY_SYNC_ASSETS>): Promise<JobStatus> {
@OnJob({ name: JobName.LibrarySyncAssets, queue: QueueName.Library })
async handleSyncAssets(job: JobOf<JobName.LibrarySyncAssets>): Promise<JobStatus> {
const assets = await this.assetJobRepository.getForSyncAssets(job.assetIds);
const assetIdsToOffline: string[] = [];
@@ -486,7 +485,7 @@ export class LibraryService extends BaseService {
const action = this.checkExistingAsset(asset, stat);
switch (action) {
case AssetSyncResult.OFFLINE: {
if (asset.status === AssetStatus.TRASHED) {
if (asset.status === AssetStatus.Trashed) {
trashedAssetIdsToOffline.push(asset.id);
} else {
assetIdsToOffline.push(asset.id);
@@ -511,7 +510,7 @@ export class LibraryService extends BaseService {
if (!isExcluded) {
this.logger.debug(`Offline asset ${asset.originalPath} is now online in library ${job.libraryId}`);
if (asset.status === AssetStatus.TRASHED) {
if (asset.status === AssetStatus.Trashed) {
trashedAssetIdsToOnline.push(asset.id);
} else {
assetIdsToOnline.push(asset.id);
@@ -557,7 +556,7 @@ export class LibraryService extends BaseService {
`Checked existing asset(s): ${assetIdsToOffline.length + trashedAssetIdsToOffline.length} offlined, ${assetIdsToOnline.length + trashedAssetIdsToOnline.length} onlined, ${assetIdsToUpdate.length} updated, ${remainingCount} unchanged of current batch of ${assets.length} (Total progress: ${job.progressCounter} of ${job.totalAssets}, ${cumulativePercentage} %) in library ${job.libraryId}.`,
);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private checkExistingAsset(
@@ -585,7 +584,7 @@ export class LibraryService extends BaseService {
return AssetSyncResult.OFFLINE;
}
if (asset.isOffline && asset.status !== AssetStatus.DELETED) {
if (asset.isOffline && asset.status !== AssetStatus.Deleted) {
// Only perform the expensive check if the asset is offline
return AssetSyncResult.CHECK_OFFLINE;
}
@@ -599,12 +598,12 @@ export class LibraryService extends BaseService {
return AssetSyncResult.DO_NOTHING;
}
@OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY })
async handleQueueSyncFiles(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_FILES>): Promise<JobStatus> {
@OnJob({ name: JobName.LibraryQueueSyncFiles, queue: QueueName.Library })
async handleQueueSyncFiles(job: JobOf<JobName.LibraryQueueSyncFiles>): Promise<JobStatus> {
const library = await this.libraryRepository.get(job.id);
if (!library) {
this.logger.debug(`Library ${job.id} not found, skipping refresh`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
this.logger.debug(`Validating import paths for library ${library.id}...`);
@@ -623,7 +622,7 @@ export class LibraryService extends BaseService {
if (validImportPaths.length === 0) {
this.logger.warn(`No valid import paths found for library ${library.id}`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const pathsOnDisk = this.storageRepository.walk({
@@ -646,7 +645,7 @@ export class LibraryService extends BaseService {
importCount += paths.length;
await this.jobRepository.queue({
name: JobName.LIBRARY_SYNC_FILES,
name: JobName.LibrarySyncFiles,
data: {
libraryId: library.id,
paths,
@@ -666,11 +665,11 @@ export class LibraryService extends BaseService {
await this.libraryRepository.update(job.id, { refreshedAt: new Date() });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.LIBRARY_ASSET_REMOVAL, queue: QueueName.LIBRARY })
async handleAssetRemoval(job: JobOf<JobName.LIBRARY_ASSET_REMOVAL>): Promise<JobStatus> {
@OnJob({ name: JobName.LibraryAssetRemoval, queue: QueueName.Library })
async handleAssetRemoval(job: JobOf<JobName.LibraryAssetRemoval>): Promise<JobStatus> {
// This is only for handling file unlink events via the file watcher
this.logger.verbose(`Deleting asset(s) ${job.paths} from library ${job.libraryId}`);
for (const assetPath of job.paths) {
@@ -680,20 +679,20 @@ export class LibraryService extends BaseService {
}
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, queue: QueueName.LIBRARY })
async handleQueueSyncAssets(job: JobOf<JobName.LIBRARY_QUEUE_SYNC_ASSETS>): Promise<JobStatus> {
@OnJob({ name: JobName.LibraryQueueSyncAssets, queue: QueueName.Library })
async handleQueueSyncAssets(job: JobOf<JobName.LibraryQueueSyncAssets>): Promise<JobStatus> {
const library = await this.libraryRepository.get(job.id);
if (!library) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const assetCount = await this.assetRepository.getLibraryAssetCount(job.id);
if (!assetCount) {
this.logger.log(`Library ${library.id} is empty, no need to check assets`);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
this.logger.log(
@@ -713,7 +712,7 @@ export class LibraryService extends BaseService {
);
if (affectedAssetCount === assetCount) {
return JobStatus.SUCCESS;
return JobStatus.Success;
}
let chunk: string[] = [];
@@ -724,7 +723,7 @@ export class LibraryService extends BaseService {
count += chunk.length;
await this.jobRepository.queue({
name: JobName.LIBRARY_SYNC_ASSETS,
name: JobName.LibrarySyncAssets,
data: {
libraryId: library.id,
importPaths: library.importPaths,
@@ -758,7 +757,7 @@ export class LibraryService extends BaseService {
this.logger.log(`Finished queuing ${count} asset check(s) for library ${library.id}`);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async findOrFail(id: string) {

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,7 @@ import {
QueueName,
RawExtractedFormat,
StorageFolder,
TranscodeHWAccel,
TranscodeHardwareAcceleration,
TranscodePolicy,
TranscodeTarget,
VideoCodec,
@@ -57,8 +57,8 @@ export class MediaService extends BaseService {
this.videoInterfaces = { dri, mali };
}
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueGenerateThumbnails, queue: QueueName.ThumbnailGeneration })
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QueueGenerateThumbnails>): Promise<JobStatus> {
let jobs: JobItem[] = [];
const queueAll = async () => {
@@ -70,7 +70,7 @@ export class MediaService extends BaseService {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
jobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
jobs.push({ name: JobName.GenerateThumbnails, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
@@ -92,7 +92,7 @@ export class MediaService extends BaseService {
await this.personRepository.update({ id: person.id, faceAssetId: face.id });
}
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
jobs.push({ name: JobName.GeneratePersonThumbnail, data: { id: person.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
@@ -100,21 +100,21 @@ export class MediaService extends BaseService {
await queueAll();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.QUEUE_MIGRATION, queue: QueueName.MIGRATION })
@OnJob({ name: JobName.QueueMigration, queue: QueueName.Migration })
async handleQueueMigration(): Promise<JobStatus> {
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.Migration);
if (active === 1 && waiting === 0) {
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
await this.storageCore.removeEmptyDirs(StorageFolder.Thumbnails);
await this.storageCore.removeEmptyDirs(StorageFolder.EncodedVideo);
}
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForMigrationJob();
for await (const asset of assets) {
jobs.push({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } });
jobs.push({ name: JobName.MigrateAsset, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
@@ -125,7 +125,7 @@ export class MediaService extends BaseService {
jobs = [];
for await (const person of this.personRepository.getAll()) {
jobs.push({ name: JobName.MIGRATE_PERSON, data: { id: person.id } });
jobs.push({ name: JobName.MigratePerson, data: { id: person.id } });
if (jobs.length === JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
@@ -135,36 +135,36 @@ export class MediaService extends BaseService {
await this.jobRepository.queueAll(jobs);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.MIGRATE_ASSET, queue: QueueName.MIGRATION })
async handleAssetMigration({ id }: JobOf<JobName.MIGRATE_ASSET>): Promise<JobStatus> {
@OnJob({ name: JobName.MigrateAsset, queue: QueueName.Migration })
async handleAssetMigration({ id }: JobOf<JobName.MigrateAsset>): Promise<JobStatus> {
const { image } = await this.getConfig({ withCache: true });
const asset = await this.assetJobRepository.getForMigrationJob(id);
if (!asset) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
await this.storageCore.moveAssetImage(asset, AssetPathType.FULLSIZE, image.fullsize.format);
await this.storageCore.moveAssetImage(asset, AssetPathType.PREVIEW, image.preview.format);
await this.storageCore.moveAssetImage(asset, AssetPathType.THUMBNAIL, image.thumbnail.format);
await this.storageCore.moveAssetImage(asset, AssetPathType.FullSize, image.fullsize.format);
await this.storageCore.moveAssetImage(asset, AssetPathType.Preview, image.preview.format);
await this.storageCore.moveAssetImage(asset, AssetPathType.Thumbnail, image.thumbnail.format);
await this.storageCore.moveAssetVideo(asset);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
async handleGenerateThumbnails({ id }: JobOf<JobName.GENERATE_THUMBNAILS>): Promise<JobStatus> {
@OnJob({ name: JobName.GenerateThumbnails, queue: QueueName.ThumbnailGeneration })
async handleGenerateThumbnails({ id }: JobOf<JobName.GenerateThumbnails>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
if (!asset) {
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (asset.visibility === AssetVisibility.HIDDEN) {
if (asset.visibility === AssetVisibility.Hidden) {
this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
let generated: {
@@ -173,27 +173,27 @@ export class MediaService extends BaseService {
fullsizePath?: string;
thumbhash: Buffer;
};
if (asset.type === AssetType.VIDEO || asset.originalFileName.toLowerCase().endsWith('.gif')) {
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
generated = await this.generateVideoThumbnails(asset);
} else if (asset.type === AssetType.IMAGE) {
} else if (asset.type === AssetType.Image) {
generated = await this.generateImageThumbnails(asset);
} else {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files);
const toUpsert: UpsertFileOptions[] = [];
if (previewFile?.path !== generated.previewPath) {
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.PREVIEW });
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.Preview });
}
if (thumbnailFile?.path !== generated.thumbnailPath) {
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.THUMBNAIL });
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.Thumbnail });
}
if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) {
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FULLSIZE });
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FullSize });
}
if (toUpsert.length > 0) {
@@ -230,7 +230,7 @@ export class MediaService extends BaseService {
await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async extractImage(originalPath: string, minSize: number) {
@@ -244,7 +244,7 @@ export class MediaService extends BaseService {
private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number) {
const { image } = await this.getConfig({ withCache: true });
const colorspace = this.isSRGB(exifInfo) ? Colorspace.SRGB : image.colorspace;
const colorspace = this.isSRGB(exifInfo) ? Colorspace.Srgb : image.colorspace;
const decodeOptions: DecodeToBufferOptions = {
colorspace,
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
@@ -264,8 +264,8 @@ export class MediaService extends BaseService {
exifInfo: Exif;
}) {
const { image } = await this.getConfig({ withCache: true });
const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format);
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format);
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
this.storageCore.ensureFolders(previewPath);
// Handle embedded preview extraction for RAW files
@@ -294,11 +294,11 @@ export class MediaService extends BaseService {
if (convertFullsize) {
// convert a new fullsize image from the same source as the thumbnail
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, image.fullsize.format);
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, image.fullsize.format);
const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions };
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.JPEG) {
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, extracted.format);
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.Jpeg) {
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, extracted.format);
this.storageCore.ensureFolders(fullsizePath);
// Write the buffer to disk with essential EXIF data
@@ -317,25 +317,25 @@ export class MediaService extends BaseService {
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
}
@OnJob({ name: JobName.GENERATE_PERSON_THUMBNAIL, queue: QueueName.THUMBNAIL_GENERATION })
async handleGeneratePersonThumbnail({ id }: JobOf<JobName.GENERATE_PERSON_THUMBNAIL>): Promise<JobStatus> {
@OnJob({ name: JobName.GeneratePersonThumbnail, queue: QueueName.ThumbnailGeneration })
async handleGeneratePersonThumbnail({ id }: JobOf<JobName.GeneratePersonThumbnail>): Promise<JobStatus> {
const { machineLearning, metadata, image } = await this.getConfig({ withCache: true });
if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const data = await this.personRepository.getDataForThumbnailGenerationJob(id);
if (!data) {
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight, exifOrientation, previewPath, originalPath } = data;
let inputImage: string | Buffer;
if (data.type === AssetType.VIDEO) {
if (data.type === AssetType.Video) {
if (!previewPath) {
this.logger.error(`Could not generate person thumbnail for video ${id}: missing preview path`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
inputImage = previewPath;
} else if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) {
@@ -357,7 +357,7 @@ export class MediaService extends BaseService {
const thumbnailOptions = {
colorspace: image.colorspace,
format: ImageFormat.JPEG,
format: ImageFormat.Jpeg,
raw: info,
quality: image.thumbnail.quality,
crop: this.getCrop(
@@ -371,7 +371,7 @@ export class MediaService extends BaseService {
await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath);
await this.personRepository.update({ id, thumbnailPath });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
@@ -411,8 +411,8 @@ export class MediaService extends BaseService {
private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) {
const { image, ffmpeg } = await this.getConfig({ withCache: true });
const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format);
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format);
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
this.storageCore.ensureFolders(previewPath);
const { format, audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath);
@@ -424,9 +424,9 @@ export class MediaService extends BaseService {
const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() });
const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.VIDEO, mainVideoStream, mainAudioStream, format);
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format);
const thumbnailOptions = thumbnailConfig.getCommand(
TranscodeTarget.VIDEO,
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
@@ -443,13 +443,13 @@ export class MediaService extends BaseService {
return { previewPath, thumbnailPath, thumbhash };
}
@OnJob({ name: JobName.QUEUE_VIDEO_CONVERSION, queue: QueueName.VIDEO_CONVERSION })
async handleQueueVideoConversion(job: JobOf<JobName.QUEUE_VIDEO_CONVERSION>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueVideoConversion, queue: QueueName.VideoConversion })
async handleQueueVideoConversion(job: JobOf<JobName.QueueVideoConversion>): Promise<JobStatus> {
const { force } = job;
let queue: { name: JobName.VIDEO_CONVERSION; data: { id: string } }[] = [];
let queue: { name: JobName.VideoConversation; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
queue.push({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } });
queue.push({ name: JobName.VideoConversation, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
@@ -459,14 +459,14 @@ export class MediaService extends BaseService {
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.VIDEO_CONVERSION, queue: QueueName.VIDEO_CONVERSION })
async handleVideoConversion({ id }: JobOf<JobName.VIDEO_CONVERSION>): Promise<JobStatus> {
@OnJob({ name: JobName.VideoConversation, queue: QueueName.VideoConversion })
async handleVideoConversion({ id }: JobOf<JobName.VideoConversation>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
const input = asset.originalPath;
@@ -474,35 +474,35 @@ export class MediaService extends BaseService {
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
let { ffmpeg } = await this.getConfig({ withCache: true });
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) {
if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) {
if (asset.encodedVideoPath) {
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [asset.encodedVideoPath] } });
await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [asset.encodedVideoPath] } });
await this.assetRepository.update({ id: asset.id, encodedVideoPath: null });
} else {
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
}
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
} else {
this.logger.log(
@@ -514,8 +514,8 @@ export class MediaService extends BaseService {
await this.mediaRepository.transcode(input, output, command);
} catch (error: any) {
this.logger.error(`Error occurred during transcoding: ${error.message}`);
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
return JobStatus.FAILED;
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
return JobStatus.Failed;
}
let partialFallbackSuccess = false;
@@ -533,7 +533,7 @@ export class MediaService extends BaseService {
if (!partialFallbackSuccess) {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
ffmpeg = { ...ffmpeg, accel: TranscodeHWAccel.DISABLED };
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
await this.mediaRepository.transcode(input, output, command);
}
@@ -543,7 +543,7 @@ export class MediaService extends BaseService {
await this.assetRepository.update({ id: asset.id, encodedVideoPath: output });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {
@@ -561,18 +561,18 @@ export class MediaService extends BaseService {
const isVideoTranscodeRequired = this.isVideoTranscodeRequired(config, videoStream);
if (isAudioTranscodeRequired && isVideoTranscodeRequired) {
return TranscodeTarget.ALL;
return TranscodeTarget.All;
}
if (isAudioTranscodeRequired) {
return TranscodeTarget.AUDIO;
return TranscodeTarget.Audio;
}
if (isVideoTranscodeRequired) {
return TranscodeTarget.VIDEO;
return TranscodeTarget.Video;
}
return TranscodeTarget.NONE;
return TranscodeTarget.None;
}
private isAudioTranscodeRequired(ffmpegConfig: SystemConfigFFmpegDto, stream?: AudioStreamInfo): boolean {
@@ -581,15 +581,15 @@ export class MediaService extends BaseService {
}
switch (ffmpegConfig.transcode) {
case TranscodePolicy.DISABLED: {
case TranscodePolicy.Disabled: {
return false;
}
case TranscodePolicy.ALL: {
case TranscodePolicy.All: {
return true;
}
case TranscodePolicy.REQUIRED:
case TranscodePolicy.OPTIMAL:
case TranscodePolicy.BITRATE: {
case TranscodePolicy.Required:
case TranscodePolicy.Optimal:
case TranscodePolicy.Bitrate: {
return !ffmpegConfig.acceptedAudioCodecs.includes(stream.codecName as AudioCodec);
}
default: {
@@ -608,19 +608,19 @@ export class MediaService extends BaseService {
const isRequired = !isTargetVideoCodec || !stream.pixelFormat.endsWith('420p');
switch (ffmpegConfig.transcode) {
case TranscodePolicy.DISABLED: {
case TranscodePolicy.Disabled: {
return false;
}
case TranscodePolicy.ALL: {
case TranscodePolicy.All: {
return true;
}
case TranscodePolicy.REQUIRED: {
case TranscodePolicy.Required: {
return isRequired;
}
case TranscodePolicy.OPTIMAL: {
case TranscodePolicy.Optimal: {
return isRequired || isLargerThanTargetRes;
}
case TranscodePolicy.BITRATE: {
case TranscodePolicy.Bitrate: {
return isRequired || isLargerThanTargetBitrate;
}
default: {
@@ -630,12 +630,12 @@ export class MediaService extends BaseService {
}
private isRemuxRequired(ffmpegConfig: SystemConfigFFmpegDto, { formatName, formatLongName }: VideoFormat): boolean {
if (ffmpegConfig.transcode === TranscodePolicy.DISABLED) {
if (ffmpegConfig.transcode === TranscodePolicy.Disabled) {
return false;
}
const name = formatLongName === 'QuickTime / MOV' ? VideoContainer.MOV : (formatName as VideoContainer);
return name !== VideoContainer.MP4 && !ffmpegConfig.acceptedContainers.includes(name);
const name = formatLongName === 'QuickTime / MOV' ? VideoContainer.Mov : (formatName as VideoContainer);
return name !== VideoContainer.Mp4 && !ffmpegConfig.acceptedContainers.includes(name);
}
isSRGB({ colorspace, profileDescription, bitsPerSample }: Exif): boolean {

View File

@@ -12,7 +12,7 @@ const DAYS = 3;
@Injectable()
export class MemoryService extends BaseService {
@OnJob({ name: JobName.MEMORIES_CREATE, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.MemoriesCreate, queue: QueueName.BackgroundTask })
async onMemoriesCreate() {
const users = await this.userRepository.getList({ withDeleted: false });
const usersIds = await Promise.all(
@@ -26,7 +26,7 @@ export class MemoryService extends BaseService {
);
await this.databaseRepository.withLock(DatabaseLock.MemoryCreation, async () => {
const state = await this.systemMetadataRepository.get(SystemMetadataKey.MEMORIES_STATE);
const state = await this.systemMetadataRepository.get(SystemMetadataKey.MemoriesState);
const start = DateTime.utc().startOf('day').minus({ days: DAYS });
const lastOnThisDayDate = state?.lastOnThisDayDate ? DateTime.fromISO(state.lastOnThisDayDate) : start;
@@ -43,7 +43,7 @@ export class MemoryService extends BaseService {
this.logger.error(`Failed to create memories for ${target.toISO()}`, error);
}
// update system metadata even when there is an error to minimize the chance of duplicates
await this.systemMetadataRepository.set(SystemMetadataKey.MEMORIES_STATE, {
await this.systemMetadataRepository.set(SystemMetadataKey.MemoriesState, {
...state,
lastOnThisDayDate: target.toISO(),
});
@@ -60,7 +60,7 @@ export class MemoryService extends BaseService {
this.memoryRepository.create(
{
ownerId,
type: MemoryType.ON_THIS_DAY,
type: MemoryType.OnThisDay,
data: { year },
memoryAt: target.set({ year }).toISO()!,
showAt,
@@ -72,7 +72,7 @@ export class MemoryService extends BaseService {
);
}
@OnJob({ name: JobName.MEMORIES_CLEANUP, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.MemoriesCleanup, queue: QueueName.BackgroundTask })
async onMemoriesCleanup() {
await this.memoryRepository.cleanup();
}
@@ -87,7 +87,7 @@ export class MemoryService extends BaseService {
}
async get(auth: AuthDto, id: string): Promise<MemoryResponseDto> {
await this.requireAccess({ auth, permission: Permission.MEMORY_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.MemoryRead, ids: [id] });
const memory = await this.findOrFail(id);
return mapMemory(memory, auth);
}
@@ -98,7 +98,7 @@ export class MemoryService extends BaseService {
const assetIds = dto.assetIds || [];
const allowedAssetIds = await this.checkAccess({
auth,
permission: Permission.ASSET_SHARE,
permission: Permission.AssetShare,
ids: assetIds,
});
const memory = await this.memoryRepository.create(
@@ -117,7 +117,7 @@ export class MemoryService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: MemoryUpdateDto): Promise<MemoryResponseDto> {
await this.requireAccess({ auth, permission: Permission.MEMORY_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.MemoryUpdate, ids: [id] });
const memory = await this.memoryRepository.update(id, {
isSaved: dto.isSaved,
@@ -129,12 +129,12 @@ export class MemoryService extends BaseService {
}
async remove(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.MEMORY_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.MemoryDelete, ids: [id] });
await this.memoryRepository.delete(id);
}
async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.MEMORY_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.MemoryRead, ids: [id] });
const repos = { access: this.accessRepository, bulk: this.memoryRepository };
const results = await addAssets(auth, repos, { parentId: id, assetIds: dto.ids });
@@ -148,13 +148,13 @@ export class MemoryService extends BaseService {
}
async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.MEMORY_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.MemoryUpdate, ids: [id] });
const repos = { access: this.accessRepository, bulk: this.memoryRepository };
const results = await removeAssets(auth, repos, {
parentId: id,
assetIds: dto.ids,
canAlwaysRemove: Permission.MEMORY_DELETE,
canAlwaysRemove: Permission.MemoryDelete,
});
const hasSuccess = results.find(({ success }) => success);

View File

@@ -50,7 +50,7 @@ describe(MetadataService.name, () => {
mockReadTags();
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
delete process.env.TZ;
});
@@ -102,11 +102,11 @@ describe(MetadataService.name, () => {
it('should queue metadata extraction for all assets without exif values', async () => {
mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image]));
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.METADATA_EXTRACTION,
name: JobName.MetadataExtraction,
data: { id: assetStub.image.id },
},
]);
@@ -115,11 +115,11 @@ describe(MetadataService.name, () => {
it('should queue metadata extraction for all assets', async () => {
mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image]));
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.METADATA_EXTRACTION,
name: JobName.MetadataExtraction,
data: { id: assetStub.image.id },
},
]);
@@ -506,7 +506,7 @@ describe(MetadataService.name, () => {
it('should not apply motion photos if asset is video', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
...assetStub.livePhotoMotionAsset,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
@@ -516,7 +516,7 @@ describe(MetadataService.name, () => {
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.asset.update).not.toHaveBeenCalledWith(
expect.objectContaining({ assetType: AssetType.VIDEO, visibility: AssetVisibility.HIDDEN }),
expect.objectContaining({ assetType: AssetType.Video, visibility: AssetVisibility.Hidden }),
);
});
@@ -583,13 +583,13 @@ describe(MetadataService.name, () => {
fileCreatedAt: assetStub.livePhotoWithOriginalFileName.fileCreatedAt,
fileModifiedAt: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
id: fileStub.livePhotoMotion.uuid,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
libraryId: assetStub.livePhotoWithOriginalFileName.libraryId,
localDateTime: assetStub.livePhotoWithOriginalFileName.fileCreatedAt,
originalFileName: 'asset_1.mp4',
originalPath: 'upload/encoded-video/user-id/li/ve/live-photo-motion-asset-MP.mp4',
ownerId: assetStub.livePhotoWithOriginalFileName.ownerId,
type: AssetType.VIDEO,
type: AssetType.Video,
});
expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.ownerId, 512);
expect(mocks.storage.createFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video);
@@ -599,7 +599,7 @@ describe(MetadataService.name, () => {
});
expect(mocks.asset.update).toHaveBeenCalledTimes(3);
expect(mocks.job.queue).toHaveBeenCalledExactlyOnceWith({
name: JobName.VIDEO_CONVERSION,
name: JobName.VideoConversation,
data: { id: assetStub.livePhotoMotionAsset.id },
});
});
@@ -641,13 +641,13 @@ describe(MetadataService.name, () => {
fileCreatedAt: assetStub.livePhotoWithOriginalFileName.fileCreatedAt,
fileModifiedAt: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
id: fileStub.livePhotoMotion.uuid,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
libraryId: assetStub.livePhotoWithOriginalFileName.libraryId,
localDateTime: assetStub.livePhotoWithOriginalFileName.fileCreatedAt,
originalFileName: 'asset_1.mp4',
originalPath: 'upload/encoded-video/user-id/li/ve/live-photo-motion-asset-MP.mp4',
ownerId: assetStub.livePhotoWithOriginalFileName.ownerId,
type: AssetType.VIDEO,
type: AssetType.Video,
});
expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.ownerId, 512);
expect(mocks.storage.createFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video);
@@ -657,7 +657,7 @@ describe(MetadataService.name, () => {
});
expect(mocks.asset.update).toHaveBeenCalledTimes(3);
expect(mocks.job.queue).toHaveBeenCalledExactlyOnceWith({
name: JobName.VIDEO_CONVERSION,
name: JobName.VideoConversation,
data: { id: assetStub.livePhotoMotionAsset.id },
});
});
@@ -699,13 +699,13 @@ describe(MetadataService.name, () => {
fileCreatedAt: assetStub.livePhotoWithOriginalFileName.fileCreatedAt,
fileModifiedAt: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
id: fileStub.livePhotoMotion.uuid,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
libraryId: assetStub.livePhotoWithOriginalFileName.libraryId,
localDateTime: assetStub.livePhotoWithOriginalFileName.fileCreatedAt,
originalFileName: 'asset_1.mp4',
originalPath: 'upload/encoded-video/user-id/li/ve/live-photo-motion-asset-MP.mp4',
ownerId: assetStub.livePhotoWithOriginalFileName.ownerId,
type: AssetType.VIDEO,
type: AssetType.Video,
});
expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.ownerId, 512);
expect(mocks.storage.createFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video);
@@ -715,7 +715,7 @@ describe(MetadataService.name, () => {
});
expect(mocks.asset.update).toHaveBeenCalledTimes(3);
expect(mocks.job.queue).toHaveBeenCalledExactlyOnceWith({
name: JobName.VIDEO_CONVERSION,
name: JobName.VideoConversation,
data: { id: assetStub.livePhotoMotionAsset.id },
});
});
@@ -737,7 +737,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.livePhotoWithOriginalFileName.id });
expect(mocks.job.queue).toHaveBeenNthCalledWith(1, {
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: { id: assetStub.livePhotoWithOriginalFileName.livePhotoVideoId, deleteOnDisk: true },
});
});
@@ -778,7 +778,7 @@ describe(MetadataService.name, () => {
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
mocks.asset.getByChecksum.mockResolvedValue({
...assetStub.livePhotoMotionAsset,
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
});
const video = randomBytes(512);
mocks.storage.readFile.mockResolvedValue(video);
@@ -786,7 +786,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
});
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoStillAsset.id,
@@ -1106,7 +1106,7 @@ describe(MetadataService.name, () => {
boundingBoxX2: 200,
boundingBoxY1: 20,
boundingBoxY2: 60,
sourceType: SourceType.EXIF,
sourceType: SourceType.Exif,
},
],
[],
@@ -1116,7 +1116,7 @@ describe(MetadataService.name, () => {
]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.GENERATE_PERSON_THUMBNAIL,
name: JobName.GeneratePersonThumbnail,
data: { id: personStub.withName.id },
},
]);
@@ -1145,7 +1145,7 @@ describe(MetadataService.name, () => {
boundingBoxX2: 200,
boundingBoxY1: 20,
boundingBoxY2: 60,
sourceType: SourceType.EXIF,
sourceType: SourceType.Exif,
},
],
[],
@@ -1234,7 +1234,7 @@ describe(MetadataService.name, () => {
boundingBoxX2: x2,
boundingBoxY1: y1,
boundingBoxY2: y2,
sourceType: SourceType.EXIF,
sourceType: SourceType.Exif,
},
],
[],
@@ -1244,7 +1244,7 @@ describe(MetadataService.name, () => {
]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.GENERATE_PERSON_THUMBNAIL,
name: JobName.GeneratePersonThumbnail,
data: { id: personStub.withName.id },
},
]);
@@ -1308,7 +1308,7 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled();
expect(mocks.asset.update).not.toHaveBeenCalledWith(
expect.objectContaining({ visibility: AssetVisibility.HIDDEN }),
expect.objectContaining({ visibility: AssetVisibility.Hidden }),
);
expect(mocks.album.removeAssetsFromAll).not.toHaveBeenCalled();
});
@@ -1326,10 +1326,10 @@ describe(MetadataService.name, () => {
ownerId: assetStub.livePhotoMotionAsset.ownerId,
otherAssetId: assetStub.livePhotoMotionAsset.id,
libraryId: null,
type: AssetType.IMAGE,
type: AssetType.Image,
});
expect(mocks.asset.update).not.toHaveBeenCalledWith(
expect.objectContaining({ visibility: AssetVisibility.HIDDEN }),
expect.objectContaining({ visibility: AssetVisibility.Hidden }),
);
expect(mocks.album.removeAssetsFromAll).not.toHaveBeenCalled();
});
@@ -1346,7 +1346,7 @@ describe(MetadataService.name, () => {
livePhotoCID: 'CID',
ownerId: assetStub.livePhotoStillAsset.ownerId,
otherAssetId: assetStub.livePhotoStillAsset.id,
type: AssetType.VIDEO,
type: AssetType.Video,
});
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoStillAsset.id,
@@ -1354,7 +1354,7 @@ describe(MetadataService.name, () => {
});
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
});
expect(mocks.album.removeAssetsFromAll).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id]);
});
@@ -1457,7 +1457,7 @@ describe(MetadataService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_SYNC,
name: JobName.SidecarSync,
data: { id: assetStub.sidecar.id },
},
]);
@@ -1471,7 +1471,7 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SIDECAR_DISCOVERY,
name: JobName.SidecarDiscovery,
data: { id: assetStub.image.id },
},
]);
@@ -1481,13 +1481,13 @@ describe(MetadataService.name, () => {
describe('handleSidecarSync', () => {
it('should do nothing if asset could not be found', async () => {
mocks.asset.getByIds.mockResolvedValue([]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed);
expect(mocks.asset.update).not.toHaveBeenCalled();
});
it('should do nothing if asset has no sidecar path', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed);
expect(mocks.asset.update).not.toHaveBeenCalled();
});
@@ -1495,7 +1495,7 @@ describe(MetadataService.name, () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValue(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith(
`${assetStub.sidecar.originalPath}.xmp`,
constants.R_OK,
@@ -1511,7 +1511,7 @@ describe(MetadataService.name, () => {
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(
2,
assetStub.sidecarWithoutExt.sidecarPath,
@@ -1528,7 +1528,7 @@ describe(MetadataService.name, () => {
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(1, assetStub.sidecar.sidecarPath, constants.R_OK);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(
2,
@@ -1545,7 +1545,7 @@ describe(MetadataService.name, () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValue(false);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith(
`${assetStub.sidecar.originalPath}.xmp`,
constants.R_OK,
@@ -1603,14 +1603,14 @@ describe(MetadataService.name, () => {
describe('handleSidecarWrite', () => {
it('should skip assets that do not exist anymore', async () => {
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.FAILED);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
});
it('should skip jobs with no metadata', async () => {
const asset = factory.jobAssets.sidecarWrite();
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(asset);
await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleSidecarWrite({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();
});
@@ -1629,7 +1629,7 @@ describe(MetadataService.name, () => {
longitude: gps,
dateTimeOriginal: date,
}),
).resolves.toBe(JobStatus.SUCCESS);
).resolves.toBe(JobStatus.Success);
expect(mocks.metadata.writeTags).toHaveBeenCalledWith(asset.sidecarPath, {
Description: description,
ImageDescription: description,

View File

@@ -126,7 +126,7 @@ type Dates = {
@Injectable()
export class MetadataService extends BaseService {
@OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.MICROSERVICES] })
@OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.Microservices] })
async onBootstrap() {
this.logger.log('Bootstrapping metadata service');
await this.init();
@@ -137,12 +137,12 @@ export class MetadataService extends BaseService {
await this.metadataRepository.teardown();
}
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] })
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
onConfigInit({ newConfig }: ArgOf<'ConfigInit'>) {
this.metadataRepository.setMaxConcurrency(newConfig.job.metadataExtraction.concurrency);
}
@OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.MICROSERVICES], server: true })
@OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.Microservices], server: true })
onConfigUpdate({ newConfig }: ArgOf<'ConfigUpdate'>) {
this.metadataRepository.setMaxConcurrency(newConfig.job.metadataExtraction.concurrency);
}
@@ -151,9 +151,9 @@ export class MetadataService extends BaseService {
this.logger.log('Initializing metadata service');
try {
await this.jobRepository.pause(QueueName.METADATA_EXTRACTION);
await this.jobRepository.pause(QueueName.MetadataExtraction);
await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.mapRepository.init());
await this.jobRepository.resume(QueueName.METADATA_EXTRACTION);
await this.jobRepository.resume(QueueName.MetadataExtraction);
this.logger.log(`Initialized local reverse geocoder`);
} catch (error: Error | any) {
@@ -170,7 +170,7 @@ export class MetadataService extends BaseService {
return;
}
const otherType = asset.type === AssetType.VIDEO ? AssetType.IMAGE : AssetType.VIDEO;
const otherType = asset.type === AssetType.Video ? AssetType.Image : AssetType.Video;
const match = await this.assetRepository.findLivePhotoMatch({
livePhotoCID: exifInfo.livePhotoCID,
ownerId: asset.ownerId,
@@ -183,23 +183,23 @@ export class MetadataService extends BaseService {
return;
}
const [photoAsset, motionAsset] = asset.type === AssetType.IMAGE ? [asset, match] : [match, asset];
const [photoAsset, motionAsset] = asset.type === AssetType.Image ? [asset, match] : [match, asset];
await Promise.all([
this.assetRepository.update({ id: photoAsset.id, livePhotoVideoId: motionAsset.id }),
this.assetRepository.update({ id: motionAsset.id, visibility: AssetVisibility.HIDDEN }),
this.assetRepository.update({ id: motionAsset.id, visibility: AssetVisibility.Hidden }),
this.albumRepository.removeAssetsFromAll([motionAsset.id]),
]);
await this.eventRepository.emit('AssetHide', { assetId: motionAsset.id, userId: motionAsset.ownerId });
}
@OnJob({ name: JobName.QUEUE_METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
async handleQueueMetadataExtraction(job: JobOf<JobName.QUEUE_METADATA_EXTRACTION>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueMetadataExtraction, queue: QueueName.MetadataExtraction })
async handleQueueMetadataExtraction(job: JobOf<JobName.QueueMetadataExtraction>): Promise<JobStatus> {
const { force } = job;
let queue: { name: JobName.METADATA_EXTRACTION; data: { id: string } }[] = [];
let queue: { name: JobName.MetadataExtraction; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForMetadataExtraction(force)) {
queue.push({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } });
queue.push({ name: JobName.MetadataExtraction, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
@@ -208,11 +208,11 @@ export class MetadataService extends BaseService {
}
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>) {
@OnJob({ name: JobName.MetadataExtraction, queue: QueueName.MetadataExtraction })
async handleMetadataExtraction(data: JobOf<JobName.MetadataExtraction>) {
const [{ metadata, reverseGeocoding }, asset] = await Promise.all([
this.getConfig({ withCache: true }),
this.assetJobRepository.getForMetadataExtraction(data.id),
@@ -320,8 +320,8 @@ export class MetadataService extends BaseService {
});
}
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
async handleQueueSidecar({ force }: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueSidecar, queue: QueueName.Sidecar })
async handleQueueSidecar({ force }: JobOf<JobName.QueueSidecar>): Promise<JobStatus> {
let jobs: JobItem[] = [];
const queueAll = async () => {
await this.jobRepository.queueAll(jobs);
@@ -330,7 +330,7 @@ export class MetadataService extends BaseService {
const assets = this.assetJobRepository.streamForSidecar(force);
for await (const asset of assets) {
jobs.push({ name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY, data: { id: asset.id } });
jobs.push({ name: force ? JobName.SidecarSync : JobName.SidecarDiscovery, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
@@ -338,35 +338,35 @@ export class MetadataService extends BaseService {
await queueAll();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.SIDECAR_SYNC, queue: QueueName.SIDECAR })
handleSidecarSync({ id }: JobOf<JobName.SIDECAR_SYNC>): Promise<JobStatus> {
@OnJob({ name: JobName.SidecarSync, queue: QueueName.Sidecar })
handleSidecarSync({ id }: JobOf<JobName.SidecarSync>): Promise<JobStatus> {
return this.processSidecar(id, true);
}
@OnJob({ name: JobName.SIDECAR_DISCOVERY, queue: QueueName.SIDECAR })
handleSidecarDiscovery({ id }: JobOf<JobName.SIDECAR_DISCOVERY>): Promise<JobStatus> {
@OnJob({ name: JobName.SidecarDiscovery, queue: QueueName.Sidecar })
handleSidecarDiscovery({ id }: JobOf<JobName.SidecarDiscovery>): Promise<JobStatus> {
return this.processSidecar(id, false);
}
@OnEvent({ name: 'AssetTag' })
async handleTagAsset({ assetId }: ArgOf<'AssetTag'>) {
await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id: assetId, tags: true } });
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } });
}
@OnEvent({ name: 'AssetUntag' })
async handleUntagAsset({ assetId }: ArgOf<'AssetUntag'>) {
await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id: assetId, tags: true } });
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } });
}
@OnJob({ name: JobName.SIDECAR_WRITE, queue: QueueName.SIDECAR })
async handleSidecarWrite(job: JobOf<JobName.SIDECAR_WRITE>): Promise<JobStatus> {
@OnJob({ name: JobName.SidecarWrite, queue: QueueName.Sidecar })
async handleSidecarWrite(job: JobOf<JobName.SidecarWrite>): Promise<JobStatus> {
const { id, description, dateTimeOriginal, latitude, longitude, rating, tags } = job;
const asset = await this.assetJobRepository.getForSidecarWriteJob(id);
if (!asset) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
const tagsList = (asset.tags || []).map((tag) => tag.value);
@@ -386,7 +386,7 @@ export class MetadataService extends BaseService {
);
if (Object.keys(exif).length === 0) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
await this.metadataRepository.writeTags(sidecarPath, exif);
@@ -395,7 +395,7 @@ export class MetadataService extends BaseService {
await this.assetRepository.update({ id, sidecarPath });
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private getImageDimensions(exifTags: ImmichTags): { width?: number; height?: number } {
@@ -416,7 +416,7 @@ export class MetadataService extends BaseService {
sidecarPath: string | null;
type: AssetType;
}): Promise<ImmichTags> {
if (!asset.sidecarPath && asset.type === AssetType.IMAGE) {
if (!asset.sidecarPath && asset.type === AssetType.Image) {
return this.metadataRepository.readTags(asset.originalPath);
}
@@ -431,7 +431,7 @@ export class MetadataService extends BaseService {
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
this.metadataRepository.readTags(asset.originalPath),
asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null,
asset.type === AssetType.VIDEO ? this.getVideoTags(asset.originalPath) : null,
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
]);
// prefer dates from sidecar tags
@@ -488,7 +488,7 @@ export class MetadataService extends BaseService {
}
private isMotionPhoto(asset: { type: AssetType }, tags: ImmichTags): boolean {
return asset.type === AssetType.IMAGE && !!(tags.MotionPhoto || tags.MicroVideo);
return asset.type === AssetType.Image && !!(tags.MotionPhoto || tags.MicroVideo);
}
private async applyMotionPhotos(asset: Asset, tags: ImmichTags, dates: Dates, stats: Stats) {
@@ -558,10 +558,10 @@ export class MetadataService extends BaseService {
});
// Hide the motion photo video asset if it's not already hidden to prepare for linking
if (motionAsset.visibility === AssetVisibility.TIMELINE) {
if (motionAsset.visibility === AssetVisibility.Timeline) {
await this.assetRepository.update({
id: motionAsset.id,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
});
this.logger.log(`Hid unlinked motion photo video asset (${motionAsset.id})`);
}
@@ -570,7 +570,7 @@ export class MetadataService extends BaseService {
motionAsset = await this.assetRepository.create({
id: motionAssetId,
libraryId: asset.libraryId,
type: AssetType.VIDEO,
type: AssetType.Video,
fileCreatedAt: dates.dateTimeOriginal,
fileModifiedAt: stats.mtime,
localDateTime: dates.localDateTime,
@@ -578,7 +578,7 @@ export class MetadataService extends BaseService {
ownerId: asset.ownerId,
originalPath: StorageCore.getAndroidMotionPath(asset, motionAssetId),
originalFileName: `${path.parse(asset.originalFileName).name}.mp4`,
visibility: AssetVisibility.HIDDEN,
visibility: AssetVisibility.Hidden,
deviceAssetId: 'NONE',
deviceId: 'NONE',
});
@@ -597,7 +597,7 @@ export class MetadataService extends BaseService {
// note asset.livePhotoVideoId is not motionAsset.id yet
if (asset.livePhotoVideoId) {
await this.jobRepository.queue({
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: { id: asset.livePhotoVideoId, deleteOnDisk: true },
});
this.logger.log(`Removed old motion photo video asset (${asset.livePhotoVideoId})`);
@@ -612,7 +612,7 @@ export class MetadataService extends BaseService {
this.logger.log(`Wrote motion photo video to ${motionAsset.originalPath}`);
await this.handleMetadataExtraction({ id: motionAsset.id });
await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: motionAsset.id } });
await this.jobRepository.queue({ name: JobName.VideoConversation, data: { id: motionAsset.id } });
}
this.logger.debug(`Finished motion photo video extraction for asset ${asset.id}: ${asset.originalPath}`);
@@ -740,7 +740,7 @@ export class MetadataService extends BaseService {
boundingBoxY1: Math.floor((region.Area.Y - region.Area.H / 2) * imageHeight),
boundingBoxX2: Math.floor((region.Area.X + region.Area.W / 2) * imageWidth),
boundingBoxY2: Math.floor((region.Area.Y + region.Area.H / 2) * imageHeight),
sourceType: SourceType.EXIF,
sourceType: SourceType.Exif,
};
facesToAdd.push(face);
@@ -753,11 +753,11 @@ export class MetadataService extends BaseService {
if (missing.length > 0) {
this.logger.debugFn(() => `Creating missing persons: ${missing.map((p) => `${p.name}/${p.id}`)}`);
const newPersonIds = await this.personRepository.createAll(missing);
const jobs = newPersonIds.map((id) => ({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } }) as const);
const jobs = newPersonIds.map((id) => ({ name: JobName.GeneratePersonThumbnail, data: { id } }) as const);
await this.jobRepository.queueAll(jobs);
}
const facesToRemove = asset.faces.filter((face) => face.sourceType === SourceType.EXIF).map((face) => face.id);
const facesToRemove = asset.faces.filter((face) => face.sourceType === SourceType.Exif).map((face) => face.id);
if (facesToRemove.length > 0) {
this.logger.debug(`Removing ${facesToRemove.length} faces for asset ${asset.id}: ${asset.originalPath}`);
}
@@ -894,15 +894,15 @@ export class MetadataService extends BaseService {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (isSync && !asset.sidecarPath) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (!isSync && (asset.visibility === AssetVisibility.HIDDEN || asset.sidecarPath) && !asset.isExternal) {
return JobStatus.FAILED;
if (!isSync && (asset.visibility === AssetVisibility.Hidden || asset.sidecarPath) && !asset.isExternal) {
return JobStatus.Failed;
}
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
@@ -927,22 +927,22 @@ export class MetadataService extends BaseService {
if (sidecarPath !== asset.sidecarPath) {
await this.assetRepository.update({ id: asset.id, sidecarPath });
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
if (sidecarPath) {
this.logger.debug(`Detected sidecar at '${sidecarPath}' for asset ${asset.id}: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, sidecarPath });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
if (!isSync) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
this.logger.debug(`No sidecar found for asset ${asset.id}: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, sidecarPath: null });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
}

View File

@@ -131,7 +131,7 @@ describe(NotificationService.name, () => {
it('should queue the generate thumbnail job', async () => {
await sut.onAssetShow({ assetId: 'asset-id', userId: 'user-id' });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.GENERATE_THUMBNAILS,
name: JobName.GenerateThumbnails,
data: { id: 'asset-id', notify: true },
});
});
@@ -146,7 +146,7 @@ describe(NotificationService.name, () => {
it('should queue notify signup event if notify is true', async () => {
await sut.onUserSignup({ id: '', notify: true });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_SIGNUP,
name: JobName.NotifySignup,
data: { id: '', tempPassword: undefined },
});
});
@@ -156,7 +156,7 @@ describe(NotificationService.name, () => {
it('should queue notify album update event', async () => {
await sut.onAlbumUpdate({ id: 'album', recipientId: '42' });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_UPDATE,
name: JobName.NotifyAlbumUpdate,
data: { id: 'album', recipientId: '42', delay: 300_000 },
});
});
@@ -166,7 +166,7 @@ describe(NotificationService.name, () => {
it('should queue notify album invite event', async () => {
await sut.onAlbumInvite({ id: '', userId: '42' });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_INVITE,
name: JobName.NotifyAlbumInvite,
data: { id: '', recipientId: '42' },
});
});
@@ -242,7 +242,7 @@ describe(NotificationService.name, () => {
describe('handleUserSignup', () => {
it('should skip if user could not be found', async () => {
await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.Skipped);
});
it('should be successful', async () => {
@@ -250,9 +250,9 @@ describe(NotificationService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: expect.objectContaining({ subject: 'Welcome to Immich' }),
});
});
@@ -260,14 +260,14 @@ describe(NotificationService.name, () => {
describe('handleAlbumInvite', () => {
it('should skip if album could not be found', async () => {
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
expect(mocks.user.get).not.toHaveBeenCalled();
});
it('should skip if recipient could not be found', async () => {
mocks.album.getById.mockResolvedValue(albumStub.empty);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
expect(mocks.job.queue).not.toHaveBeenCalled();
});
@@ -277,13 +277,13 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: false, albumInvite: true } },
},
],
});
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
});
it('should skip if the recipient has email notifications for album invite disabled', async () => {
@@ -292,13 +292,13 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: true, albumInvite: false } },
},
],
});
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
});
it('should send invite email', async () => {
@@ -307,7 +307,7 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: true, albumInvite: true } },
},
],
@@ -315,9 +315,9 @@ describe(NotificationService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: expect.objectContaining({ subject: expect.stringContaining('You have been added to a shared album') }),
});
});
@@ -328,7 +328,7 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: true, albumInvite: true } },
},
],
@@ -337,13 +337,13 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
albumStub.emptyWithValidThumbnail.albumThumbnailAssetId,
AssetFileType.THUMBNAIL,
AssetFileType.Thumbnail,
);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: expect.objectContaining({
subject: expect.stringContaining('You have been added to a shared album'),
imageAttachments: undefined,
@@ -357,7 +357,7 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: true, albumInvite: true } },
},
],
@@ -365,16 +365,16 @@ describe(NotificationService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([
{ id: '1', type: AssetFileType.THUMBNAIL, path: 'path-to-thumb.jpg' },
{ id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg' },
]);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
albumStub.emptyWithValidThumbnail.albumThumbnailAssetId,
AssetFileType.THUMBNAIL,
AssetFileType.Thumbnail,
);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: expect.objectContaining({
subject: expect.stringContaining('You have been added to a shared album'),
imageAttachments: [{ filename: 'album-thumbnail.jpg', path: expect.anything(), cid: expect.anything() }],
@@ -388,7 +388,7 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: true, albumInvite: true } },
},
],
@@ -397,13 +397,13 @@ describe(NotificationService.name, () => {
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetStub.image.files[2]]);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
albumStub.emptyWithValidThumbnail.albumThumbnailAssetId,
AssetFileType.THUMBNAIL,
AssetFileType.Thumbnail,
);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: expect.objectContaining({
subject: expect.stringContaining('You have been added to a shared album'),
imageAttachments: [{ filename: 'album-thumbnail.ext', path: expect.anything(), cid: expect.anything() }],
@@ -414,14 +414,14 @@ describe(NotificationService.name, () => {
describe('handleAlbumUpdate', () => {
it('should skip if album could not be found', async () => {
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.Skipped);
expect(mocks.user.get).not.toHaveBeenCalled();
});
it('should skip if owner could not be found', async () => {
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.Skipped);
expect(mocks.systemMetadata.get).not.toHaveBeenCalled();
});
@@ -448,7 +448,7 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: false, albumUpdate: true } },
},
],
@@ -470,7 +470,7 @@ describe(NotificationService.name, () => {
...userStub.user1,
metadata: [
{
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: { emailNotifications: { enabled: true, albumUpdate: false } },
},
],
@@ -500,9 +500,9 @@ describe(NotificationService.name, () => {
it('should add new recipients for new images if job is already queued', async () => {
await sut.onAlbumUpdate({ id: '1', recipientId: '2' } as INotifyAlbumUpdateJob);
expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NOTIFY_ALBUM_UPDATE, '1/2');
expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NotifyAlbumUpdate, '1/2');
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.NOTIFY_ALBUM_UPDATE,
name: JobName.NotifyAlbumUpdate,
data: {
id: '1',
delay: 300_000,
@@ -515,7 +515,7 @@ describe(NotificationService.name, () => {
describe('handleSendEmail', () => {
it('should skip if smtp notifications are disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue({ notifications: { smtp: { enabled: false } } });
await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.Skipped);
});
it('should send mail successfully', async () => {
@@ -524,7 +524,7 @@ describe(NotificationService.name, () => {
});
mocks.email.sendEmail.mockResolvedValue({ messageId: '', response: '' });
await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.email.sendEmail).toHaveBeenCalledWith(expect.objectContaining({ replyTo: 'test@immich.app' }));
});
@@ -534,7 +534,7 @@ describe(NotificationService.name, () => {
});
mocks.email.sendEmail.mockResolvedValue({ messageId: '', response: '' });
await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSendEmail({ html: '', subject: '', text: '', to: '' })).resolves.toBe(JobStatus.Success);
expect(mocks.email.sendEmail).toHaveBeenCalledWith(expect.objectContaining({ replyTo: 'demo@immich.app' }));
});
});

View File

@@ -39,19 +39,19 @@ export class NotificationService extends BaseService {
}
async updateAll(auth: AuthDto, dto: NotificationUpdateAllDto) {
await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NOTIFICATION_UPDATE });
await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NotificationUpdate });
await this.notificationRepository.updateAll(dto.ids, {
readAt: dto.readAt,
});
}
async deleteAll(auth: AuthDto, dto: NotificationDeleteAllDto) {
await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NOTIFICATION_DELETE });
await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NotificationDelete });
await this.notificationRepository.deleteAll(dto.ids);
}
async get(auth: AuthDto, id: string) {
await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_READ });
await this.requireAccess({ auth, ids: [id], permission: Permission.NotificationRead });
const item = await this.notificationRepository.get(id);
if (!item) {
throw new BadRequestException('Notification not found');
@@ -60,7 +60,7 @@ export class NotificationService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: NotificationUpdateDto) {
await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_UPDATE });
await this.requireAccess({ auth, ids: [id], permission: Permission.NotificationUpdate });
const item = await this.notificationRepository.update(id, {
readAt: dto.readAt,
});
@@ -68,11 +68,11 @@ export class NotificationService extends BaseService {
}
async delete(auth: AuthDto, id: string) {
await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_DELETE });
await this.requireAccess({ auth, ids: [id], permission: Permission.NotificationDelete });
await this.notificationRepository.delete(id);
}
@OnJob({ name: JobName.NOTIFICATIONS_CLEANUP, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.NotificationsCleanup, queue: QueueName.BackgroundTask })
async onNotificationsCleanup() {
await this.notificationRepository.cleanup();
}
@@ -87,7 +87,7 @@ export class NotificationService extends BaseService {
this.logger.error(`Unable to run job handler (${job.name}): ${error}`, error?.stack, JSON.stringify(job.data));
switch (job.name) {
case JobName.BACKUP_DATABASE: {
case JobName.BackupDatabase: {
const errorMessage = error instanceof Error ? error.message : error;
const item = await this.notificationRepository.create({
userId: admin.id,
@@ -135,7 +135,7 @@ export class NotificationService extends BaseService {
@OnEvent({ name: 'AssetShow' })
async onAssetShow({ assetId }: ArgOf<'AssetShow'>) {
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: { id: assetId, notify: true } });
await this.jobRepository.queue({ name: JobName.GenerateThumbnails, data: { id: assetId, notify: true } });
}
@OnEvent({ name: 'AssetTrash' })
@@ -193,22 +193,22 @@ export class NotificationService extends BaseService {
@OnEvent({ name: 'UserSignup' })
async onUserSignup({ notify, id, tempPassword }: ArgOf<'UserSignup'>) {
if (notify) {
await this.jobRepository.queue({ name: JobName.NOTIFY_SIGNUP, data: { id, tempPassword } });
await this.jobRepository.queue({ name: JobName.NotifySignup, data: { id, tempPassword } });
}
}
@OnEvent({ name: 'AlbumUpdate' })
async onAlbumUpdate({ id, recipientId }: ArgOf<'AlbumUpdate'>) {
await this.jobRepository.removeJob(JobName.NOTIFY_ALBUM_UPDATE, `${id}/${recipientId}`);
await this.jobRepository.removeJob(JobName.NotifyAlbumUpdate, `${id}/${recipientId}`);
await this.jobRepository.queue({
name: JobName.NOTIFY_ALBUM_UPDATE,
name: JobName.NotifyAlbumUpdate,
data: { id, recipientId, delay: NotificationService.albumUpdateEmailDelayMs },
});
}
@OnEvent({ name: 'AlbumInvite' })
async onAlbumInvite({ id, userId }: ArgOf<'AlbumInvite'>) {
await this.jobRepository.queue({ name: JobName.NOTIFY_ALBUM_INVITE, data: { id, recipientId: userId } });
await this.jobRepository.queue({ name: JobName.NotifyAlbumInvite, data: { id, recipientId: userId } });
}
@OnEvent({ name: 'SessionDelete' })
@@ -313,11 +313,11 @@ export class NotificationService extends BaseService {
return { name, html: templateResponse };
}
@OnJob({ name: JobName.NOTIFY_SIGNUP, queue: QueueName.NOTIFICATION })
async handleUserSignup({ id, tempPassword }: JobOf<JobName.NOTIFY_SIGNUP>) {
@OnJob({ name: JobName.NotifySignup, queue: QueueName.Notification })
async handleUserSignup({ id, tempPassword }: JobOf<JobName.NotifySignup>) {
const user = await this.userRepository.get(id, { withDeleted: false });
if (!user) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const { server, templates } = await this.getConfig({ withCache: true });
@@ -333,7 +333,7 @@ export class NotificationService extends BaseService {
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: {
to: user.email,
subject: 'Welcome to Immich',
@@ -342,25 +342,25 @@ export class NotificationService extends BaseService {
},
});
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.NOTIFY_ALBUM_INVITE, queue: QueueName.NOTIFICATION })
async handleAlbumInvite({ id, recipientId }: JobOf<JobName.NOTIFY_ALBUM_INVITE>) {
@OnJob({ name: JobName.NotifyAlbumInvite, queue: QueueName.Notification })
async handleAlbumInvite({ id, recipientId }: JobOf<JobName.NotifyAlbumInvite>) {
const album = await this.albumRepository.getById(id, { withAssets: false });
if (!album) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const recipient = await this.userRepository.get(recipientId, { withDeleted: false });
if (!recipient) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const { emailNotifications } = getPreferences(recipient.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumInvite) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const attachment = await this.getAlbumThumbnailAttachment(album);
@@ -380,7 +380,7 @@ export class NotificationService extends BaseService {
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: {
to: recipient.email,
subject: `You have been added to a shared album - ${album.albumName}`,
@@ -390,20 +390,20 @@ export class NotificationService extends BaseService {
},
});
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.NOTIFY_ALBUM_UPDATE, queue: QueueName.NOTIFICATION })
async handleAlbumUpdate({ id, recipientId }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
@OnJob({ name: JobName.NotifyAlbumUpdate, queue: QueueName.Notification })
async handleAlbumUpdate({ id, recipientId }: JobOf<JobName.NotifyAlbumUpdate>) {
const album = await this.albumRepository.getById(id, { withAssets: false });
if (!album) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const owner = await this.userRepository.get(album.ownerId, { withDeleted: false });
if (!owner) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const attachment = await this.getAlbumThumbnailAttachment(album);
@@ -412,13 +412,13 @@ export class NotificationService extends BaseService {
const user = await this.userRepository.get(recipientId, { withDeleted: false });
if (!user) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const { emailNotifications } = getPreferences(user.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const { html, text } = await this.emailRepository.renderEmail({
@@ -434,7 +434,7 @@ export class NotificationService extends BaseService {
});
await this.jobRepository.queue({
name: JobName.SEND_EMAIL,
name: JobName.SendMail,
data: {
to: user.email,
subject: `New media has been added to an album - ${album.albumName}`,
@@ -444,14 +444,14 @@ export class NotificationService extends BaseService {
},
});
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.SEND_EMAIL, queue: QueueName.NOTIFICATION })
async handleSendEmail(data: JobOf<JobName.SEND_EMAIL>): Promise<JobStatus> {
@OnJob({ name: JobName.SendMail, queue: QueueName.Notification })
async handleSendEmail(data: JobOf<JobName.SendMail>): Promise<JobStatus> {
const { notifications } = await this.getConfig({ withCache: false });
if (!notifications.smtp.enabled) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const { to, subject, html, text: plain } = data;
@@ -468,7 +468,7 @@ export class NotificationService extends BaseService {
this.logger.log(`Sent mail with id: ${response.messageId} status: ${response.response}`);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async getAlbumThumbnailAttachment(album: {
@@ -480,7 +480,7 @@ export class NotificationService extends BaseService {
const albumThumbnailFiles = await this.assetJobRepository.getAlbumThumbnailFiles(
album.albumThumbnailAssetId,
AssetFileType.THUMBNAIL,
AssetFileType.Thumbnail,
);
if (albumThumbnailFiles.length !== 1) {

View File

@@ -40,7 +40,7 @@ export class PartnerService extends BaseService {
}
async update(auth: AuthDto, sharedById: string, dto: UpdatePartnerDto): Promise<PartnerResponseDto> {
await this.requireAccess({ auth, permission: Permission.PARTNER_UPDATE, ids: [sharedById] });
await this.requireAccess({ auth, permission: Permission.PartnerUpdate, ids: [sharedById] });
const partnerId: PartnerIds = { sharedById, sharedWithId: auth.user.id };
const entity = await this.partnerRepository.update(partnerId, { inTimeline: dto.inTimeline });

View File

@@ -182,7 +182,7 @@ describe(PersonService.name, () => {
new ImmichFileResponse({
path: '/path/to/thumbnail.jpg',
contentType: 'image/jpeg',
cacheControl: CacheControl.PRIVATE_WITHOUT_CACHE,
cacheControl: CacheControl.PrivateWithoutCache,
}),
);
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
@@ -276,7 +276,7 @@ describe(PersonService.name, () => {
},
]);
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.GENERATE_PERSON_THUMBNAIL,
name: JobName.GeneratePersonThumbnail,
data: { id: 'person-1' },
});
expect(mocks.access.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['person-1']));
@@ -337,7 +337,7 @@ describe(PersonService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.GENERATE_PERSON_THUMBNAIL,
name: JobName.GeneratePersonThumbnail,
data: { id: personStub.newThumbnail.id },
},
]);
@@ -346,7 +346,7 @@ describe(PersonService.name, () => {
describe('handlePersonMigration', () => {
it('should not move person files', async () => {
await expect(sut.handlePersonMigration(personStub.noName)).resolves.toBe(JobStatus.FAILED);
await expect(sut.handlePersonMigration(personStub.noName)).resolves.toBe(JobStatus.Failed);
});
});
@@ -373,7 +373,7 @@ describe(PersonService.name, () => {
await sut.createNewFeaturePhoto([personStub.newThumbnail.id]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.GENERATE_PERSON_THUMBNAIL,
name: JobName.GeneratePersonThumbnail,
data: { id: personStub.newThumbnail.id },
},
]);
@@ -447,7 +447,7 @@ describe(PersonService.name, () => {
it('should skip if machine learning is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
await expect(sut.handleQueueDetectFaces({})).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueDetectFaces({})).resolves.toBe(JobStatus.Skipped);
expect(mocks.job.queue).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.systemMetadata.get).toHaveBeenCalled();
@@ -462,7 +462,7 @@ describe(PersonService.name, () => {
expect(mocks.person.vacuum).not.toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
name: JobName.FaceDetection,
data: { id: assetStub.image.id },
},
]);
@@ -474,14 +474,14 @@ describe(PersonService.name, () => {
await sut.handleQueueDetectFaces({ force: true });
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MachineLearning });
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]);
expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: true });
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
name: JobName.FaceDetection,
data: { id: assetStub.image.id },
},
]);
@@ -499,11 +499,11 @@ describe(PersonService.name, () => {
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(undefined);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
name: JobName.FaceDetection,
data: { id: assetStub.image.id },
},
]);
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.PERSON_CLEANUP });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.PersonCleanup });
});
it('should delete existing people and faces if forced', async () => {
@@ -518,7 +518,7 @@ describe(PersonService.name, () => {
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACE_DETECTION,
name: JobName.FaceDetection,
data: { id: assetStub.image.id },
},
]);
@@ -540,7 +540,7 @@ describe(PersonService.name, () => {
});
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.Skipped);
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.systemMetadata.get).toHaveBeenCalled();
expect(mocks.systemMetadata.set).not.toHaveBeenCalled();
@@ -556,7 +556,7 @@ describe(PersonService.name, () => {
delayed: 0,
});
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.Skipped);
expect(mocks.job.queueAll).not.toHaveBeenCalled();
expect(mocks.systemMetadata.set).not.toHaveBeenCalled();
});
@@ -577,15 +577,15 @@ describe(PersonService.name, () => {
expect(mocks.person.getAllFaces).toHaveBeenCalledWith({
personId: null,
sourceType: SourceType.MACHINE_LEARNING,
sourceType: SourceType.MachineLearning,
});
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
name: JobName.FacialRecognition,
data: { id: faceStub.face1.id, deferred: false },
},
]);
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE, {
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState, {
lastRun: expect.any(String),
});
expect(mocks.person.vacuum).not.toHaveBeenCalled();
@@ -609,11 +609,11 @@ describe(PersonService.name, () => {
expect(mocks.person.getAllFaces).toHaveBeenCalledWith(undefined);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
name: JobName.FacialRecognition,
data: { id: faceStub.face1.id, deferred: false },
},
]);
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE, {
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState, {
lastRun: expect.any(String),
});
expect(mocks.person.vacuum).toHaveBeenCalledWith({ reindexVectors: false });
@@ -637,19 +637,19 @@ describe(PersonService.name, () => {
await sut.handleQueueRecognizeFaces({ force: false, nightly: true });
expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE);
expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState);
expect(mocks.person.getLatestFaceDate).toHaveBeenCalledOnce();
expect(mocks.person.getAllFaces).toHaveBeenCalledWith({
personId: null,
sourceType: SourceType.MACHINE_LEARNING,
sourceType: SourceType.MachineLearning,
});
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
name: JobName.FacialRecognition,
data: { id: faceStub.face1.id, deferred: false },
},
]);
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE, {
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState, {
lastRun: expect.any(String),
});
expect(mocks.person.vacuum).not.toHaveBeenCalled();
@@ -665,7 +665,7 @@ describe(PersonService.name, () => {
await sut.handleQueueRecognizeFaces({ force: true, nightly: true });
expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FACIAL_RECOGNITION_STATE);
expect(mocks.systemMetadata.get).toHaveBeenCalledWith(SystemMetadataKey.FacialRecognitionState);
expect(mocks.person.getLatestFaceDate).toHaveBeenCalledOnce();
expect(mocks.person.getAllFaces).not.toHaveBeenCalled();
expect(mocks.job.queueAll).not.toHaveBeenCalled();
@@ -690,10 +690,10 @@ describe(PersonService.name, () => {
await sut.handleQueueRecognizeFaces({ force: true });
expect(mocks.person.deleteFaces).not.toHaveBeenCalled();
expect(mocks.person.unassignFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
expect(mocks.person.unassignFaces).toHaveBeenCalledWith({ sourceType: SourceType.MachineLearning });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.FACIAL_RECOGNITION,
name: JobName.FacialRecognition,
data: { id: faceStub.face1.id, deferred: false },
},
]);
@@ -711,7 +711,7 @@ describe(PersonService.name, () => {
it('should skip if machine learning is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleDetectFaces({ id: 'foo' })).resolves.toBe(JobStatus.Skipped);
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
expect(mocks.systemMetadata.get).toHaveBeenCalled();
});
@@ -754,8 +754,8 @@ describe(PersonService.name, () => {
expect(mocks.person.refreshFaces).toHaveBeenCalledWith([face], [], [faceSearch]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceId } },
{ name: JobName.QueueFacialRecognition, data: { force: false } },
{ name: JobName.FacialRecognition, data: { id: faceId } },
]);
expect(mocks.person.reassignFace).not.toHaveBeenCalled();
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
@@ -790,8 +790,8 @@ describe(PersonService.name, () => {
expect(mocks.person.refreshFaces).toHaveBeenCalledWith([face], [faceStub.primaryFace1.id], [faceSearch]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceId } },
{ name: JobName.QueueFacialRecognition, data: { force: false } },
{ name: JobName.FacialRecognition, data: { id: faceId } },
]);
expect(mocks.person.reassignFace).not.toHaveBeenCalled();
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
@@ -830,8 +830,8 @@ describe(PersonService.name, () => {
expect(mocks.person.refreshFaces).toHaveBeenCalledWith([face], [], [faceSearch]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
{ name: JobName.FACIAL_RECOGNITION, data: { id: faceId } },
{ name: JobName.QueueFacialRecognition, data: { force: false } },
{ name: JobName.FacialRecognition, data: { id: faceId } },
]);
expect(mocks.person.reassignFace).not.toHaveBeenCalled();
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
@@ -840,7 +840,7 @@ describe(PersonService.name, () => {
describe('handleRecognizeFaces', () => {
it('should fail if face does not exist', async () => {
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.FAILED);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.Failed);
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
expect(mocks.person.create).not.toHaveBeenCalled();
@@ -850,7 +850,7 @@ describe(PersonService.name, () => {
const face = { ...faceStub.face1, asset: null };
mocks.person.getFaceForFacialRecognitionJob.mockResolvedValue(face);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.FAILED);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.Failed);
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
expect(mocks.person.create).not.toHaveBeenCalled();
@@ -859,7 +859,7 @@ describe(PersonService.name, () => {
it('should skip if face already has an assigned person', async () => {
mocks.person.getFaceForFacialRecognitionJob.mockResolvedValue(faceStub.face1);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.SKIPPED);
expect(await sut.handleRecognizeFaces({ id: faceStub.face1.id })).toBe(JobStatus.Skipped);
expect(mocks.person.reassignFaces).not.toHaveBeenCalled();
expect(mocks.person.create).not.toHaveBeenCalled();
@@ -1008,7 +1008,7 @@ describe(PersonService.name, () => {
await sut.handleRecognizeFaces({ id: faceStub.noPerson1.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FACIAL_RECOGNITION,
name: JobName.FacialRecognition,
data: { id: faceStub.noPerson1.id, deferred: true },
});
expect(mocks.search.searchFaces).toHaveBeenCalledTimes(1);
@@ -1161,7 +1161,7 @@ describe(PersonService.name, () => {
id: faceStub.face1.id,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MACHINE_LEARNING,
sourceType: SourceType.MachineLearning,
person: mapPerson(personStub.withName),
});
});

View File

@@ -78,7 +78,7 @@ export class PersonService extends BaseService {
}
async reassignFaces(auth: AuthDto, personId: string, dto: AssetFaceUpdateDto): Promise<PersonResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [personId] });
await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [personId] });
const person = await this.findOrFail(personId);
const result: PersonResponseDto[] = [];
const changeFeaturePhoto: string[] = [];
@@ -86,7 +86,7 @@ export class PersonService extends BaseService {
const faces = await this.personRepository.getFacesByIds([{ personId: data.personId, assetId: data.assetId }]);
for (const face of faces) {
await this.requireAccess({ auth, permission: Permission.PERSON_CREATE, ids: [face.id] });
await this.requireAccess({ auth, permission: Permission.PersonCreate, ids: [face.id] });
if (person.faceAssetId === null) {
changeFeaturePhoto.push(person.id);
}
@@ -107,8 +107,8 @@ export class PersonService extends BaseService {
}
async reassignFacesById(auth: AuthDto, personId: string, dto: FaceDto): Promise<PersonResponseDto> {
await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [personId] });
await this.requireAccess({ auth, permission: Permission.PERSON_CREATE, ids: [dto.id] });
await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [personId] });
await this.requireAccess({ auth, permission: Permission.PersonCreate, ids: [dto.id] });
const face = await this.personRepository.getFaceById(dto.id);
const person = await this.findOrFail(personId);
@@ -124,7 +124,7 @@ export class PersonService extends BaseService {
}
async getFacesById(auth: AuthDto, dto: FaceDto): Promise<AssetFaceResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [dto.id] });
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.id] });
const faces = await this.personRepository.getFaces(dto.id);
return faces.map((asset) => mapFaces(asset, auth));
}
@@ -140,7 +140,7 @@ export class PersonService extends BaseService {
if (assetFace) {
await this.personRepository.update({ id: personId, faceAssetId: assetFace.id });
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: personId } });
jobs.push({ name: JobName.GeneratePersonThumbnail, data: { id: personId } });
}
}
@@ -148,17 +148,17 @@ export class PersonService extends BaseService {
}
async getById(auth: AuthDto, id: string): Promise<PersonResponseDto> {
await this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.PersonRead, ids: [id] });
return this.findOrFail(id).then(mapPerson);
}
async getStatistics(auth: AuthDto, id: string): Promise<PersonStatisticsResponseDto> {
await this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.PersonRead, ids: [id] });
return this.personRepository.getStatistics(id);
}
async getThumbnail(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
await this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.PersonRead, ids: [id] });
const person = await this.personRepository.getById(id);
if (!person || !person.thumbnailPath) {
throw new NotFoundException();
@@ -167,7 +167,7 @@ export class PersonService extends BaseService {
return new ImmichFileResponse({
path: person.thumbnailPath,
contentType: mimeTypes.lookup(person.thumbnailPath),
cacheControl: CacheControl.PRIVATE_WITHOUT_CACHE,
cacheControl: CacheControl.PrivateWithoutCache,
});
}
@@ -185,13 +185,13 @@ export class PersonService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: PersonUpdateDto): Promise<PersonResponseDto> {
await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [id] });
const { name, birthDate, isHidden, featureFaceAssetId: assetId, isFavorite, color } = dto;
// TODO: set by faceId directly
let faceId: string | undefined = undefined;
if (assetId) {
await this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [assetId] });
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [assetId] });
const [face] = await this.personRepository.getFacesByIds([{ personId: id, assetId }]);
if (!face) {
throw new BadRequestException('Invalid assetId for feature face');
@@ -211,7 +211,7 @@ export class PersonService extends BaseService {
});
if (assetId) {
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } });
await this.jobRepository.queue({ name: JobName.GeneratePersonThumbnail, data: { id } });
}
return mapPerson(person);
@@ -242,7 +242,7 @@ export class PersonService extends BaseService {
}
async deleteAll(auth: AuthDto, { ids }: BulkIdsDto): Promise<void> {
await this.requireAccess({ auth, permission: Permission.PERSON_DELETE, ids });
await this.requireAccess({ auth, permission: Permission.PersonDelete, ids });
const people = await this.personRepository.getForPeopleDelete(ids);
await this.removeAllPeople(people);
}
@@ -254,22 +254,22 @@ export class PersonService extends BaseService {
this.logger.debug(`Deleted ${people.length} people`);
}
@OnJob({ name: JobName.PERSON_CLEANUP, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.PersonCleanup, queue: QueueName.BackgroundTask })
async handlePersonCleanup(): Promise<JobStatus> {
const people = await this.personRepository.getAllWithoutFaces();
await this.removeAllPeople(people);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.QUEUE_FACE_DETECTION, queue: QueueName.FACE_DETECTION })
async handleQueueDetectFaces({ force }: JobOf<JobName.QUEUE_FACE_DETECTION>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueFaceDetection, queue: QueueName.FaceDetection })
async handleQueueDetectFaces({ force }: JobOf<JobName.QueueFaceDetection>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
if (!isFacialRecognitionEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
if (force) {
await this.personRepository.deleteFaces({ sourceType: SourceType.MACHINE_LEARNING });
await this.personRepository.deleteFaces({ sourceType: SourceType.MachineLearning });
await this.handlePersonCleanup();
await this.personRepository.vacuum({ reindexVectors: true });
}
@@ -277,7 +277,7 @@ export class PersonService extends BaseService {
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForDetectFacesJob(force);
for await (const asset of assets) {
jobs.push({ name: JobName.FACE_DETECTION, data: { id: asset.id } });
jobs.push({ name: JobName.FaceDetection, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
@@ -288,27 +288,27 @@ export class PersonService extends BaseService {
await this.jobRepository.queueAll(jobs);
if (force === undefined) {
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
await this.jobRepository.queue({ name: JobName.PersonCleanup });
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.FACE_DETECTION, queue: QueueName.FACE_DETECTION })
async handleDetectFaces({ id }: JobOf<JobName.FACE_DETECTION>): Promise<JobStatus> {
@OnJob({ name: JobName.FaceDetection, queue: QueueName.FaceDetection })
async handleDetectFaces({ id }: JobOf<JobName.FaceDetection>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: true });
if (!isFacialRecognitionEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const asset = await this.assetJobRepository.getForDetectFacesJob(id);
const previewFile = asset?.files[0];
if (!asset || asset.files.length !== 1 || !previewFile) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (asset.visibility === AssetVisibility.HIDDEN) {
return JobStatus.SKIPPED;
if (asset.visibility === AssetVisibility.Hidden) {
return JobStatus.Skipped;
}
const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces(
@@ -323,7 +323,7 @@ export class PersonService extends BaseService {
const mlFaceIds = new Set<string>();
for (const face of asset.faces) {
if (face.sourceType === SourceType.MACHINE_LEARNING) {
if (face.sourceType === SourceType.MachineLearning) {
mlFaceIds.add(face.id);
}
}
@@ -368,15 +368,15 @@ export class PersonService extends BaseService {
if (facesToAdd.length > 0) {
this.logger.log(`Detected ${facesToAdd.length} new faces in asset ${id}`);
const jobs = facesToAdd.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id } }) as const);
await this.jobRepository.queueAll([{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, ...jobs]);
const jobs = facesToAdd.map((face) => ({ name: JobName.FacialRecognition, data: { id: face.id } }) as const);
await this.jobRepository.queueAll([{ name: JobName.QueueFacialRecognition, data: { force: false } }, ...jobs]);
} else if (embeddings.length > 0) {
this.logger.log(`Added ${embeddings.length} face embeddings for asset ${id}`);
}
await this.assetRepository.upsertJobStatus({ assetId: asset.id, facesRecognizedAt: new Date() });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private iou(
@@ -396,50 +396,50 @@ export class PersonService extends BaseService {
return intersection / union;
}
@OnJob({ name: JobName.QUEUE_FACIAL_RECOGNITION, queue: QueueName.FACIAL_RECOGNITION })
async handleQueueRecognizeFaces({ force, nightly }: JobOf<JobName.QUEUE_FACIAL_RECOGNITION>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueFacialRecognition, queue: QueueName.FacialRecognition })
async handleQueueRecognizeFaces({ force, nightly }: JobOf<JobName.QueueFacialRecognition>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
if (!isFacialRecognitionEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
await this.jobRepository.waitForQueueCompletion(QueueName.ThumbnailGeneration, QueueName.FaceDetection);
if (nightly) {
const [state, latestFaceDate] = await Promise.all([
this.systemMetadataRepository.get(SystemMetadataKey.FACIAL_RECOGNITION_STATE),
this.systemMetadataRepository.get(SystemMetadataKey.FacialRecognitionState),
this.personRepository.getLatestFaceDate(),
]);
if (state?.lastRun && latestFaceDate && state.lastRun > latestFaceDate) {
this.logger.debug('Skipping facial recognition nightly since no face has been added since the last run');
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
}
const { waiting } = await this.jobRepository.getJobCounts(QueueName.FACIAL_RECOGNITION);
const { waiting } = await this.jobRepository.getJobCounts(QueueName.FacialRecognition);
if (force) {
await this.personRepository.unassignFaces({ sourceType: SourceType.MACHINE_LEARNING });
await this.personRepository.unassignFaces({ sourceType: SourceType.MachineLearning });
await this.handlePersonCleanup();
await this.personRepository.vacuum({ reindexVectors: false });
} else if (waiting) {
this.logger.debug(
`Skipping facial recognition queueing because ${waiting} job${waiting > 1 ? 's are' : ' is'} already queued`,
);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
await this.databaseRepository.prewarm(VectorIndex.FACE);
await this.databaseRepository.prewarm(VectorIndex.Face);
const lastRun = new Date().toISOString();
const facePagination = this.personRepository.getAllFaces(
force ? undefined : { personId: null, sourceType: SourceType.MACHINE_LEARNING },
force ? undefined : { personId: null, sourceType: SourceType.MachineLearning },
);
let jobs: { name: JobName.FACIAL_RECOGNITION; data: { id: string; deferred: false } }[] = [];
let jobs: { name: JobName.FacialRecognition; data: { id: string; deferred: false } }[] = [];
for await (const face of facePagination) {
jobs.push({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id, deferred: false } });
jobs.push({ name: JobName.FacialRecognition, data: { id: face.id, deferred: false } });
if (jobs.length === JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
@@ -449,37 +449,37 @@ export class PersonService extends BaseService {
await this.jobRepository.queueAll(jobs);
await this.systemMetadataRepository.set(SystemMetadataKey.FACIAL_RECOGNITION_STATE, { lastRun });
await this.systemMetadataRepository.set(SystemMetadataKey.FacialRecognitionState, { lastRun });
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.FACIAL_RECOGNITION, queue: QueueName.FACIAL_RECOGNITION })
async handleRecognizeFaces({ id, deferred }: JobOf<JobName.FACIAL_RECOGNITION>): Promise<JobStatus> {
@OnJob({ name: JobName.FacialRecognition, queue: QueueName.FacialRecognition })
async handleRecognizeFaces({ id, deferred }: JobOf<JobName.FacialRecognition>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: true });
if (!isFacialRecognitionEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const face = await this.personRepository.getFaceForFacialRecognitionJob(id);
if (!face || !face.asset) {
this.logger.warn(`Face ${id} not found`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (face.sourceType !== SourceType.MACHINE_LEARNING) {
if (face.sourceType !== SourceType.MachineLearning) {
this.logger.warn(`Skipping face ${id} due to source ${face.sourceType}`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
if (!face.faceSearch?.embedding) {
this.logger.warn(`Face ${id} does not have an embedding`);
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (face.personId) {
this.logger.debug(`Face ${id} already has a person assigned`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const matches = await this.searchRepository.searchFaces({
@@ -493,18 +493,18 @@ export class PersonService extends BaseService {
// `matches` also includes the face itself
if (machineLearning.facialRecognition.minFaces > 1 && matches.length <= 1) {
this.logger.debug(`Face ${id} only matched the face itself, skipping`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
this.logger.debug(`Face ${id} has ${matches.length} matches`);
const isCore =
matches.length >= machineLearning.facialRecognition.minFaces &&
face.asset.visibility === AssetVisibility.TIMELINE;
face.asset.visibility === AssetVisibility.Timeline;
if (!isCore && !deferred) {
this.logger.debug(`Deferring non-core face ${id} for later processing`);
await this.jobRepository.queue({ name: JobName.FACIAL_RECOGNITION, data: { id, deferred: true } });
return JobStatus.SKIPPED;
await this.jobRepository.queue({ name: JobName.FacialRecognition, data: { id, deferred: true } });
return JobStatus.Skipped;
}
let personId = matches.find((match) => match.personId)?.personId;
@@ -526,7 +526,7 @@ export class PersonService extends BaseService {
if (isCore && !personId) {
this.logger.log(`Creating new person for face ${id}`);
const newPerson = await this.personRepository.create({ ownerId: face.asset.ownerId, faceAssetId: face.id });
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } });
await this.jobRepository.queue({ name: JobName.GeneratePersonThumbnail, data: { id: newPerson.id } });
personId = newPerson.id;
}
@@ -535,19 +535,19 @@ export class PersonService extends BaseService {
await this.personRepository.reassignFaces({ faceIds: [id], newPersonId: personId });
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.MIGRATE_PERSON, queue: QueueName.MIGRATION })
async handlePersonMigration({ id }: JobOf<JobName.MIGRATE_PERSON>): Promise<JobStatus> {
@OnJob({ name: JobName.MigratePerson, queue: QueueName.Migration })
async handlePersonMigration({ id }: JobOf<JobName.MigratePerson>): Promise<JobStatus> {
const person = await this.personRepository.getById(id);
if (!person) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
await this.storageCore.movePersonFile(person, PersonPathType.FACE);
await this.storageCore.movePersonFile(person, PersonPathType.Face);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {
@@ -556,7 +556,7 @@ export class PersonService extends BaseService {
throw new BadRequestException('Cannot merge a person into themselves');
}
await this.requireAccess({ auth, permission: Permission.PERSON_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.PersonUpdate, ids: [id] });
let primaryPerson = await this.findOrFail(id);
const primaryName = primaryPerson.name || primaryPerson.id;
@@ -564,7 +564,7 @@ export class PersonService extends BaseService {
const allowedIds = await this.checkAccess({
auth,
permission: Permission.PERSON_MERGE,
permission: Permission.PersonMerge,
ids: mergeIds,
});
@@ -623,8 +623,8 @@ export class PersonService extends BaseService {
// TODO return a asset face response
async createFace(auth: AuthDto, dto: AssetFaceCreateDto): Promise<void> {
await Promise.all([
this.requireAccess({ auth, permission: Permission.ASSET_READ, ids: [dto.assetId] }),
this.requireAccess({ auth, permission: Permission.PERSON_READ, ids: [dto.personId] }),
this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.assetId] }),
this.requireAccess({ auth, permission: Permission.PersonRead, ids: [dto.personId] }),
]);
await this.personRepository.createAssetFace({
@@ -636,12 +636,12 @@ export class PersonService extends BaseService {
boundingBoxX2: dto.x + dto.width,
boundingBoxY1: dto.y,
boundingBoxY2: dto.y + dto.height,
sourceType: SourceType.MANUAL,
sourceType: SourceType.Manual,
});
}
async deleteFace(auth: AuthDto, id: string, dto: AssetFaceDeleteDto): Promise<void> {
await this.requireAccess({ auth, permission: Permission.FACE_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.FaceDelete, ids: [id] });
return dto.force ? this.personRepository.deleteAssetFace(id) : this.personRepository.softDeleteAssetFaces(id);
}

View File

@@ -46,7 +46,7 @@ export class SearchService extends BaseService {
}
async searchMetadata(auth: AuthDto, dto: MetadataSearchDto): Promise<SearchResponseDto> {
if (dto.visibility === AssetVisibility.LOCKED) {
if (dto.visibility === AssetVisibility.Locked) {
requireElevatedPermission(auth);
}
@@ -65,7 +65,7 @@ export class SearchService extends BaseService {
...dto,
checksum,
userIds,
orderDirection: dto.order ?? AssetOrder.DESC,
orderDirection: dto.order ?? AssetOrder.Desc,
},
);
@@ -82,7 +82,7 @@ export class SearchService extends BaseService {
}
async searchRandom(auth: AuthDto, dto: RandomSearchDto): Promise<AssetResponseDto[]> {
if (dto.visibility === AssetVisibility.LOCKED) {
if (dto.visibility === AssetVisibility.Locked) {
requireElevatedPermission(auth);
}
@@ -92,7 +92,7 @@ export class SearchService extends BaseService {
}
async searchSmart(auth: AuthDto, dto: SmartSearchDto): Promise<SearchResponseDto> {
if (dto.visibility === AssetVisibility.LOCKED) {
if (dto.visibility === AssetVisibility.Locked) {
requireElevatedPermission(auth);
}

View File

@@ -256,7 +256,7 @@ describe(ServerService.name, () => {
const license = { licenseKey: 'IMSV-license-key', activationKey: 'activation-key' };
await sut.setLicense(license);
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.LICENSE, expect.any(Object));
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.License, expect.any(Object));
});
it('should not save license if invalid', async () => {

View File

@@ -27,7 +27,7 @@ export class ServerService extends BaseService {
async onBootstrap(): Promise<void> {
const featureFlags = await this.getFeatures();
if (featureFlags.configFile) {
await this.systemMetadataRepository.set(SystemMetadataKey.ADMIN_ONBOARDING, {
await this.systemMetadataRepository.set(SystemMetadataKey.AdminOnboarding, {
isOnboarded: true,
});
}
@@ -38,7 +38,7 @@ export class ServerService extends BaseService {
const version = `v${serverVersion.toString()}`;
const { buildMetadata } = this.configRepository.getEnv();
const buildVersions = await this.serverInfoRepository.getBuildVersions();
const licensed = await this.systemMetadataRepository.get(SystemMetadataKey.LICENSE);
const licensed = await this.systemMetadataRepository.get(SystemMetadataKey.License);
return {
version,
@@ -60,7 +60,7 @@ export class ServerService extends BaseService {
}
async getStorage(): Promise<ServerStorageResponseDto> {
const libraryBase = StorageCore.getBaseFolder(StorageFolder.LIBRARY);
const libraryBase = StorageCore.getBaseFolder(StorageFolder.Library);
const diskInfo = await this.storageRepository.checkDiskUsage(libraryBase);
const usagePercentage = (((diskInfo.total - diskInfo.free) / diskInfo.total) * 100).toFixed(2);
@@ -111,7 +111,7 @@ export class ServerService extends BaseService {
async getSystemConfig(): Promise<ServerConfigDto> {
const config = await this.getConfig({ withCache: false });
const isInitialized = await this.userRepository.hasAdmin();
const onboarding = await this.systemMetadataRepository.get(SystemMetadataKey.ADMIN_ONBOARDING);
const onboarding = await this.systemMetadataRepository.get(SystemMetadataKey.AdminOnboarding);
return {
loginPageMessage: config.server.loginPageMessage,
@@ -163,11 +163,11 @@ export class ServerService extends BaseService {
}
async deleteLicense(): Promise<void> {
await this.systemMetadataRepository.delete(SystemMetadataKey.LICENSE);
await this.systemMetadataRepository.delete(SystemMetadataKey.License);
}
async getLicense(): Promise<LicenseResponseDto> {
const license = await this.systemMetadataRepository.get(SystemMetadataKey.LICENSE);
const license = await this.systemMetadataRepository.get(SystemMetadataKey.License);
if (!license) {
throw new NotFoundException();
}
@@ -186,7 +186,7 @@ export class ServerService extends BaseService {
const licenseData = { ...dto, activatedAt: new Date() };
await this.systemMetadataRepository.set(SystemMetadataKey.LICENSE, licenseData);
await this.systemMetadataRepository.set(SystemMetadataKey.License, licenseData);
return licenseData;
}

View File

@@ -19,7 +19,7 @@ describe('SessionService', () => {
describe('handleCleanup', () => {
it('should clean sessions', async () => {
mocks.session.cleanup.mockResolvedValue([]);
await expect(sut.handleCleanup()).resolves.toEqual(JobStatus.SUCCESS);
await expect(sut.handleCleanup()).resolves.toEqual(JobStatus.Success);
});
});

View File

@@ -14,7 +14,7 @@ import { BaseService } from 'src/services/base.service';
@Injectable()
export class SessionService extends BaseService {
@OnJob({ name: JobName.CLEAN_OLD_SESSION_TOKENS, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.CleanOldSessionTokens, queue: QueueName.BackgroundTask })
async handleCleanup(): Promise<JobStatus> {
const sessions = await this.sessionRepository.cleanup();
for (const session of sessions) {
@@ -23,7 +23,7 @@ export class SessionService extends BaseService {
this.logger.log(`Deleted ${sessions.length} expired session tokens`);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
async create(auth: AuthDto, dto: SessionCreateDto): Promise<SessionCreateResponseDto> {
@@ -51,7 +51,7 @@ export class SessionService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: SessionUpdateDto): Promise<SessionResponseDto> {
await this.requireAccess({ auth, permission: Permission.SESSION_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.SessionUpdate, ids: [id] });
if (Object.values(dto).filter((prop) => prop !== undefined).length === 0) {
throw new BadRequestException('No fields to update');
@@ -65,12 +65,12 @@ export class SessionService extends BaseService {
}
async delete(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.AUTH_DEVICE_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.AuthDeviceDelete, ids: [id] });
await this.sessionRepository.delete(id);
}
async lock(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.SESSION_LOCK, ids: [id] });
await this.requireAccess({ auth, permission: Permission.SessionLock, ids: [id] });
await this.sessionRepository.update(id, { pinExpiresAt: null });
}

View File

@@ -95,26 +95,26 @@ describe(SharedLinkService.name, () => {
describe('create', () => {
it('should not allow an album shared link without an albumId', async () => {
await expect(sut.create(authStub.admin, { type: SharedLinkType.ALBUM, assetIds: [] })).rejects.toBeInstanceOf(
await expect(sut.create(authStub.admin, { type: SharedLinkType.Album, assetIds: [] })).rejects.toBeInstanceOf(
BadRequestException,
);
});
it('should not allow non-owners to create album shared links', async () => {
await expect(
sut.create(authStub.admin, { type: SharedLinkType.ALBUM, assetIds: [], albumId: 'album-1' }),
sut.create(authStub.admin, { type: SharedLinkType.Album, assetIds: [], albumId: 'album-1' }),
).rejects.toBeInstanceOf(BadRequestException);
});
it('should not allow individual shared links with no assets', async () => {
await expect(
sut.create(authStub.admin, { type: SharedLinkType.INDIVIDUAL, assetIds: [] }),
sut.create(authStub.admin, { type: SharedLinkType.Individual, assetIds: [] }),
).rejects.toBeInstanceOf(BadRequestException);
});
it('should require asset ownership to make an individual shared link', async () => {
await expect(
sut.create(authStub.admin, { type: SharedLinkType.INDIVIDUAL, assetIds: ['asset-1'] }),
sut.create(authStub.admin, { type: SharedLinkType.Individual, assetIds: ['asset-1'] }),
).rejects.toBeInstanceOf(BadRequestException);
});
@@ -122,14 +122,14 @@ describe(SharedLinkService.name, () => {
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.oneAsset.id]));
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.valid);
await sut.create(authStub.admin, { type: SharedLinkType.ALBUM, albumId: albumStub.oneAsset.id });
await sut.create(authStub.admin, { type: SharedLinkType.Album, albumId: albumStub.oneAsset.id });
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(
authStub.admin.user.id,
new Set([albumStub.oneAsset.id]),
);
expect(mocks.sharedLink.create).toHaveBeenCalledWith({
type: SharedLinkType.ALBUM,
type: SharedLinkType.Album,
userId: authStub.admin.user.id,
albumId: albumStub.oneAsset.id,
allowDownload: true,
@@ -146,7 +146,7 @@ describe(SharedLinkService.name, () => {
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual);
await sut.create(authStub.admin, {
type: SharedLinkType.INDIVIDUAL,
type: SharedLinkType.Individual,
assetIds: [assetStub.image.id],
showMetadata: true,
allowDownload: true,
@@ -159,7 +159,7 @@ describe(SharedLinkService.name, () => {
false,
);
expect(mocks.sharedLink.create).toHaveBeenCalledWith({
type: SharedLinkType.INDIVIDUAL,
type: SharedLinkType.Individual,
userId: authStub.admin.user.id,
albumId: null,
allowDownload: true,
@@ -177,7 +177,7 @@ describe(SharedLinkService.name, () => {
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual);
await sut.create(authStub.admin, {
type: SharedLinkType.INDIVIDUAL,
type: SharedLinkType.Individual,
assetIds: [assetStub.image.id],
showMetadata: false,
allowDownload: true,
@@ -190,7 +190,7 @@ describe(SharedLinkService.name, () => {
false,
);
expect(mocks.sharedLink.create).toHaveBeenCalledWith({
type: SharedLinkType.INDIVIDUAL,
type: SharedLinkType.Individual,
userId: authStub.admin.user.id,
albumId: null,
allowDownload: false,

View File

@@ -45,20 +45,20 @@ export class SharedLinkService extends BaseService {
async create(auth: AuthDto, dto: SharedLinkCreateDto): Promise<SharedLinkResponseDto> {
switch (dto.type) {
case SharedLinkType.ALBUM: {
case SharedLinkType.Album: {
if (!dto.albumId) {
throw new BadRequestException('Invalid albumId');
}
await this.requireAccess({ auth, permission: Permission.ALBUM_SHARE, ids: [dto.albumId] });
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [dto.albumId] });
break;
}
case SharedLinkType.INDIVIDUAL: {
case SharedLinkType.Individual: {
if (!dto.assetIds || dto.assetIds.length === 0) {
throw new BadRequestException('Invalid assetIds');
}
await this.requireAccess({ auth, permission: Permission.ASSET_SHARE, ids: dto.assetIds });
await this.requireAccess({ auth, permission: Permission.AssetShare, ids: dto.assetIds });
break;
}
@@ -113,7 +113,7 @@ export class SharedLinkService extends BaseService {
async addAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise<AssetIdsResponseDto[]> {
const sharedLink = await this.findOrFail(auth.user.id, id);
if (sharedLink.type !== SharedLinkType.INDIVIDUAL) {
if (sharedLink.type !== SharedLinkType.Individual) {
throw new BadRequestException('Invalid shared link type');
}
@@ -121,7 +121,7 @@ export class SharedLinkService extends BaseService {
const notPresentAssetIds = dto.assetIds.filter((assetId) => !existingAssetIds.has(assetId));
const allowedAssetIds = await this.checkAccess({
auth,
permission: Permission.ASSET_SHARE,
permission: Permission.AssetShare,
ids: notPresentAssetIds,
});
@@ -153,7 +153,7 @@ export class SharedLinkService extends BaseService {
async removeAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise<AssetIdsResponseDto[]> {
const sharedLink = await this.findOrFail(auth.user.id, id);
if (sharedLink.type !== SharedLinkType.INDIVIDUAL) {
if (sharedLink.type !== SharedLinkType.Individual) {
throw new BadRequestException('Invalid shared link type');
}

View File

@@ -14,7 +14,7 @@ describe(SmartInfoService.name, () => {
({ sut, mocks } = newTestService(SmartInfoService));
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.config.getWorker.mockReturnValue(ImmichWorker.MICROSERVICES);
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
});
it('should work', () => {
@@ -160,7 +160,7 @@ describe(SmartInfoService.name, () => {
await sut.handleQueueEncodeClip({ force: false });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } },
{ name: JobName.SmartSearch, data: { id: assetStub.image.id } },
]);
expect(mocks.assetJob.streamForEncodeClip).toHaveBeenCalledWith(false);
expect(mocks.database.setDimensionSize).not.toHaveBeenCalled();
@@ -172,7 +172,7 @@ describe(SmartInfoService.name, () => {
await sut.handleQueueEncodeClip({ force: true });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{ name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } },
{ name: JobName.SmartSearch, data: { id: assetStub.image.id } },
]);
expect(mocks.assetJob.streamForEncodeClip).toHaveBeenCalledWith(true);
expect(mocks.database.setDimensionSize).toHaveBeenCalledExactlyOnceWith(512);
@@ -183,7 +183,7 @@ describe(SmartInfoService.name, () => {
it('should do nothing if machine learning is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
expect(await sut.handleEncodeClip({ id: '123' })).toEqual(JobStatus.SKIPPED);
expect(await sut.handleEncodeClip({ id: '123' })).toEqual(JobStatus.Skipped);
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled();
@@ -192,7 +192,7 @@ describe(SmartInfoService.name, () => {
it('should skip assets without a resize path', async () => {
mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.noResizePath, files: [] });
expect(await sut.handleEncodeClip({ id: assetStub.noResizePath.id })).toEqual(JobStatus.FAILED);
expect(await sut.handleEncodeClip({ id: assetStub.noResizePath.id })).toEqual(JobStatus.Failed);
expect(mocks.search.upsert).not.toHaveBeenCalled();
expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled();
@@ -202,7 +202,7 @@ describe(SmartInfoService.name, () => {
mocks.machineLearning.encodeImage.mockResolvedValue('[0.01, 0.02, 0.03]');
mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.SUCCESS);
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
@@ -218,7 +218,7 @@ describe(SmartInfoService.name, () => {
files: [assetStub.image.files[1]],
});
expect(await sut.handleEncodeClip({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.SKIPPED);
expect(await sut.handleEncodeClip({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.Skipped);
expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled();
expect(mocks.search.upsert).not.toHaveBeenCalled();
@@ -227,7 +227,7 @@ describe(SmartInfoService.name, () => {
it('should fail if asset could not be found', async () => {
mocks.assetJob.getForClipEncoding.mockResolvedValue(void 0);
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.FAILED);
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Failed);
expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled();
expect(mocks.search.upsert).not.toHaveBeenCalled();
@@ -238,7 +238,7 @@ describe(SmartInfoService.name, () => {
mocks.database.isBusy.mockReturnValue(true);
mocks.assetJob.getForClipEncoding.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.SUCCESS);
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.database.wait).toHaveBeenCalledWith(512);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(

View File

@@ -10,12 +10,12 @@ import { getCLIPModelInfo, isSmartSearchEnabled } from 'src/utils/misc';
@Injectable()
export class SmartInfoService extends BaseService {
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.MICROSERVICES] })
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
async onConfigInit({ newConfig }: ArgOf<'ConfigInit'>) {
await this.init(newConfig);
}
@OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.MICROSERVICES], server: true })
@OnEvent({ name: 'ConfigUpdate', workers: [ImmichWorker.Microservices], server: true })
async onConfigUpdate({ oldConfig, newConfig }: ArgOf<'ConfigUpdate'>) {
await this.init(newConfig, oldConfig);
}
@@ -64,11 +64,11 @@ export class SmartInfoService extends BaseService {
});
}
@OnJob({ name: JobName.QUEUE_SMART_SEARCH, queue: QueueName.SMART_SEARCH })
async handleQueueEncodeClip({ force }: JobOf<JobName.QUEUE_SMART_SEARCH>): Promise<JobStatus> {
@OnJob({ name: JobName.QueueSmartSearch, queue: QueueName.SmartSearch })
async handleQueueEncodeClip({ force }: JobOf<JobName.QueueSmartSearch>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
if (!isSmartSearchEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
if (force) {
@@ -80,7 +80,7 @@ export class SmartInfoService extends BaseService {
let queue: JobItem[] = [];
const assets = this.assetJobRepository.streamForEncodeClip(force);
for await (const asset of assets) {
queue.push({ name: JobName.SMART_SEARCH, data: { id: asset.id } });
queue.push({ name: JobName.SmartSearch, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
@@ -89,23 +89,23 @@ export class SmartInfoService extends BaseService {
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.SMART_SEARCH, queue: QueueName.SMART_SEARCH })
async handleEncodeClip({ id }: JobOf<JobName.SMART_SEARCH>): Promise<JobStatus> {
@OnJob({ name: JobName.SmartSearch, queue: QueueName.SmartSearch })
async handleEncodeClip({ id }: JobOf<JobName.SmartSearch>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: true });
if (!isSmartSearchEnabled(machineLearning)) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const asset = await this.assetJobRepository.getForClipEncoding(id);
if (!asset || asset.files.length !== 1) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
if (asset.visibility === AssetVisibility.HIDDEN) {
return JobStatus.SKIPPED;
if (asset.visibility === AssetVisibility.Hidden) {
return JobStatus.Skipped;
}
const embedding = await this.machineLearningRepository.encodeImage(
@@ -122,11 +122,11 @@ export class SmartInfoService extends BaseService {
const newConfig = await this.getConfig({ withCache: true });
if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
// Skip the job if the the model has changed since the embedding was generated.
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
await this.searchRepository.upsert(asset.id, embedding);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
}

View File

@@ -17,7 +17,7 @@ export class StackService extends BaseService {
}
async create(auth: AuthDto, dto: StackCreateDto): Promise<StackResponseDto> {
await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: dto.assetIds });
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds });
const stack = await this.stackRepository.create({ ownerId: auth.user.id }, dto.assetIds);
@@ -27,13 +27,13 @@ export class StackService extends BaseService {
}
async get(auth: AuthDto, id: string): Promise<StackResponseDto> {
await this.requireAccess({ auth, permission: Permission.STACK_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.StackRead, ids: [id] });
const stack = await this.findOrFail(id);
return mapStack(stack, { auth });
}
async update(auth: AuthDto, id: string, dto: StackUpdateDto): Promise<StackResponseDto> {
await this.requireAccess({ auth, permission: Permission.STACK_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.StackUpdate, ids: [id] });
const stack = await this.findOrFail(id);
if (dto.primaryAssetId && !stack.assets.some(({ id }) => id === dto.primaryAssetId)) {
throw new BadRequestException('Primary asset must be in the stack');
@@ -47,13 +47,13 @@ export class StackService extends BaseService {
}
async delete(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.STACK_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.StackDelete, ids: [id] });
await this.stackRepository.delete(id);
await this.eventRepository.emit('StackDelete', { stackId: id, userId: auth.user.id });
}
async deleteAll(auth: AuthDto, dto: BulkIdsDto): Promise<void> {
await this.requireAccess({ auth, permission: Permission.STACK_DELETE, ids: dto.ids });
await this.requireAccess({ auth, permission: Permission.StackDelete, ids: dto.ids });
await this.stackRepository.deleteAll(dto.ids);
await this.eventRepository.emit('StackDeleteAll', { stackIds: dto.ids, userId: auth.user.id });
}

View File

@@ -96,7 +96,7 @@ describe(StorageTemplateService.name, () => {
it('should skip when storage template is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue({ storageTemplate: { enabled: false } });
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.Skipped);
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
@@ -119,7 +119,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValueOnce({
id: '123',
entityId: stillAsset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: stillAsset.originalPath,
newPath: newStillPicturePath,
});
@@ -127,12 +127,12 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValueOnce({
id: '124',
entityId: motionAsset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: motionAsset.originalPath,
newPath: newMotionPicturePath,
});
await expect(sut.handleMigrationSingle({ id: stillAsset.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleMigrationSingle({ id: stillAsset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(2);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: stillAsset.id, originalPath: newStillPicturePath });
@@ -152,13 +152,13 @@ describe(StorageTemplateService.name, () => {
mocks.assetJob.getForStorageTemplateJob.mockResolvedValueOnce(asset);
mocks.album.getByAssetId.mockResolvedValueOnce([album]);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success);
expect(mocks.move.create).toHaveBeenCalledWith({
entityId: asset.id,
newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/${album.albumName}/${asset.originalFileName}`,
oldPath: asset.originalPath,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
});
});
@@ -172,14 +172,14 @@ describe(StorageTemplateService.name, () => {
mocks.user.get.mockResolvedValue(user);
mocks.assetJob.getForStorageTemplateJob.mockResolvedValueOnce(asset);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success);
const month = (asset.fileCreatedAt.getMonth() + 1).toString().padStart(2, '0');
expect(mocks.move.create).toHaveBeenCalledWith({
entityId: asset.id,
newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/other/${month}/${asset.originalFileName}`,
oldPath: asset.originalPath,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
});
});
@@ -206,14 +206,14 @@ describe(StorageTemplateService.name, () => {
},
]);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success);
const month = (asset.fileCreatedAt.getMonth() + 1).toString().padStart(2, '0');
expect(mocks.move.create).toHaveBeenCalledWith({
entityId: asset.id,
newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/${month} - ${album.albumName}/${asset.originalFileName}`,
oldPath: asset.originalPath,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
});
});
@@ -229,14 +229,14 @@ describe(StorageTemplateService.name, () => {
mocks.user.get.mockResolvedValue(user);
mocks.assetJob.getForStorageTemplateJob.mockResolvedValueOnce(asset);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.SUCCESS);
expect(await sut.handleMigrationSingle({ id: asset.id })).toBe(JobStatus.Success);
const month = (asset.fileCreatedAt.getMonth() + 1).toString().padStart(2, '0');
expect(mocks.move.create).toHaveBeenCalledWith({
entityId: asset.id,
newPath: `upload/library/${user.id}/${asset.fileCreatedAt.getFullYear()}/${month}/${asset.originalFileName}`,
oldPath: asset.originalPath,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
});
});
@@ -251,7 +251,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.getByEntity.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: asset.originalPath,
newPath: previousFailedNewPath,
});
@@ -259,12 +259,12 @@ describe(StorageTemplateService.name, () => {
mocks.move.update.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: asset.originalPath,
newPath,
});
await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(asset.id);
expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(3);
@@ -293,7 +293,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.getByEntity.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: asset.originalPath,
newPath: previousFailedNewPath,
});
@@ -301,12 +301,12 @@ describe(StorageTemplateService.name, () => {
mocks.move.update.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: previousFailedNewPath,
newPath,
});
await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleMigrationSingle({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(asset.id);
expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(3);
@@ -328,19 +328,19 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: testAsset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: testAsset.originalPath,
newPath,
});
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(testAsset.id);
expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(1);
expect(mocks.storage.stat).toHaveBeenCalledWith(newPath);
expect(mocks.move.create).toHaveBeenCalledWith({
entityId: testAsset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: testAsset.originalPath,
newPath,
});
@@ -370,7 +370,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.getByEntity.mockResolvedValue({
id: '123',
entityId: testAsset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: testAsset.originalPath,
newPath: previousFailedNewPath,
});
@@ -378,12 +378,12 @@ describe(StorageTemplateService.name, () => {
mocks.move.update.mockResolvedValue({
id: '123',
entityId: testAsset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: previousFailedNewPath,
newPath,
});
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleMigrationSingle({ id: testAsset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.assetJob.getForStorageTemplateJob).toHaveBeenCalledWith(testAsset.id);
expect(mocks.storage.checkFileExists).toHaveBeenCalledTimes(3);
@@ -417,7 +417,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath,
newPath,
});
@@ -472,7 +472,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: assetStub.image.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: assetStub.image.originalPath,
newPath,
});
@@ -492,7 +492,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: asset.originalPath,
newPath: `upload/library/${user.storageLabel}/2023/2023-02-23/${asset.originalFileName}`,
});
@@ -520,7 +520,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath,
newPath,
});
@@ -559,7 +559,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: asset.originalPath,
newPath: `upload/library/user-id/2022/2022-06-19/${asset.originalFileName}`,
});
@@ -592,7 +592,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: 'move-123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: asset.originalPath,
newPath: '',
});
@@ -622,7 +622,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.heic`,
newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.heic`,
});
@@ -648,7 +648,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.HEIC`,
newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.heic`,
});
@@ -674,7 +674,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.JPEG`,
newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.jpg`,
});
@@ -700,7 +700,7 @@ describe(StorageTemplateService.name, () => {
mocks.move.create.mockResolvedValue({
id: '123',
entityId: asset.id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath: `upload/library/${user.id}/2022/2022-06-19/IMG_7065.JPG`,
newPath: `upload/library/${user.id}/2023/2023-02-23/IMG_7065.jpg`,
});

View File

@@ -97,7 +97,7 @@ export class StorageTemplateService extends BaseService {
asset: {
fileCreatedAt: new Date(),
originalPath: '/upload/test/IMG_123.jpg',
type: AssetType.IMAGE,
type: AssetType.Image,
id: 'd587e44b-f8c0-4832-9ba3-43268bbf5d4e',
} as StorageAsset,
filename: 'IMG_123',
@@ -118,20 +118,20 @@ export class StorageTemplateService extends BaseService {
@OnEvent({ name: 'AssetMetadataExtracted' })
async onAssetMetadataExtracted({ source, assetId }: ArgOf<'AssetMetadataExtracted'>) {
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { source, id: assetId } });
await this.jobRepository.queue({ name: JobName.StorageTemplateMigrationSingle, data: { source, id: assetId } });
}
@OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, queue: QueueName.STORAGE_TEMPLATE_MIGRATION })
async handleMigrationSingle({ id }: JobOf<JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE>): Promise<JobStatus> {
@OnJob({ name: JobName.StorageTemplateMigrationSingle, queue: QueueName.StorageTemplateMigration })
async handleMigrationSingle({ id }: JobOf<JobName.StorageTemplateMigrationSingle>): Promise<JobStatus> {
const config = await this.getConfig({ withCache: true });
const storageTemplateEnabled = config.storageTemplate.enabled;
if (!storageTemplateEnabled) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const asset = await this.assetJobRepository.getForStorageTemplateJob(id);
if (!asset) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
const user = await this.userRepository.get(asset.ownerId, {});
@@ -143,22 +143,22 @@ export class StorageTemplateService extends BaseService {
if (asset.livePhotoVideoId) {
const livePhotoVideo = await this.assetJobRepository.getForStorageTemplateJob(asset.livePhotoVideoId);
if (!livePhotoVideo) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
const motionFilename = getLivePhotoMotionFilename(filename, livePhotoVideo.originalPath);
await this.moveAsset(livePhotoVideo, { storageLabel, filename: motionFilename });
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION, queue: QueueName.STORAGE_TEMPLATE_MIGRATION })
@OnJob({ name: JobName.StorageTemplateMigration, queue: QueueName.StorageTemplateMigration })
async handleMigration(): Promise<JobStatus> {
this.logger.log('Starting storage template migration');
const { storageTemplate } = await this.getConfig({ withCache: true });
const { enabled } = storageTemplate;
if (!enabled) {
this.logger.log('Storage template migration disabled, skipping');
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
await this.moveRepository.cleanMoveHistory();
@@ -174,12 +174,12 @@ export class StorageTemplateService extends BaseService {
}
this.logger.debug('Cleaning up empty directories...');
const libraryFolder = StorageCore.getBaseFolder(StorageFolder.LIBRARY);
const libraryFolder = StorageCore.getBaseFolder(StorageFolder.Library);
await this.storageRepository.removeEmptyDirs(libraryFolder);
this.logger.log('Finished storage template migration');
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnEvent({ name: 'AssetDelete' })
@@ -208,7 +208,7 @@ export class StorageTemplateService extends BaseService {
try {
await this.storageCore.moveFile({
entityId: id,
pathType: AssetPathType.ORIGINAL,
pathType: AssetPathType.Original,
oldPath,
newPath,
assetInfo: { sizeInBytes: fileSizeInByte, checksum },
@@ -216,7 +216,7 @@ export class StorageTemplateService extends BaseService {
if (sidecarPath) {
await this.storageCore.moveFile({
entityId: id,
pathType: AssetPathType.SIDECAR,
pathType: AssetPathType.Sidecar,
oldPath: sidecarPath,
newPath: `${newPath}.xmp`,
});
@@ -357,8 +357,8 @@ export class StorageTemplateService extends BaseService {
const substitutions: Record<string, string> = {
filename,
ext: extension,
filetype: asset.type == AssetType.IMAGE ? 'IMG' : 'VID',
filetypefull: asset.type == AssetType.IMAGE ? 'IMAGE' : 'VIDEO',
filetype: asset.type == AssetType.Image ? 'IMG' : 'VID',
filetypefull: asset.type == AssetType.Image ? 'IMAGE' : 'VIDEO',
assetId: asset.id,
assetIdShort: asset.id.slice(-12),
//just throw into the root if it doesn't belong to an album

View File

@@ -22,7 +22,7 @@ describe(StorageService.name, () => {
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SystemFlags, {
mountChecks: {
backups: true,
'encoded-video': true,
@@ -60,7 +60,7 @@ describe(StorageService.name, () => {
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.SystemFlags, {
mountChecks: {
backups: true,
'encoded-video': true,

View File

@@ -17,7 +17,7 @@ export class StorageService extends BaseService {
await this.databaseRepository.withLock(DatabaseLock.SystemFileMounts, async () => {
const flags =
(await this.systemMetadataRepository.get(SystemMetadataKey.SYSTEM_FLAGS)) ||
(await this.systemMetadataRepository.get(SystemMetadataKey.SystemFlags)) ||
({ mountChecks: {} } as SystemFlags);
if (!flags.mountChecks) {
@@ -46,7 +46,7 @@ export class StorageService extends BaseService {
}
if (updated) {
await this.systemMetadataRepository.set(SystemMetadataKey.SYSTEM_FLAGS, flags);
await this.systemMetadataRepository.set(SystemMetadataKey.SystemFlags, flags);
this.logger.log('Successfully enabled system mount folders checks');
}
@@ -62,8 +62,8 @@ export class StorageService extends BaseService {
});
}
@OnJob({ name: JobName.DELETE_FILES, queue: QueueName.BACKGROUND_TASK })
async handleDeleteFiles(job: JobOf<JobName.DELETE_FILES>): Promise<JobStatus> {
@OnJob({ name: JobName.DeleteFiles, queue: QueueName.BackgroundTask })
async handleDeleteFiles(job: JobOf<JobName.DeleteFiles>): Promise<JobStatus> {
const { files } = job;
// TODO: one job per file
@@ -79,7 +79,7 @@ export class StorageService extends BaseService {
}
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async verifyReadAccess(folder: StorageFolder) {

View File

@@ -640,7 +640,7 @@ export class SyncService extends BaseService {
async getFullSync(auth: AuthDto, dto: AssetFullSyncDto): Promise<AssetResponseDto[]> {
// mobile implementation is faster if this is a single id
const userId = dto.userId || auth.user.id;
await this.requireAccess({ auth, permission: Permission.TIMELINE_READ, ids: [userId] });
await this.requireAccess({ auth, permission: Permission.TimelineRead, ids: [userId] });
const assets = await this.assetRepository.getAllForUserFullSync({
ownerId: userId,
updatedUntil: dto.updatedUntil,
@@ -664,7 +664,7 @@ export class SyncService extends BaseService {
return FULL_SYNC;
}
await this.requireAccess({ auth, permission: Permission.TIMELINE_READ, ids: dto.userIds });
await this.requireAccess({ auth, permission: Permission.TimelineRead, ids: dto.userIds });
const limit = 10_000;
const upserted = await this.assetRepository.getChangedDeltaSync({ limit, updatedAfter: dto.updatedAfter, userIds });
@@ -676,8 +676,8 @@ export class SyncService extends BaseService {
const deleted = await this.auditRepository.getAfter(dto.updatedAfter, {
userIds,
entityType: EntityType.ASSET,
action: DatabaseAction.DELETE,
entityType: EntityType.Asset,
action: DatabaseAction.Delete,
});
const result = {
@@ -686,7 +686,7 @@ export class SyncService extends BaseService {
// do not return archived assets for partner users
.filter(
(a) =>
a.ownerId === auth.user.id || (a.ownerId !== auth.user.id && a.visibility === AssetVisibility.TIMELINE),
a.ownerId === auth.user.id || (a.ownerId !== auth.user.id && a.visibility === AssetVisibility.Timeline),
)
.map((a) =>
mapAsset(a, {

View File

@@ -9,7 +9,7 @@ import {
OAuthTokenEndpointAuthMethod,
QueueName,
ToneMapping,
TranscodeHWAccel,
TranscodeHardwareAcceleration,
TranscodePolicy,
VideoCodec,
VideoContainer,
@@ -28,17 +28,17 @@ const partialConfig = {
const updatedConfig = Object.freeze<SystemConfig>({
job: {
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
[QueueName.SMART_SEARCH]: { concurrency: 2 },
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
[QueueName.FACE_DETECTION]: { concurrency: 2 },
[QueueName.SEARCH]: { concurrency: 5 },
[QueueName.SIDECAR]: { concurrency: 5 },
[QueueName.LIBRARY]: { concurrency: 5 },
[QueueName.MIGRATION]: { concurrency: 5 },
[QueueName.THUMBNAIL_GENERATION]: { concurrency: 3 },
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
[QueueName.NOTIFICATION]: { concurrency: 5 },
[QueueName.BackgroundTask]: { concurrency: 5 },
[QueueName.SmartSearch]: { concurrency: 2 },
[QueueName.MetadataExtraction]: { concurrency: 5 },
[QueueName.FaceDetection]: { concurrency: 2 },
[QueueName.Search]: { concurrency: 5 },
[QueueName.Sidecar]: { concurrency: 5 },
[QueueName.Library]: { concurrency: 5 },
[QueueName.Migration]: { concurrency: 5 },
[QueueName.ThumbnailGeneration]: { concurrency: 3 },
[QueueName.VideoConversion]: { concurrency: 1 },
[QueueName.Notification]: { concurrency: 5 },
},
backup: {
database: {
@@ -51,28 +51,28 @@ const updatedConfig = Object.freeze<SystemConfig>({
crf: 30,
threads: 0,
preset: 'ultrafast',
targetAudioCodec: AudioCodec.AAC,
acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS, AudioCodec.PCMS16LE],
targetAudioCodec: AudioCodec.Aac,
acceptedAudioCodecs: [AudioCodec.Aac, AudioCodec.Mp3, AudioCodec.LibOpus, AudioCodec.PcmS16le],
targetResolution: '720',
targetVideoCodec: VideoCodec.H264,
acceptedVideoCodecs: [VideoCodec.H264],
acceptedContainers: [VideoContainer.MOV, VideoContainer.OGG, VideoContainer.WEBM],
acceptedContainers: [VideoContainer.Mov, VideoContainer.Ogg, VideoContainer.Webm],
maxBitrate: '0',
bframes: -1,
refs: 0,
gopSize: 0,
temporalAQ: false,
cqMode: CQMode.AUTO,
cqMode: CQMode.Auto,
twoPass: false,
preferredHwDevice: 'auto',
transcode: TranscodePolicy.REQUIRED,
accel: TranscodeHWAccel.DISABLED,
transcode: TranscodePolicy.Required,
accel: TranscodeHardwareAcceleration.Disabled,
accelDecode: false,
tonemap: ToneMapping.HABLE,
tonemap: ToneMapping.Hable,
},
logging: {
enabled: true,
level: LogLevel.LOG,
level: LogLevel.Log,
},
metadata: {
faces: {
@@ -128,7 +128,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
scope: 'openid email profile',
signingAlgorithm: 'RS256',
profileSigningAlgorithm: 'none',
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.CLIENT_SECRET_POST,
tokenEndpointAuthMethod: OAuthTokenEndpointAuthMethod.ClientSecretPost,
timeout: 30_000,
storageLabelClaim: 'preferred_username',
storageQuotaClaim: 'immich_quota',
@@ -150,15 +150,15 @@ const updatedConfig = Object.freeze<SystemConfig>({
image: {
thumbnail: {
size: 250,
format: ImageFormat.WEBP,
format: ImageFormat.Webp,
quality: 80,
},
preview: {
size: 1440,
format: ImageFormat.JPEG,
format: ImageFormat.Jpeg,
quality: 80,
},
fullsize: { enabled: false, format: ImageFormat.JPEG, quality: 80 },
fullsize: { enabled: false, format: ImageFormat.Jpeg, quality: 80 },
colorspace: Colorspace.P3,
extractEmbedded: false,
},

View File

@@ -30,12 +30,12 @@ describe(SystemMetadataService.name, () => {
describe('updateAdminOnboarding', () => {
it('should update isOnboarded to true', async () => {
await expect(sut.updateAdminOnboarding({ isOnboarded: true })).resolves.toBeUndefined();
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.ADMIN_ONBOARDING, { isOnboarded: true });
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.AdminOnboarding, { isOnboarded: true });
});
it('should update isOnboarded to false', async () => {
await expect(sut.updateAdminOnboarding({ isOnboarded: false })).resolves.toBeUndefined();
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.ADMIN_ONBOARDING, { isOnboarded: false });
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.AdminOnboarding, { isOnboarded: false });
});
});

View File

@@ -11,23 +11,23 @@ import { BaseService } from 'src/services/base.service';
@Injectable()
export class SystemMetadataService extends BaseService {
async getAdminOnboarding(): Promise<AdminOnboardingResponseDto> {
const value = await this.systemMetadataRepository.get(SystemMetadataKey.ADMIN_ONBOARDING);
const value = await this.systemMetadataRepository.get(SystemMetadataKey.AdminOnboarding);
return { isOnboarded: false, ...value };
}
async updateAdminOnboarding(dto: AdminOnboardingUpdateDto): Promise<void> {
await this.systemMetadataRepository.set(SystemMetadataKey.ADMIN_ONBOARDING, {
await this.systemMetadataRepository.set(SystemMetadataKey.AdminOnboarding, {
isOnboarded: dto.isOnboarded,
});
}
async getReverseGeocodingState(): Promise<ReverseGeocodingStateResponseDto> {
const value = await this.systemMetadataRepository.get(SystemMetadataKey.REVERSE_GEOCODING_STATE);
const value = await this.systemMetadataRepository.get(SystemMetadataKey.ReverseGeocodingState);
return { lastUpdate: null, lastImportFileName: null, ...value };
}
async getVersionCheckState(): Promise<VersionCheckStateResponseDto> {
const value = await this.systemMetadataRepository.get(SystemMetadataKey.VERSION_CHECK_STATE);
const value = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState);
return { checkedAt: null, releaseVersion: null, ...value };
}
}

View File

@@ -278,7 +278,7 @@ describe(TagService.name, () => {
it('should delete empty tags', async () => {
mocks.tag.deleteEmptyTags.mockResolvedValue();
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
expect(mocks.tag.deleteEmptyTags).toHaveBeenCalled();
});

View File

@@ -26,7 +26,7 @@ export class TagService extends BaseService {
}
async get(auth: AuthDto, id: string): Promise<TagResponseDto> {
await this.requireAccess({ auth, permission: Permission.TAG_READ, ids: [id] });
await this.requireAccess({ auth, permission: Permission.TagRead, ids: [id] });
const tag = await this.findOrFail(id);
return mapTag(tag);
}
@@ -34,7 +34,7 @@ export class TagService extends BaseService {
async create(auth: AuthDto, dto: TagCreateDto) {
let parent;
if (dto.parentId) {
await this.requireAccess({ auth, permission: Permission.TAG_READ, ids: [dto.parentId] });
await this.requireAccess({ auth, permission: Permission.TagRead, ids: [dto.parentId] });
parent = await this.tagRepository.get(dto.parentId);
if (!parent) {
throw new BadRequestException('Tag not found');
@@ -55,7 +55,7 @@ export class TagService extends BaseService {
}
async update(auth: AuthDto, id: string, dto: TagUpdateDto): Promise<TagResponseDto> {
await this.requireAccess({ auth, permission: Permission.TAG_UPDATE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.TagUpdate, ids: [id] });
const { color } = dto;
const tag = await this.tagRepository.update(id, { color });
@@ -68,7 +68,7 @@ export class TagService extends BaseService {
}
async remove(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.TAG_DELETE, ids: [id] });
await this.requireAccess({ auth, permission: Permission.TagDelete, ids: [id] });
// TODO sync tag changes for affected assets
@@ -77,8 +77,8 @@ export class TagService extends BaseService {
async bulkTagAssets(auth: AuthDto, dto: TagBulkAssetsDto): Promise<TagBulkAssetsResponseDto> {
const [tagIds, assetIds] = await Promise.all([
this.checkAccess({ auth, permission: Permission.TAG_ASSET, ids: dto.tagIds }),
this.checkAccess({ auth, permission: Permission.ASSET_UPDATE, ids: dto.assetIds }),
this.checkAccess({ auth, permission: Permission.TagAsset, ids: dto.tagIds }),
this.checkAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds }),
]);
const items: Insertable<TagAssetTable>[] = [];
@@ -97,7 +97,7 @@ export class TagService extends BaseService {
}
async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.TAG_ASSET, ids: [id] });
await this.requireAccess({ auth, permission: Permission.TagAsset, ids: [id] });
const results = await addAssets(
auth,
@@ -115,12 +115,12 @@ export class TagService extends BaseService {
}
async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.TAG_ASSET, ids: [id] });
await this.requireAccess({ auth, permission: Permission.TagAsset, ids: [id] });
const results = await removeAssets(
auth,
{ access: this.accessRepository, bulk: this.tagRepository },
{ parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.TAG_DELETE },
{ parentId: id, assetIds: dto.ids, canAlwaysRemove: Permission.TagDelete },
);
for (const { id: assetId, success } of results) {
@@ -132,10 +132,10 @@ export class TagService extends BaseService {
return results;
}
@OnJob({ name: JobName.TAG_CLEANUP, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.TagCleanup, queue: QueueName.BackgroundTask })
async handleTagCleanup() {
await this.tagRepository.deleteEmptyTags();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async findOrFail(id: string) {

View File

@@ -49,7 +49,7 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
timeBucket: 'bucket',
visibility: AssetVisibility.ARCHIVE,
visibility: AssetVisibility.Archive,
userId: authStub.admin.user.id,
}),
).resolves.toEqual(json);
@@ -57,7 +57,7 @@ describe(TimelineService.name, () => {
'bucket',
expect.objectContaining({
timeBucket: 'bucket',
visibility: AssetVisibility.ARCHIVE,
visibility: AssetVisibility.Archive,
userIds: [authStub.admin.user.id],
}),
);
@@ -71,14 +71,14 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
timeBucket: 'bucket',
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
userId: authStub.admin.user.id,
withPartners: true,
}),
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
timeBucket: 'bucket',
visibility: AssetVisibility.TIMELINE,
visibility: AssetVisibility.Timeline,
withPartners: true,
userIds: [authStub.admin.user.id],
});
@@ -126,7 +126,7 @@ describe(TimelineService.name, () => {
await expect(
sut.getTimeBucket(authStub.admin, {
timeBucket: 'bucket',
visibility: AssetVisibility.ARCHIVE,
visibility: AssetVisibility.Archive,
withPartners: true,
userId: authStub.admin.user.id,
}),

View File

@@ -45,29 +45,29 @@ export class TimelineService extends BaseService {
}
private async timeBucketChecks(auth: AuthDto, dto: TimeBucketDto) {
if (dto.visibility === AssetVisibility.LOCKED) {
if (dto.visibility === AssetVisibility.Locked) {
requireElevatedPermission(auth);
}
if (dto.albumId) {
await this.requireAccess({ auth, permission: Permission.ALBUM_READ, ids: [dto.albumId] });
await this.requireAccess({ auth, permission: Permission.AlbumRead, ids: [dto.albumId] });
} else {
dto.userId = dto.userId || auth.user.id;
}
if (dto.userId) {
await this.requireAccess({ auth, permission: Permission.TIMELINE_READ, ids: [dto.userId] });
if (dto.visibility === AssetVisibility.ARCHIVE) {
await this.requireAccess({ auth, permission: Permission.ARCHIVE_READ, ids: [dto.userId] });
await this.requireAccess({ auth, permission: Permission.TimelineRead, ids: [dto.userId] });
if (dto.visibility === AssetVisibility.Archive) {
await this.requireAccess({ auth, permission: Permission.ArchiveRead, ids: [dto.userId] });
}
}
if (dto.tagId) {
await this.requireAccess({ auth, permission: Permission.TAG_READ, ids: [dto.tagId] });
await this.requireAccess({ auth, permission: Permission.TagRead, ids: [dto.tagId] });
}
if (dto.withPartners) {
const requestedArchived = dto.visibility === AssetVisibility.ARCHIVE || dto.visibility === undefined;
const requestedArchived = dto.visibility === AssetVisibility.Archive || dto.visibility === undefined;
const requestedFavorite = dto.isFavorite === true || dto.isFavorite === false;
const requestedTrash = dto.isTrashed === true;

View File

@@ -77,24 +77,24 @@ describe(TrashService.name, () => {
mocks.trash.empty.mockResolvedValue(1);
await expect(sut.empty(authStub.user1)).resolves.toEqual({ count: 1 });
expect(mocks.trash.empty).toHaveBeenCalledWith('user-id');
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_TRASH_EMPTY, data: {} });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueTrashEmpty, data: {} });
});
});
describe('onAssetsDelete', () => {
it('should queue the empty trash job', async () => {
await expect(sut.onAssetsDelete()).resolves.toBeUndefined();
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_TRASH_EMPTY, data: {} });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.QueueTrashEmpty, data: {} });
});
});
describe('handleQueueEmptyTrash', () => {
it('should queue asset delete jobs', async () => {
mocks.trash.getDeletedIds.mockReturnValue(makeAssetIdStream(1));
await expect(sut.handleQueueEmptyTrash()).resolves.toEqual(JobStatus.SUCCESS);
await expect(sut.handleQueueEmptyTrash()).resolves.toEqual(JobStatus.Success);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: { id: 'asset-1', deleteOnDisk: true },
},
]);

View File

@@ -15,7 +15,7 @@ export class TrashService extends BaseService {
return { count: 0 };
}
await this.requireAccess({ auth, permission: Permission.ASSET_DELETE, ids });
await this.requireAccess({ auth, permission: Permission.AssetDelete, ids });
await this.trashRepository.restoreAll(ids);
await this.eventRepository.emit('AssetRestoreAll', { assetIds: ids, userId: auth.user.id });
@@ -35,17 +35,17 @@ export class TrashService extends BaseService {
async empty(auth: AuthDto): Promise<TrashResponseDto> {
const count = await this.trashRepository.empty(auth.user.id);
if (count > 0) {
await this.jobRepository.queue({ name: JobName.QUEUE_TRASH_EMPTY, data: {} });
await this.jobRepository.queue({ name: JobName.QueueTrashEmpty, data: {} });
}
return { count };
}
@OnEvent({ name: 'AssetDeleteAll' })
async onAssetsDelete() {
await this.jobRepository.queue({ name: JobName.QUEUE_TRASH_EMPTY, data: {} });
await this.jobRepository.queue({ name: JobName.QueueTrashEmpty, data: {} });
}
@OnJob({ name: JobName.QUEUE_TRASH_EMPTY, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.QueueTrashEmpty, queue: QueueName.BackgroundTask })
async handleQueueEmptyTrash() {
const assets = this.trashRepository.getDeletedIds();
@@ -67,14 +67,14 @@ export class TrashService extends BaseService {
this.logger.log(`Queued ${count} asset(s) for deletion from the trash`);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private async handleBatch(ids: string[]) {
this.logger.debug(`Queueing ${ids.length} asset(s) for deletion from the trash`);
await this.jobRepository.queueAll(
ids.map((assetId) => ({
name: JobName.ASSET_DELETION,
name: JobName.AssetDeletion,
data: {
id: assetId,
deleteOnDisk: true,

View File

@@ -140,7 +140,7 @@ describe(UserAdminService.name, () => {
await expect(sut.delete(authStub.admin, userStub.user1.id, {})).resolves.toEqual(mapUserAdmin(userStub.user1));
expect(mocks.user.update).toHaveBeenCalledWith(userStub.user1.id, {
status: UserStatus.DELETED,
status: UserStatus.Deleted,
deletedAt: expect.any(Date),
});
});
@@ -154,11 +154,11 @@ describe(UserAdminService.name, () => {
);
expect(mocks.user.update).toHaveBeenCalledWith(userStub.user1.id, {
status: UserStatus.REMOVING,
status: UserStatus.Removing,
deletedAt: expect.any(Date),
});
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.USER_DELETION,
name: JobName.UserDeletion,
data: { id: userStub.user1.id, force: true },
});
});

View File

@@ -100,11 +100,11 @@ export class UserAdminService extends BaseService {
await this.albumRepository.softDeleteAll(id);
const status = force ? UserStatus.REMOVING : UserStatus.DELETED;
const status = force ? UserStatus.Removing : UserStatus.Deleted;
const user = await this.userRepository.update(id, { status, deletedAt: new Date() });
if (force) {
await this.jobRepository.queue({ name: JobName.USER_DELETION, data: { id: user.id, force } });
await this.jobRepository.queue({ name: JobName.UserDeletion, data: { id: user.id, force } });
}
return mapUserAdmin(user);
@@ -134,7 +134,7 @@ export class UserAdminService extends BaseService {
const newPreferences = mergePreferences(getPreferences(metadata), dto);
await this.userRepository.upsertMetadata(id, {
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: getPreferencesPartial(newPreferences),
});

View File

@@ -122,7 +122,7 @@ describe(UserService.name, () => {
await sut.createProfileImage(authStub.admin, file);
expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DELETE_FILES, data: { files } }]]);
expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DeleteFiles, data: { files } }]]);
});
it('should not delete the profile image if it has not been set', async () => {
@@ -156,7 +156,7 @@ describe(UserService.name, () => {
await sut.deleteProfileImage(authStub.admin);
expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DELETE_FILES, data: { files } }]]);
expect(mocks.job.queue.mock.calls).toEqual([[{ name: JobName.DeleteFiles, data: { files } }]]);
});
});
@@ -185,7 +185,7 @@ describe(UserService.name, () => {
new ImmichFileResponse({
path: '/path/to/profile.jpg',
contentType: 'image/jpeg',
cacheControl: CacheControl.NONE,
cacheControl: CacheControl.None,
}),
);
@@ -211,7 +211,7 @@ describe(UserService.name, () => {
await sut.handleUserDeleteCheck();
expect(mocks.user.getDeletedAfter).toHaveBeenCalled();
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.USER_DELETION, data: { id: user.id } }]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.UserDeletion, data: { id: user.id } }]);
});
});
@@ -266,7 +266,7 @@ describe(UserService.name, () => {
await sut.setLicense(authStub.user1, license);
expect(mocks.user.upsertMetadata).toHaveBeenCalledWith(authStub.user1.user.id, {
key: UserMetadataKey.LICENSE,
key: UserMetadataKey.License,
value: expect.any(Object),
});
});
@@ -279,7 +279,7 @@ describe(UserService.name, () => {
await sut.setLicense(authStub.user1, license);
expect(mocks.user.upsertMetadata).toHaveBeenCalledWith(authStub.user1.user.id, {
key: UserMetadataKey.LICENSE,
key: UserMetadataKey.License,
value: expect.any(Object),
});
});

View File

@@ -78,7 +78,7 @@ export class UserService extends BaseService {
const updated = mergePreferences(getPreferences(metadata), dto);
await this.userRepository.upsertMetadata(auth.user.id, {
key: UserMetadataKey.PREFERENCES,
key: UserMetadataKey.Preferences,
value: getPreferencesPartial(updated),
});
@@ -99,7 +99,7 @@ export class UserService extends BaseService {
});
if (oldpath !== '') {
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [oldpath] } });
await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [oldpath] } });
}
return {
@@ -115,7 +115,7 @@ export class UserService extends BaseService {
throw new BadRequestException("Can't delete a missing profile Image");
}
await this.userRepository.update(auth.user.id, { profileImagePath: '', profileChangedAt: new Date() });
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [user.profileImagePath] } });
await this.jobRepository.queue({ name: JobName.DeleteFiles, data: { files: [user.profileImagePath] } });
}
async getProfileImage(id: string): Promise<ImmichFileResponse> {
@@ -127,7 +127,7 @@ export class UserService extends BaseService {
return new ImmichFileResponse({
path: user.profileImagePath,
contentType: 'image/jpeg',
cacheControl: CacheControl.NONE,
cacheControl: CacheControl.None,
});
}
@@ -135,7 +135,7 @@ export class UserService extends BaseService {
const metadata = await this.userRepository.getMetadata(auth.user.id);
const license = metadata.find(
(item): item is UserMetadataItem<UserMetadataKey.LICENSE> => item.key === UserMetadataKey.LICENSE,
(item): item is UserMetadataItem<UserMetadataKey.License> => item.key === UserMetadataKey.License,
);
if (!license) {
throw new NotFoundException();
@@ -144,7 +144,7 @@ export class UserService extends BaseService {
}
async deleteLicense({ user }: AuthDto): Promise<void> {
await this.userRepository.deleteMetadata(user.id, UserMetadataKey.LICENSE);
await this.userRepository.deleteMetadata(user.id, UserMetadataKey.License);
}
async setLicense(auth: AuthDto, license: LicenseKeyDto): Promise<LicenseResponseDto> {
@@ -173,7 +173,7 @@ export class UserService extends BaseService {
const activatedAt = new Date();
await this.userRepository.upsertMetadata(auth.user.id, {
key: UserMetadataKey.LICENSE,
key: UserMetadataKey.License,
value: { ...license, activatedAt: activatedAt.toISOString() },
});
@@ -184,7 +184,7 @@ export class UserService extends BaseService {
const metadata = await this.userRepository.getMetadata(auth.user.id);
const onboardingData = metadata.find(
(item): item is UserMetadataItem<UserMetadataKey.ONBOARDING> => item.key === UserMetadataKey.ONBOARDING,
(item): item is UserMetadataItem<UserMetadataKey.Onboarding> => item.key === UserMetadataKey.Onboarding,
)?.value;
if (!onboardingData) {
@@ -197,12 +197,12 @@ export class UserService extends BaseService {
}
async deleteOnboarding({ user }: AuthDto): Promise<void> {
await this.userRepository.deleteMetadata(user.id, UserMetadataKey.ONBOARDING);
await this.userRepository.deleteMetadata(user.id, UserMetadataKey.Onboarding);
}
async setOnboarding(auth: AuthDto, onboarding: OnboardingDto): Promise<OnboardingResponseDto> {
await this.userRepository.upsertMetadata(auth.user.id, {
key: UserMetadataKey.ONBOARDING,
key: UserMetadataKey.Onboarding,
value: {
isOnboarded: onboarding.isOnboarded,
},
@@ -213,42 +213,42 @@ export class UserService extends BaseService {
};
}
@OnJob({ name: JobName.USER_SYNC_USAGE, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.userSyncUsage, queue: QueueName.BackgroundTask })
async handleUserSyncUsage(): Promise<JobStatus> {
await this.userRepository.syncUsage();
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnJob({ name: JobName.USER_DELETE_CHECK, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.UserDeleteCheck, queue: QueueName.BackgroundTask })
async handleUserDeleteCheck(): Promise<JobStatus> {
const config = await this.getConfig({ withCache: false });
const users = await this.userRepository.getDeletedAfter(DateTime.now().minus({ days: config.user.deleteDelay }));
await this.jobRepository.queueAll(users.map((user) => ({ name: JobName.USER_DELETION, data: { id: user.id } })));
return JobStatus.SUCCESS;
await this.jobRepository.queueAll(users.map((user) => ({ name: JobName.UserDeletion, data: { id: user.id } })));
return JobStatus.Success;
}
@OnJob({ name: JobName.USER_DELETION, queue: QueueName.BACKGROUND_TASK })
async handleUserDelete({ id, force }: JobOf<JobName.USER_DELETION>): Promise<JobStatus> {
@OnJob({ name: JobName.UserDeletion, queue: QueueName.BackgroundTask })
async handleUserDelete({ id, force }: JobOf<JobName.UserDeletion>): Promise<JobStatus> {
const config = await this.getConfig({ withCache: false });
const user = await this.userRepository.get(id, { withDeleted: true });
if (!user) {
return JobStatus.FAILED;
return JobStatus.Failed;
}
// just for extra protection here
if (!force && !this.isReadyForDeletion(user, config.user.deleteDelay)) {
this.logger.warn(`Skipped user that was not ready for deletion: id=${id}`);
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
this.logger.log(`Deleting user: ${user.id}`);
const folders = [
StorageCore.getLibraryFolder(user),
StorageCore.getFolderLocation(StorageFolder.UPLOAD, user.id),
StorageCore.getFolderLocation(StorageFolder.PROFILE, user.id),
StorageCore.getFolderLocation(StorageFolder.THUMBNAILS, user.id),
StorageCore.getFolderLocation(StorageFolder.ENCODED_VIDEO, user.id),
StorageCore.getFolderLocation(StorageFolder.Upload, user.id),
StorageCore.getFolderLocation(StorageFolder.Profile, user.id),
StorageCore.getFolderLocation(StorageFolder.Thumbnails, user.id),
StorageCore.getFolderLocation(StorageFolder.EncodedVideo, user.id),
];
for (const folder of folders) {
@@ -260,7 +260,7 @@ export class UserService extends BaseService {
await this.albumRepository.deleteAll(user.id);
await this.userRepository.delete(user, true);
return JobStatus.SUCCESS;
return JobStatus.Success;
}
private isReadyForDeletion(user: { id: string; deletedAt?: Date | null }, deleteDelay: number): boolean {

View File

@@ -72,18 +72,18 @@ describe(VersionService.name, () => {
describe('handQueueVersionCheck', () => {
it('should queue a version check job', async () => {
await expect(sut.handleQueueVersionCheck()).resolves.toBeUndefined();
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.VERSION_CHECK, data: {} });
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.VersionCheck, data: {} });
});
});
describe('handVersionCheck', () => {
beforeEach(() => {
mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.PRODUCTION }));
mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.Production }));
});
it('should not run in dev mode', async () => {
mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.DEVELOPMENT }));
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SKIPPED);
mocks.config.getEnv.mockReturnValue(mockEnvData({ environment: ImmichEnvironment.Development }));
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped);
});
it('should not run if the last check was < 60 minutes ago', async () => {
@@ -91,12 +91,12 @@ describe(VersionService.name, () => {
checkedAt: DateTime.utc().minus({ minutes: 5 }).toISO(),
releaseVersion: '1.0.0',
});
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SKIPPED);
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped);
});
it('should not run if version check is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue({ newVersionCheck: { enabled: false } });
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SKIPPED);
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped);
});
it('should run if it has been > 60 minutes', async () => {
@@ -105,7 +105,7 @@ describe(VersionService.name, () => {
checkedAt: DateTime.utc().minus({ minutes: 65 }).toISO(),
releaseVersion: '1.0.0',
});
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SUCCESS);
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Success);
expect(mocks.systemMetadata.set).toHaveBeenCalled();
expect(mocks.logger.log).toHaveBeenCalled();
expect(mocks.event.clientBroadcast).toHaveBeenCalled();
@@ -113,8 +113,8 @@ describe(VersionService.name, () => {
it('should not notify if the version is equal', async () => {
mocks.serverInfo.getGitHubRelease.mockResolvedValue(mockRelease(serverVersion.toString()));
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.SUCCESS);
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.VERSION_CHECK_STATE, {
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Success);
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.VersionCheckState, {
checkedAt: expect.any(String),
releaseVersion: serverVersion.toString(),
});
@@ -123,7 +123,7 @@ describe(VersionService.name, () => {
it('should handle a github error', async () => {
mocks.serverInfo.getGitHubRelease.mockRejectedValue(new Error('GitHub is down'));
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.FAILED);
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Failed);
expect(mocks.systemMetadata.set).not.toHaveBeenCalled();
expect(mocks.event.clientBroadcast).not.toHaveBeenCalled();
expect(mocks.logger.warn).toHaveBeenCalled();

View File

@@ -41,7 +41,7 @@ export class VersionService extends BaseService {
const needsNewMemories = semver.lt(previousVersion, '1.129.0');
if (needsNewMemories) {
await this.jobRepository.queue({ name: JobName.MEMORIES_CREATE });
await this.jobRepository.queue({ name: JobName.MemoriesCreate });
}
}
});
@@ -56,31 +56,31 @@ export class VersionService extends BaseService {
}
async handleQueueVersionCheck() {
await this.jobRepository.queue({ name: JobName.VERSION_CHECK, data: {} });
await this.jobRepository.queue({ name: JobName.VersionCheck, data: {} });
}
@OnJob({ name: JobName.VERSION_CHECK, queue: QueueName.BACKGROUND_TASK })
@OnJob({ name: JobName.VersionCheck, queue: QueueName.BackgroundTask })
async handleVersionCheck(): Promise<JobStatus> {
try {
this.logger.debug('Running version check');
const { environment } = this.configRepository.getEnv();
if (environment === ImmichEnvironment.DEVELOPMENT) {
return JobStatus.SKIPPED;
if (environment === ImmichEnvironment.Development) {
return JobStatus.Skipped;
}
const { newVersionCheck } = await this.getConfig({ withCache: true });
if (!newVersionCheck.enabled) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
const versionCheck = await this.systemMetadataRepository.get(SystemMetadataKey.VERSION_CHECK_STATE);
const versionCheck = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState);
if (versionCheck?.checkedAt) {
const lastUpdate = DateTime.fromISO(versionCheck.checkedAt);
const elapsedTime = DateTime.now().diff(lastUpdate).as('minutes');
// check once per hour (max)
if (elapsedTime < 60) {
return JobStatus.SKIPPED;
return JobStatus.Skipped;
}
}
@@ -88,7 +88,7 @@ export class VersionService extends BaseService {
await this.serverInfoRepository.getGitHubRelease();
const metadata: VersionCheckMetadata = { checkedAt: DateTime.utc().toISO(), releaseVersion };
await this.systemMetadataRepository.set(SystemMetadataKey.VERSION_CHECK_STATE, metadata);
await this.systemMetadataRepository.set(SystemMetadataKey.VersionCheckState, metadata);
if (semver.gt(releaseVersion, serverVersion)) {
this.logger.log(`Found ${releaseVersion}, released at ${new Date(publishedAt).toLocaleString()}`);
@@ -96,16 +96,16 @@ export class VersionService extends BaseService {
}
} catch (error: Error | any) {
this.logger.warn(`Unable to run version check: ${error}`, error?.stack);
return JobStatus.FAILED;
return JobStatus.Failed;
}
return JobStatus.SUCCESS;
return JobStatus.Success;
}
@OnEvent({ name: 'WebsocketConnect' })
async onWebsocketConnection({ userId }: ArgOf<'WebsocketConnect'>) {
this.eventRepository.clientSend('on_server_version', userId, serverVersion);
const metadata = await this.systemMetadataRepository.get(SystemMetadataKey.VERSION_CHECK_STATE);
const metadata = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState);
if (metadata) {
this.eventRepository.clientSend('on_new_release', userId, asNotification(metadata));
}