@@ -0,0 +1,182 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { ReactionType } from 'src/dtos/activity.dto';
|
||||
import { IActivityRepository } from 'src/interfaces/activity.repository';
|
||||
import { ActivityService } from 'src/services/activity.service';
|
||||
import { activityStub } from 'test/fixtures/activity.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newActivityRepositoryMock } from 'test/repositories/activity.repository.mock';
|
||||
|
||||
describe(ActivityService.name, () => {
|
||||
let sut: ActivityService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let activityMock: jest.Mocked<IActivityRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
activityMock = newActivityRepositoryMock();
|
||||
|
||||
sut = new ActivityService(accessMock, activityMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should get all', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.search.mockResolvedValue([]);
|
||||
|
||||
await expect(sut.getAll(authStub.admin, { assetId: 'asset-id', albumId: 'album-id' })).resolves.toEqual([]);
|
||||
|
||||
expect(activityMock.search).toHaveBeenCalledWith({
|
||||
assetId: 'asset-id',
|
||||
albumId: 'album-id',
|
||||
isLiked: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter by type=like', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.search.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getAll(authStub.admin, { assetId: 'asset-id', albumId: 'album-id', type: ReactionType.LIKE }),
|
||||
).resolves.toEqual([]);
|
||||
|
||||
expect(activityMock.search).toHaveBeenCalledWith({
|
||||
assetId: 'asset-id',
|
||||
albumId: 'album-id',
|
||||
isLiked: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter by type=comment', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.search.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getAll(authStub.admin, { assetId: 'asset-id', albumId: 'album-id', type: ReactionType.COMMENT }),
|
||||
).resolves.toEqual([]);
|
||||
|
||||
expect(activityMock.search).toHaveBeenCalledWith({
|
||||
assetId: 'asset-id',
|
||||
albumId: 'album-id',
|
||||
isLiked: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStatistics', () => {
|
||||
it('should get the comment count', async () => {
|
||||
activityMock.getStatistics.mockResolvedValue(1);
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([activityStub.oneComment.albumId]));
|
||||
await expect(
|
||||
sut.getStatistics(authStub.admin, {
|
||||
assetId: 'asset-id',
|
||||
albumId: activityStub.oneComment.albumId,
|
||||
}),
|
||||
).resolves.toEqual({ comments: 1 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('addComment', () => {
|
||||
it('should require access to the album', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
type: ReactionType.COMMENT,
|
||||
comment: 'comment',
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should create a comment', async () => {
|
||||
accessMock.activity.checkCreateAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.create.mockResolvedValue(activityStub.oneComment);
|
||||
|
||||
await sut.create(authStub.admin, {
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
type: ReactionType.COMMENT,
|
||||
comment: 'comment',
|
||||
});
|
||||
|
||||
expect(activityMock.create).toHaveBeenCalledWith({
|
||||
userId: 'admin_id',
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
comment: 'comment',
|
||||
isLiked: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail because activity is disabled for the album', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.create.mockResolvedValue(activityStub.oneComment);
|
||||
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
type: ReactionType.COMMENT,
|
||||
comment: 'comment',
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should create a like', async () => {
|
||||
accessMock.activity.checkCreateAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.create.mockResolvedValue(activityStub.liked);
|
||||
activityMock.search.mockResolvedValue([]);
|
||||
|
||||
await sut.create(authStub.admin, {
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
type: ReactionType.LIKE,
|
||||
});
|
||||
|
||||
expect(activityMock.create).toHaveBeenCalledWith({
|
||||
userId: 'admin_id',
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
isLiked: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip if like exists', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
||||
accessMock.activity.checkCreateAccess.mockResolvedValue(new Set(['album-id']));
|
||||
activityMock.search.mockResolvedValue([activityStub.liked]);
|
||||
|
||||
await sut.create(authStub.admin, {
|
||||
albumId: 'album-id',
|
||||
assetId: 'asset-id',
|
||||
type: ReactionType.LIKE,
|
||||
});
|
||||
|
||||
expect(activityMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should require access', async () => {
|
||||
await expect(sut.delete(authStub.admin, activityStub.oneComment.id)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(activityMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should let the activity owner delete a comment', async () => {
|
||||
accessMock.activity.checkOwnerAccess.mockResolvedValue(new Set(['activity-id']));
|
||||
await sut.delete(authStub.admin, 'activity-id');
|
||||
expect(activityMock.delete).toHaveBeenCalledWith('activity-id');
|
||||
});
|
||||
|
||||
it('should let the album owner delete a comment', async () => {
|
||||
accessMock.activity.checkAlbumOwnerAccess.mockResolvedValue(new Set(['activity-id']));
|
||||
await sut.delete(authStub.admin, 'activity-id');
|
||||
expect(activityMock.delete).toHaveBeenCalledWith('activity-id');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,85 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import {
|
||||
ActivityCreateDto,
|
||||
ActivityDto,
|
||||
ActivityResponseDto,
|
||||
ActivitySearchDto,
|
||||
ActivityStatisticsResponseDto,
|
||||
MaybeDuplicate,
|
||||
ReactionLevel,
|
||||
ReactionType,
|
||||
mapActivity,
|
||||
} from 'src/dtos/activity.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { ActivityEntity } from 'src/entities/activity.entity';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IActivityRepository } from 'src/interfaces/activity.repository';
|
||||
|
||||
@Injectable()
|
||||
export class ActivityService {
|
||||
private access: AccessCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IActivityRepository) private repository: IActivityRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
async getAll(auth: AuthDto, dto: ActivitySearchDto): Promise<ActivityResponseDto[]> {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_READ, dto.albumId);
|
||||
const activities = await this.repository.search({
|
||||
userId: dto.userId,
|
||||
albumId: dto.albumId,
|
||||
assetId: dto.level === ReactionLevel.ALBUM ? null : dto.assetId,
|
||||
isLiked: dto.type && dto.type === ReactionType.LIKE,
|
||||
});
|
||||
|
||||
return activities.map((activity) => mapActivity(activity));
|
||||
}
|
||||
|
||||
async getStatistics(auth: AuthDto, dto: ActivityDto): Promise<ActivityStatisticsResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_READ, dto.albumId);
|
||||
return { comments: await this.repository.getStatistics(dto.assetId, dto.albumId) };
|
||||
}
|
||||
|
||||
async create(auth: AuthDto, dto: ActivityCreateDto): Promise<MaybeDuplicate<ActivityResponseDto>> {
|
||||
await this.access.requirePermission(auth, Permission.ACTIVITY_CREATE, dto.albumId);
|
||||
|
||||
const common = {
|
||||
userId: auth.user.id,
|
||||
assetId: dto.assetId,
|
||||
albumId: dto.albumId,
|
||||
};
|
||||
|
||||
let activity: ActivityEntity | null = null;
|
||||
let duplicate = false;
|
||||
|
||||
if (dto.type === ReactionType.LIKE) {
|
||||
delete dto.comment;
|
||||
[activity] = await this.repository.search({
|
||||
...common,
|
||||
// `null` will search for an album like
|
||||
assetId: dto.assetId ?? null,
|
||||
isLiked: true,
|
||||
});
|
||||
duplicate = !!activity;
|
||||
}
|
||||
|
||||
if (!activity) {
|
||||
activity = await this.repository.create({
|
||||
...common,
|
||||
isLiked: dto.type === ReactionType.LIKE,
|
||||
comment: dto.comment,
|
||||
});
|
||||
}
|
||||
|
||||
return { duplicate, value: mapActivity(activity) };
|
||||
}
|
||||
|
||||
async delete(auth: AuthDto, id: string): Promise<void> {
|
||||
await this.access.requirePermission(auth, Permission.ACTIVITY_DELETE, id);
|
||||
await this.repository.delete(id);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,762 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { AlbumService } from 'src/services/album.service';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newAlbumRepositoryMock } from 'test/repositories/album.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
|
||||
describe(AlbumService.name, () => {
|
||||
let sut: AlbumService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let userMock: jest.Mocked<IUserRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
|
||||
sut = new AlbumService(accessMock, albumMock, assetMock, userMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getCount', () => {
|
||||
it('should get the album count', async () => {
|
||||
albumMock.getOwned.mockResolvedValue([]),
|
||||
albumMock.getShared.mockResolvedValue([]),
|
||||
albumMock.getNotShared.mockResolvedValue([]),
|
||||
await expect(sut.getCount(authStub.admin)).resolves.toEqual({
|
||||
owned: 0,
|
||||
shared: 0,
|
||||
notShared: 0,
|
||||
});
|
||||
|
||||
expect(albumMock.getOwned).toHaveBeenCalledWith(authStub.admin.user.id);
|
||||
expect(albumMock.getShared).toHaveBeenCalledWith(authStub.admin.user.id);
|
||||
expect(albumMock.getNotShared).toHaveBeenCalledWith(authStub.admin.user.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('gets list of albums for auth user', async () => {
|
||||
albumMock.getOwned.mockResolvedValue([albumStub.empty, albumStub.sharedWithUser]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{ albumId: albumStub.empty.id, assetCount: 0, startDate: undefined, endDate: undefined },
|
||||
{ albumId: albumStub.sharedWithUser.id, assetCount: 0, startDate: undefined, endDate: undefined },
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([]);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, {});
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].id).toEqual(albumStub.empty.id);
|
||||
expect(result[1].id).toEqual(albumStub.sharedWithUser.id);
|
||||
});
|
||||
|
||||
it('gets list of albums that have a specific asset', async () => {
|
||||
albumMock.getByAssetId.mockResolvedValue([albumStub.oneAsset]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.oneAsset.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([]);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, { assetId: albumStub.oneAsset.id });
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toEqual(albumStub.oneAsset.id);
|
||||
expect(albumMock.getByAssetId).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('gets list of albums that are shared', async () => {
|
||||
albumMock.getShared.mockResolvedValue([albumStub.sharedWithUser]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{ albumId: albumStub.sharedWithUser.id, assetCount: 0, startDate: undefined, endDate: undefined },
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([]);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, { shared: true });
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toEqual(albumStub.sharedWithUser.id);
|
||||
expect(albumMock.getShared).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('gets list of albums that are NOT shared', async () => {
|
||||
albumMock.getNotShared.mockResolvedValue([albumStub.empty]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{ albumId: albumStub.empty.id, assetCount: 0, startDate: undefined, endDate: undefined },
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([]);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, { shared: false });
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toEqual(albumStub.empty.id);
|
||||
expect(albumMock.getNotShared).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('counts assets correctly', async () => {
|
||||
albumMock.getOwned.mockResolvedValue([albumStub.oneAsset]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.oneAsset.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([]);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, {});
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].assetCount).toEqual(1);
|
||||
expect(albumMock.getOwned).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('updates the album thumbnail by listing all albums', async () => {
|
||||
albumMock.getOwned.mockResolvedValue([albumStub.oneAssetInvalidThumbnail]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.oneAssetInvalidThumbnail.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([albumStub.oneAssetInvalidThumbnail.id]);
|
||||
albumMock.update.mockResolvedValue(albumStub.oneAssetValidThumbnail);
|
||||
assetMock.getFirstAssetForAlbumId.mockResolvedValue(albumStub.oneAssetInvalidThumbnail.assets[0]);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, {});
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(albumMock.getInvalidThumbnail).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.update).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('removes the thumbnail for an empty album', async () => {
|
||||
albumMock.getOwned.mockResolvedValue([albumStub.emptyWithInvalidThumbnail]);
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.emptyWithInvalidThumbnail.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
albumMock.getInvalidThumbnail.mockResolvedValue([albumStub.emptyWithInvalidThumbnail.id]);
|
||||
albumMock.update.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
||||
assetMock.getFirstAssetForAlbumId.mockResolvedValue(null);
|
||||
|
||||
const result = await sut.getAll(authStub.admin, {});
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(albumMock.getInvalidThumbnail).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.update).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('creates album', async () => {
|
||||
albumMock.create.mockResolvedValue(albumStub.empty);
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.create(authStub.admin, {
|
||||
albumName: 'Empty album',
|
||||
sharedWithUserIds: ['user-id'],
|
||||
description: '',
|
||||
assetIds: ['123'],
|
||||
});
|
||||
|
||||
expect(albumMock.create).toHaveBeenCalledWith({
|
||||
ownerId: authStub.admin.user.id,
|
||||
albumName: albumStub.empty.albumName,
|
||||
description: albumStub.empty.description,
|
||||
sharedUsers: [{ id: 'user-id' }],
|
||||
assets: [{ id: '123' }],
|
||||
albumThumbnailAssetId: '123',
|
||||
});
|
||||
|
||||
expect(userMock.get).toHaveBeenCalledWith('user-id', {});
|
||||
});
|
||||
|
||||
it('should require valid userIds', async () => {
|
||||
userMock.get.mockResolvedValue(null);
|
||||
await expect(
|
||||
sut.create(authStub.admin, {
|
||||
albumName: 'Empty album',
|
||||
sharedWithUserIds: ['user-3'],
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(userMock.get).toHaveBeenCalledWith('user-3', {});
|
||||
expect(albumMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should prevent updating an album that does not exist', async () => {
|
||||
albumMock.getById.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
sut.update(authStub.user1, 'invalid-id', {
|
||||
albumName: 'new album name',
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should prevent updating a not owned album (shared with auth user)', async () => {
|
||||
await expect(
|
||||
sut.update(authStub.admin, albumStub.sharedWithAdmin.id, {
|
||||
albumName: 'new album name',
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should require a valid thumbnail asset id', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-4']));
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
albumMock.update.mockResolvedValue(albumStub.oneAsset);
|
||||
albumMock.hasAsset.mockResolvedValue(false);
|
||||
|
||||
await expect(
|
||||
sut.update(authStub.admin, albumStub.oneAsset.id, {
|
||||
albumThumbnailAssetId: 'not-in-album',
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(albumMock.hasAsset).toHaveBeenCalledWith({ albumId: 'album-4', assetId: 'not-in-album' });
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow the owner to update the album', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-4']));
|
||||
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
albumMock.update.mockResolvedValue(albumStub.oneAsset);
|
||||
|
||||
await sut.update(authStub.admin, albumStub.oneAsset.id, {
|
||||
albumName: 'new album name',
|
||||
});
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-4',
|
||||
albumName: 'new album name',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should throw an error for an album not found', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithAdmin.id]));
|
||||
albumMock.getById.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.delete(authStub.admin, albumStub.sharedWithAdmin.id)).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(albumMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not let a shared user delete the album', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithAdmin);
|
||||
|
||||
await expect(sut.delete(authStub.admin, albumStub.sharedWithAdmin.id)).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(albumMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should let the owner delete an album', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.empty.id]));
|
||||
albumMock.getById.mockResolvedValue(albumStub.empty);
|
||||
|
||||
await sut.delete(authStub.admin, albumStub.empty.id);
|
||||
|
||||
expect(albumMock.delete).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.delete).toHaveBeenCalledWith(albumStub.empty);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addUsers', () => {
|
||||
it('should throw an error if the auth user is not the owner', async () => {
|
||||
await expect(
|
||||
sut.addUsers(authStub.admin, albumStub.sharedWithAdmin.id, { sharedUserIds: ['user-1'] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if the userId is already added', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithAdmin.id]));
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithAdmin);
|
||||
await expect(
|
||||
sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, { sharedUserIds: [authStub.admin.user.id] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if the userId does not exist', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithAdmin.id]));
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithAdmin);
|
||||
userMock.get.mockResolvedValue(null);
|
||||
await expect(
|
||||
sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, { sharedUserIds: ['user-3'] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should add valid shared users', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithAdmin.id]));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.sharedWithAdmin));
|
||||
albumMock.update.mockResolvedValue(albumStub.sharedWithAdmin);
|
||||
userMock.get.mockResolvedValue(userStub.user2);
|
||||
await sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, { sharedUserIds: [authStub.user2.user.id] });
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: albumStub.sharedWithAdmin.id,
|
||||
updatedAt: expect.any(Date),
|
||||
sharedUsers: [userStub.admin, { id: authStub.user2.user.id }],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeUser', () => {
|
||||
it('should require a valid album id', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-1']));
|
||||
albumMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.removeUser(authStub.admin, 'album-1', 'user-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove a shared user from an owned album', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.sharedWithUser.id]));
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithUser);
|
||||
|
||||
await expect(
|
||||
sut.removeUser(authStub.admin, albumStub.sharedWithUser.id, userStub.user1.id),
|
||||
).resolves.toBeUndefined();
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: albumStub.sharedWithUser.id,
|
||||
updatedAt: expect.any(Date),
|
||||
sharedUsers: [],
|
||||
});
|
||||
expect(albumMock.getById).toHaveBeenCalledWith(albumStub.sharedWithUser.id, { withAssets: false });
|
||||
});
|
||||
|
||||
it('should prevent removing a shared user from a not-owned album (shared with auth user)', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithMultiple);
|
||||
|
||||
await expect(
|
||||
sut.removeUser(authStub.user1, albumStub.sharedWithMultiple.id, authStub.user2.user.id),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
expect(accessMock.album.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.user1.user.id,
|
||||
new Set([albumStub.sharedWithMultiple.id]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow a shared user to remove themselves', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithUser);
|
||||
|
||||
await sut.removeUser(authStub.user1, albumStub.sharedWithUser.id, authStub.user1.user.id);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: albumStub.sharedWithUser.id,
|
||||
updatedAt: expect.any(Date),
|
||||
sharedUsers: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow a shared user to remove themselves using "me"', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.sharedWithUser);
|
||||
|
||||
await sut.removeUser(authStub.user1, albumStub.sharedWithUser.id, 'me');
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledTimes(1);
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: albumStub.sharedWithUser.id,
|
||||
updatedAt: expect.any(Date),
|
||||
sharedUsers: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should not allow the owner to be removed', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.empty);
|
||||
|
||||
await expect(sut.removeUser(authStub.admin, albumStub.empty.id, authStub.admin.user.id)).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error for a user not in the album', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.empty);
|
||||
|
||||
await expect(sut.removeUser(authStub.admin, albumStub.empty.id, 'user-3')).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAlbumInfo', () => {
|
||||
it('should get a shared album', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.oneAsset.id]));
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.oneAsset.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
|
||||
await sut.get(authStub.admin, albumStub.oneAsset.id, {});
|
||||
|
||||
expect(albumMock.getById).toHaveBeenCalledWith(albumStub.oneAsset.id, { withAssets: true });
|
||||
expect(accessMock.album.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
new Set([albumStub.oneAsset.id]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should get a shared album via a shared link', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
accessMock.album.checkSharedLinkAccess.mockResolvedValue(new Set(['album-123']));
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.oneAsset.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
|
||||
await sut.get(authStub.adminSharedLink, 'album-123', {});
|
||||
|
||||
expect(albumMock.getById).toHaveBeenCalledWith('album-123', { withAssets: true });
|
||||
expect(accessMock.album.checkSharedLinkAccess).toHaveBeenCalledWith(
|
||||
authStub.adminSharedLink.sharedLink?.id,
|
||||
new Set(['album-123']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should get a shared album via shared with user', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
accessMock.album.checkSharedAlbumAccess.mockResolvedValue(new Set(['album-123']));
|
||||
albumMock.getMetadataForIds.mockResolvedValue([
|
||||
{
|
||||
albumId: albumStub.oneAsset.id,
|
||||
assetCount: 1,
|
||||
startDate: new Date('1970-01-01'),
|
||||
endDate: new Date('1970-01-01'),
|
||||
},
|
||||
]);
|
||||
|
||||
await sut.get(authStub.user1, 'album-123', {});
|
||||
|
||||
expect(albumMock.getById).toHaveBeenCalledWith('album-123', { withAssets: true });
|
||||
expect(accessMock.album.checkSharedAlbumAccess).toHaveBeenCalledWith(
|
||||
authStub.user1.user.id,
|
||||
new Set(['album-123']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error for no access', async () => {
|
||||
await expect(sut.get(authStub.admin, 'album-123', {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(accessMock.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-123']));
|
||||
expect(accessMock.album.checkSharedAlbumAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
new Set(['album-123']),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addAssets', () => {
|
||||
it('should allow the owner to add assets', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-123']));
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.oneAsset));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.admin, 'album-123', { ids: ['asset-1', 'asset-2', 'asset-3'] }),
|
||||
).resolves.toEqual([
|
||||
{ success: true, id: 'asset-1' },
|
||||
{ success: true, id: 'asset-2' },
|
||||
{ success: true, id: 'asset-3' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-123',
|
||||
updatedAt: expect.any(Date),
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(albumMock.addAssets).toHaveBeenCalledWith({
|
||||
albumId: 'album-123',
|
||||
assetIds: ['asset-1', 'asset-2', 'asset-3'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should not set the thumbnail if the album has one already', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-123']));
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep({ ...albumStub.empty, albumThumbnailAssetId: 'asset-id' }));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(sut.addAssets(authStub.admin, 'album-123', { ids: ['asset-1'] })).resolves.toEqual([
|
||||
{ success: true, id: 'asset-1' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-123',
|
||||
updatedAt: expect.any(Date),
|
||||
albumThumbnailAssetId: 'asset-id',
|
||||
});
|
||||
expect(albumMock.addAssets).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow a shared user to add assets', async () => {
|
||||
accessMock.album.checkSharedAlbumAccess.mockResolvedValue(new Set(['album-123']));
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.sharedWithUser));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.user1, 'album-123', { ids: ['asset-1', 'asset-2', 'asset-3'] }),
|
||||
).resolves.toEqual([
|
||||
{ success: true, id: 'asset-1' },
|
||||
{ success: true, id: 'asset-2' },
|
||||
{ success: true, id: 'asset-3' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-123',
|
||||
updatedAt: expect.any(Date),
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(albumMock.addAssets).toHaveBeenCalledWith({
|
||||
albumId: 'album-123',
|
||||
assetIds: ['asset-1', 'asset-2', 'asset-3'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow a shared link user to add assets', async () => {
|
||||
accessMock.album.checkSharedLinkAccess.mockResolvedValue(new Set(['album-123']));
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.oneAsset));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.adminSharedLink, 'album-123', { ids: ['asset-1', 'asset-2', 'asset-3'] }),
|
||||
).resolves.toEqual([
|
||||
{ success: true, id: 'asset-1' },
|
||||
{ success: true, id: 'asset-2' },
|
||||
{ success: true, id: 'asset-3' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-123',
|
||||
updatedAt: expect.any(Date),
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(albumMock.addAssets).toHaveBeenCalledWith({
|
||||
albumId: 'album-123',
|
||||
assetIds: ['asset-1', 'asset-2', 'asset-3'],
|
||||
});
|
||||
|
||||
expect(accessMock.album.checkSharedLinkAccess).toHaveBeenCalledWith(
|
||||
authStub.adminSharedLink.sharedLink?.id,
|
||||
new Set(['album-123']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow adding assets shared via partner sharing', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-123']));
|
||||
accessMock.asset.checkPartnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.oneAsset));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(sut.addAssets(authStub.admin, 'album-123', { ids: ['asset-1'] })).resolves.toEqual([
|
||||
{ success: true, id: 'asset-1' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-123',
|
||||
updatedAt: expect.any(Date),
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(accessMock.asset.checkPartnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['asset-1']));
|
||||
});
|
||||
|
||||
it('should skip duplicate assets', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-123']));
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-id']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.oneAsset));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set(['asset-id']));
|
||||
|
||||
await expect(sut.addAssets(authStub.admin, 'album-123', { ids: ['asset-id'] })).resolves.toEqual([
|
||||
{ success: false, id: 'asset-id', error: BulkIdErrorReason.DUPLICATE },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip assets not shared with user', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-123']));
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(sut.addAssets(authStub.admin, 'album-123', { ids: ['asset-1'] })).resolves.toEqual([
|
||||
{ success: false, id: 'asset-1', error: BulkIdErrorReason.NO_PERMISSION },
|
||||
]);
|
||||
|
||||
expect(accessMock.asset.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['asset-1']));
|
||||
expect(accessMock.asset.checkPartnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['asset-1']));
|
||||
});
|
||||
|
||||
it('should not allow unauthorized access to the album', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.admin, 'album-123', { ids: ['asset-1', 'asset-2', 'asset-3'] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(accessMock.album.checkOwnerAccess).toHaveBeenCalled();
|
||||
expect(accessMock.album.checkSharedAlbumAccess).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not allow unauthorized shared link access to the album', async () => {
|
||||
albumMock.getById.mockResolvedValue(albumStub.oneAsset);
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.adminSharedLink, 'album-123', { ids: ['asset-1', 'asset-2', 'asset-3'] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(accessMock.album.checkSharedLinkAccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeAssets', () => {
|
||||
it('should allow the owner to remove assets', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValueOnce(new Set(['album-123']));
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValueOnce(new Set(['asset-id']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.oneAsset));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set(['asset-id']));
|
||||
|
||||
await expect(sut.removeAssets(authStub.admin, 'album-123', { ids: ['asset-id'] })).resolves.toEqual([
|
||||
{ success: true, id: 'asset-id' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({ id: 'album-123', updatedAt: expect.any(Date) });
|
||||
expect(albumMock.removeAssets).toHaveBeenCalledWith('album-123', ['asset-id']);
|
||||
});
|
||||
|
||||
it('should skip assets not in the album', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-123']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.empty));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set());
|
||||
|
||||
await expect(sut.removeAssets(authStub.admin, 'album-123', { ids: ['asset-id'] })).resolves.toEqual([
|
||||
{ success: false, id: 'asset-id', error: BulkIdErrorReason.NOT_FOUND },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip assets without user permission to remove', async () => {
|
||||
accessMock.album.checkSharedAlbumAccess.mockResolvedValue(new Set(['album-123']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.oneAsset));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set(['asset-id']));
|
||||
|
||||
await expect(sut.removeAssets(authStub.admin, 'album-123', { ids: ['asset-id'] })).resolves.toEqual([
|
||||
{
|
||||
success: false,
|
||||
id: 'asset-id',
|
||||
error: BulkIdErrorReason.NO_PERMISSION,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(albumMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reset the thumbnail if it is removed', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValueOnce(new Set(['album-123']));
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValueOnce(new Set(['asset-id']));
|
||||
albumMock.getById.mockResolvedValue(_.cloneDeep(albumStub.twoAssets));
|
||||
albumMock.getAssetIds.mockResolvedValueOnce(new Set(['asset-id']));
|
||||
|
||||
await expect(sut.removeAssets(authStub.admin, 'album-123', { ids: ['asset-id'] })).resolves.toEqual([
|
||||
{ success: true, id: 'asset-id' },
|
||||
]);
|
||||
|
||||
expect(albumMock.update).toHaveBeenCalledWith({
|
||||
id: 'album-123',
|
||||
updatedAt: expect.any(Date),
|
||||
});
|
||||
expect(albumMock.updateThumbnails).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
// // it('removes assets from shared album (shared with auth user)', async () => {
|
||||
// // const albumEntity = _getOwnedSharedAlbum();
|
||||
// // albumRepositoryMock.get.mockImplementation(() => Promise.resolve<AlbumEntity>(albumEntity));
|
||||
// // albumRepositoryMock.removeAssets.mockImplementation(() => Promise.resolve<AlbumEntity>(albumEntity));
|
||||
|
||||
// // await expect(
|
||||
// // sut.removeAssetsFromAlbum(
|
||||
// // auth,
|
||||
// // {
|
||||
// // ids: ['1'],
|
||||
// // },
|
||||
// // albumEntity.id,
|
||||
// // ),
|
||||
// // ).resolves.toBeUndefined();
|
||||
// // expect(albumRepositoryMock.removeAssets).toHaveBeenCalledTimes(1);
|
||||
// // expect(albumRepositoryMock.removeAssets).toHaveBeenCalledWith(albumEntity, {
|
||||
// // ids: ['1'],
|
||||
// // });
|
||||
// // });
|
||||
|
||||
// it('prevents removing assets from a not owned / shared album', async () => {
|
||||
// const albumEntity = _getNotOwnedNotSharedAlbum();
|
||||
|
||||
// const albumResponse: AddAssetsResponseDto = {
|
||||
// alreadyInAlbum: [],
|
||||
// successfullyAdded: 1,
|
||||
// };
|
||||
|
||||
// const albumId = albumEntity.id;
|
||||
|
||||
// albumRepositoryMock.get.mockImplementation(() => Promise.resolve<AlbumEntity>(albumEntity));
|
||||
// albumRepositoryMock.addAssets.mockImplementation(() => Promise.resolve<AddAssetsResponseDto>(albumResponse));
|
||||
|
||||
// await expect(sut.removeAssets(auth, albumId, { ids: ['1'] })).rejects.toBeInstanceOf(ForbiddenException);
|
||||
// });
|
||||
});
|
||||
@@ -0,0 +1,310 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import {
|
||||
AddUsersDto,
|
||||
AlbumCountResponseDto,
|
||||
AlbumInfoDto,
|
||||
AlbumResponseDto,
|
||||
CreateAlbumDto,
|
||||
GetAlbumsDto,
|
||||
UpdateAlbumDto,
|
||||
mapAlbum,
|
||||
mapAlbumWithAssets,
|
||||
mapAlbumWithoutAssets,
|
||||
} from 'src/dtos/album.dto';
|
||||
import { BulkIdErrorReason, BulkIdResponseDto, BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AlbumEntity } from 'src/entities/album.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { UserEntity } from 'src/entities/user.entity';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { AlbumAssetCount, AlbumInfoOptions, IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { setUnion } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class AlbumService {
|
||||
private access: AccessCore;
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
async getCount(auth: AuthDto): Promise<AlbumCountResponseDto> {
|
||||
const [owned, shared, notShared] = await Promise.all([
|
||||
this.albumRepository.getOwned(auth.user.id),
|
||||
this.albumRepository.getShared(auth.user.id),
|
||||
this.albumRepository.getNotShared(auth.user.id),
|
||||
]);
|
||||
|
||||
return {
|
||||
owned: owned.length,
|
||||
shared: shared.length,
|
||||
notShared: notShared.length,
|
||||
};
|
||||
}
|
||||
|
||||
async getAll({ user: { id: ownerId } }: AuthDto, { assetId, shared }: GetAlbumsDto): Promise<AlbumResponseDto[]> {
|
||||
const invalidAlbumIds = await this.albumRepository.getInvalidThumbnail();
|
||||
for (const albumId of invalidAlbumIds) {
|
||||
const newThumbnail = await this.assetRepository.getFirstAssetForAlbumId(albumId);
|
||||
await this.albumRepository.update({ id: albumId, albumThumbnailAsset: newThumbnail });
|
||||
}
|
||||
|
||||
let albums: AlbumEntity[];
|
||||
if (assetId) {
|
||||
albums = await this.albumRepository.getByAssetId(ownerId, assetId);
|
||||
} else if (shared === true) {
|
||||
albums = await this.albumRepository.getShared(ownerId);
|
||||
} else if (shared === false) {
|
||||
albums = await this.albumRepository.getNotShared(ownerId);
|
||||
} else {
|
||||
albums = await this.albumRepository.getOwned(ownerId);
|
||||
}
|
||||
|
||||
// Get asset count for each album. Then map the result to an object:
|
||||
// { [albumId]: assetCount }
|
||||
const results = await this.albumRepository.getMetadataForIds(albums.map((album) => album.id));
|
||||
const albumMetadata: Record<string, AlbumAssetCount> = {};
|
||||
for (const metadata of results) {
|
||||
const { albumId, assetCount, startDate, endDate } = metadata;
|
||||
albumMetadata[albumId] = {
|
||||
albumId,
|
||||
assetCount,
|
||||
startDate,
|
||||
endDate,
|
||||
};
|
||||
}
|
||||
|
||||
return Promise.all(
|
||||
albums.map(async (album) => {
|
||||
const lastModifiedAsset = await this.assetRepository.getLastUpdatedAssetForAlbumId(album.id);
|
||||
return {
|
||||
...mapAlbumWithoutAssets(album),
|
||||
sharedLinks: undefined,
|
||||
startDate: albumMetadata[album.id].startDate,
|
||||
endDate: albumMetadata[album.id].endDate,
|
||||
assetCount: albumMetadata[album.id].assetCount,
|
||||
lastModifiedAssetTimestamp: lastModifiedAsset?.fileModifiedAt,
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async get(auth: AuthDto, id: string, dto: AlbumInfoDto): Promise<AlbumResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_READ, id);
|
||||
await this.albumRepository.updateThumbnails();
|
||||
const withAssets = dto.withoutAssets === undefined ? true : !dto.withoutAssets;
|
||||
const album = await this.findOrFail(id, { withAssets });
|
||||
const [albumMetadataForIds] = await this.albumRepository.getMetadataForIds([album.id]);
|
||||
|
||||
return {
|
||||
...mapAlbum(album, withAssets, auth),
|
||||
startDate: albumMetadataForIds.startDate,
|
||||
endDate: albumMetadataForIds.endDate,
|
||||
assetCount: albumMetadataForIds.assetCount,
|
||||
};
|
||||
}
|
||||
|
||||
async create(auth: AuthDto, dto: CreateAlbumDto): Promise<AlbumResponseDto> {
|
||||
for (const userId of dto.sharedWithUserIds || []) {
|
||||
const exists = await this.userRepository.get(userId, {});
|
||||
if (!exists) {
|
||||
throw new BadRequestException('User not found');
|
||||
}
|
||||
}
|
||||
|
||||
const album = await this.albumRepository.create({
|
||||
ownerId: auth.user.id,
|
||||
albumName: dto.albumName,
|
||||
description: dto.description,
|
||||
sharedUsers: dto.sharedWithUserIds?.map((value) => ({ id: value }) as UserEntity) ?? [],
|
||||
assets: (dto.assetIds || []).map((id) => ({ id }) as AssetEntity),
|
||||
albumThumbnailAssetId: dto.assetIds?.[0] || null,
|
||||
});
|
||||
|
||||
return mapAlbumWithAssets(album);
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: UpdateAlbumDto): Promise<AlbumResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_UPDATE, id);
|
||||
|
||||
const album = await this.findOrFail(id, { withAssets: true });
|
||||
|
||||
if (dto.albumThumbnailAssetId) {
|
||||
const valid = await this.albumRepository.hasAsset({ albumId: id, assetId: dto.albumThumbnailAssetId });
|
||||
if (!valid) {
|
||||
throw new BadRequestException('Invalid album thumbnail');
|
||||
}
|
||||
}
|
||||
const updatedAlbum = await this.albumRepository.update({
|
||||
id: album.id,
|
||||
albumName: dto.albumName,
|
||||
description: dto.description,
|
||||
albumThumbnailAssetId: dto.albumThumbnailAssetId,
|
||||
isActivityEnabled: dto.isActivityEnabled,
|
||||
order: dto.order,
|
||||
});
|
||||
|
||||
return mapAlbumWithoutAssets(updatedAlbum);
|
||||
}
|
||||
|
||||
async delete(auth: AuthDto, id: string): Promise<void> {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_DELETE, id);
|
||||
|
||||
const album = await this.findOrFail(id, { withAssets: false });
|
||||
|
||||
await this.albumRepository.delete(album);
|
||||
}
|
||||
|
||||
async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
|
||||
const album = await this.findOrFail(id, { withAssets: false });
|
||||
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_READ, id);
|
||||
|
||||
const existingAssetIds = await this.albumRepository.getAssetIds(id, dto.ids);
|
||||
const notPresentAssetIds = dto.ids.filter((id) => !existingAssetIds.has(id));
|
||||
const allowedAssetIds = await this.access.checkAccess(auth, Permission.ASSET_SHARE, notPresentAssetIds);
|
||||
|
||||
const results: BulkIdResponseDto[] = [];
|
||||
for (const assetId of dto.ids) {
|
||||
const hasAsset = existingAssetIds.has(assetId);
|
||||
if (hasAsset) {
|
||||
results.push({ id: assetId, success: false, error: BulkIdErrorReason.DUPLICATE });
|
||||
continue;
|
||||
}
|
||||
|
||||
const hasAccess = allowedAssetIds.has(assetId);
|
||||
if (!hasAccess) {
|
||||
results.push({ id: assetId, success: false, error: BulkIdErrorReason.NO_PERMISSION });
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push({ id: assetId, success: true });
|
||||
}
|
||||
|
||||
const newAssetIds = results.filter(({ success }) => success).map(({ id }) => id);
|
||||
if (newAssetIds.length > 0) {
|
||||
await this.albumRepository.addAssets({ albumId: id, assetIds: newAssetIds });
|
||||
await this.albumRepository.update({
|
||||
id,
|
||||
updatedAt: new Date(),
|
||||
albumThumbnailAssetId: album.albumThumbnailAssetId ?? newAssetIds[0],
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async removeAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
|
||||
const album = await this.findOrFail(id, { withAssets: false });
|
||||
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_READ, id);
|
||||
|
||||
const existingAssetIds = await this.albumRepository.getAssetIds(id, dto.ids);
|
||||
const canRemove = await this.access.checkAccess(auth, Permission.ALBUM_REMOVE_ASSET, existingAssetIds);
|
||||
const canShare = await this.access.checkAccess(auth, Permission.ASSET_SHARE, existingAssetIds);
|
||||
const allowedAssetIds = setUnion(canRemove, canShare);
|
||||
|
||||
const results: BulkIdResponseDto[] = [];
|
||||
for (const assetId of dto.ids) {
|
||||
const hasAsset = existingAssetIds.has(assetId);
|
||||
if (!hasAsset) {
|
||||
results.push({ id: assetId, success: false, error: BulkIdErrorReason.NOT_FOUND });
|
||||
continue;
|
||||
}
|
||||
|
||||
const hasAccess = allowedAssetIds.has(assetId);
|
||||
if (!hasAccess) {
|
||||
results.push({ id: assetId, success: false, error: BulkIdErrorReason.NO_PERMISSION });
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push({ id: assetId, success: true });
|
||||
}
|
||||
|
||||
const removedIds = results.filter(({ success }) => success).map(({ id }) => id);
|
||||
if (removedIds.length > 0) {
|
||||
await this.albumRepository.removeAssets(id, removedIds);
|
||||
await this.albumRepository.update({ id, updatedAt: new Date() });
|
||||
if (album.albumThumbnailAssetId && removedIds.includes(album.albumThumbnailAssetId)) {
|
||||
await this.albumRepository.updateThumbnails();
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async addUsers(auth: AuthDto, id: string, dto: AddUsersDto): Promise<AlbumResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_SHARE, id);
|
||||
|
||||
const album = await this.findOrFail(id, { withAssets: false });
|
||||
|
||||
for (const userId of dto.sharedUserIds) {
|
||||
if (album.ownerId === userId) {
|
||||
throw new BadRequestException('Cannot be shared with owner');
|
||||
}
|
||||
|
||||
const exists = album.sharedUsers.find((user) => user.id === userId);
|
||||
if (exists) {
|
||||
throw new BadRequestException('User already added');
|
||||
}
|
||||
|
||||
const user = await this.userRepository.get(userId, {});
|
||||
if (!user) {
|
||||
throw new BadRequestException('User not found');
|
||||
}
|
||||
|
||||
album.sharedUsers.push({ id: userId } as UserEntity);
|
||||
}
|
||||
|
||||
return this.albumRepository
|
||||
.update({
|
||||
id: album.id,
|
||||
updatedAt: new Date(),
|
||||
sharedUsers: album.sharedUsers,
|
||||
})
|
||||
.then(mapAlbumWithoutAssets);
|
||||
}
|
||||
|
||||
async removeUser(auth: AuthDto, id: string, userId: string | 'me'): Promise<void> {
|
||||
if (userId === 'me') {
|
||||
userId = auth.user.id;
|
||||
}
|
||||
|
||||
const album = await this.findOrFail(id, { withAssets: false });
|
||||
|
||||
if (album.ownerId === userId) {
|
||||
throw new BadRequestException('Cannot remove album owner');
|
||||
}
|
||||
|
||||
const exists = album.sharedUsers.find((user) => user.id === userId);
|
||||
if (!exists) {
|
||||
throw new BadRequestException('Album not shared with user');
|
||||
}
|
||||
|
||||
// non-admin can remove themselves
|
||||
if (auth.user.id !== userId) {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_SHARE, id);
|
||||
}
|
||||
|
||||
await this.albumRepository.update({
|
||||
id: album.id,
|
||||
updatedAt: new Date(),
|
||||
sharedUsers: album.sharedUsers.filter((user) => user.id !== userId),
|
||||
});
|
||||
}
|
||||
|
||||
private async findOrFail(id: string, options: AlbumInfoOptions) {
|
||||
const album = await this.albumRepository.getById(id, options);
|
||||
if (!album) {
|
||||
throw new BadRequestException('Album not found');
|
||||
}
|
||||
return album;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,115 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { IKeyRepository } from 'src/interfaces/api-key.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { APIKeyService } from 'src/services/api-key.service';
|
||||
import { keyStub } from 'test/fixtures/api-key.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { newKeyRepositoryMock } from 'test/repositories/api-key.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
|
||||
describe(APIKeyService.name, () => {
|
||||
let sut: APIKeyService;
|
||||
let keyMock: jest.Mocked<IKeyRepository>;
|
||||
let cryptoMock: jest.Mocked<ICryptoRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
cryptoMock = newCryptoRepositoryMock();
|
||||
keyMock = newKeyRepositoryMock();
|
||||
sut = new APIKeyService(cryptoMock, keyMock);
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a new key', async () => {
|
||||
keyMock.create.mockResolvedValue(keyStub.admin);
|
||||
await sut.create(authStub.admin, { name: 'Test Key' });
|
||||
expect(keyMock.create).toHaveBeenCalledWith({
|
||||
key: 'cmFuZG9tLWJ5dGVz (hashed)',
|
||||
name: 'Test Key',
|
||||
userId: authStub.admin.user.id,
|
||||
});
|
||||
expect(cryptoMock.randomBytes).toHaveBeenCalled();
|
||||
expect(cryptoMock.hashSha256).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not require a name', async () => {
|
||||
keyMock.create.mockResolvedValue(keyStub.admin);
|
||||
|
||||
await sut.create(authStub.admin, {});
|
||||
|
||||
expect(keyMock.create).toHaveBeenCalledWith({
|
||||
key: 'cmFuZG9tLWJ5dGVz (hashed)',
|
||||
name: 'API Key',
|
||||
userId: authStub.admin.user.id,
|
||||
});
|
||||
expect(cryptoMock.randomBytes).toHaveBeenCalled();
|
||||
expect(cryptoMock.hashSha256).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should throw an error if the key is not found', async () => {
|
||||
keyMock.getById.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.update(authStub.admin, 'random-guid', { name: 'New Name' })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(keyMock.update).not.toHaveBeenCalledWith('random-guid');
|
||||
});
|
||||
|
||||
it('should update a key', async () => {
|
||||
keyMock.getById.mockResolvedValue(keyStub.admin);
|
||||
keyMock.update.mockResolvedValue(keyStub.admin);
|
||||
|
||||
await sut.update(authStub.admin, 'random-guid', { name: 'New Name' });
|
||||
|
||||
expect(keyMock.update).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid', { name: 'New Name' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should throw an error if the key is not found', async () => {
|
||||
keyMock.getById.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.delete(authStub.admin, 'random-guid')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(keyMock.delete).not.toHaveBeenCalledWith('random-guid');
|
||||
});
|
||||
|
||||
it('should delete a key', async () => {
|
||||
keyMock.getById.mockResolvedValue(keyStub.admin);
|
||||
|
||||
await sut.delete(authStub.admin, 'random-guid');
|
||||
|
||||
expect(keyMock.delete).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getById', () => {
|
||||
it('should throw an error if the key is not found', async () => {
|
||||
keyMock.getById.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.getById(authStub.admin, 'random-guid')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(keyMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid');
|
||||
});
|
||||
|
||||
it('should get a key by id', async () => {
|
||||
keyMock.getById.mockResolvedValue(keyStub.admin);
|
||||
|
||||
await sut.getById(authStub.admin, 'random-guid');
|
||||
|
||||
expect(keyMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'random-guid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all the keys for a user', async () => {
|
||||
keyMock.getByUserId.mockResolvedValue([keyStub.admin]);
|
||||
|
||||
await expect(sut.getAll(authStub.admin)).resolves.toHaveLength(1);
|
||||
|
||||
expect(keyMock.getByUserId).toHaveBeenCalledWith(authStub.admin.user.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,67 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { APIKeyCreateDto, APIKeyCreateResponseDto, APIKeyResponseDto } from 'src/dtos/api-key.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { APIKeyEntity } from 'src/entities/api-key.entity';
|
||||
import { IKeyRepository } from 'src/interfaces/api-key.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
|
||||
@Injectable()
|
||||
export class APIKeyService {
|
||||
constructor(
|
||||
@Inject(ICryptoRepository) private crypto: ICryptoRepository,
|
||||
@Inject(IKeyRepository) private repository: IKeyRepository,
|
||||
) {}
|
||||
|
||||
async create(auth: AuthDto, dto: APIKeyCreateDto): Promise<APIKeyCreateResponseDto> {
|
||||
const secret = this.crypto.randomBytes(32).toString('base64').replaceAll(/\W/g, '');
|
||||
const entity = await this.repository.create({
|
||||
key: this.crypto.hashSha256(secret),
|
||||
name: dto.name || 'API Key',
|
||||
userId: auth.user.id,
|
||||
});
|
||||
|
||||
return { secret, apiKey: this.map(entity) };
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: APIKeyCreateDto): Promise<APIKeyResponseDto> {
|
||||
const exists = await this.repository.getById(auth.user.id, id);
|
||||
if (!exists) {
|
||||
throw new BadRequestException('API Key not found');
|
||||
}
|
||||
|
||||
const key = await this.repository.update(auth.user.id, id, { name: dto.name });
|
||||
|
||||
return this.map(key);
|
||||
}
|
||||
|
||||
async delete(auth: AuthDto, id: string): Promise<void> {
|
||||
const exists = await this.repository.getById(auth.user.id, id);
|
||||
if (!exists) {
|
||||
throw new BadRequestException('API Key not found');
|
||||
}
|
||||
|
||||
await this.repository.delete(auth.user.id, id);
|
||||
}
|
||||
|
||||
async getById(auth: AuthDto, id: string): Promise<APIKeyResponseDto> {
|
||||
const key = await this.repository.getById(auth.user.id, id);
|
||||
if (!key) {
|
||||
throw new BadRequestException('API Key not found');
|
||||
}
|
||||
return this.map(key);
|
||||
}
|
||||
|
||||
async getAll(auth: AuthDto): Promise<APIKeyResponseDto[]> {
|
||||
const keys = await this.repository.getByUserId(auth.user.id);
|
||||
return keys.map((key) => this.map(key));
|
||||
}
|
||||
|
||||
private map(entity: APIKeyEntity): APIKeyResponseDto {
|
||||
return {
|
||||
id: entity.id,
|
||||
name: entity.name,
|
||||
createdAt: entity.createdAt,
|
||||
updatedAt: entity.updatedAt,
|
||||
};
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,553 @@
|
||||
import { BadRequestException, Inject } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { DateTime, Duration } from 'luxon';
|
||||
import { extname } from 'node:path';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { mimeTypes } from 'src/domain/domain.constant';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName } from 'src/domain/job/job.constants';
|
||||
import { IAssetDeletionJob, ISidecarWriteJob } from 'src/domain/job/job.interface';
|
||||
import {
|
||||
AssetResponseDto,
|
||||
MemoryLaneResponseDto,
|
||||
SanitizedAssetResponseDto,
|
||||
mapAsset,
|
||||
} from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
AssetBulkDeleteDto,
|
||||
AssetBulkUpdateDto,
|
||||
AssetJobName,
|
||||
AssetJobsDto,
|
||||
AssetStatsDto,
|
||||
UpdateAssetDto,
|
||||
UploadFieldName,
|
||||
mapStats,
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { MapMarkerDto, MapMarkerResponseDto, MemoryLaneDto } from 'src/dtos/search.dto';
|
||||
import { UpdateStackParentDto } from 'src/dtos/stack.dto';
|
||||
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { LibraryType } from 'src/entities/library.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IAssetStackRepository } from 'src/interfaces/asset-stack.repository';
|
||||
import { IAssetRepository, TimeBucketOptions } from 'src/interfaces/asset.repository';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IJobRepository, JobItem, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { usePagination } from 'src/utils';
|
||||
|
||||
export interface UploadRequest {
|
||||
auth: AuthDto | null;
|
||||
fieldName: UploadFieldName;
|
||||
file: UploadFile;
|
||||
}
|
||||
|
||||
export interface UploadFile {
|
||||
uuid: string;
|
||||
checksum: Buffer;
|
||||
originalPath: string;
|
||||
originalName: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
export class AssetService {
|
||||
private logger = new ImmichLogger(AssetService.name);
|
||||
private access: AccessCore;
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
@Inject(ICommunicationRepository) private communicationRepository: ICommunicationRepository,
|
||||
@Inject(IPartnerRepository) private partnerRepository: IPartnerRepository,
|
||||
@Inject(IAssetStackRepository) private assetStackRepository: IAssetStackRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
canUploadFile({ auth, fieldName, file }: UploadRequest): true {
|
||||
this.access.requireUploadAccess(auth);
|
||||
|
||||
const filename = file.originalName;
|
||||
|
||||
switch (fieldName) {
|
||||
case UploadFieldName.ASSET_DATA: {
|
||||
if (mimeTypes.isAsset(filename)) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case UploadFieldName.LIVE_PHOTO_DATA: {
|
||||
if (mimeTypes.isVideo(filename)) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case UploadFieldName.SIDECAR_DATA: {
|
||||
if (mimeTypes.isSidecar(filename)) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case UploadFieldName.PROFILE_DATA: {
|
||||
if (mimeTypes.isProfile(filename)) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.error(`Unsupported file type ${filename}`);
|
||||
throw new BadRequestException(`Unsupported file type ${filename}`);
|
||||
}
|
||||
|
||||
getUploadFilename({ auth, fieldName, file }: UploadRequest): string {
|
||||
this.access.requireUploadAccess(auth);
|
||||
|
||||
const originalExtension = extname(file.originalName);
|
||||
|
||||
const lookup = {
|
||||
[UploadFieldName.ASSET_DATA]: originalExtension,
|
||||
[UploadFieldName.LIVE_PHOTO_DATA]: '.mov',
|
||||
[UploadFieldName.SIDECAR_DATA]: '.xmp',
|
||||
[UploadFieldName.PROFILE_DATA]: originalExtension,
|
||||
};
|
||||
|
||||
return sanitize(`${file.uuid}${lookup[fieldName]}`);
|
||||
}
|
||||
|
||||
getUploadFolder({ auth, fieldName, file }: UploadRequest): string {
|
||||
auth = this.access.requireUploadAccess(auth);
|
||||
|
||||
let folder = StorageCore.getNestedFolder(StorageFolder.UPLOAD, auth.user.id, file.uuid);
|
||||
if (fieldName === UploadFieldName.PROFILE_DATA) {
|
||||
folder = StorageCore.getFolderLocation(StorageFolder.PROFILE, auth.user.id);
|
||||
}
|
||||
|
||||
this.storageRepository.mkdirSync(folder);
|
||||
|
||||
return folder;
|
||||
}
|
||||
|
||||
async getMapMarkers(auth: AuthDto, options: MapMarkerDto): Promise<MapMarkerResponseDto[]> {
|
||||
const userIds: string[] = [auth.user.id];
|
||||
if (options.withPartners) {
|
||||
const partners = await this.partnerRepository.getAll(auth.user.id);
|
||||
const partnersIds = partners
|
||||
.filter((partner) => partner.sharedBy && partner.sharedWith && partner.sharedById != auth.user.id)
|
||||
.map((partner) => partner.sharedById);
|
||||
userIds.push(...partnersIds);
|
||||
}
|
||||
return this.assetRepository.getMapMarkers(userIds, options);
|
||||
}
|
||||
|
||||
async getMemoryLane(auth: AuthDto, dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
|
||||
const currentYear = new Date().getFullYear();
|
||||
|
||||
// get partners id
|
||||
const userIds: string[] = [auth.user.id];
|
||||
const partners = await this.partnerRepository.getAll(auth.user.id);
|
||||
const partnersIds = partners
|
||||
.filter((partner) => partner.sharedBy && partner.inTimeline)
|
||||
.map((partner) => partner.sharedById);
|
||||
userIds.push(...partnersIds);
|
||||
|
||||
const assets = await this.assetRepository.getByDayOfYear(userIds, dto);
|
||||
|
||||
return _.chain(assets)
|
||||
.filter((asset) => asset.localDateTime.getFullYear() < currentYear)
|
||||
.map((asset) => {
|
||||
const years = currentYear - asset.localDateTime.getFullYear();
|
||||
|
||||
return {
|
||||
title: `${years} year${years > 1 ? 's' : ''} since...`,
|
||||
asset: mapAsset(asset, { auth }),
|
||||
};
|
||||
})
|
||||
.groupBy((asset) => asset.title)
|
||||
.map((items, title) => ({ title, assets: items.map(({ asset }) => asset) }))
|
||||
.value();
|
||||
}
|
||||
|
||||
private async timeBucketChecks(auth: AuthDto, dto: TimeBucketDto) {
|
||||
if (dto.albumId) {
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_READ, [dto.albumId]);
|
||||
} else {
|
||||
dto.userId = dto.userId || auth.user.id;
|
||||
}
|
||||
|
||||
if (dto.userId) {
|
||||
await this.access.requirePermission(auth, Permission.TIMELINE_READ, [dto.userId]);
|
||||
if (dto.isArchived !== false) {
|
||||
await this.access.requirePermission(auth, Permission.ARCHIVE_READ, [dto.userId]);
|
||||
}
|
||||
}
|
||||
|
||||
if (dto.withPartners) {
|
||||
const requestedArchived = dto.isArchived === true || dto.isArchived === undefined;
|
||||
const requestedFavorite = dto.isFavorite === true || dto.isFavorite === false;
|
||||
const requestedTrash = dto.isTrashed === true;
|
||||
|
||||
if (requestedArchived || requestedFavorite || requestedTrash) {
|
||||
throw new BadRequestException(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
|
||||
await this.timeBucketChecks(auth, dto);
|
||||
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
|
||||
|
||||
return this.assetRepository.getTimeBuckets(timeBucketOptions);
|
||||
}
|
||||
|
||||
async getTimeBucket(
|
||||
auth: AuthDto,
|
||||
dto: TimeBucketAssetDto,
|
||||
): Promise<AssetResponseDto[] | SanitizedAssetResponseDto[]> {
|
||||
await this.timeBucketChecks(auth, dto);
|
||||
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
|
||||
const assets = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
|
||||
return !auth.sharedLink || auth.sharedLink?.showExif
|
||||
? assets.map((asset) => mapAsset(asset, { withStack: true, auth }))
|
||||
: assets.map((asset) => mapAsset(asset, { stripMetadata: true, auth }));
|
||||
}
|
||||
|
||||
async buildTimeBucketOptions(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketOptions> {
|
||||
const { userId, ...options } = dto;
|
||||
let userIds: string[] | undefined = undefined;
|
||||
|
||||
if (userId) {
|
||||
userIds = [userId];
|
||||
|
||||
if (dto.withPartners) {
|
||||
const partners = await this.partnerRepository.getAll(auth.user.id);
|
||||
const partnersIds = partners
|
||||
.filter((partner) => partner.sharedBy && partner.sharedWith && partner.inTimeline)
|
||||
.map((partner) => partner.sharedById);
|
||||
|
||||
userIds.push(...partnersIds);
|
||||
}
|
||||
}
|
||||
|
||||
return { ...options, userIds };
|
||||
}
|
||||
|
||||
async getStatistics(auth: AuthDto, dto: AssetStatsDto) {
|
||||
const stats = await this.assetRepository.getStatistics(auth.user.id, dto);
|
||||
return mapStats(stats);
|
||||
}
|
||||
|
||||
async getRandom(auth: AuthDto, count: number): Promise<AssetResponseDto[]> {
|
||||
const assets = await this.assetRepository.getRandom(auth.user.id, count);
|
||||
return assets.map((a) => mapAsset(a, { auth }));
|
||||
}
|
||||
|
||||
async getUserAssetsByDeviceId(auth: AuthDto, deviceId: string) {
|
||||
return this.assetRepository.getAllByDeviceId(auth.user.id, deviceId);
|
||||
}
|
||||
|
||||
async get(auth: AuthDto, id: string): Promise<AssetResponseDto | SanitizedAssetResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_READ, id);
|
||||
|
||||
const asset = await this.assetRepository.getById(id, {
|
||||
exifInfo: true,
|
||||
tags: true,
|
||||
sharedLinks: true,
|
||||
smartInfo: true,
|
||||
owner: true,
|
||||
faces: {
|
||||
person: true,
|
||||
},
|
||||
stack: {
|
||||
assets: {
|
||||
exifInfo: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!asset) {
|
||||
throw new BadRequestException('Asset not found');
|
||||
}
|
||||
|
||||
if (auth.sharedLink && !auth.sharedLink.showExif) {
|
||||
return mapAsset(asset, { stripMetadata: true, withStack: true, auth });
|
||||
}
|
||||
|
||||
const data = mapAsset(asset, { withStack: true, auth });
|
||||
|
||||
if (auth.sharedLink) {
|
||||
delete data.owner;
|
||||
}
|
||||
|
||||
if (data.ownerId !== auth.user.id || auth.sharedLink) {
|
||||
data.people = [];
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: UpdateAssetDto): Promise<AssetResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_UPDATE, id);
|
||||
|
||||
const { description, dateTimeOriginal, latitude, longitude, ...rest } = dto;
|
||||
await this.updateMetadata({ id, description, dateTimeOriginal, latitude, longitude });
|
||||
|
||||
await this.assetRepository.update({ id, ...rest });
|
||||
const asset = await this.assetRepository.getById(id, {
|
||||
exifInfo: true,
|
||||
owner: true,
|
||||
smartInfo: true,
|
||||
tags: true,
|
||||
faces: {
|
||||
person: true,
|
||||
},
|
||||
});
|
||||
if (!asset) {
|
||||
throw new BadRequestException('Asset not found');
|
||||
}
|
||||
return mapAsset(asset, { auth });
|
||||
}
|
||||
|
||||
async updateAll(auth: AuthDto, dto: AssetBulkUpdateDto): Promise<void> {
|
||||
const { ids, removeParent, dateTimeOriginal, latitude, longitude, ...options } = dto;
|
||||
await this.access.requirePermission(auth, Permission.ASSET_UPDATE, ids);
|
||||
|
||||
// TODO: refactor this logic into separate API calls POST /stack, PUT /stack, etc.
|
||||
const stackIdsToCheckForDelete: string[] = [];
|
||||
if (removeParent) {
|
||||
(options as Partial<AssetEntity>).stack = null;
|
||||
const assets = await this.assetRepository.getByIds(ids, { stack: true });
|
||||
stackIdsToCheckForDelete.push(...new Set(assets.filter((a) => !!a.stackId).map((a) => a.stackId!)));
|
||||
// This updates the updatedAt column of the parents to indicate that one of its children is removed
|
||||
// All the unique parent's -> parent is set to null
|
||||
await this.assetRepository.updateAll(
|
||||
assets.filter((a) => !!a.stack?.primaryAssetId).map((a) => a.stack!.primaryAssetId!),
|
||||
{ updatedAt: new Date() },
|
||||
);
|
||||
} else if (options.stackParentId) {
|
||||
//Creating new stack if parent doesn't have one already. If it does, then we add to the existing stack
|
||||
await this.access.requirePermission(auth, Permission.ASSET_UPDATE, options.stackParentId);
|
||||
const primaryAsset = await this.assetRepository.getById(options.stackParentId, { stack: { assets: true } });
|
||||
if (!primaryAsset) {
|
||||
throw new BadRequestException('Asset not found for given stackParentId');
|
||||
}
|
||||
let stack = primaryAsset.stack;
|
||||
|
||||
ids.push(options.stackParentId);
|
||||
const assets = await this.assetRepository.getByIds(ids, { stack: { assets: true } });
|
||||
stackIdsToCheckForDelete.push(
|
||||
...new Set(assets.filter((a) => !!a.stackId && stack?.id !== a.stackId).map((a) => a.stackId!)),
|
||||
);
|
||||
const assetsWithChildren = assets.filter((a) => a.stack && a.stack.assets.length > 0);
|
||||
ids.push(...assetsWithChildren.flatMap((child) => child.stack!.assets.map((gChild) => gChild.id)));
|
||||
|
||||
if (stack) {
|
||||
await this.assetStackRepository.update({
|
||||
id: stack.id,
|
||||
primaryAssetId: primaryAsset.id,
|
||||
assets: ids.map((id) => ({ id }) as AssetEntity),
|
||||
});
|
||||
} else {
|
||||
stack = await this.assetStackRepository.create({
|
||||
primaryAssetId: primaryAsset.id,
|
||||
assets: ids.map((id) => ({ id }) as AssetEntity),
|
||||
});
|
||||
}
|
||||
|
||||
// Merge stacks
|
||||
options.stackParentId = undefined;
|
||||
(options as Partial<AssetEntity>).updatedAt = new Date();
|
||||
}
|
||||
|
||||
for (const id of ids) {
|
||||
await this.updateMetadata({ id, dateTimeOriginal, latitude, longitude });
|
||||
}
|
||||
|
||||
await this.assetRepository.updateAll(ids, options);
|
||||
const stackIdsToDelete = await Promise.all(
|
||||
stackIdsToCheckForDelete.map((id) => this.assetStackRepository.getById(id)),
|
||||
);
|
||||
const stacksToDelete = stackIdsToDelete
|
||||
.flatMap((stack) => (stack ? [stack] : []))
|
||||
.filter((stack) => stack.assets.length < 2);
|
||||
await Promise.all(stacksToDelete.map((as) => this.assetStackRepository.delete(as.id)));
|
||||
this.communicationRepository.send(ClientEvent.ASSET_STACK_UPDATE, auth.user.id, ids);
|
||||
}
|
||||
|
||||
async handleAssetDeletionCheck(): Promise<JobStatus> {
|
||||
const config = await this.configCore.getConfig();
|
||||
const trashedDays = config.trash.enabled ? config.trash.days : 0;
|
||||
const trashedBefore = DateTime.now()
|
||||
.minus(Duration.fromObject({ days: trashedDays }))
|
||||
.toJSDate();
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAll(pagination, { trashedBefore }),
|
||||
);
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.ASSET_DELETION, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleAssetDeletion(job: IAssetDeletionJob): Promise<JobStatus> {
|
||||
const { id, fromExternal } = job;
|
||||
|
||||
const asset = await this.assetRepository.getById(id, {
|
||||
faces: {
|
||||
person: true,
|
||||
},
|
||||
library: true,
|
||||
stack: { assets: true },
|
||||
exifInfo: true,
|
||||
});
|
||||
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
// Ignore requests that are not from external library job but is for an external asset
|
||||
if (!fromExternal && (!asset.library || asset.library.type === LibraryType.EXTERNAL)) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
// Replace the parent of the stack children with a new asset
|
||||
if (asset.stack?.primaryAssetId === id) {
|
||||
const stackAssetIds = asset.stack.assets.map((a) => a.id);
|
||||
if (stackAssetIds.length > 2) {
|
||||
const newPrimaryAssetId = stackAssetIds.find((a) => a !== id)!;
|
||||
await this.assetStackRepository.update({
|
||||
id: asset.stack.id,
|
||||
primaryAssetId: newPrimaryAssetId,
|
||||
});
|
||||
} else {
|
||||
await this.assetStackRepository.delete(asset.stack.id);
|
||||
}
|
||||
}
|
||||
|
||||
await this.assetRepository.remove(asset);
|
||||
await this.userRepository.updateUsage(asset.ownerId, -(asset.exifInfo?.fileSizeInByte || 0));
|
||||
this.communicationRepository.send(ClientEvent.ASSET_DELETE, asset.ownerId, id);
|
||||
|
||||
// TODO refactor this to use cascades
|
||||
if (asset.livePhotoVideoId) {
|
||||
await this.jobRepository.queue({ name: JobName.ASSET_DELETION, data: { id: asset.livePhotoVideoId } });
|
||||
}
|
||||
|
||||
const files = [asset.webpPath, asset.resizePath, asset.encodedVideoPath, asset.sidecarPath];
|
||||
if (!fromExternal) {
|
||||
files.push(asset.originalPath);
|
||||
}
|
||||
|
||||
if (!asset.isReadOnly) {
|
||||
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files } });
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async deleteAll(auth: AuthDto, dto: AssetBulkDeleteDto): Promise<void> {
|
||||
const { ids, force } = dto;
|
||||
|
||||
await this.access.requirePermission(auth, Permission.ASSET_DELETE, ids);
|
||||
|
||||
if (force) {
|
||||
await this.jobRepository.queueAll(ids.map((id) => ({ name: JobName.ASSET_DELETION, data: { id } })));
|
||||
} else {
|
||||
await this.assetRepository.softDeleteAll(ids);
|
||||
this.communicationRepository.send(ClientEvent.ASSET_TRASH, auth.user.id, ids);
|
||||
}
|
||||
}
|
||||
|
||||
async updateStackParent(auth: AuthDto, dto: UpdateStackParentDto): Promise<void> {
|
||||
const { oldParentId, newParentId } = dto;
|
||||
await this.access.requirePermission(auth, Permission.ASSET_READ, oldParentId);
|
||||
await this.access.requirePermission(auth, Permission.ASSET_UPDATE, newParentId);
|
||||
|
||||
const childIds: string[] = [];
|
||||
const oldParent = await this.assetRepository.getById(oldParentId, {
|
||||
faces: {
|
||||
person: true,
|
||||
},
|
||||
library: true,
|
||||
stack: {
|
||||
assets: true,
|
||||
},
|
||||
});
|
||||
if (!oldParent?.stackId) {
|
||||
throw new Error('Asset not found or not in a stack');
|
||||
}
|
||||
if (oldParent != null) {
|
||||
// Get all children of old parent
|
||||
childIds.push(oldParent.id, ...(oldParent.stack?.assets.map((a) => a.id) ?? []));
|
||||
}
|
||||
await this.assetStackRepository.update({
|
||||
id: oldParent.stackId,
|
||||
primaryAssetId: newParentId,
|
||||
});
|
||||
|
||||
this.communicationRepository.send(ClientEvent.ASSET_STACK_UPDATE, auth.user.id, [
|
||||
...childIds,
|
||||
newParentId,
|
||||
oldParentId,
|
||||
]);
|
||||
await this.assetRepository.updateAll([oldParentId, newParentId, ...childIds], { updatedAt: new Date() });
|
||||
}
|
||||
|
||||
async run(auth: AuthDto, dto: AssetJobsDto) {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_UPDATE, dto.assetIds);
|
||||
|
||||
const jobs: JobItem[] = [];
|
||||
|
||||
for (const id of dto.assetIds) {
|
||||
switch (dto.name) {
|
||||
case AssetJobName.REFRESH_METADATA: {
|
||||
jobs.push({ name: JobName.METADATA_EXTRACTION, data: { id } });
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetJobName.REGENERATE_THUMBNAIL: {
|
||||
jobs.push({ name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id } });
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetJobName.TRANSCODE_VIDEO: {
|
||||
jobs.push({ name: JobName.VIDEO_CONVERSION, data: { id } });
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
}
|
||||
|
||||
private async updateMetadata(dto: ISidecarWriteJob) {
|
||||
const { id, description, dateTimeOriginal, latitude, longitude } = dto;
|
||||
const writes = _.omitBy({ description, dateTimeOriginal, latitude, longitude }, _.isUndefined);
|
||||
if (Object.keys(writes).length > 0) {
|
||||
await this.assetRepository.upsertExif({ assetId: id, ...writes });
|
||||
await this.jobRepository.queue({ name: JobName.SIDECAR_WRITE, data: { id, ...writes } });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
import { DatabaseAction, EntityType } from 'src/entities/audit.entity';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IAuditRepository } from 'src/interfaces/audit.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { auditStub } from 'test/fixtures/audit.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newAuditRepositoryMock } from 'test/repositories/audit.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
|
||||
describe(AuditService.name, () => {
|
||||
let sut: AuditService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let auditMock: jest.Mocked<IAuditRepository>;
|
||||
let cryptoMock: jest.Mocked<ICryptoRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
let userMock: jest.Mocked<IUserRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
cryptoMock = newCryptoRepositoryMock();
|
||||
auditMock = newAuditRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
sut = new AuditService(accessMock, assetMock, cryptoMock, personMock, auditMock, storageMock, userMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleCleanup', () => {
|
||||
it('should delete old audit entries', async () => {
|
||||
await expect(sut.handleCleanup()).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(auditMock.removeBefore).toHaveBeenCalledWith(expect.any(Date));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDeletes', () => {
|
||||
it('should require full sync if the request is older than 100 days', async () => {
|
||||
auditMock.getAfter.mockResolvedValue([]);
|
||||
|
||||
const date = new Date(2022, 0, 1);
|
||||
await expect(sut.getDeletes(authStub.admin, { after: date, entityType: EntityType.ASSET })).resolves.toEqual({
|
||||
needsFullSync: true,
|
||||
ids: [],
|
||||
});
|
||||
|
||||
expect(auditMock.getAfter).toHaveBeenCalledWith(date, {
|
||||
action: DatabaseAction.DELETE,
|
||||
ownerId: authStub.admin.user.id,
|
||||
entityType: EntityType.ASSET,
|
||||
});
|
||||
});
|
||||
|
||||
it('should get any new or updated assets and deleted ids', async () => {
|
||||
auditMock.getAfter.mockResolvedValue([auditStub.delete]);
|
||||
|
||||
const date = new Date();
|
||||
await expect(sut.getDeletes(authStub.admin, { after: date, entityType: EntityType.ASSET })).resolves.toEqual({
|
||||
needsFullSync: false,
|
||||
ids: ['asset-deleted'],
|
||||
});
|
||||
|
||||
expect(auditMock.getAfter).toHaveBeenCalledWith(date, {
|
||||
action: DatabaseAction.DELETE,
|
||||
ownerId: authStub.admin.user.id,
|
||||
entityType: EntityType.ASSET,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,244 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { resolve } from 'node:path';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { AUDIT_LOG_MAX_DURATION } from 'src/domain/domain.constant';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/domain/job/job.constants';
|
||||
import {
|
||||
AuditDeletesDto,
|
||||
AuditDeletesResponseDto,
|
||||
FileChecksumDto,
|
||||
FileChecksumResponseDto,
|
||||
FileReportItemDto,
|
||||
PathEntityType,
|
||||
} from 'src/dtos/audit.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { DatabaseAction } from 'src/entities/audit.entity';
|
||||
import { AssetPathType, PersonPathType, UserPathType } from 'src/entities/move.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IAuditRepository } from 'src/interfaces/audit.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { usePagination } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class AuditService {
|
||||
private access: AccessCore;
|
||||
private logger = new ImmichLogger(AuditService.name);
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(IAuditRepository) private repository: IAuditRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
async handleCleanup(): Promise<JobStatus> {
|
||||
await this.repository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async getDeletes(auth: AuthDto, dto: AuditDeletesDto): Promise<AuditDeletesResponseDto> {
|
||||
const userId = dto.userId || auth.user.id;
|
||||
await this.access.requirePermission(auth, Permission.TIMELINE_READ, userId);
|
||||
|
||||
const audits = await this.repository.getAfter(dto.after, {
|
||||
ownerId: userId,
|
||||
entityType: dto.entityType,
|
||||
action: DatabaseAction.DELETE,
|
||||
});
|
||||
|
||||
const duration = DateTime.now().diff(DateTime.fromJSDate(dto.after));
|
||||
|
||||
return {
|
||||
needsFullSync: duration > AUDIT_LOG_MAX_DURATION,
|
||||
ids: audits.map(({ entityId }) => entityId),
|
||||
};
|
||||
}
|
||||
|
||||
async getChecksums(dto: FileChecksumDto) {
|
||||
const results: FileChecksumResponseDto[] = [];
|
||||
for (const filename of dto.filenames) {
|
||||
if (!StorageCore.isImmichPath(filename)) {
|
||||
throw new BadRequestException(
|
||||
`Could not get the checksum of ${filename} because the file isn't accessible by Immich`,
|
||||
);
|
||||
}
|
||||
|
||||
const checksum = await this.cryptoRepository.hashFile(filename);
|
||||
results.push({ filename, checksum: checksum.toString('base64') });
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
async fixItems(items: FileReportItemDto[]) {
|
||||
for (const { entityId: id, pathType, pathValue } of items) {
|
||||
if (!StorageCore.isImmichPath(pathValue)) {
|
||||
throw new BadRequestException(
|
||||
`Could not fix item ${id} with path ${pathValue} because the file isn't accessible by Immich`,
|
||||
);
|
||||
}
|
||||
|
||||
switch (pathType) {
|
||||
case AssetPathType.ENCODED_VIDEO: {
|
||||
await this.assetRepository.update({ id, encodedVideoPath: pathValue });
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetPathType.JPEG_THUMBNAIL: {
|
||||
await this.assetRepository.update({ id, resizePath: pathValue });
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetPathType.WEBP_THUMBNAIL: {
|
||||
await this.assetRepository.update({ id, webpPath: pathValue });
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetPathType.ORIGINAL: {
|
||||
await this.assetRepository.update({ id, originalPath: pathValue });
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetPathType.SIDECAR: {
|
||||
await this.assetRepository.update({ id, sidecarPath: pathValue });
|
||||
break;
|
||||
}
|
||||
|
||||
case PersonPathType.FACE: {
|
||||
await this.personRepository.update({ id, thumbnailPath: pathValue });
|
||||
break;
|
||||
}
|
||||
|
||||
case UserPathType.PROFILE: {
|
||||
await this.userRepository.update(id, { profileImagePath: pathValue });
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fullPath(filename: string) {
|
||||
return resolve(filename);
|
||||
}
|
||||
|
||||
async getFileReport() {
|
||||
const hasFile = (items: Set<string>, filename: string) => items.has(filename) || items.has(this.fullPath(filename));
|
||||
const crawl = async (folder: StorageFolder) =>
|
||||
new Set(
|
||||
await this.storageRepository.crawl({
|
||||
includeHidden: true,
|
||||
pathsToCrawl: [StorageCore.getBaseFolder(folder)],
|
||||
}),
|
||||
);
|
||||
|
||||
const uploadFiles = await crawl(StorageFolder.UPLOAD);
|
||||
const libraryFiles = await crawl(StorageFolder.LIBRARY);
|
||||
const thumbFiles = await crawl(StorageFolder.THUMBNAILS);
|
||||
const videoFiles = await crawl(StorageFolder.ENCODED_VIDEO);
|
||||
const profileFiles = await crawl(StorageFolder.PROFILE);
|
||||
const allFiles = new Set<string>();
|
||||
for (const list of [libraryFiles, thumbFiles, videoFiles, profileFiles, uploadFiles]) {
|
||||
for (const item of list) {
|
||||
allFiles.add(item);
|
||||
}
|
||||
}
|
||||
|
||||
const track = (filename: string | null) => {
|
||||
if (!filename) {
|
||||
return;
|
||||
}
|
||||
allFiles.delete(filename);
|
||||
allFiles.delete(this.fullPath(filename));
|
||||
};
|
||||
|
||||
this.logger.log(
|
||||
`Found ${libraryFiles.size} original files, ${thumbFiles.size} thumbnails, ${videoFiles.size} encoded videos, ${profileFiles.size} profile files`,
|
||||
);
|
||||
const pagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (options) =>
|
||||
this.assetRepository.getAll(options, { withDeleted: true, withArchived: true }),
|
||||
);
|
||||
|
||||
let assetCount = 0;
|
||||
|
||||
const orphans: FileReportItemDto[] = [];
|
||||
for await (const assets of pagination) {
|
||||
assetCount += assets.length;
|
||||
for (const { id, originalPath, resizePath, encodedVideoPath, webpPath, isExternal, checksum } of assets) {
|
||||
for (const file of [originalPath, resizePath, encodedVideoPath, webpPath]) {
|
||||
track(file);
|
||||
}
|
||||
|
||||
const entity = { entityId: id, entityType: PathEntityType.ASSET, checksum: checksum.toString('base64') };
|
||||
if (
|
||||
originalPath &&
|
||||
!hasFile(libraryFiles, originalPath) &&
|
||||
!hasFile(uploadFiles, originalPath) &&
|
||||
// Android motion assets
|
||||
!hasFile(videoFiles, originalPath) &&
|
||||
// ignore external library assets
|
||||
!isExternal
|
||||
) {
|
||||
orphans.push({ ...entity, pathType: AssetPathType.ORIGINAL, pathValue: originalPath });
|
||||
}
|
||||
if (resizePath && !hasFile(thumbFiles, resizePath)) {
|
||||
orphans.push({ ...entity, pathType: AssetPathType.JPEG_THUMBNAIL, pathValue: resizePath });
|
||||
}
|
||||
if (webpPath && !hasFile(thumbFiles, webpPath)) {
|
||||
orphans.push({ ...entity, pathType: AssetPathType.WEBP_THUMBNAIL, pathValue: webpPath });
|
||||
}
|
||||
if (encodedVideoPath && !hasFile(videoFiles, encodedVideoPath)) {
|
||||
orphans.push({ ...entity, pathType: AssetPathType.WEBP_THUMBNAIL, pathValue: encodedVideoPath });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const users = await this.userRepository.getList();
|
||||
for (const { id, profileImagePath } of users) {
|
||||
track(profileImagePath);
|
||||
|
||||
const entity = { entityId: id, entityType: PathEntityType.USER };
|
||||
if (profileImagePath && !hasFile(profileFiles, profileImagePath)) {
|
||||
orphans.push({ ...entity, pathType: UserPathType.PROFILE, pathValue: profileImagePath });
|
||||
}
|
||||
}
|
||||
|
||||
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.personRepository.getAll(pagination),
|
||||
);
|
||||
for await (const people of personPagination) {
|
||||
for (const { id, thumbnailPath } of people) {
|
||||
track(thumbnailPath);
|
||||
const entity = { entityId: id, entityType: PathEntityType.PERSON };
|
||||
if (thumbnailPath && !hasFile(thumbFiles, thumbnailPath)) {
|
||||
orphans.push({ ...entity, pathType: PersonPathType.FACE, pathValue: thumbnailPath });
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${people.length} people`);
|
||||
}
|
||||
|
||||
const extras: string[] = [];
|
||||
for (const file of allFiles) {
|
||||
extras.push(file);
|
||||
}
|
||||
|
||||
// send as absolute paths
|
||||
for (const orphan of orphans) {
|
||||
orphan.pathValue = this.fullPath(orphan.pathValue);
|
||||
}
|
||||
|
||||
return { orphans, extras };
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,610 @@
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { Issuer, generators } from 'openid-client';
|
||||
import { Socket } from 'socket.io';
|
||||
import { AuthType } from 'src/domain/auth/auth.constant';
|
||||
import { AuthDto, SignUpDto } from 'src/dtos/auth.dto';
|
||||
import { UserEntity } from 'src/entities/user.entity';
|
||||
import { IKeyRepository } from 'src/interfaces/api-key.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { ILibraryRepository } from 'src/interfaces/library.repository';
|
||||
import { ISharedLinkRepository } from 'src/interfaces/shared-link.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserTokenRepository } from 'src/interfaces/user-token.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { keyStub } from 'test/fixtures/api-key.stub';
|
||||
import { authStub, loginResponseStub } from 'test/fixtures/auth.stub';
|
||||
import { sharedLinkStub } from 'test/fixtures/shared-link.stub';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { userTokenStub } from 'test/fixtures/user-token.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newKeyRepositoryMock } from 'test/repositories/api-key.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newLibraryRepositoryMock } from 'test/repositories/library.repository.mock';
|
||||
import { newSharedLinkRepositoryMock } from 'test/repositories/shared-link.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
import { newUserTokenRepositoryMock } from 'test/repositories/user-token.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
|
||||
// const token = Buffer.from('my-api-key', 'utf8').toString('base64');
|
||||
|
||||
const email = 'test@immich.com';
|
||||
const sub = 'my-auth-user-sub';
|
||||
const loginDetails = {
|
||||
isSecure: true,
|
||||
clientIp: '127.0.0.1',
|
||||
deviceOS: '',
|
||||
deviceType: '',
|
||||
};
|
||||
|
||||
const fixtures = {
|
||||
login: {
|
||||
email,
|
||||
password: 'password',
|
||||
},
|
||||
};
|
||||
|
||||
const oauthUserWithDefaultQuota = {
|
||||
email: email,
|
||||
name: ' ',
|
||||
oauthId: sub,
|
||||
quotaSizeInBytes: 1_073_741_824,
|
||||
storageLabel: null,
|
||||
};
|
||||
|
||||
describe('AuthService', () => {
|
||||
let sut: AuthService;
|
||||
let accessMock: jest.Mocked<IAccessRepositoryMock>;
|
||||
let cryptoMock: jest.Mocked<ICryptoRepository>;
|
||||
let userMock: jest.Mocked<IUserRepository>;
|
||||
let libraryMock: jest.Mocked<ILibraryRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let userTokenMock: jest.Mocked<IUserTokenRepository>;
|
||||
let shareMock: jest.Mocked<ISharedLinkRepository>;
|
||||
let keyMock: jest.Mocked<IKeyRepository>;
|
||||
|
||||
let callbackMock: jest.Mock;
|
||||
let userinfoMock: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
callbackMock = jest.fn().mockReturnValue({ access_token: 'access-token' });
|
||||
userinfoMock = jest.fn().mockResolvedValue({ sub, email });
|
||||
|
||||
jest.spyOn(generators, 'state').mockReturnValue('state');
|
||||
jest.spyOn(Issuer, 'discover').mockResolvedValue({
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
Client: jest.fn().mockResolvedValue({
|
||||
issuer: {
|
||||
metadata: {
|
||||
end_session_endpoint: 'http://end-session-endpoint',
|
||||
},
|
||||
},
|
||||
authorizationUrl: jest.fn().mockReturnValue('http://authorization-url'),
|
||||
callbackParams: jest.fn().mockReturnValue({ state: 'state' }),
|
||||
callback: callbackMock,
|
||||
userinfo: userinfoMock,
|
||||
}),
|
||||
} as any);
|
||||
|
||||
accessMock = newAccessRepositoryMock();
|
||||
cryptoMock = newCryptoRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
libraryMock = newLibraryRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
userTokenMock = newUserTokenRepositoryMock();
|
||||
shareMock = newSharedLinkRepositoryMock();
|
||||
keyMock = newKeyRepositoryMock();
|
||||
|
||||
sut = new AuthService(accessMock, cryptoMock, configMock, libraryMock, userMock, userTokenMock, shareMock, keyMock);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should throw an error if password login is disabled', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.disabled);
|
||||
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should check the user exists', async () => {
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should check the user has a password', async () => {
|
||||
userMock.getByEmail.mockResolvedValue({} as UserEntity);
|
||||
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should successfully log the user in', async () => {
|
||||
userMock.getByEmail.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.create.mockResolvedValue(userTokenStub.userToken);
|
||||
await expect(sut.login(fixtures.login, loginDetails)).resolves.toEqual(loginResponseStub.user1password);
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should generate the cookie headers (insecure)', async () => {
|
||||
userMock.getByEmail.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.create.mockResolvedValue(userTokenStub.userToken);
|
||||
await expect(
|
||||
sut.login(fixtures.login, {
|
||||
clientIp: '127.0.0.1',
|
||||
isSecure: false,
|
||||
deviceOS: '',
|
||||
deviceType: '',
|
||||
}),
|
||||
).resolves.toEqual(loginResponseStub.user1insecure);
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('changePassword', () => {
|
||||
it('should change the password', async () => {
|
||||
const auth = { user: { email: 'test@imimch.com' } } as AuthDto;
|
||||
const dto = { password: 'old-password', newPassword: 'new-password' };
|
||||
|
||||
userMock.getByEmail.mockResolvedValue({
|
||||
email: 'test@immich.com',
|
||||
password: 'hash-password',
|
||||
} as UserEntity);
|
||||
|
||||
await sut.changePassword(auth, dto);
|
||||
|
||||
expect(userMock.getByEmail).toHaveBeenCalledWith(auth.user.email, true);
|
||||
expect(cryptoMock.compareBcrypt).toHaveBeenCalledWith('old-password', 'hash-password');
|
||||
});
|
||||
|
||||
it('should throw when auth user email is not found', async () => {
|
||||
const auth = { user: { email: 'test@imimch.com' } } as AuthDto;
|
||||
const dto = { password: 'old-password', newPassword: 'new-password' };
|
||||
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should throw when password does not match existing password', async () => {
|
||||
const auth = { user: { email: 'test@imimch.com' } as UserEntity };
|
||||
const dto = { password: 'old-password', newPassword: 'new-password' };
|
||||
|
||||
cryptoMock.compareBcrypt.mockReturnValue(false);
|
||||
|
||||
userMock.getByEmail.mockResolvedValue({
|
||||
email: 'test@immich.com',
|
||||
password: 'hash-password',
|
||||
} as UserEntity);
|
||||
|
||||
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should throw when user does not have a password', async () => {
|
||||
const auth = { user: { email: 'test@imimch.com' } } as AuthDto;
|
||||
const dto = { password: 'old-password', newPassword: 'new-password' };
|
||||
|
||||
userMock.getByEmail.mockResolvedValue({
|
||||
email: 'test@immich.com',
|
||||
password: '',
|
||||
} as UserEntity);
|
||||
|
||||
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('logout', () => {
|
||||
it('should return the end session endpoint', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.enabled);
|
||||
const auth = { user: { id: '123' } } as AuthDto;
|
||||
await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: 'http://end-session-endpoint',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the default redirect', async () => {
|
||||
const auth = { user: { id: '123' } } as AuthDto;
|
||||
|
||||
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
|
||||
it('should delete the access token', async () => {
|
||||
const auth = { user: { id: '123' }, userToken: { id: 'token123' } } as AuthDto;
|
||||
|
||||
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
|
||||
expect(userTokenMock.delete).toHaveBeenCalledWith('token123');
|
||||
});
|
||||
|
||||
it('should return the default redirect if auth type is OAUTH but oauth is not enabled', async () => {
|
||||
const auth = { user: { id: '123' } } as AuthDto;
|
||||
|
||||
await expect(sut.logout(auth, AuthType.OAUTH)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('adminSignUp', () => {
|
||||
const dto: SignUpDto = { email: 'test@immich.com', password: 'password', name: 'immich admin' };
|
||||
|
||||
it('should only allow one admin', async () => {
|
||||
userMock.getAdmin.mockResolvedValue({} as UserEntity);
|
||||
await expect(sut.adminSignUp(dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(userMock.getAdmin).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should sign up the admin', async () => {
|
||||
userMock.getAdmin.mockResolvedValue(null);
|
||||
userMock.create.mockResolvedValue({ ...dto, id: 'admin', createdAt: new Date('2021-01-01') } as UserEntity);
|
||||
await expect(sut.adminSignUp(dto)).resolves.toEqual({
|
||||
avatarColor: expect.any(String),
|
||||
id: 'admin',
|
||||
createdAt: new Date('2021-01-01'),
|
||||
email: 'test@immich.com',
|
||||
name: 'immich admin',
|
||||
});
|
||||
expect(userMock.getAdmin).toHaveBeenCalled();
|
||||
expect(userMock.create).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate - socket connections', () => {
|
||||
it('should throw token is not provided', async () => {
|
||||
await expect(sut.validate({}, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should validate using authorization header', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.getByToken.mockResolvedValue(userTokenStub.userToken);
|
||||
const client = { request: { headers: { authorization: 'Bearer auth_token' } } };
|
||||
await expect(sut.validate((client as Socket).request.headers, {})).resolves.toEqual({
|
||||
user: userStub.user1,
|
||||
userToken: userTokenStub.userToken,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate - shared key', () => {
|
||||
it('should not accept a non-existent key', async () => {
|
||||
shareMock.getByKey.mockResolvedValue(null);
|
||||
const headers: IncomingHttpHeaders = { 'x-immich-share-key': 'key' };
|
||||
await expect(sut.validate(headers, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should not accept an expired key', async () => {
|
||||
shareMock.getByKey.mockResolvedValue(sharedLinkStub.expired);
|
||||
const headers: IncomingHttpHeaders = { 'x-immich-share-key': 'key' };
|
||||
await expect(sut.validate(headers, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should not accept a key without a user', async () => {
|
||||
shareMock.getByKey.mockResolvedValue(sharedLinkStub.expired);
|
||||
userMock.get.mockResolvedValue(null);
|
||||
const headers: IncomingHttpHeaders = { 'x-immich-share-key': 'key' };
|
||||
await expect(sut.validate(headers, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should accept a base64url key', async () => {
|
||||
shareMock.getByKey.mockResolvedValue(sharedLinkStub.valid);
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
const headers: IncomingHttpHeaders = { 'x-immich-share-key': sharedLinkStub.valid.key.toString('base64url') };
|
||||
await expect(sut.validate(headers, {})).resolves.toEqual({
|
||||
user: userStub.admin,
|
||||
sharedLink: sharedLinkStub.valid,
|
||||
});
|
||||
expect(shareMock.getByKey).toHaveBeenCalledWith(sharedLinkStub.valid.key);
|
||||
});
|
||||
|
||||
it('should accept a hex key', async () => {
|
||||
shareMock.getByKey.mockResolvedValue(sharedLinkStub.valid);
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
const headers: IncomingHttpHeaders = { 'x-immich-share-key': sharedLinkStub.valid.key.toString('hex') };
|
||||
await expect(sut.validate(headers, {})).resolves.toEqual({
|
||||
user: userStub.admin,
|
||||
sharedLink: sharedLinkStub.valid,
|
||||
});
|
||||
expect(shareMock.getByKey).toHaveBeenCalledWith(sharedLinkStub.valid.key);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate - user token', () => {
|
||||
it('should throw if no token is found', async () => {
|
||||
userTokenMock.getByToken.mockResolvedValue(null);
|
||||
const headers: IncomingHttpHeaders = { 'x-immich-user-token': 'auth_token' };
|
||||
await expect(sut.validate(headers, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it('should return an auth dto', async () => {
|
||||
userTokenMock.getByToken.mockResolvedValue(userTokenStub.userToken);
|
||||
const headers: IncomingHttpHeaders = { cookie: 'immich_access_token=auth_token' };
|
||||
await expect(sut.validate(headers, {})).resolves.toEqual({
|
||||
user: userStub.user1,
|
||||
userToken: userTokenStub.userToken,
|
||||
});
|
||||
});
|
||||
|
||||
it('should update when access time exceeds an hour', async () => {
|
||||
userTokenMock.getByToken.mockResolvedValue(userTokenStub.inactiveToken);
|
||||
userTokenMock.save.mockResolvedValue(userTokenStub.userToken);
|
||||
const headers: IncomingHttpHeaders = { cookie: 'immich_access_token=auth_token' };
|
||||
await expect(sut.validate(headers, {})).resolves.toEqual({
|
||||
user: userStub.user1,
|
||||
userToken: userTokenStub.userToken,
|
||||
});
|
||||
expect(userTokenMock.save.mock.calls[0][0]).toMatchObject({
|
||||
id: 'not_active',
|
||||
token: 'auth_token',
|
||||
userId: 'user-id',
|
||||
createdAt: new Date('2021-01-01'),
|
||||
updatedAt: expect.any(Date),
|
||||
deviceOS: 'Android',
|
||||
deviceType: 'Mobile',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate - api key', () => {
|
||||
it('should throw an error if no api key is found', async () => {
|
||||
keyMock.getKey.mockResolvedValue(null);
|
||||
const headers: IncomingHttpHeaders = { 'x-api-key': 'auth_token' };
|
||||
await expect(sut.validate(headers, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
expect(keyMock.getKey).toHaveBeenCalledWith('auth_token (hashed)');
|
||||
});
|
||||
|
||||
it('should return an auth dto', async () => {
|
||||
keyMock.getKey.mockResolvedValue(keyStub.admin);
|
||||
const headers: IncomingHttpHeaders = { 'x-api-key': 'auth_token' };
|
||||
await expect(sut.validate(headers, {})).resolves.toEqual({ user: userStub.admin, apiKey: keyStub.admin });
|
||||
expect(keyMock.getKey).toHaveBeenCalledWith('auth_token (hashed)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDevices', () => {
|
||||
it('should get the devices', async () => {
|
||||
userTokenMock.getAll.mockResolvedValue([userTokenStub.userToken, userTokenStub.inactiveToken]);
|
||||
await expect(sut.getDevices(authStub.user1)).resolves.toEqual([
|
||||
{
|
||||
createdAt: '2021-01-01T00:00:00.000Z',
|
||||
current: true,
|
||||
deviceOS: '',
|
||||
deviceType: '',
|
||||
id: 'token-id',
|
||||
updatedAt: expect.any(String),
|
||||
},
|
||||
{
|
||||
createdAt: '2021-01-01T00:00:00.000Z',
|
||||
current: false,
|
||||
deviceOS: 'Android',
|
||||
deviceType: 'Mobile',
|
||||
id: 'not_active',
|
||||
updatedAt: expect.any(String),
|
||||
},
|
||||
]);
|
||||
|
||||
expect(userTokenMock.getAll).toHaveBeenCalledWith(authStub.user1.user.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('logoutDevices', () => {
|
||||
it('should logout all devices', async () => {
|
||||
userTokenMock.getAll.mockResolvedValue([userTokenStub.inactiveToken, userTokenStub.userToken]);
|
||||
|
||||
await sut.logoutDevices(authStub.user1);
|
||||
|
||||
expect(userTokenMock.getAll).toHaveBeenCalledWith(authStub.user1.user.id);
|
||||
expect(userTokenMock.delete).toHaveBeenCalledWith('not_active');
|
||||
expect(userTokenMock.delete).not.toHaveBeenCalledWith('token-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('logoutDevice', () => {
|
||||
it('should logout the device', async () => {
|
||||
accessMock.authDevice.checkOwnerAccess.mockResolvedValue(new Set(['token-1']));
|
||||
|
||||
await sut.logoutDevice(authStub.user1, 'token-1');
|
||||
|
||||
expect(accessMock.authDevice.checkOwnerAccess).toHaveBeenCalledWith(authStub.user1.user.id, new Set(['token-1']));
|
||||
expect(userTokenMock.delete).toHaveBeenCalledWith('token-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMobileRedirect', () => {
|
||||
it('should pass along the query params', () => {
|
||||
expect(sut.getMobileRedirect('http://immich.app?code=123&state=456')).toEqual('app.immich:/?code=123&state=456');
|
||||
});
|
||||
|
||||
it('should work if called without query params', () => {
|
||||
expect(sut.getMobileRedirect('http://immich.app')).toEqual('app.immich:/?');
|
||||
});
|
||||
});
|
||||
|
||||
describe('callback', () => {
|
||||
it('should throw an error if OAuth is not enabled', async () => {
|
||||
await expect(sut.callback({ url: '' }, loginDetails)).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should not allow auto registering', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.noAutoRegister);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should link an existing user', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.noAutoRegister);
|
||||
userMock.getByEmail.mockResolvedValue(userStub.user1);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.create.mockResolvedValue(userTokenStub.userToken);
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(1);
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, { oauthId: sub });
|
||||
});
|
||||
|
||||
it('should allow auto registering by default', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.enabled);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getAdmin.mockResolvedValue(userStub.user1);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.create.mockResolvedValue(userTokenStub.userToken);
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.getByEmail).toHaveBeenCalledTimes(2); // second call is for domain check before create
|
||||
expect(userMock.create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should use the mobile redirect override', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.override);
|
||||
userMock.getByOAuthId.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.create.mockResolvedValue(userTokenStub.userToken);
|
||||
|
||||
await sut.callback({ url: `app.immich:/?code=abc123` }, loginDetails);
|
||||
|
||||
expect(callbackMock).toHaveBeenCalledWith('http://mobile-redirect', { state: 'state' }, { state: 'state' });
|
||||
});
|
||||
|
||||
it('should use the mobile redirect override for ios urls with multiple slashes', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.override);
|
||||
userMock.getByOAuthId.mockResolvedValue(userStub.user1);
|
||||
userTokenMock.create.mockResolvedValue(userTokenStub.userToken);
|
||||
|
||||
await sut.callback({ url: `app.immich:///?code=abc123` }, loginDetails);
|
||||
|
||||
expect(callbackMock).toHaveBeenCalledWith('http://mobile-redirect', { state: 'state' }, { state: 'state' });
|
||||
});
|
||||
|
||||
it('should use the default quota', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.withDefaultStorageQuota);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getAdmin.mockResolvedValue(userStub.user1);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.create).toHaveBeenCalledWith(oauthUserWithDefaultQuota);
|
||||
});
|
||||
|
||||
it('should ignore an invalid storage quota', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.withDefaultStorageQuota);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getAdmin.mockResolvedValue(userStub.user1);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
userinfoMock.mockResolvedValue({ sub, email, immich_quota: 'abc' });
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.create).toHaveBeenCalledWith(oauthUserWithDefaultQuota);
|
||||
});
|
||||
|
||||
it('should ignore a negative quota', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.withDefaultStorageQuota);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getAdmin.mockResolvedValue(userStub.user1);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
userinfoMock.mockResolvedValue({ sub, email, immich_quota: -5 });
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.create).toHaveBeenCalledWith(oauthUserWithDefaultQuota);
|
||||
});
|
||||
|
||||
it('should not set quota for 0 quota', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.withDefaultStorageQuota);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getAdmin.mockResolvedValue(userStub.user1);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
userinfoMock.mockResolvedValue({ sub, email, immich_quota: 0 });
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.create).toHaveBeenCalledWith({
|
||||
email: email,
|
||||
name: ' ',
|
||||
oauthId: sub,
|
||||
quotaSizeInBytes: null,
|
||||
storageLabel: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use a valid storage quota', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.withDefaultStorageQuota);
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getAdmin.mockResolvedValue(userStub.user1);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
userinfoMock.mockResolvedValue({ sub, email, immich_quota: 5 });
|
||||
|
||||
await expect(sut.callback({ url: 'http://immich/auth/login?code=abc123' }, loginDetails)).resolves.toEqual(
|
||||
loginResponseStub.user1oauth,
|
||||
);
|
||||
|
||||
expect(userMock.create).toHaveBeenCalledWith({
|
||||
email: email,
|
||||
name: ' ',
|
||||
oauthId: sub,
|
||||
quotaSizeInBytes: 5_368_709_120,
|
||||
storageLabel: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('link', () => {
|
||||
it('should link an account', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.enabled);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.link(authStub.user1, { url: 'http://immich/user-settings?code=abc123' });
|
||||
|
||||
expect(userMock.update).toHaveBeenCalledWith(authStub.user1.user.id, { oauthId: sub });
|
||||
});
|
||||
|
||||
it('should not link an already linked oauth.sub', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.enabled);
|
||||
userMock.getByOAuthId.mockResolvedValue({ id: 'other-user' } as UserEntity);
|
||||
|
||||
await expect(sut.link(authStub.user1, { url: 'http://immich/user-settings?code=abc123' })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(userMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlink', () => {
|
||||
it('should unlink an account', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.enabled);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.unlink(authStub.user1);
|
||||
|
||||
expect(userMock.update).toHaveBeenCalledWith(authStub.user1.user.id, { oauthId: '' });
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,468 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
Inject,
|
||||
Injectable,
|
||||
InternalServerErrorException,
|
||||
UnauthorizedException,
|
||||
} from '@nestjs/common';
|
||||
import { isNumber, isString } from 'class-validator';
|
||||
import cookieParser from 'cookie';
|
||||
import { DateTime } from 'luxon';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { ClientMetadata, Issuer, UserinfoResponse, custom, generators } from 'openid-client';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { UserCore } from 'src/cores/user.core';
|
||||
import {
|
||||
AuthType,
|
||||
IMMICH_ACCESS_COOKIE,
|
||||
IMMICH_API_KEY_HEADER,
|
||||
IMMICH_AUTH_TYPE_COOKIE,
|
||||
IMMICH_IS_AUTHENTICATED,
|
||||
LOGIN_URL,
|
||||
MOBILE_REDIRECT,
|
||||
} from 'src/domain/auth/auth.constant';
|
||||
import {
|
||||
AuthDeviceResponseDto,
|
||||
AuthDto,
|
||||
ChangePasswordDto,
|
||||
LoginCredentialDto,
|
||||
LoginResponseDto,
|
||||
LogoutResponseDto,
|
||||
OAuthAuthorizeResponseDto,
|
||||
OAuthCallbackDto,
|
||||
OAuthConfigDto,
|
||||
SignUpDto,
|
||||
mapLoginResponse,
|
||||
mapUserToken,
|
||||
} from 'src/dtos/auth.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { SystemConfig } from 'src/entities/system-config.entity';
|
||||
import { UserEntity } from 'src/entities/user.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IKeyRepository } from 'src/interfaces/api-key.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { ILibraryRepository } from 'src/interfaces/library.repository';
|
||||
import { ISharedLinkRepository } from 'src/interfaces/shared-link.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserTokenRepository } from 'src/interfaces/user-token.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { HumanReadableSize } from 'src/utils';
|
||||
|
||||
export interface LoginDetails {
|
||||
isSecure: boolean;
|
||||
clientIp: string;
|
||||
deviceType: string;
|
||||
deviceOS: string;
|
||||
}
|
||||
|
||||
interface LoginResponse {
|
||||
response: LoginResponseDto;
|
||||
cookie: string[];
|
||||
}
|
||||
|
||||
interface OAuthProfile extends UserinfoResponse {
|
||||
email: string;
|
||||
}
|
||||
|
||||
interface ClaimOptions<T> {
|
||||
key: string;
|
||||
default: T;
|
||||
isValid: (value: unknown) => boolean;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AuthService {
|
||||
private access: AccessCore;
|
||||
private configCore: SystemConfigCore;
|
||||
private logger = new ImmichLogger(AuthService.name);
|
||||
private userCore: UserCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(ILibraryRepository) libraryRepository: ILibraryRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
@Inject(IUserTokenRepository) private userTokenRepository: IUserTokenRepository,
|
||||
@Inject(ISharedLinkRepository) private sharedLinkRepository: ISharedLinkRepository,
|
||||
@Inject(IKeyRepository) private keyRepository: IKeyRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.userCore = UserCore.create(cryptoRepository, libraryRepository, userRepository);
|
||||
|
||||
custom.setHttpOptionsDefaults({ timeout: 30_000 });
|
||||
}
|
||||
|
||||
async login(dto: LoginCredentialDto, details: LoginDetails): Promise<LoginResponse> {
|
||||
const config = await this.configCore.getConfig();
|
||||
if (!config.passwordLogin.enabled) {
|
||||
throw new UnauthorizedException('Password login has been disabled');
|
||||
}
|
||||
|
||||
let user = await this.userRepository.getByEmail(dto.email, true);
|
||||
if (user) {
|
||||
const isAuthenticated = this.validatePassword(dto.password, user);
|
||||
if (!isAuthenticated) {
|
||||
user = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
this.logger.warn(`Failed login attempt for user ${dto.email} from ip address ${details.clientIp}`);
|
||||
throw new UnauthorizedException('Incorrect email or password');
|
||||
}
|
||||
|
||||
return this.createLoginResponse(user, AuthType.PASSWORD, details);
|
||||
}
|
||||
|
||||
async logout(auth: AuthDto, authType: AuthType): Promise<LogoutResponseDto> {
|
||||
if (auth.userToken) {
|
||||
await this.userTokenRepository.delete(auth.userToken.id);
|
||||
}
|
||||
|
||||
return {
|
||||
successful: true,
|
||||
redirectUri: await this.getLogoutEndpoint(authType),
|
||||
};
|
||||
}
|
||||
|
||||
async changePassword(auth: AuthDto, dto: ChangePasswordDto) {
|
||||
const { password, newPassword } = dto;
|
||||
const user = await this.userRepository.getByEmail(auth.user.email, true);
|
||||
if (!user) {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
const valid = this.validatePassword(password, user);
|
||||
if (!valid) {
|
||||
throw new BadRequestException('Wrong password');
|
||||
}
|
||||
|
||||
return this.userCore.updateUser(auth.user, auth.user.id, { password: newPassword });
|
||||
}
|
||||
|
||||
async adminSignUp(dto: SignUpDto): Promise<UserResponseDto> {
|
||||
const adminUser = await this.userRepository.getAdmin();
|
||||
|
||||
if (adminUser) {
|
||||
throw new BadRequestException('The server already has an admin');
|
||||
}
|
||||
|
||||
const admin = await this.userCore.createUser({
|
||||
isAdmin: true,
|
||||
email: dto.email,
|
||||
name: dto.name,
|
||||
password: dto.password,
|
||||
storageLabel: 'admin',
|
||||
});
|
||||
|
||||
return mapUser(admin);
|
||||
}
|
||||
|
||||
async validate(headers: IncomingHttpHeaders, params: Record<string, string>): Promise<AuthDto> {
|
||||
const shareKey = (headers['x-immich-share-key'] || params.key) as string;
|
||||
const userToken = (headers['x-immich-user-token'] ||
|
||||
params.userToken ||
|
||||
this.getBearerToken(headers) ||
|
||||
this.getCookieToken(headers)) as string;
|
||||
const apiKey = (headers[IMMICH_API_KEY_HEADER] || params.apiKey) as string;
|
||||
|
||||
if (shareKey) {
|
||||
return this.validateSharedLink(shareKey);
|
||||
}
|
||||
|
||||
if (userToken) {
|
||||
return this.validateUserToken(userToken);
|
||||
}
|
||||
|
||||
if (apiKey) {
|
||||
return this.validateApiKey(apiKey);
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Authentication required');
|
||||
}
|
||||
|
||||
async getDevices(auth: AuthDto): Promise<AuthDeviceResponseDto[]> {
|
||||
const userTokens = await this.userTokenRepository.getAll(auth.user.id);
|
||||
return userTokens.map((userToken) => mapUserToken(userToken, auth.userToken?.id));
|
||||
}
|
||||
|
||||
async logoutDevice(auth: AuthDto, id: string): Promise<void> {
|
||||
await this.access.requirePermission(auth, Permission.AUTH_DEVICE_DELETE, id);
|
||||
await this.userTokenRepository.delete(id);
|
||||
}
|
||||
|
||||
async logoutDevices(auth: AuthDto): Promise<void> {
|
||||
const devices = await this.userTokenRepository.getAll(auth.user.id);
|
||||
for (const device of devices) {
|
||||
if (device.id === auth.userToken?.id) {
|
||||
continue;
|
||||
}
|
||||
await this.userTokenRepository.delete(device.id);
|
||||
}
|
||||
}
|
||||
|
||||
getMobileRedirect(url: string) {
|
||||
return `${MOBILE_REDIRECT}?${url.split('?')[1] || ''}`;
|
||||
}
|
||||
|
||||
async authorize(dto: OAuthConfigDto): Promise<OAuthAuthorizeResponseDto> {
|
||||
const config = await this.configCore.getConfig();
|
||||
if (!config.oauth.enabled) {
|
||||
throw new BadRequestException('OAuth is not enabled');
|
||||
}
|
||||
|
||||
const client = await this.getOAuthClient(config);
|
||||
const url = client.authorizationUrl({
|
||||
redirect_uri: this.normalize(config, dto.redirectUri),
|
||||
scope: config.oauth.scope,
|
||||
state: generators.state(),
|
||||
});
|
||||
|
||||
return { url };
|
||||
}
|
||||
|
||||
async callback(
|
||||
dto: OAuthCallbackDto,
|
||||
loginDetails: LoginDetails,
|
||||
): Promise<{ response: LoginResponseDto; cookie: string[] }> {
|
||||
const config = await this.configCore.getConfig();
|
||||
const profile = await this.getOAuthProfile(config, dto.url);
|
||||
this.logger.debug(`Logging in with OAuth: ${JSON.stringify(profile)}`);
|
||||
let user = await this.userRepository.getByOAuthId(profile.sub);
|
||||
|
||||
// link existing user
|
||||
if (!user) {
|
||||
const emailUser = await this.userRepository.getByEmail(profile.email);
|
||||
if (emailUser) {
|
||||
user = await this.userRepository.update(emailUser.id, { oauthId: profile.sub });
|
||||
}
|
||||
}
|
||||
|
||||
const { autoRegister, defaultStorageQuota, storageLabelClaim, storageQuotaClaim } = config.oauth;
|
||||
|
||||
// register new user
|
||||
if (!user) {
|
||||
if (!autoRegister) {
|
||||
this.logger.warn(
|
||||
`Unable to register ${profile.email}. To enable set OAuth Auto Register to true in admin settings.`,
|
||||
);
|
||||
throw new BadRequestException(`User does not exist and auto registering is disabled.`);
|
||||
}
|
||||
|
||||
this.logger.log(`Registering new user: ${profile.email}/${profile.sub}`);
|
||||
this.logger.verbose(`OAuth Profile: ${JSON.stringify(profile)}`);
|
||||
|
||||
const storageLabel = this.getClaim(profile, {
|
||||
key: storageLabelClaim,
|
||||
default: '',
|
||||
isValid: isString,
|
||||
});
|
||||
const storageQuota = this.getClaim(profile, {
|
||||
key: storageQuotaClaim,
|
||||
default: defaultStorageQuota,
|
||||
isValid: (value: unknown) => isNumber(value) && value >= 0,
|
||||
});
|
||||
|
||||
const userName = profile.name ?? `${profile.given_name || ''} ${profile.family_name || ''}`;
|
||||
user = await this.userCore.createUser({
|
||||
name: userName,
|
||||
email: profile.email,
|
||||
oauthId: profile.sub,
|
||||
quotaSizeInBytes: storageQuota * HumanReadableSize.GiB || null,
|
||||
storageLabel: storageLabel || null,
|
||||
});
|
||||
}
|
||||
|
||||
return this.createLoginResponse(user, AuthType.OAUTH, loginDetails);
|
||||
}
|
||||
|
||||
async link(auth: AuthDto, dto: OAuthCallbackDto): Promise<UserResponseDto> {
|
||||
const config = await this.configCore.getConfig();
|
||||
const { sub: oauthId } = await this.getOAuthProfile(config, dto.url);
|
||||
const duplicate = await this.userRepository.getByOAuthId(oauthId);
|
||||
if (duplicate && duplicate.id !== auth.user.id) {
|
||||
this.logger.warn(`OAuth link account failed: sub is already linked to another user (${duplicate.email}).`);
|
||||
throw new BadRequestException('This OAuth account has already been linked to another user.');
|
||||
}
|
||||
return mapUser(await this.userRepository.update(auth.user.id, { oauthId }));
|
||||
}
|
||||
|
||||
async unlink(auth: AuthDto): Promise<UserResponseDto> {
|
||||
return mapUser(await this.userRepository.update(auth.user.id, { oauthId: '' }));
|
||||
}
|
||||
|
||||
private async getLogoutEndpoint(authType: AuthType): Promise<string> {
|
||||
if (authType !== AuthType.OAUTH) {
|
||||
return LOGIN_URL;
|
||||
}
|
||||
|
||||
const config = await this.configCore.getConfig();
|
||||
if (!config.oauth.enabled) {
|
||||
return LOGIN_URL;
|
||||
}
|
||||
|
||||
const client = await this.getOAuthClient(config);
|
||||
return client.issuer.metadata.end_session_endpoint || LOGIN_URL;
|
||||
}
|
||||
|
||||
private async getOAuthProfile(config: SystemConfig, url: string): Promise<OAuthProfile> {
|
||||
const redirectUri = this.normalize(config, url.split('?')[0]);
|
||||
const client = await this.getOAuthClient(config);
|
||||
const params = client.callbackParams(url);
|
||||
try {
|
||||
const tokens = await client.callback(redirectUri, params, { state: params.state });
|
||||
return client.userinfo<OAuthProfile>(tokens.access_token || '');
|
||||
} catch (error: Error | any) {
|
||||
if (error.message.includes('unexpected JWT alg received')) {
|
||||
this.logger.warn(
|
||||
[
|
||||
'Algorithm mismatch. Make sure the signing algorithm is set correctly in the OAuth settings.',
|
||||
'Or, that you have specified a signing key in your OAuth provider.',
|
||||
].join(' '),
|
||||
);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async getOAuthClient(config: SystemConfig) {
|
||||
const { enabled, clientId, clientSecret, issuerUrl, signingAlgorithm } = config.oauth;
|
||||
|
||||
if (!enabled) {
|
||||
throw new BadRequestException('OAuth2 is not enabled');
|
||||
}
|
||||
|
||||
const metadata: ClientMetadata = {
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
response_types: ['code'],
|
||||
};
|
||||
|
||||
try {
|
||||
const issuer = await Issuer.discover(issuerUrl);
|
||||
metadata.id_token_signed_response_alg = signingAlgorithm;
|
||||
|
||||
return new issuer.Client(metadata);
|
||||
} catch (error: any | AggregateError) {
|
||||
this.logger.error(`Error in OAuth discovery: ${error}`, error?.stack, error?.errors);
|
||||
throw new InternalServerErrorException(`Error in OAuth discovery: ${error}`, { cause: error });
|
||||
}
|
||||
}
|
||||
|
||||
private normalize(config: SystemConfig, redirectUri: string) {
|
||||
const isMobile = redirectUri.startsWith(MOBILE_REDIRECT);
|
||||
const { mobileRedirectUri, mobileOverrideEnabled } = config.oauth;
|
||||
if (isMobile && mobileOverrideEnabled && mobileRedirectUri) {
|
||||
return mobileRedirectUri;
|
||||
}
|
||||
return redirectUri;
|
||||
}
|
||||
|
||||
private getBearerToken(headers: IncomingHttpHeaders): string | null {
|
||||
const [type, token] = (headers.authorization || '').split(' ');
|
||||
if (type.toLowerCase() === 'bearer') {
|
||||
return token;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private getCookieToken(headers: IncomingHttpHeaders): string | null {
|
||||
const cookies = cookieParser.parse(headers.cookie || '');
|
||||
return cookies[IMMICH_ACCESS_COOKIE] || null;
|
||||
}
|
||||
|
||||
async validateSharedLink(key: string | string[]): Promise<AuthDto> {
|
||||
key = Array.isArray(key) ? key[0] : key;
|
||||
|
||||
const bytes = Buffer.from(key, key.length === 100 ? 'hex' : 'base64url');
|
||||
const sharedLink = await this.sharedLinkRepository.getByKey(bytes);
|
||||
if (sharedLink && (!sharedLink.expiresAt || new Date(sharedLink.expiresAt) > new Date())) {
|
||||
const user = sharedLink.user;
|
||||
if (user) {
|
||||
return { user, sharedLink };
|
||||
}
|
||||
}
|
||||
throw new UnauthorizedException('Invalid share key');
|
||||
}
|
||||
|
||||
private async validateApiKey(key: string): Promise<AuthDto> {
|
||||
const hashedKey = this.cryptoRepository.hashSha256(key);
|
||||
const apiKey = await this.keyRepository.getKey(hashedKey);
|
||||
if (apiKey?.user) {
|
||||
return { user: apiKey.user, apiKey };
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Invalid API key');
|
||||
}
|
||||
|
||||
private validatePassword(inputPassword: string, user: UserEntity): boolean {
|
||||
if (!user || !user.password) {
|
||||
return false;
|
||||
}
|
||||
return this.cryptoRepository.compareBcrypt(inputPassword, user.password);
|
||||
}
|
||||
|
||||
private async validateUserToken(tokenValue: string): Promise<AuthDto> {
|
||||
const hashedToken = this.cryptoRepository.hashSha256(tokenValue);
|
||||
let userToken = await this.userTokenRepository.getByToken(hashedToken);
|
||||
|
||||
if (userToken?.user) {
|
||||
const now = DateTime.now();
|
||||
const updatedAt = DateTime.fromJSDate(userToken.updatedAt);
|
||||
const diff = now.diff(updatedAt, ['hours']);
|
||||
if (diff.hours > 1) {
|
||||
userToken = await this.userTokenRepository.save({ ...userToken, updatedAt: new Date() });
|
||||
}
|
||||
|
||||
return { user: userToken.user, userToken };
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('Invalid user token');
|
||||
}
|
||||
|
||||
private async createLoginResponse(user: UserEntity, authType: AuthType, loginDetails: LoginDetails) {
|
||||
const key = this.cryptoRepository.randomBytes(32).toString('base64').replaceAll(/\W/g, '');
|
||||
const token = this.cryptoRepository.hashSha256(key);
|
||||
|
||||
await this.userTokenRepository.create({
|
||||
token,
|
||||
user,
|
||||
deviceOS: loginDetails.deviceOS,
|
||||
deviceType: loginDetails.deviceType,
|
||||
});
|
||||
|
||||
const response = mapLoginResponse(user, key);
|
||||
const cookie = this.getCookies(response, authType, loginDetails);
|
||||
return { response, cookie };
|
||||
}
|
||||
|
||||
private getCookies(loginResponse: LoginResponseDto, authType: AuthType, { isSecure }: LoginDetails) {
|
||||
const maxAge = 400 * 24 * 3600; // 400 days
|
||||
|
||||
let authTypeCookie = '';
|
||||
let accessTokenCookie = '';
|
||||
let isAuthenticatedCookie = '';
|
||||
|
||||
if (isSecure) {
|
||||
accessTokenCookie = `${IMMICH_ACCESS_COOKIE}=${loginResponse.accessToken}; HttpOnly; Secure; Path=/; Max-Age=${maxAge}; SameSite=Lax;`;
|
||||
authTypeCookie = `${IMMICH_AUTH_TYPE_COOKIE}=${authType}; HttpOnly; Secure; Path=/; Max-Age=${maxAge}; SameSite=Lax;`;
|
||||
isAuthenticatedCookie = `${IMMICH_IS_AUTHENTICATED}=true; Secure; Path=/; Max-Age=${maxAge}; SameSite=Lax;`;
|
||||
} else {
|
||||
accessTokenCookie = `${IMMICH_ACCESS_COOKIE}=${loginResponse.accessToken}; HttpOnly; Path=/; Max-Age=${maxAge}; SameSite=Lax;`;
|
||||
authTypeCookie = `${IMMICH_AUTH_TYPE_COOKIE}=${authType}; HttpOnly; Path=/; Max-Age=${maxAge}; SameSite=Lax;`;
|
||||
isAuthenticatedCookie = `${IMMICH_IS_AUTHENTICATED}=true; Path=/; Max-Age=${maxAge}; SameSite=Lax;`;
|
||||
}
|
||||
return [accessTokenCookie, authTypeCookie, isAuthenticatedCookie];
|
||||
}
|
||||
|
||||
private getClaim<T>(profile: OAuthProfile, options: ClaimOptions<T>): T {
|
||||
const value = profile[options.key as keyof OAuthProfile];
|
||||
return options.isValid(value) ? (value as T) : options.default;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,223 @@
|
||||
import { Version, VersionType } from 'src/domain/domain.constant';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { DatabaseExtension, IDatabaseRepository, VectorIndex } from 'src/interfaces/database.repository';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
|
||||
|
||||
describe(DatabaseService.name, () => {
|
||||
let sut: DatabaseService;
|
||||
let databaseMock: jest.Mocked<IDatabaseRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
databaseMock = newDatabaseRepositoryMock();
|
||||
|
||||
sut = new DatabaseService(databaseMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe.each([
|
||||
[{ vectorExt: DatabaseExtension.VECTORS, extName: 'pgvecto.rs', minVersion: new Version(0, 1, 1) }],
|
||||
[{ vectorExt: DatabaseExtension.VECTOR, extName: 'pgvector', minVersion: new Version(0, 5, 0) }],
|
||||
] as const)('init', ({ vectorExt, extName, minVersion }) => {
|
||||
let fatalLog: jest.SpyInstance;
|
||||
let errorLog: jest.SpyInstance;
|
||||
let warnLog: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
fatalLog = jest.spyOn(ImmichLogger.prototype, 'fatal');
|
||||
errorLog = jest.spyOn(ImmichLogger.prototype, 'error');
|
||||
warnLog = jest.spyOn(ImmichLogger.prototype, 'warn');
|
||||
databaseMock.getPreferredVectorExtension.mockReturnValue(vectorExt);
|
||||
databaseMock.getExtensionVersion.mockResolvedValue(minVersion);
|
||||
|
||||
sut = new DatabaseService(databaseMock);
|
||||
|
||||
sut.minVectorVersion = minVersion;
|
||||
sut.minVectorsVersion = minVersion;
|
||||
sut.vectorVersionPin = VersionType.MINOR;
|
||||
sut.vectorsVersionPin = VersionType.MINOR;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fatalLog.mockRestore();
|
||||
warnLog.mockRestore();
|
||||
});
|
||||
|
||||
it(`should resolve successfully if minimum supported PostgreSQL and ${extName} version are installed`, async () => {
|
||||
databaseMock.getPostgresVersion.mockResolvedValueOnce(new Version(14, 0, 0));
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(databaseMock.getPostgresVersion).toHaveBeenCalled();
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledWith(vectorExt);
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.getExtensionVersion).toHaveBeenCalled();
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if PostgreSQL version is below minimum supported version', async () => {
|
||||
databaseMock.getPostgresVersion.mockResolvedValueOnce(new Version(13, 0, 0));
|
||||
|
||||
await expect(sut.init()).rejects.toThrow('PostgreSQL version is 13');
|
||||
|
||||
expect(databaseMock.getPostgresVersion).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it(`should resolve successfully if minimum supported ${extName} version is installed`, async () => {
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledWith(vectorExt);
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should throw an error if ${extName} version is not installed even after createVectorExtension`, async () => {
|
||||
databaseMock.getExtensionVersion.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.init()).rejects.toThrow(`Unexpected: ${extName} extension is not installed.`);
|
||||
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should throw an error if ${extName} version is below minimum supported version`, async () => {
|
||||
databaseMock.getExtensionVersion.mockResolvedValue(
|
||||
new Version(minVersion.major, minVersion.minor - 1, minVersion.patch),
|
||||
);
|
||||
|
||||
await expect(sut.init()).rejects.toThrow(extName);
|
||||
|
||||
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each([
|
||||
{ type: VersionType.EQUAL, max: 'no', actual: 'patch' },
|
||||
{ type: VersionType.PATCH, max: 'patch', actual: 'minor' },
|
||||
{ type: VersionType.MINOR, max: 'minor', actual: 'major' },
|
||||
] as const)(
|
||||
`should throw an error if $max upgrade from min version is allowed and ${extName} version is $actual`,
|
||||
async ({ type, actual }) => {
|
||||
const version = new Version(minVersion.major, minVersion.minor, minVersion.patch);
|
||||
version[actual] = minVersion[actual] + 1;
|
||||
databaseMock.getExtensionVersion.mockResolvedValue(version);
|
||||
if (vectorExt === DatabaseExtension.VECTOR) {
|
||||
sut.minVectorVersion = minVersion;
|
||||
sut.vectorVersionPin = type;
|
||||
} else {
|
||||
sut.minVectorsVersion = minVersion;
|
||||
sut.vectorsVersionPin = type;
|
||||
}
|
||||
|
||||
await expect(sut.init()).rejects.toThrow(extName);
|
||||
|
||||
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
|
||||
},
|
||||
);
|
||||
|
||||
it(`should throw an error if ${extName} version is a nightly`, async () => {
|
||||
databaseMock.getExtensionVersion.mockResolvedValue(new Version(0, 0, 0));
|
||||
|
||||
await expect(sut.init()).rejects.toThrow(extName);
|
||||
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should throw error if ${extName} extension could not be created`, async () => {
|
||||
databaseMock.createExtension.mockRejectedValue(new Error('Failed to create extension'));
|
||||
|
||||
await expect(sut.init()).rejects.toThrow('Failed to create extension');
|
||||
|
||||
expect(fatalLog).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.createExtension).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should update ${extName} if a newer version is available`, async () => {
|
||||
const version = new Version(minVersion.major, minVersion.minor + 1, minVersion.patch);
|
||||
databaseMock.getAvailableExtensionVersion.mockResolvedValue(version);
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(databaseMock.updateVectorExtension).toHaveBeenCalledWith(vectorExt, version);
|
||||
expect(databaseMock.updateVectorExtension).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should not update ${extName} if a newer version is higher than the maximum`, async () => {
|
||||
const version = new Version(minVersion.major + 1, minVersion.minor, minVersion.patch);
|
||||
databaseMock.getAvailableExtensionVersion.mockResolvedValue(version);
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(databaseMock.updateVectorExtension).not.toHaveBeenCalled();
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should warn if attempted to update ${extName} and failed`, async () => {
|
||||
const version = new Version(minVersion.major, minVersion.minor, minVersion.patch + 1);
|
||||
databaseMock.getAvailableExtensionVersion.mockResolvedValue(version);
|
||||
databaseMock.updateVectorExtension.mockRejectedValue(new Error('Failed to update extension'));
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(warnLog).toHaveBeenCalledTimes(1);
|
||||
expect(warnLog.mock.calls[0][0]).toContain(extName);
|
||||
expect(errorLog).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
expect(databaseMock.updateVectorExtension).toHaveBeenCalledWith(vectorExt, version);
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it(`should warn if ${extName} update requires restart`, async () => {
|
||||
const version = new Version(minVersion.major, minVersion.minor, minVersion.patch + 1);
|
||||
databaseMock.getAvailableExtensionVersion.mockResolvedValue(version);
|
||||
databaseMock.updateVectorExtension.mockResolvedValue({ restartRequired: true });
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(warnLog).toHaveBeenCalledTimes(1);
|
||||
expect(warnLog.mock.calls[0][0]).toContain(extName);
|
||||
expect(databaseMock.updateVectorExtension).toHaveBeenCalledWith(vectorExt, version);
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each([{ index: VectorIndex.CLIP }, { index: VectorIndex.FACE }])(
|
||||
`should reindex $index if necessary`,
|
||||
async ({ index }) => {
|
||||
databaseMock.shouldReindex.mockImplementation((indexArg) => Promise.resolve(indexArg === index));
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(databaseMock.shouldReindex).toHaveBeenCalledWith(index);
|
||||
expect(databaseMock.shouldReindex).toHaveBeenCalledTimes(2);
|
||||
expect(databaseMock.reindex).toHaveBeenCalledWith(index);
|
||||
expect(databaseMock.reindex).toHaveBeenCalledTimes(1);
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
},
|
||||
);
|
||||
|
||||
it.each([{ index: VectorIndex.CLIP }, { index: VectorIndex.FACE }])(
|
||||
`should not reindex $index if not necessary`,
|
||||
async () => {
|
||||
databaseMock.shouldReindex.mockResolvedValue(false);
|
||||
|
||||
await expect(sut.init()).resolves.toBeUndefined();
|
||||
|
||||
expect(databaseMock.shouldReindex).toHaveBeenCalledTimes(2);
|
||||
expect(databaseMock.reindex).not.toHaveBeenCalled();
|
||||
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
|
||||
expect(fatalLog).not.toHaveBeenCalled();
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,158 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { Version, VersionType } from 'src/domain/domain.constant';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import {
|
||||
DatabaseExtension,
|
||||
DatabaseLock,
|
||||
IDatabaseRepository,
|
||||
VectorExtension,
|
||||
VectorIndex,
|
||||
extName,
|
||||
} from 'src/interfaces/database.repository';
|
||||
|
||||
@Injectable()
|
||||
export class DatabaseService {
|
||||
private logger = new ImmichLogger(DatabaseService.name);
|
||||
private vectorExt: VectorExtension;
|
||||
minPostgresVersion = 14;
|
||||
minVectorsVersion = new Version(0, 2, 0);
|
||||
vectorsVersionPin = VersionType.MINOR;
|
||||
minVectorVersion = new Version(0, 5, 0);
|
||||
vectorVersionPin = VersionType.MAJOR;
|
||||
|
||||
constructor(@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository) {
|
||||
this.vectorExt = this.databaseRepository.getPreferredVectorExtension();
|
||||
}
|
||||
|
||||
async init() {
|
||||
await this.assertPostgresql();
|
||||
await this.databaseRepository.withLock(DatabaseLock.Migrations, async () => {
|
||||
await this.createVectorExtension();
|
||||
await this.updateVectorExtension();
|
||||
await this.assertVectorExtension();
|
||||
|
||||
try {
|
||||
if (await this.databaseRepository.shouldReindex(VectorIndex.CLIP)) {
|
||||
await this.databaseRepository.reindex(VectorIndex.CLIP);
|
||||
}
|
||||
|
||||
if (await this.databaseRepository.shouldReindex(VectorIndex.FACE)) {
|
||||
await this.databaseRepository.reindex(VectorIndex.FACE);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
'Could not run vector reindexing checks. If the extension was updated, please restart the Postgres instance.',
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
|
||||
await this.databaseRepository.runMigrations();
|
||||
});
|
||||
}
|
||||
|
||||
private async assertPostgresql() {
|
||||
const { major } = await this.databaseRepository.getPostgresVersion();
|
||||
if (major < this.minPostgresVersion) {
|
||||
throw new Error(`
|
||||
The PostgreSQL version is ${major}, which is older than the minimum supported version ${this.minPostgresVersion}.
|
||||
Please upgrade to this version or later.`);
|
||||
}
|
||||
}
|
||||
|
||||
private async createVectorExtension() {
|
||||
try {
|
||||
await this.databaseRepository.createExtension(this.vectorExt);
|
||||
} catch (error) {
|
||||
const otherExt =
|
||||
this.vectorExt === DatabaseExtension.VECTORS ? DatabaseExtension.VECTOR : DatabaseExtension.VECTORS;
|
||||
this.logger.fatal(`
|
||||
Failed to activate ${extName[this.vectorExt]} extension.
|
||||
Please ensure the Postgres instance has ${extName[this.vectorExt]} installed.
|
||||
|
||||
If the Postgres instance already has ${extName[this.vectorExt]} installed, Immich may not have the necessary permissions to activate it.
|
||||
In this case, please run 'CREATE EXTENSION IF NOT EXISTS ${this.vectorExt}' manually as a superuser.
|
||||
See https://immich.app/docs/guides/database-queries for how to query the database.
|
||||
|
||||
Alternatively, if your Postgres instance has ${extName[otherExt]}, you may use this instead by setting the environment variable 'DB_VECTOR_EXTENSION=${extName[otherExt]}'.
|
||||
Note that switching between the two extensions after a successful startup is not supported.
|
||||
The exception is if your version of Immich prior to upgrading was 1.90.2 or earlier.
|
||||
In this case, you may set either extension now, but you will not be able to switch to the other extension following a successful startup.
|
||||
`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async updateVectorExtension() {
|
||||
const [version, availableVersion] = await Promise.all([
|
||||
this.databaseRepository.getExtensionVersion(this.vectorExt),
|
||||
this.databaseRepository.getAvailableExtensionVersion(this.vectorExt),
|
||||
]);
|
||||
if (version == null) {
|
||||
throw new Error(`Unexpected: ${extName[this.vectorExt]} extension is not installed.`);
|
||||
}
|
||||
|
||||
if (availableVersion == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const maxVersion = this.vectorExt === DatabaseExtension.VECTOR ? this.vectorVersionPin : this.vectorsVersionPin;
|
||||
const isNewer = availableVersion.isNewerThan(version);
|
||||
if (isNewer == null || isNewer > maxVersion) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.logger.log(`Updating ${extName[this.vectorExt]} extension to ${availableVersion}`);
|
||||
const { restartRequired } = await this.databaseRepository.updateVectorExtension(this.vectorExt, availableVersion);
|
||||
if (restartRequired) {
|
||||
this.logger.warn(`
|
||||
The ${extName[this.vectorExt]} extension has been updated to ${availableVersion}.
|
||||
Please restart the Postgres instance to complete the update.`);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`
|
||||
The ${extName[this.vectorExt]} extension version is ${version}, but ${availableVersion} is available.
|
||||
Immich attempted to update the extension, but failed to do so.
|
||||
This may be because Immich does not have the necessary permissions to update the extension.
|
||||
|
||||
Please run 'ALTER EXTENSION ${this.vectorExt} UPDATE' manually as a superuser.
|
||||
See https://immich.app/docs/guides/database-queries for how to query the database.`);
|
||||
this.logger.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
private async assertVectorExtension() {
|
||||
const version = await this.databaseRepository.getExtensionVersion(this.vectorExt);
|
||||
if (version == null) {
|
||||
throw new Error(`Unexpected: The ${extName[this.vectorExt]} extension is not installed.`);
|
||||
}
|
||||
|
||||
if (version.isEqual(new Version(0, 0, 0))) {
|
||||
throw new Error(`
|
||||
The ${extName[this.vectorExt]} extension version is ${version}, which means it is a nightly release.
|
||||
|
||||
Please run 'DROP EXTENSION IF EXISTS ${this.vectorExt}' and switch to a release version.
|
||||
See https://immich.app/docs/guides/database-queries for how to query the database.`);
|
||||
}
|
||||
|
||||
const minVersion = this.vectorExt === DatabaseExtension.VECTOR ? this.minVectorVersion : this.minVectorsVersion;
|
||||
const maxVersion = this.vectorExt === DatabaseExtension.VECTOR ? this.vectorVersionPin : this.vectorsVersionPin;
|
||||
|
||||
if (version.isOlderThan(minVersion) || version.isNewerThan(minVersion) > maxVersion) {
|
||||
const allowedReleaseType = maxVersion === VersionType.MAJOR ? '' : ` ${VersionType[maxVersion].toLowerCase()}`;
|
||||
const releases =
|
||||
maxVersion === VersionType.EQUAL
|
||||
? minVersion.toString()
|
||||
: `${minVersion} and later${allowedReleaseType} releases`;
|
||||
|
||||
throw new Error(`
|
||||
The ${extName[this.vectorExt]} extension version is ${version}, but Immich only supports ${releases}.
|
||||
|
||||
If the Postgres instance already has a compatible version installed, Immich may not have the necessary permissions to activate it.
|
||||
In this case, please run 'ALTER EXTENSION UPDATE ${this.vectorExt}' manually as a superuser.
|
||||
See https://immich.app/docs/guides/database-queries for how to query the database.
|
||||
|
||||
Otherwise, please update the version of ${extName[this.vectorExt]} in the Postgres instance to a compatible version.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,246 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { when } from 'jest-when';
|
||||
import { DownloadResponseDto } from 'src/dtos/download.dto';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
import { CacheControl, ImmichFileResponse } from 'src/utils';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { Readable } from 'typeorm/platform/PlatformTools.js';
|
||||
|
||||
const downloadResponse: DownloadResponseDto = {
|
||||
totalSize: 105_000,
|
||||
archives: [
|
||||
{
|
||||
assetIds: ['asset-id', 'asset-id'],
|
||||
size: 105_000,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe(DownloadService.name, () => {
|
||||
let sut: DownloadService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
|
||||
sut = new DownloadService(accessMock, assetMock, storageMock);
|
||||
});
|
||||
|
||||
describe('downloadFile', () => {
|
||||
it('should require the asset.download permission', async () => {
|
||||
await expect(sut.downloadFile(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(accessMock.asset.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['asset-1']));
|
||||
expect(accessMock.asset.checkAlbumAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['asset-1']));
|
||||
expect(accessMock.asset.checkPartnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['asset-1']));
|
||||
});
|
||||
|
||||
it('should throw an error if the asset is not found', async () => {
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
assetMock.getByIds.mockResolvedValue([]);
|
||||
|
||||
await expect(sut.downloadFile(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith(['asset-1']);
|
||||
});
|
||||
|
||||
it('should throw an error if the asset is offline', async () => {
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.offline]);
|
||||
|
||||
await expect(sut.downloadFile(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith(['asset-1']);
|
||||
});
|
||||
|
||||
it('should download a file', async () => {
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await expect(sut.downloadFile(authStub.admin, 'asset-1')).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/original/path.jpg',
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.NONE,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should download an archive', async () => {
|
||||
const archiveMock = {
|
||||
addFile: jest.fn(),
|
||||
finalize: jest.fn(),
|
||||
stream: new Readable(),
|
||||
};
|
||||
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
assetMock.getByIds.mockResolvedValue([
|
||||
{ ...assetStub.noResizePath, id: 'asset-1' },
|
||||
{ ...assetStub.noWebpPath, id: 'asset-2' },
|
||||
]);
|
||||
storageMock.createZipStream.mockReturnValue(archiveMock);
|
||||
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
|
||||
stream: archiveMock.stream,
|
||||
});
|
||||
|
||||
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, 'upload/library/IMG_123.jpg', 'IMG_123.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, 'upload/library/IMG_456.jpg', 'IMG_456.jpg');
|
||||
});
|
||||
|
||||
it('should handle duplicate file names', async () => {
|
||||
const archiveMock = {
|
||||
addFile: jest.fn(),
|
||||
finalize: jest.fn(),
|
||||
stream: new Readable(),
|
||||
};
|
||||
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
assetMock.getByIds.mockResolvedValue([
|
||||
{ ...assetStub.noResizePath, id: 'asset-1' },
|
||||
{ ...assetStub.noResizePath, id: 'asset-2' },
|
||||
]);
|
||||
storageMock.createZipStream.mockReturnValue(archiveMock);
|
||||
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
|
||||
stream: archiveMock.stream,
|
||||
});
|
||||
|
||||
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, 'upload/library/IMG_123.jpg', 'IMG_123.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, 'upload/library/IMG_123.jpg', 'IMG_123+1.jpg');
|
||||
});
|
||||
|
||||
it('should be deterministic', async () => {
|
||||
const archiveMock = {
|
||||
addFile: jest.fn(),
|
||||
finalize: jest.fn(),
|
||||
stream: new Readable(),
|
||||
};
|
||||
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
assetMock.getByIds.mockResolvedValue([
|
||||
{ ...assetStub.noResizePath, id: 'asset-2' },
|
||||
{ ...assetStub.noResizePath, id: 'asset-1' },
|
||||
]);
|
||||
storageMock.createZipStream.mockReturnValue(archiveMock);
|
||||
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
|
||||
stream: archiveMock.stream,
|
||||
});
|
||||
|
||||
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, 'upload/library/IMG_123.jpg', 'IMG_123.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, 'upload/library/IMG_123.jpg', 'IMG_123+1.jpg');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDownloadInfo', () => {
|
||||
it('should throw an error for an invalid dto', async () => {
|
||||
await expect(sut.getDownloadInfo(authStub.admin, {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should return a list of archives (assetIds)', async () => {
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image, assetStub.video]);
|
||||
|
||||
const assetIds = ['asset-1', 'asset-2'];
|
||||
await expect(sut.getDownloadInfo(authStub.admin, { assetIds })).resolves.toEqual(downloadResponse);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith(['asset-1', 'asset-2'], { exifInfo: true });
|
||||
});
|
||||
|
||||
it('should return a list of archives (albumId)', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set(['album-1']));
|
||||
assetMock.getByAlbumId.mockResolvedValue({
|
||||
items: [assetStub.image, assetStub.video],
|
||||
hasNextPage: false,
|
||||
});
|
||||
|
||||
await expect(sut.getDownloadInfo(authStub.admin, { albumId: 'album-1' })).resolves.toEqual(downloadResponse);
|
||||
|
||||
expect(accessMock.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-1']));
|
||||
expect(assetMock.getByAlbumId).toHaveBeenCalledWith({ take: 2500, skip: 0 }, 'album-1');
|
||||
});
|
||||
|
||||
it('should return a list of archives (userId)', async () => {
|
||||
accessMock.library.checkOwnerAccess.mockResolvedValue(new Set([authStub.admin.user.id]));
|
||||
assetMock.getByUserId.mockResolvedValue({
|
||||
items: [assetStub.image, assetStub.video],
|
||||
hasNextPage: false,
|
||||
});
|
||||
|
||||
await expect(sut.getDownloadInfo(authStub.admin, { userId: authStub.admin.user.id })).resolves.toEqual(
|
||||
downloadResponse,
|
||||
);
|
||||
|
||||
expect(assetMock.getByUserId).toHaveBeenCalledWith({ take: 2500, skip: 0 }, authStub.admin.user.id, {
|
||||
isVisible: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should split archives by size', async () => {
|
||||
accessMock.library.checkOwnerAccess.mockResolvedValue(new Set([authStub.admin.user.id]));
|
||||
|
||||
assetMock.getByUserId.mockResolvedValue({
|
||||
items: [
|
||||
{ ...assetStub.image, id: 'asset-1' },
|
||||
{ ...assetStub.video, id: 'asset-2' },
|
||||
{ ...assetStub.withLocation, id: 'asset-3' },
|
||||
{ ...assetStub.noWebpPath, id: 'asset-4' },
|
||||
],
|
||||
hasNextPage: false,
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.getDownloadInfo(authStub.admin, {
|
||||
userId: authStub.admin.user.id,
|
||||
archiveSize: 30_000,
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
totalSize: 251_456,
|
||||
archives: [
|
||||
{ assetIds: ['asset-1', 'asset-2'], size: 105_000 },
|
||||
{ assetIds: ['asset-3', 'asset-4'], size: 146_456 },
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should include the video portion of a live photo', async () => {
|
||||
const assetIds = [assetStub.livePhotoStillAsset.id];
|
||||
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(assetIds));
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.livePhotoStillAsset.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.livePhotoStillAsset]);
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.livePhotoMotionAsset.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.livePhotoMotionAsset]);
|
||||
|
||||
await expect(sut.getDownloadInfo(authStub.admin, { assetIds })).resolves.toEqual({
|
||||
totalSize: 125_000,
|
||||
archives: [
|
||||
{
|
||||
assetIds: [assetStub.livePhotoStillAsset.id, assetStub.livePhotoMotionAsset.id],
|
||||
size: 125_000,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,139 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { parse } from 'node:path';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { mimeTypes } from 'src/domain/domain.constant';
|
||||
import { AssetIdsDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { DownloadArchiveInfo, DownloadInfoDto, DownloadResponseDto } from 'src/dtos/download.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IStorageRepository, ImmichReadStream } from 'src/interfaces/storage.repository';
|
||||
import { CacheControl, HumanReadableSize, ImmichFileResponse, usePagination } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class DownloadService {
|
||||
private access: AccessCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
async downloadFile(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_DOWNLOAD, id);
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset) {
|
||||
throw new BadRequestException('Asset not found');
|
||||
}
|
||||
|
||||
if (asset.isOffline) {
|
||||
throw new BadRequestException('Asset is offline');
|
||||
}
|
||||
|
||||
return new ImmichFileResponse({
|
||||
path: asset.originalPath,
|
||||
contentType: mimeTypes.lookup(asset.originalPath),
|
||||
cacheControl: CacheControl.NONE,
|
||||
});
|
||||
}
|
||||
|
||||
async getDownloadInfo(auth: AuthDto, dto: DownloadInfoDto): Promise<DownloadResponseDto> {
|
||||
const targetSize = dto.archiveSize || HumanReadableSize.GiB * 4;
|
||||
const archives: DownloadArchiveInfo[] = [];
|
||||
let archive: DownloadArchiveInfo = { size: 0, assetIds: [] };
|
||||
|
||||
const assetPagination = await this.getDownloadAssets(auth, dto);
|
||||
for await (const assets of assetPagination) {
|
||||
// motion part of live photos
|
||||
const motionIds = assets.map((asset) => asset.livePhotoVideoId).filter<string>((id): id is string => !!id);
|
||||
if (motionIds.length > 0) {
|
||||
assets.push(...(await this.assetRepository.getByIds(motionIds, { exifInfo: true })));
|
||||
}
|
||||
|
||||
for (const asset of assets) {
|
||||
archive.size += Number(asset.exifInfo?.fileSizeInByte || 0);
|
||||
archive.assetIds.push(asset.id);
|
||||
|
||||
if (archive.size > targetSize) {
|
||||
archives.push(archive);
|
||||
archive = { size: 0, assetIds: [] };
|
||||
}
|
||||
}
|
||||
|
||||
if (archive.assetIds.length > 0) {
|
||||
archives.push(archive);
|
||||
}
|
||||
}
|
||||
|
||||
let totalSize = 0;
|
||||
for (const archive of archives) {
|
||||
totalSize += archive.size;
|
||||
}
|
||||
|
||||
return { totalSize, archives };
|
||||
}
|
||||
|
||||
async downloadArchive(auth: AuthDto, dto: AssetIdsDto): Promise<ImmichReadStream> {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_DOWNLOAD, dto.assetIds);
|
||||
|
||||
const zip = this.storageRepository.createZipStream();
|
||||
const assets = await this.assetRepository.getByIds(dto.assetIds);
|
||||
const assetMap = new Map(assets.map((asset) => [asset.id, asset]));
|
||||
const paths: Record<string, number> = {};
|
||||
|
||||
for (const assetId of dto.assetIds) {
|
||||
const asset = assetMap.get(assetId);
|
||||
if (!asset) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const { originalPath, originalFileName } = asset;
|
||||
|
||||
let filename = originalFileName;
|
||||
const count = paths[filename] || 0;
|
||||
paths[filename] = count + 1;
|
||||
if (count !== 0) {
|
||||
const parsedFilename = parse(originalFileName);
|
||||
filename = `${parsedFilename.name}+${count}${parsedFilename.ext}`;
|
||||
}
|
||||
|
||||
zip.addFile(originalPath, filename);
|
||||
}
|
||||
|
||||
void zip.finalize();
|
||||
|
||||
return { stream: zip.stream };
|
||||
}
|
||||
|
||||
private async getDownloadAssets(auth: AuthDto, dto: DownloadInfoDto): Promise<AsyncGenerator<AssetEntity[]>> {
|
||||
const PAGINATION_SIZE = 2500;
|
||||
|
||||
if (dto.assetIds) {
|
||||
const assetIds = dto.assetIds;
|
||||
await this.access.requirePermission(auth, Permission.ASSET_DOWNLOAD, assetIds);
|
||||
const assets = await this.assetRepository.getByIds(assetIds, { exifInfo: true });
|
||||
return usePagination(PAGINATION_SIZE, () => ({ hasNextPage: false, items: assets }));
|
||||
}
|
||||
|
||||
if (dto.albumId) {
|
||||
const albumId = dto.albumId;
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_DOWNLOAD, albumId);
|
||||
return usePagination(PAGINATION_SIZE, (pagination) => this.assetRepository.getByAlbumId(pagination, albumId));
|
||||
}
|
||||
|
||||
if (dto.userId) {
|
||||
const userId = dto.userId;
|
||||
await this.access.requirePermission(auth, Permission.TIMELINE_DOWNLOAD, userId);
|
||||
return usePagination(PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getByUserId(pagination, userId, { isVisible: true }),
|
||||
);
|
||||
}
|
||||
|
||||
throw new BadRequestException('assetIds, albumId, or userId is required');
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,379 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { FeatureFlag, SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { JobCommand, JobName, QueueName } from 'src/domain/job/job.constants';
|
||||
import { SystemConfig, SystemConfigKey } from 'src/entities/system-config.entity';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IJobRepository, JobHandler, JobItem, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { JobService } from 'src/services/job.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newCommunicationRepositoryMock } from 'test/repositories/communication.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
|
||||
const makeMockHandlers = (status: JobStatus) => {
|
||||
const mock = jest.fn().mockResolvedValue(status);
|
||||
return Object.fromEntries(Object.values(JobName).map((jobName) => [jobName, mock])) as unknown as Record<
|
||||
JobName,
|
||||
JobHandler
|
||||
>;
|
||||
};
|
||||
|
||||
describe(JobService.name, () => {
|
||||
let sut: JobService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let communicationMock: jest.Mocked<ICommunicationRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
communicationMock = newCommunicationRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
sut = new JobService(assetMock, communicationMock, jobMock, configMock, personMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleNightlyJobs', () => {
|
||||
it('should run the scheduled jobs', async () => {
|
||||
await sut.handleNightlyJobs();
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{ name: JobName.ASSET_DELETION_CHECK },
|
||||
{ name: JobName.USER_DELETE_CHECK },
|
||||
{ name: JobName.PERSON_CLEANUP },
|
||||
{ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } },
|
||||
{ name: JobName.CLEAN_OLD_AUDIT_LOGS },
|
||||
{ name: JobName.USER_SYNC_USAGE },
|
||||
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllJobStatus', () => {
|
||||
it('should get all job statuses', async () => {
|
||||
jobMock.getJobCounts.mockResolvedValue({
|
||||
active: 1,
|
||||
completed: 1,
|
||||
failed: 1,
|
||||
delayed: 1,
|
||||
waiting: 1,
|
||||
paused: 1,
|
||||
});
|
||||
jobMock.getQueueStatus.mockResolvedValue({
|
||||
isActive: true,
|
||||
isPaused: true,
|
||||
});
|
||||
|
||||
const expectedJobStatus = {
|
||||
jobCounts: {
|
||||
active: 1,
|
||||
completed: 1,
|
||||
delayed: 1,
|
||||
failed: 1,
|
||||
waiting: 1,
|
||||
paused: 1,
|
||||
},
|
||||
queueStatus: {
|
||||
isActive: true,
|
||||
isPaused: true,
|
||||
},
|
||||
};
|
||||
|
||||
await expect(sut.getAllJobsStatus()).resolves.toEqual({
|
||||
[QueueName.BACKGROUND_TASK]: expectedJobStatus,
|
||||
[QueueName.SMART_SEARCH]: expectedJobStatus,
|
||||
[QueueName.METADATA_EXTRACTION]: expectedJobStatus,
|
||||
[QueueName.SEARCH]: expectedJobStatus,
|
||||
[QueueName.STORAGE_TEMPLATE_MIGRATION]: expectedJobStatus,
|
||||
[QueueName.MIGRATION]: expectedJobStatus,
|
||||
[QueueName.THUMBNAIL_GENERATION]: expectedJobStatus,
|
||||
[QueueName.VIDEO_CONVERSION]: expectedJobStatus,
|
||||
[QueueName.FACE_DETECTION]: expectedJobStatus,
|
||||
[QueueName.FACIAL_RECOGNITION]: expectedJobStatus,
|
||||
[QueueName.SIDECAR]: expectedJobStatus,
|
||||
[QueueName.LIBRARY]: expectedJobStatus,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleCommand', () => {
|
||||
it('should handle a pause command', async () => {
|
||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.PAUSE, force: false });
|
||||
|
||||
expect(jobMock.pause).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||
});
|
||||
|
||||
it('should handle a resume command', async () => {
|
||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.RESUME, force: false });
|
||||
|
||||
expect(jobMock.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||
});
|
||||
|
||||
it('should handle an empty command', async () => {
|
||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.EMPTY, force: false });
|
||||
|
||||
expect(jobMock.empty).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||
});
|
||||
|
||||
it('should not start a job that is already running', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: true, isPaused: false });
|
||||
|
||||
await expect(
|
||||
sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle a start video conversion command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_VIDEO_CONVERSION, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should handle a start storage template migration command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
|
||||
});
|
||||
|
||||
it('should handle a start smart search command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SMART_SEARCH, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should handle a start metadata extraction command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should handle a start sidecar command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_SIDECAR, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should handle a start thumbnail generation command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should handle a start face detection command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACE_DETECTION, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should handle a start facial recognition command', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } });
|
||||
});
|
||||
|
||||
it('should throw a bad request when an invalid queue is used', async () => {
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await expect(
|
||||
sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('init', () => {
|
||||
it('should register a handler for each queue', async () => {
|
||||
await sut.init(makeMockHandlers(JobStatus.SUCCESS));
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
expect(jobMock.addHandler).toHaveBeenCalledTimes(Object.keys(QueueName).length);
|
||||
});
|
||||
|
||||
it('should subscribe to config changes', async () => {
|
||||
await sut.init(makeMockHandlers(JobStatus.FAILED));
|
||||
|
||||
SystemConfigCore.create(newSystemConfigRepositoryMock(false)).config$.next({
|
||||
job: {
|
||||
[QueueName.BACKGROUND_TASK]: { concurrency: 10 },
|
||||
[QueueName.SMART_SEARCH]: { concurrency: 10 },
|
||||
[QueueName.METADATA_EXTRACTION]: { concurrency: 10 },
|
||||
[QueueName.FACE_DETECTION]: { concurrency: 10 },
|
||||
[QueueName.SEARCH]: { concurrency: 10 },
|
||||
[QueueName.SIDECAR]: { concurrency: 10 },
|
||||
[QueueName.LIBRARY]: { concurrency: 10 },
|
||||
[QueueName.MIGRATION]: { concurrency: 10 },
|
||||
[QueueName.THUMBNAIL_GENERATION]: { concurrency: 10 },
|
||||
[QueueName.VIDEO_CONVERSION]: { concurrency: 10 },
|
||||
},
|
||||
} as SystemConfig);
|
||||
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.BACKGROUND_TASK, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SMART_SEARCH, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.FACE_DETECTION, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SIDECAR, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.LIBRARY, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.MIGRATION, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.THUMBNAIL_GENERATION, 10);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.VIDEO_CONVERSION, 10);
|
||||
});
|
||||
|
||||
const tests: Array<{ item: JobItem; jobs: JobName[] }> = [
|
||||
{
|
||||
item: { name: JobName.SIDECAR_SYNC, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.METADATA_EXTRACTION],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.METADATA_EXTRACTION],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.METADATA_EXTRACTION, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.LINK_LIVE_PHOTOS],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.LINK_LIVE_PHOTOS, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
|
||||
jobs: [JobName.GENERATE_JPEG_THUMBNAIL],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1' } },
|
||||
jobs: [],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'asset-1' } },
|
||||
jobs: [],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.GENERATE_WEBP_THUMBNAIL, JobName.GENERATE_THUMBHASH_THUMBNAIL],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: 'asset-1', source: 'upload' } },
|
||||
jobs: [
|
||||
JobName.GENERATE_WEBP_THUMBNAIL,
|
||||
JobName.GENERATE_THUMBHASH_THUMBNAIL,
|
||||
JobName.SMART_SEARCH,
|
||||
JobName.FACE_DETECTION,
|
||||
JobName.VIDEO_CONVERSION,
|
||||
],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: 'asset-live-image', source: 'upload' } },
|
||||
jobs: [
|
||||
JobName.GENERATE_WEBP_THUMBNAIL,
|
||||
JobName.GENERATE_THUMBHASH_THUMBNAIL,
|
||||
JobName.SMART_SEARCH,
|
||||
JobName.FACE_DETECTION,
|
||||
JobName.VIDEO_CONVERSION,
|
||||
],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.SMART_SEARCH, data: { id: 'asset-1' } },
|
||||
jobs: [],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.FACE_DETECTION, data: { id: 'asset-1' } },
|
||||
jobs: [],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.FACIAL_RECOGNITION, data: { id: 'asset-1' } },
|
||||
jobs: [],
|
||||
},
|
||||
];
|
||||
|
||||
for (const { item, jobs } of tests) {
|
||||
it(`should queue ${jobs.length} jobs when a ${item.name} job finishes successfully`, async () => {
|
||||
if (item.name === JobName.GENERATE_JPEG_THUMBNAIL && item.data.source === 'upload') {
|
||||
if (item.data.id === 'asset-live-image') {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoStillAsset]);
|
||||
} else {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset]);
|
||||
}
|
||||
}
|
||||
|
||||
await sut.init(makeMockHandlers(JobStatus.SUCCESS));
|
||||
await jobMock.addHandler.mock.calls[0][2](item);
|
||||
|
||||
if (jobs.length > 1) {
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith(
|
||||
jobs.map((jobName) => ({ name: jobName, data: expect.anything() })),
|
||||
);
|
||||
} else {
|
||||
expect(jobMock.queue).toHaveBeenCalledTimes(jobs.length);
|
||||
for (const jobName of jobs) {
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: jobName, data: expect.anything() });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it(`should not queue any jobs when ${item.name} finishes with 'false'`, async () => {
|
||||
await sut.init(makeMockHandlers(JobStatus.FAILED));
|
||||
await jobMock.addHandler.mock.calls[0][2](item);
|
||||
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
}
|
||||
|
||||
const featureTests: Array<{ queue: QueueName; feature: FeatureFlag; configKey: SystemConfigKey }> = [
|
||||
{
|
||||
queue: QueueName.SMART_SEARCH,
|
||||
feature: FeatureFlag.SMART_SEARCH,
|
||||
configKey: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED,
|
||||
},
|
||||
{
|
||||
queue: QueueName.FACE_DETECTION,
|
||||
feature: FeatureFlag.FACIAL_RECOGNITION,
|
||||
configKey: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_ENABLED,
|
||||
},
|
||||
{
|
||||
queue: QueueName.FACIAL_RECOGNITION,
|
||||
feature: FeatureFlag.FACIAL_RECOGNITION,
|
||||
configKey: SystemConfigKey.MACHINE_LEARNING_FACIAL_RECOGNITION_ENABLED,
|
||||
},
|
||||
];
|
||||
|
||||
for (const { queue, feature, configKey } of featureTests) {
|
||||
it(`should throw an error if attempting to queue ${queue} when ${feature} is disabled`, async () => {
|
||||
configMock.load.mockResolvedValue([{ key: configKey, value: false }]);
|
||||
jobMock.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
||||
|
||||
await expect(sut.handleCommand(queue, { command: JobCommand.START, force: false })).rejects.toThrow();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,284 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { FeatureFlag, SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { ConcurrentQueueName, JobCommand, JobName, QueueName } from 'src/domain/job/job.constants';
|
||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AllJobStatusResponseDto, JobCommandDto, JobStatusDto } from 'src/dtos/job.dto';
|
||||
import { AssetType } from 'src/entities/asset.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IJobRepository, JobHandler, JobItem, JobStatus, QueueCleanType } from 'src/interfaces/job.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
|
||||
@Injectable()
|
||||
export class JobService {
|
||||
private logger = new ImmichLogger(JobService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ICommunicationRepository) private communicationRepository: ICommunicationRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
async handleCommand(queueName: QueueName, dto: JobCommandDto): Promise<JobStatusDto> {
|
||||
this.logger.debug(`Handling command: queue=${queueName},force=${dto.force}`);
|
||||
|
||||
switch (dto.command) {
|
||||
case JobCommand.START: {
|
||||
await this.start(queueName, dto);
|
||||
break;
|
||||
}
|
||||
|
||||
case JobCommand.PAUSE: {
|
||||
await this.jobRepository.pause(queueName);
|
||||
break;
|
||||
}
|
||||
|
||||
case JobCommand.RESUME: {
|
||||
await this.jobRepository.resume(queueName);
|
||||
break;
|
||||
}
|
||||
|
||||
case JobCommand.EMPTY: {
|
||||
await this.jobRepository.empty(queueName);
|
||||
break;
|
||||
}
|
||||
|
||||
case JobCommand.CLEAR_FAILED: {
|
||||
const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.FAILED);
|
||||
this.logger.debug(`Cleared failed jobs: ${failedJobs}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return this.getJobStatus(queueName);
|
||||
}
|
||||
|
||||
async getJobStatus(queueName: QueueName): Promise<JobStatusDto> {
|
||||
const [jobCounts, queueStatus] = await Promise.all([
|
||||
this.jobRepository.getJobCounts(queueName),
|
||||
this.jobRepository.getQueueStatus(queueName),
|
||||
]);
|
||||
|
||||
return { jobCounts, queueStatus };
|
||||
}
|
||||
|
||||
async getAllJobsStatus(): Promise<AllJobStatusResponseDto> {
|
||||
const response = new AllJobStatusResponseDto();
|
||||
for (const queueName of Object.values(QueueName)) {
|
||||
response[queueName] = await this.getJobStatus(queueName);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
private async start(name: QueueName, { force }: JobCommandDto): Promise<void> {
|
||||
const { isActive } = await this.jobRepository.getQueueStatus(name);
|
||||
if (isActive) {
|
||||
throw new BadRequestException(`Job is already running`);
|
||||
}
|
||||
|
||||
switch (name) {
|
||||
case QueueName.VIDEO_CONVERSION: {
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_VIDEO_CONVERSION, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.STORAGE_TEMPLATE_MIGRATION: {
|
||||
return this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
|
||||
}
|
||||
|
||||
case QueueName.MIGRATION: {
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_MIGRATION });
|
||||
}
|
||||
|
||||
case QueueName.SMART_SEARCH: {
|
||||
await this.configCore.requireFeature(FeatureFlag.SMART_SEARCH);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_SMART_SEARCH, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.METADATA_EXTRACTION: {
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.SIDECAR: {
|
||||
await this.configCore.requireFeature(FeatureFlag.SIDECAR);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_SIDECAR, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.THUMBNAIL_GENERATION: {
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.FACE_DETECTION: {
|
||||
await this.configCore.requireFeature(FeatureFlag.FACIAL_RECOGNITION);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_FACE_DETECTION, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.FACIAL_RECOGNITION: {
|
||||
await this.configCore.requireFeature(FeatureFlag.FACIAL_RECOGNITION);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.LIBRARY: {
|
||||
return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force } });
|
||||
}
|
||||
|
||||
default: {
|
||||
throw new BadRequestException(`Invalid job name: ${name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(jobHandlers: Record<JobName, JobHandler>) {
|
||||
const config = await this.configCore.getConfig();
|
||||
for (const queueName of Object.values(QueueName)) {
|
||||
let concurrency = 1;
|
||||
|
||||
if (this.isConcurrentQueue(queueName)) {
|
||||
concurrency = config.job[queueName].concurrency;
|
||||
}
|
||||
|
||||
this.logger.debug(`Registering ${queueName} with a concurrency of ${concurrency}`);
|
||||
this.jobRepository.addHandler(queueName, concurrency, async (item: JobItem): Promise<void> => {
|
||||
const { name, data } = item;
|
||||
|
||||
try {
|
||||
const handler = jobHandlers[name];
|
||||
const status = await handler(data);
|
||||
if (status === JobStatus.SUCCESS || status == JobStatus.SKIPPED) {
|
||||
await this.onDone(item);
|
||||
}
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Unable to run job handler (${queueName}/${name}): ${error}`, error?.stack, data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
this.configCore.config$.subscribe((config) => {
|
||||
this.logger.debug(`Updating queue concurrency settings`);
|
||||
for (const queueName of Object.values(QueueName)) {
|
||||
let concurrency = 1;
|
||||
if (this.isConcurrentQueue(queueName)) {
|
||||
concurrency = config.job[queueName].concurrency;
|
||||
}
|
||||
this.logger.debug(`Setting ${queueName} concurrency to ${concurrency}`);
|
||||
this.jobRepository.setConcurrency(queueName, concurrency);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private isConcurrentQueue(name: QueueName): name is ConcurrentQueueName {
|
||||
return ![QueueName.FACIAL_RECOGNITION, QueueName.STORAGE_TEMPLATE_MIGRATION].includes(name);
|
||||
}
|
||||
|
||||
async handleNightlyJobs() {
|
||||
await this.jobRepository.queueAll([
|
||||
{ name: JobName.ASSET_DELETION_CHECK },
|
||||
{ name: JobName.USER_DELETE_CHECK },
|
||||
{ name: JobName.PERSON_CLEANUP },
|
||||
{ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } },
|
||||
{ name: JobName.CLEAN_OLD_AUDIT_LOGS },
|
||||
{ name: JobName.USER_SYNC_USAGE },
|
||||
{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } },
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue follow up jobs
|
||||
*/
|
||||
private async onDone(item: JobItem) {
|
||||
switch (item.name) {
|
||||
case JobName.SIDECAR_SYNC:
|
||||
case JobName.SIDECAR_DISCOVERY: {
|
||||
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: item.data });
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.SIDECAR_WRITE: {
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.METADATA_EXTRACTION,
|
||||
data: { id: item.data.id, source: 'sidecar-write' },
|
||||
});
|
||||
}
|
||||
|
||||
case JobName.METADATA_EXTRACTION: {
|
||||
if (item.data.source === 'sidecar-write') {
|
||||
const [asset] = await this.assetRepository.getByIdsWithAllRelations([item.data.id]);
|
||||
if (asset) {
|
||||
this.communicationRepository.send(ClientEvent.ASSET_UPDATE, asset.ownerId, mapAsset(asset));
|
||||
}
|
||||
}
|
||||
await this.jobRepository.queue({ name: JobName.LINK_LIVE_PHOTOS, data: item.data });
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.LINK_LIVE_PHOTOS: {
|
||||
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: item.data });
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
||||
if (item.data.source === 'upload') {
|
||||
await this.jobRepository.queue({ name: JobName.GENERATE_JPEG_THUMBNAIL, data: item.data });
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.GENERATE_PERSON_THUMBNAIL: {
|
||||
const { id } = item.data;
|
||||
const person = await this.personRepository.getById(id);
|
||||
if (person) {
|
||||
this.communicationRepository.send(ClientEvent.PERSON_THUMBNAIL, person.ownerId, person.id);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.GENERATE_JPEG_THUMBNAIL: {
|
||||
const jobs: JobItem[] = [
|
||||
{ name: JobName.GENERATE_WEBP_THUMBNAIL, data: item.data },
|
||||
{ name: JobName.GENERATE_THUMBHASH_THUMBNAIL, data: item.data },
|
||||
];
|
||||
|
||||
if (item.data.source === 'upload') {
|
||||
jobs.push({ name: JobName.SMART_SEARCH, data: item.data }, { name: JobName.FACE_DETECTION, data: item.data });
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([item.data.id]);
|
||||
if (asset) {
|
||||
if (asset.type === AssetType.VIDEO) {
|
||||
jobs.push({ name: JobName.VIDEO_CONVERSION, data: item.data });
|
||||
} else if (asset.livePhotoVideoId) {
|
||||
jobs.push({ name: JobName.VIDEO_CONVERSION, data: { id: asset.livePhotoVideoId } });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.GENERATE_WEBP_THUMBNAIL: {
|
||||
if (item.data.source !== 'upload') {
|
||||
break;
|
||||
}
|
||||
|
||||
const [asset] = await this.assetRepository.getByIdsWithAllRelations([item.data.id]);
|
||||
|
||||
// Only live-photo motion part will be marked as not visible immediately on upload. Skip notifying clients
|
||||
if (asset && asset.isVisible) {
|
||||
this.communicationRepository.send(ClientEvent.UPLOAD_SUCCESS, asset.ownerId, mapAsset(asset));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.USER_DELETION: {
|
||||
this.communicationRepository.broadcast(ClientEvent.USER_DELETE, item.data.id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,702 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { Trie } from 'mnemonist';
|
||||
import { R_OK } from 'node:constants';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { Stats } from 'node:fs';
|
||||
import path, { basename, parse } from 'node:path';
|
||||
import picomatch from 'picomatch';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { mimeTypes } from 'src/domain/domain.constant';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName } from 'src/domain/job/job.constants';
|
||||
import { IBaseJob, IEntityJob, ILibraryFileJob, ILibraryRefreshJob } from 'src/domain/job/job.interface';
|
||||
import {
|
||||
CreateLibraryDto,
|
||||
LibraryResponseDto,
|
||||
LibraryStatsResponseDto,
|
||||
ScanLibraryDto,
|
||||
SearchLibraryDto,
|
||||
UpdateLibraryDto,
|
||||
ValidateLibraryDto,
|
||||
ValidateLibraryImportPathResponseDto,
|
||||
ValidateLibraryResponseDto,
|
||||
mapLibrary,
|
||||
} from 'src/dtos/library.dto';
|
||||
import { AssetType } from 'src/entities/asset.entity';
|
||||
import { LibraryEntity, LibraryType } from 'src/entities/library.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAssetRepository, WithProperty } from 'src/interfaces/asset.repository';
|
||||
import { InternalEvent, InternalEventMap } from 'src/interfaces/communication.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { ILibraryRepository } from 'src/interfaces/library.repository';
|
||||
import { IStorageRepository, StorageEventType } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { handlePromiseError, usePagination } from 'src/utils';
|
||||
import { validateCronExpression } from 'src/validation';
|
||||
|
||||
const LIBRARY_SCAN_BATCH_SIZE = 5000;
|
||||
|
||||
@Injectable()
|
||||
export class LibraryService extends EventEmitter {
|
||||
readonly logger = new ImmichLogger(LibraryService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
private watchLibraries = false;
|
||||
private watchLock = false;
|
||||
private watchers: Record<string, () => Promise<void>> = {};
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ILibraryRepository) private repository: ILibraryRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository,
|
||||
) {
|
||||
super();
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
async init() {
|
||||
const config = await this.configCore.getConfig();
|
||||
|
||||
const { watch, scan } = config.library;
|
||||
|
||||
// This ensures that library watching only occurs in one microservice
|
||||
// TODO: we could make the lock be per-library instead of global
|
||||
this.watchLock = await this.databaseRepository.tryLock(DatabaseLock.LibraryWatch);
|
||||
|
||||
this.watchLibraries = this.watchLock && watch.enabled;
|
||||
|
||||
this.jobRepository.addCronJob(
|
||||
'libraryScan',
|
||||
scan.cronExpression,
|
||||
() =>
|
||||
handlePromiseError(
|
||||
this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force: false } }),
|
||||
this.logger,
|
||||
),
|
||||
scan.enabled,
|
||||
);
|
||||
|
||||
if (this.watchLibraries) {
|
||||
await this.watchAll();
|
||||
}
|
||||
|
||||
this.configCore.config$.subscribe(({ library }) => {
|
||||
this.jobRepository.updateCronJob('libraryScan', library.scan.cronExpression, library.scan.enabled);
|
||||
|
||||
if (library.watch.enabled !== this.watchLibraries) {
|
||||
// Watch configuration changed, update accordingly
|
||||
this.watchLibraries = library.watch.enabled;
|
||||
handlePromiseError(this.watchLibraries ? this.watchAll() : this.unwatchAll(), this.logger);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(InternalEvent.VALIDATE_CONFIG)
|
||||
validateConfig({ newConfig }: InternalEventMap[InternalEvent.VALIDATE_CONFIG]) {
|
||||
const { scan } = newConfig.library;
|
||||
if (!validateCronExpression(scan.cronExpression)) {
|
||||
throw new Error(`Invalid cron expression ${scan.cronExpression}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async watch(id: string): Promise<boolean> {
|
||||
if (!this.watchLibraries) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const library = await this.findOrFail(id);
|
||||
|
||||
if (library.type !== LibraryType.EXTERNAL) {
|
||||
throw new BadRequestException('Can only watch external libraries');
|
||||
} else if (library.importPaths.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
await this.unwatch(id);
|
||||
|
||||
this.logger.log(`Starting to watch library ${library.id} with import path(s) ${library.importPaths}`);
|
||||
|
||||
const matcher = picomatch(`**/*{${mimeTypes.getSupportedFileExtensions().join(',')}}`, {
|
||||
nocase: true,
|
||||
ignore: library.exclusionPatterns,
|
||||
});
|
||||
|
||||
let _resolve: () => void;
|
||||
const ready$ = new Promise<void>((resolve) => (_resolve = resolve));
|
||||
|
||||
this.watchers[id] = this.storageRepository.watch(
|
||||
library.importPaths,
|
||||
{
|
||||
usePolling: false,
|
||||
ignoreInitial: true,
|
||||
},
|
||||
{
|
||||
onReady: () => _resolve(),
|
||||
onAdd: (path) => {
|
||||
const handler = async () => {
|
||||
this.logger.debug(`File add event received for ${path} in library ${library.id}}`);
|
||||
if (matcher(path)) {
|
||||
await this.scanAssets(library.id, [path], library.ownerId, false);
|
||||
}
|
||||
this.emit(StorageEventType.ADD, path);
|
||||
};
|
||||
return handlePromiseError(handler(), this.logger);
|
||||
},
|
||||
onChange: (path) => {
|
||||
const handler = async () => {
|
||||
this.logger.debug(`Detected file change for ${path} in library ${library.id}`);
|
||||
if (matcher(path)) {
|
||||
// Note: if the changed file was not previously imported, it will be imported now.
|
||||
await this.scanAssets(library.id, [path], library.ownerId, false);
|
||||
}
|
||||
this.emit(StorageEventType.CHANGE, path);
|
||||
};
|
||||
return handlePromiseError(handler(), this.logger);
|
||||
},
|
||||
onUnlink: (path) => {
|
||||
const handler = async () => {
|
||||
this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`);
|
||||
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
|
||||
if (asset && matcher(path)) {
|
||||
await this.assetRepository.update({ id: asset.id, isOffline: true });
|
||||
}
|
||||
this.emit(StorageEventType.UNLINK, path);
|
||||
};
|
||||
return handlePromiseError(handler(), this.logger);
|
||||
},
|
||||
onError: (error) => {
|
||||
this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`);
|
||||
this.emit(StorageEventType.ERROR, error);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Wait for the watcher to initialize before returning
|
||||
await ready$;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async unwatch(id: string) {
|
||||
if (this.watchers[id]) {
|
||||
await this.watchers[id]();
|
||||
delete this.watchers[id];
|
||||
}
|
||||
}
|
||||
|
||||
async teardown() {
|
||||
await this.unwatchAll();
|
||||
}
|
||||
|
||||
private async unwatchAll() {
|
||||
if (!this.watchLock) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const id in this.watchers) {
|
||||
await this.unwatch(id);
|
||||
}
|
||||
}
|
||||
|
||||
async watchAll() {
|
||||
if (!this.watchLock) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const libraries = await this.repository.getAll(false, LibraryType.EXTERNAL);
|
||||
|
||||
for (const library of libraries) {
|
||||
await this.watch(library.id);
|
||||
}
|
||||
}
|
||||
|
||||
async getStatistics(id: string): Promise<LibraryStatsResponseDto> {
|
||||
await this.findOrFail(id);
|
||||
return this.repository.getStatistics(id);
|
||||
}
|
||||
|
||||
async get(id: string): Promise<LibraryResponseDto> {
|
||||
const library = await this.findOrFail(id);
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
async getAll(dto: SearchLibraryDto): Promise<LibraryResponseDto[]> {
|
||||
const libraries = await this.repository.getAll(false, dto.type);
|
||||
return libraries.map((library) => mapLibrary(library));
|
||||
}
|
||||
|
||||
async handleQueueCleanup(): Promise<JobStatus> {
|
||||
this.logger.debug('Cleaning up any pending library deletions');
|
||||
const pendingDeletion = await this.repository.getAllDeleted();
|
||||
await this.jobRepository.queueAll(
|
||||
pendingDeletion.map((libraryToDelete) => ({ name: JobName.LIBRARY_DELETE, data: { id: libraryToDelete.id } })),
|
||||
);
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async create(dto: CreateLibraryDto): Promise<LibraryResponseDto> {
|
||||
switch (dto.type) {
|
||||
case LibraryType.EXTERNAL: {
|
||||
if (!dto.name) {
|
||||
dto.name = 'New External Library';
|
||||
}
|
||||
break;
|
||||
}
|
||||
case LibraryType.UPLOAD: {
|
||||
if (!dto.name) {
|
||||
dto.name = 'New Upload Library';
|
||||
}
|
||||
if (dto.importPaths && dto.importPaths.length > 0) {
|
||||
throw new BadRequestException('Upload libraries cannot have import paths');
|
||||
}
|
||||
if (dto.exclusionPatterns && dto.exclusionPatterns.length > 0) {
|
||||
throw new BadRequestException('Upload libraries cannot have exclusion patterns');
|
||||
}
|
||||
if (dto.isWatched) {
|
||||
throw new BadRequestException('Upload libraries cannot be watched');
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const library = await this.repository.create({
|
||||
ownerId: dto.ownerId,
|
||||
name: dto.name,
|
||||
type: dto.type,
|
||||
importPaths: dto.importPaths ?? [],
|
||||
exclusionPatterns: dto.exclusionPatterns ?? [],
|
||||
isVisible: dto.isVisible ?? true,
|
||||
});
|
||||
|
||||
this.logger.log(`Creating ${dto.type} library for ${dto.ownerId}}`);
|
||||
|
||||
if (dto.type === LibraryType.EXTERNAL) {
|
||||
await this.watch(library.id);
|
||||
}
|
||||
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
private async scanAssets(libraryId: string, assetPaths: string[], ownerId: string, force = false) {
|
||||
this.logger.verbose(`Queuing refresh of ${assetPaths.length} asset(s)`);
|
||||
|
||||
// We perform this in batches to save on memory when performing large refreshes (greater than 1M assets)
|
||||
const batchSize = 5000;
|
||||
for (let i = 0; i < assetPaths.length; i += batchSize) {
|
||||
const batch = assetPaths.slice(i, i + batchSize);
|
||||
await this.jobRepository.queueAll(
|
||||
batch.map((assetPath) => ({
|
||||
name: JobName.LIBRARY_SCAN_ASSET,
|
||||
data: {
|
||||
id: libraryId,
|
||||
assetPath: assetPath,
|
||||
ownerId,
|
||||
force,
|
||||
},
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.debug('Asset refresh queue completed');
|
||||
}
|
||||
|
||||
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
|
||||
const validation = new ValidateLibraryImportPathResponseDto();
|
||||
validation.importPath = importPath;
|
||||
|
||||
if (StorageCore.isImmichPath(importPath)) {
|
||||
validation.message = 'Cannot use media upload folder for external libraries';
|
||||
return validation;
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = await this.storageRepository.stat(importPath);
|
||||
if (!stat.isDirectory()) {
|
||||
validation.message = 'Not a directory';
|
||||
return validation;
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
validation.message = 'Path does not exist (ENOENT)';
|
||||
return validation;
|
||||
}
|
||||
validation.message = String(error);
|
||||
return validation;
|
||||
}
|
||||
|
||||
const access = await this.storageRepository.checkFileExists(importPath, R_OK);
|
||||
|
||||
if (!access) {
|
||||
validation.message = 'Lacking read permission for folder';
|
||||
return validation;
|
||||
}
|
||||
|
||||
validation.isValid = true;
|
||||
return validation;
|
||||
}
|
||||
|
||||
async validate(id: string, dto: ValidateLibraryDto): Promise<ValidateLibraryResponseDto> {
|
||||
const importPaths = await Promise.all(
|
||||
(dto.importPaths || []).map((importPath) => this.validateImportPath(importPath)),
|
||||
);
|
||||
return { importPaths };
|
||||
}
|
||||
|
||||
async update(id: string, dto: UpdateLibraryDto): Promise<LibraryResponseDto> {
|
||||
await this.findOrFail(id);
|
||||
const library = await this.repository.update({ id, ...dto });
|
||||
|
||||
if (dto.importPaths) {
|
||||
const validation = await this.validate(id, { importPaths: dto.importPaths });
|
||||
if (validation.importPaths) {
|
||||
for (const path of validation.importPaths) {
|
||||
if (!path.isValid) {
|
||||
throw new BadRequestException(`Invalid import path: ${path.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dto.importPaths || dto.exclusionPatterns) {
|
||||
// Re-watch library to use new paths and/or exclusion patterns
|
||||
await this.watch(id);
|
||||
}
|
||||
|
||||
return mapLibrary(library);
|
||||
}
|
||||
|
||||
async delete(id: string) {
|
||||
const library = await this.findOrFail(id);
|
||||
const uploadCount = await this.repository.getUploadLibraryCount(library.ownerId);
|
||||
if (library.type === LibraryType.UPLOAD && uploadCount <= 1) {
|
||||
throw new BadRequestException('Cannot delete the last upload library');
|
||||
}
|
||||
|
||||
if (this.watchLibraries) {
|
||||
await this.unwatch(id);
|
||||
}
|
||||
|
||||
await this.repository.softDelete(id);
|
||||
await this.jobRepository.queue({ name: JobName.LIBRARY_DELETE, data: { id } });
|
||||
}
|
||||
|
||||
async handleDeleteLibrary(job: IEntityJob): Promise<JobStatus> {
|
||||
const library = await this.repository.get(job.id, true);
|
||||
if (!library) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
// TODO use pagination
|
||||
const assetIds = await this.repository.getAssetIds(job.id, true);
|
||||
this.logger.debug(`Will delete ${assetIds.length} asset(s) in library ${job.id}`);
|
||||
await this.jobRepository.queueAll(
|
||||
assetIds.map((assetId) => ({ name: JobName.ASSET_DELETION, data: { id: assetId, fromExternal: true } })),
|
||||
);
|
||||
|
||||
if (assetIds.length === 0) {
|
||||
this.logger.log(`Deleting library ${job.id}`);
|
||||
await this.repository.delete(job.id);
|
||||
}
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleAssetRefresh(job: ILibraryFileJob): Promise<JobStatus> {
|
||||
const assetPath = path.normalize(job.assetPath);
|
||||
|
||||
const existingAssetEntity = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath);
|
||||
|
||||
let stats: Stats;
|
||||
try {
|
||||
stats = await this.storageRepository.stat(assetPath);
|
||||
} catch (error: Error | any) {
|
||||
// Can't access file, probably offline
|
||||
if (existingAssetEntity) {
|
||||
// Mark asset as offline
|
||||
this.logger.debug(`Marking asset as offline: ${assetPath}`);
|
||||
|
||||
await this.assetRepository.update({ id: existingAssetEntity.id, isOffline: true });
|
||||
return JobStatus.SUCCESS;
|
||||
} else {
|
||||
// File can't be accessed and does not already exist in db
|
||||
throw new BadRequestException('Cannot access file', { cause: error });
|
||||
}
|
||||
}
|
||||
|
||||
let doImport = false;
|
||||
let doRefresh = false;
|
||||
|
||||
if (job.force) {
|
||||
doRefresh = true;
|
||||
}
|
||||
|
||||
if (!existingAssetEntity) {
|
||||
// This asset is new to us, read it from disk
|
||||
this.logger.debug(`Importing new asset: ${assetPath}`);
|
||||
doImport = true;
|
||||
} else if (stats.mtime.toISOString() !== existingAssetEntity.fileModifiedAt.toISOString()) {
|
||||
// File modification time has changed since last time we checked, re-read from disk
|
||||
this.logger.debug(
|
||||
`File modification time has changed, re-importing asset: ${assetPath}. Old mtime: ${existingAssetEntity.fileModifiedAt}. New mtime: ${stats.mtime}`,
|
||||
);
|
||||
doRefresh = true;
|
||||
} else if (!job.force && stats && !existingAssetEntity.isOffline) {
|
||||
// Asset exists on disk and in db and mtime has not changed. Also, we are not forcing refresn. Therefore, do nothing
|
||||
this.logger.debug(`Asset already exists in database and on disk, will not import: ${assetPath}`);
|
||||
}
|
||||
|
||||
if (stats && existingAssetEntity?.isOffline) {
|
||||
// File was previously offline but is now online
|
||||
this.logger.debug(`Marking previously-offline asset as online: ${assetPath}`);
|
||||
await this.assetRepository.update({ id: existingAssetEntity.id, isOffline: false });
|
||||
doRefresh = true;
|
||||
}
|
||||
|
||||
if (!doImport && !doRefresh) {
|
||||
// If we don't import, exit here
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
let assetType: AssetType;
|
||||
|
||||
if (mimeTypes.isImage(assetPath)) {
|
||||
assetType = AssetType.IMAGE;
|
||||
} else if (mimeTypes.isVideo(assetPath)) {
|
||||
assetType = AssetType.VIDEO;
|
||||
} else {
|
||||
throw new BadRequestException(`Unsupported file type ${assetPath}`);
|
||||
}
|
||||
|
||||
// TODO: doesn't xmp replace the file extension? Will need investigation
|
||||
let sidecarPath: string | null = null;
|
||||
if (await this.storageRepository.checkFileExists(`${assetPath}.xmp`, R_OK)) {
|
||||
sidecarPath = `${assetPath}.xmp`;
|
||||
}
|
||||
|
||||
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
|
||||
|
||||
let assetId;
|
||||
if (doImport) {
|
||||
const library = await this.repository.get(job.id, true);
|
||||
if (library?.deletedAt) {
|
||||
this.logger.error('Cannot import asset into deleted library');
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
|
||||
|
||||
// TODO: In wait of refactoring the domain asset service, this function is just manually written like this
|
||||
const addedAsset = await this.assetRepository.create({
|
||||
ownerId: job.ownerId,
|
||||
libraryId: job.id,
|
||||
checksum: pathHash,
|
||||
originalPath: assetPath,
|
||||
deviceAssetId: deviceAssetId,
|
||||
deviceId: 'Library Import',
|
||||
fileCreatedAt: stats.mtime,
|
||||
fileModifiedAt: stats.mtime,
|
||||
localDateTime: stats.mtime,
|
||||
type: assetType,
|
||||
originalFileName: parse(assetPath).base,
|
||||
sidecarPath,
|
||||
isReadOnly: true,
|
||||
isExternal: true,
|
||||
});
|
||||
assetId = addedAsset.id;
|
||||
} else if (doRefresh && existingAssetEntity) {
|
||||
assetId = existingAssetEntity.id;
|
||||
await this.assetRepository.updateAll([existingAssetEntity.id], {
|
||||
fileCreatedAt: stats.mtime,
|
||||
fileModifiedAt: stats.mtime,
|
||||
});
|
||||
} else {
|
||||
// Not importing and not refreshing, do nothing
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
this.logger.debug(`Queuing metadata extraction for: ${assetPath}`);
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: assetId, source: 'upload' } });
|
||||
|
||||
if (assetType === AssetType.VIDEO) {
|
||||
await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: assetId } });
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async queueScan(id: string, dto: ScanLibraryDto) {
|
||||
const library = await this.findOrFail(id);
|
||||
if (library.type !== LibraryType.EXTERNAL) {
|
||||
throw new BadRequestException('Can only refresh external libraries');
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({
|
||||
name: JobName.LIBRARY_SCAN,
|
||||
data: {
|
||||
id,
|
||||
refreshModifiedFiles: dto.refreshModifiedFiles ?? false,
|
||||
refreshAllFiles: dto.refreshAllFiles ?? false,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async queueRemoveOffline(id: string) {
|
||||
this.logger.verbose(`Removing offline files from library: ${id}`);
|
||||
await this.jobRepository.queue({ name: JobName.LIBRARY_REMOVE_OFFLINE, data: { id } });
|
||||
}
|
||||
|
||||
async handleQueueAllScan(job: IBaseJob): Promise<JobStatus> {
|
||||
this.logger.debug(`Refreshing all external libraries: force=${job.force}`);
|
||||
|
||||
// Queue cleanup
|
||||
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} });
|
||||
|
||||
// Queue all library refresh
|
||||
const libraries = await this.repository.getAll(true, LibraryType.EXTERNAL);
|
||||
await this.jobRepository.queueAll(
|
||||
libraries.map((library) => ({
|
||||
name: JobName.LIBRARY_SCAN,
|
||||
data: {
|
||||
id: library.id,
|
||||
refreshModifiedFiles: !job.force,
|
||||
refreshAllFiles: job.force ?? false,
|
||||
},
|
||||
})),
|
||||
);
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleOfflineRemoval(job: IEntityJob): Promise<JobStatus> {
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getWith(pagination, WithProperty.IS_OFFLINE, job.id),
|
||||
);
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
this.logger.debug(`Removing ${assets.length} offline assets`);
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.ASSET_DELETION, data: { id: asset.id, fromExternal: true } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueAssetRefresh(job: ILibraryRefreshJob): Promise<JobStatus> {
|
||||
const library = await this.repository.get(job.id);
|
||||
if (!library || library.type !== LibraryType.EXTERNAL) {
|
||||
this.logger.warn('Can only refresh external libraries');
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.log(`Refreshing library: ${job.id}`);
|
||||
|
||||
const crawledAssetPaths = await this.getPathTrie(library);
|
||||
this.logger.debug(`Found ${crawledAssetPaths.size} asset(s) when crawling import paths ${library.importPaths}`);
|
||||
|
||||
const assetIdsToMarkOffline = [];
|
||||
const assetIdsToMarkOnline = [];
|
||||
const pagination = usePagination(LIBRARY_SCAN_BATCH_SIZE, (pagination) =>
|
||||
this.assetRepository.getLibraryAssetPaths(pagination, library.id),
|
||||
);
|
||||
|
||||
this.logger.verbose(`Crawled asset paths paginated`);
|
||||
|
||||
const shouldScanAll = job.refreshAllFiles || job.refreshModifiedFiles;
|
||||
for await (const page of pagination) {
|
||||
for (const asset of page) {
|
||||
const isOffline = !crawledAssetPaths.has(asset.originalPath);
|
||||
if (isOffline && !asset.isOffline) {
|
||||
assetIdsToMarkOffline.push(asset.id);
|
||||
this.logger.verbose(`Added to mark-offline list: ${asset.originalPath}`);
|
||||
}
|
||||
|
||||
if (!isOffline && asset.isOffline) {
|
||||
assetIdsToMarkOnline.push(asset.id);
|
||||
this.logger.verbose(`Added to mark-online list: ${asset.originalPath}`);
|
||||
}
|
||||
|
||||
if (!shouldScanAll) {
|
||||
crawledAssetPaths.delete(asset.originalPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.verbose(`Crawled assets have been checked for online/offline status`);
|
||||
|
||||
if (assetIdsToMarkOffline.length > 0) {
|
||||
this.logger.debug(`Found ${assetIdsToMarkOffline.length} offline asset(s) previously marked as online`);
|
||||
await this.assetRepository.updateAll(assetIdsToMarkOffline, { isOffline: true });
|
||||
}
|
||||
|
||||
if (assetIdsToMarkOnline.length > 0) {
|
||||
this.logger.debug(`Found ${assetIdsToMarkOnline.length} online asset(s) previously marked as offline`);
|
||||
await this.assetRepository.updateAll(assetIdsToMarkOnline, { isOffline: false });
|
||||
}
|
||||
|
||||
if (crawledAssetPaths.size > 0) {
|
||||
if (!shouldScanAll) {
|
||||
this.logger.debug(`Will import ${crawledAssetPaths.size} new asset(s)`);
|
||||
}
|
||||
|
||||
let batch = [];
|
||||
for (const assetPath of crawledAssetPaths) {
|
||||
batch.push(assetPath);
|
||||
|
||||
if (batch.length >= LIBRARY_SCAN_BATCH_SIZE) {
|
||||
await this.scanAssets(job.id, batch, library.ownerId, job.refreshAllFiles ?? false);
|
||||
batch = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.length > 0) {
|
||||
await this.scanAssets(job.id, batch, library.ownerId, job.refreshAllFiles ?? false);
|
||||
}
|
||||
}
|
||||
|
||||
await this.repository.update({ id: job.id, refreshedAt: new Date() });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private async getPathTrie(library: LibraryEntity): Promise<Trie<string>> {
|
||||
const pathValidation = await Promise.all(
|
||||
library.importPaths.map(async (importPath) => await this.validateImportPath(importPath)),
|
||||
);
|
||||
|
||||
const validImportPaths = pathValidation
|
||||
.map((validation) => {
|
||||
if (!validation.isValid) {
|
||||
this.logger.error(`Skipping invalid import path: ${validation.importPath}. Reason: ${validation.message}`);
|
||||
}
|
||||
return validation;
|
||||
})
|
||||
.filter((validation) => validation.isValid)
|
||||
.map((validation) => validation.importPath);
|
||||
|
||||
const generator = this.storageRepository.walk({
|
||||
pathsToCrawl: validImportPaths,
|
||||
exclusionPatterns: library.exclusionPatterns,
|
||||
});
|
||||
|
||||
const trie = new Trie<string>();
|
||||
for await (const filePath of generator) {
|
||||
trie.add(filePath);
|
||||
}
|
||||
|
||||
return trie;
|
||||
}
|
||||
|
||||
private async findOrFail(id: string) {
|
||||
const library = await this.repository.get(id);
|
||||
if (!library) {
|
||||
throw new BadRequestException('Library not found');
|
||||
}
|
||||
return library;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,514 @@
|
||||
import { Inject, Injectable, UnsupportedMediaTypeException } from '@nestjs/common';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from 'src/domain/job/job.constants';
|
||||
import { IBaseJob, IEntityJob } from 'src/domain/job/job.interface';
|
||||
import {
|
||||
H264Config,
|
||||
HEVCConfig,
|
||||
NVENCConfig,
|
||||
QSVConfig,
|
||||
RKMPPConfig,
|
||||
ThumbnailConfig,
|
||||
VAAPIConfig,
|
||||
VP9Config,
|
||||
} from 'src/domain/media/media.util';
|
||||
import { SystemConfigFFmpegDto } from 'src/dtos/system-config-ffmpeg.dto';
|
||||
import { AssetEntity, AssetType } from 'src/entities/asset.entity';
|
||||
import { AssetPathType } from 'src/entities/move.entity';
|
||||
import {
|
||||
AudioCodec,
|
||||
Colorspace,
|
||||
TranscodeHWAccel,
|
||||
TranscodePolicy,
|
||||
TranscodeTarget,
|
||||
VideoCodec,
|
||||
} from 'src/entities/system-config.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { IJobRepository, JobItem, JobStatus } from 'src/interfaces/job.repository';
|
||||
import {
|
||||
AudioStreamInfo,
|
||||
IMediaRepository,
|
||||
VideoCodecHWConfig,
|
||||
VideoStreamInfo,
|
||||
} from 'src/interfaces/media.repository';
|
||||
import { IMoveRepository } from 'src/interfaces/move.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { usePagination } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class MediaService {
|
||||
private logger = new ImmichLogger(MediaService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
private storageCore: StorageCore;
|
||||
private hasOpenCL?: boolean = undefined;
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMediaRepository) private mediaRepository: IMediaRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IMoveRepository) moveRepository: IMoveRepository,
|
||||
@Inject(ICryptoRepository) cryptoRepository: ICryptoRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.storageCore = StorageCore.create(
|
||||
assetRepository,
|
||||
moveRepository,
|
||||
personRepository,
|
||||
cryptoRepository,
|
||||
configRepository,
|
||||
storageRepository,
|
||||
);
|
||||
}
|
||||
|
||||
async handleQueueGenerateThumbnails({ force }: IBaseJob): Promise<JobStatus> {
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination)
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.THUMBNAIL);
|
||||
});
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
const jobs: JobItem[] = [];
|
||||
|
||||
for (const asset of assets) {
|
||||
if (!asset.resizePath || force) {
|
||||
jobs.push({ name: JobName.GENERATE_JPEG_THUMBNAIL, data: { id: asset.id } });
|
||||
continue;
|
||||
}
|
||||
if (!asset.webpPath) {
|
||||
jobs.push({ name: JobName.GENERATE_WEBP_THUMBNAIL, data: { id: asset.id } });
|
||||
}
|
||||
if (!asset.thumbhash) {
|
||||
jobs.push({ name: JobName.GENERATE_THUMBHASH_THUMBNAIL, data: { id: asset.id } });
|
||||
}
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
}
|
||||
|
||||
const jobs: JobItem[] = [];
|
||||
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.personRepository.getAll(pagination, { where: force ? undefined : { thumbnailPath: '' } }),
|
||||
);
|
||||
|
||||
for await (const people of personPagination) {
|
||||
for (const person of people) {
|
||||
if (!person.faceAssetId) {
|
||||
const face = await this.personRepository.getRandomFace(person.id);
|
||||
if (!face) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await this.personRepository.update({ id: person.id, faceAssetId: face.assetId });
|
||||
}
|
||||
|
||||
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
|
||||
}
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueMigration(): Promise<JobStatus> {
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAll(pagination),
|
||||
);
|
||||
|
||||
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
|
||||
if (active === 1 && waiting === 0) {
|
||||
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
|
||||
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
|
||||
}
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.personRepository.getAll(pagination),
|
||||
);
|
||||
|
||||
for await (const people of personPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
people.map((person) => ({ name: JobName.MIGRATE_PERSON, data: { id: person.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleAssetMigration({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
await this.storageCore.moveAssetFile(asset, AssetPathType.JPEG_THUMBNAIL);
|
||||
await this.storageCore.moveAssetFile(asset, AssetPathType.WEBP_THUMBNAIL);
|
||||
await this.storageCore.moveAssetFile(asset, AssetPathType.ENCODED_VIDEO);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleGenerateJpegThumbnail({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const resizePath = await this.generateThumbnail(asset, 'jpeg');
|
||||
await this.assetRepository.update({ id: asset.id, resizePath });
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private async generateThumbnail(asset: AssetEntity, format: 'jpeg' | 'webp') {
|
||||
const { thumbnail, ffmpeg } = await this.configCore.getConfig();
|
||||
const size = format === 'jpeg' ? thumbnail.jpegSize : thumbnail.webpSize;
|
||||
const path =
|
||||
format === 'jpeg' ? StorageCore.getLargeThumbnailPath(asset) : StorageCore.getSmallThumbnailPath(asset);
|
||||
this.storageCore.ensureFolders(path);
|
||||
|
||||
switch (asset.type) {
|
||||
case AssetType.IMAGE: {
|
||||
const colorspace = this.isSRGB(asset) ? Colorspace.SRGB : thumbnail.colorspace;
|
||||
const thumbnailOptions = { format, size, colorspace, quality: thumbnail.quality };
|
||||
await this.mediaRepository.resize(asset.originalPath, path, thumbnailOptions);
|
||||
break;
|
||||
}
|
||||
|
||||
case AssetType.VIDEO: {
|
||||
const { audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath);
|
||||
const mainVideoStream = this.getMainStream(videoStreams);
|
||||
if (!mainVideoStream) {
|
||||
this.logger.warn(`Skipped thumbnail generation for asset ${asset.id}: no video streams found`);
|
||||
return;
|
||||
}
|
||||
const mainAudioStream = this.getMainStream(audioStreams);
|
||||
const config = { ...ffmpeg, targetResolution: size.toString() };
|
||||
const options = new ThumbnailConfig(config).getOptions(TranscodeTarget.VIDEO, mainVideoStream, mainAudioStream);
|
||||
await this.mediaRepository.transcode(asset.originalPath, path, options);
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
throw new UnsupportedMediaTypeException(`Unsupported asset type for thumbnail generation: ${asset.type}`);
|
||||
}
|
||||
}
|
||||
this.logger.log(
|
||||
`Successfully generated ${format.toUpperCase()} ${asset.type.toLowerCase()} thumbnail for asset ${asset.id}`,
|
||||
);
|
||||
return path;
|
||||
}
|
||||
|
||||
async handleGenerateWebpThumbnail({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const webpPath = await this.generateThumbnail(asset, 'webp');
|
||||
await this.assetRepository.update({ id: asset.id, webpPath });
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleGenerateThumbhashThumbnail({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset?.resizePath) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const thumbhash = await this.mediaRepository.generateThumbhash(asset.resizePath);
|
||||
await this.assetRepository.update({ id: asset.id, thumbhash });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueVideoConversion(job: IBaseJob): Promise<JobStatus> {
|
||||
const { force } = job;
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination, { type: AssetType.VIDEO })
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.ENCODED_VIDEO);
|
||||
});
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleVideoConversion({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset || asset.type !== AssetType.VIDEO) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const input = asset.originalPath;
|
||||
const output = StorageCore.getEncodedVideoPath(asset);
|
||||
this.storageCore.ensureFolders(output);
|
||||
|
||||
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input);
|
||||
const mainVideoStream = this.getMainStream(videoStreams);
|
||||
const mainAudioStream = this.getMainStream(audioStreams);
|
||||
const containerExtension = format.formatName;
|
||||
if (!mainVideoStream || !containerExtension) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
if (!mainVideoStream.height || !mainVideoStream.width) {
|
||||
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const { ffmpeg: config } = await this.configCore.getConfig();
|
||||
|
||||
const target = this.getTranscodeTarget(config, mainVideoStream, mainAudioStream);
|
||||
if (target === TranscodeTarget.NONE) {
|
||||
if (asset.encodedVideoPath) {
|
||||
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
|
||||
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [asset.encodedVideoPath] } });
|
||||
await this.assetRepository.update({ id: asset.id, encodedVideoPath: null });
|
||||
}
|
||||
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
let transcodeOptions;
|
||||
try {
|
||||
transcodeOptions = await this.getCodecConfig(config).then((c) =>
|
||||
c.getOptions(target, mainVideoStream, mainAudioStream),
|
||||
);
|
||||
} catch (error) {
|
||||
this.logger.error(`An error occurred while configuring transcoding options: ${error}`);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.log(`Started encoding video ${asset.id} ${JSON.stringify(transcodeOptions)}`);
|
||||
try {
|
||||
await this.mediaRepository.transcode(input, output, transcodeOptions);
|
||||
} catch (error) {
|
||||
this.logger.error(error);
|
||||
if (config.accel !== TranscodeHWAccel.DISABLED) {
|
||||
this.logger.error(
|
||||
`Error occurred during transcoding. Retrying with ${config.accel.toUpperCase()} acceleration disabled.`,
|
||||
);
|
||||
}
|
||||
config.accel = TranscodeHWAccel.DISABLED;
|
||||
transcodeOptions = await this.getCodecConfig(config).then((c) =>
|
||||
c.getOptions(target, mainVideoStream, mainAudioStream),
|
||||
);
|
||||
await this.mediaRepository.transcode(input, output, transcodeOptions);
|
||||
}
|
||||
|
||||
this.logger.log(`Successfully encoded ${asset.id}`);
|
||||
|
||||
await this.assetRepository.update({ id: asset.id, encodedVideoPath: output });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {
|
||||
return streams.sort((stream1, stream2) => stream2.frameCount - stream1.frameCount)[0];
|
||||
}
|
||||
|
||||
private getTranscodeTarget(
|
||||
config: SystemConfigFFmpegDto,
|
||||
videoStream: VideoStreamInfo | null,
|
||||
audioStream: AudioStreamInfo | null,
|
||||
): TranscodeTarget {
|
||||
if (videoStream == null && audioStream == null) {
|
||||
return TranscodeTarget.NONE;
|
||||
}
|
||||
|
||||
const isAudioTranscodeRequired = this.isAudioTranscodeRequired(config, audioStream);
|
||||
const isVideoTranscodeRequired = this.isVideoTranscodeRequired(config, videoStream);
|
||||
|
||||
if (isAudioTranscodeRequired && isVideoTranscodeRequired) {
|
||||
return TranscodeTarget.ALL;
|
||||
}
|
||||
|
||||
if (isAudioTranscodeRequired) {
|
||||
return TranscodeTarget.AUDIO;
|
||||
}
|
||||
|
||||
if (isVideoTranscodeRequired) {
|
||||
return TranscodeTarget.VIDEO;
|
||||
}
|
||||
|
||||
return TranscodeTarget.NONE;
|
||||
}
|
||||
|
||||
private isAudioTranscodeRequired(ffmpegConfig: SystemConfigFFmpegDto, stream: AudioStreamInfo | null): boolean {
|
||||
if (stream == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
switch (ffmpegConfig.transcode) {
|
||||
case TranscodePolicy.DISABLED: {
|
||||
return false;
|
||||
}
|
||||
case TranscodePolicy.ALL: {
|
||||
return true;
|
||||
}
|
||||
case TranscodePolicy.REQUIRED:
|
||||
case TranscodePolicy.OPTIMAL:
|
||||
case TranscodePolicy.BITRATE: {
|
||||
return !ffmpegConfig.acceptedAudioCodecs.includes(stream.codecName as AudioCodec);
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported transcode policy: ${ffmpegConfig.transcode}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private isVideoTranscodeRequired(ffmpegConfig: SystemConfigFFmpegDto, stream: VideoStreamInfo | null): boolean {
|
||||
if (stream == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const scalingEnabled = ffmpegConfig.targetResolution !== 'original';
|
||||
const targetRes = Number.parseInt(ffmpegConfig.targetResolution);
|
||||
const isLargerThanTargetRes = scalingEnabled && Math.min(stream.height, stream.width) > targetRes;
|
||||
const isLargerThanTargetBitrate = stream.bitrate > this.parseBitrateToBps(ffmpegConfig.maxBitrate);
|
||||
|
||||
const isTargetVideoCodec = ffmpegConfig.acceptedVideoCodecs.includes(stream.codecName as VideoCodec);
|
||||
const isRequired = !isTargetVideoCodec || stream.isHDR;
|
||||
|
||||
switch (ffmpegConfig.transcode) {
|
||||
case TranscodePolicy.DISABLED: {
|
||||
return false;
|
||||
}
|
||||
case TranscodePolicy.ALL: {
|
||||
return true;
|
||||
}
|
||||
case TranscodePolicy.REQUIRED: {
|
||||
return isRequired;
|
||||
}
|
||||
case TranscodePolicy.OPTIMAL: {
|
||||
return isRequired || isLargerThanTargetRes;
|
||||
}
|
||||
case TranscodePolicy.BITRATE: {
|
||||
return isRequired || isLargerThanTargetBitrate;
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported transcode policy: ${ffmpegConfig.transcode}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getCodecConfig(config: SystemConfigFFmpegDto) {
|
||||
if (config.accel === TranscodeHWAccel.DISABLED) {
|
||||
return this.getSWCodecConfig(config);
|
||||
}
|
||||
return this.getHWCodecConfig(config);
|
||||
}
|
||||
|
||||
private getSWCodecConfig(config: SystemConfigFFmpegDto) {
|
||||
switch (config.targetVideoCodec) {
|
||||
case VideoCodec.H264: {
|
||||
return new H264Config(config);
|
||||
}
|
||||
case VideoCodec.HEVC: {
|
||||
return new HEVCConfig(config);
|
||||
}
|
||||
case VideoCodec.VP9: {
|
||||
return new VP9Config(config);
|
||||
}
|
||||
default: {
|
||||
throw new UnsupportedMediaTypeException(`Codec '${config.targetVideoCodec}' is unsupported`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async getHWCodecConfig(config: SystemConfigFFmpegDto) {
|
||||
let handler: VideoCodecHWConfig;
|
||||
let devices: string[];
|
||||
switch (config.accel) {
|
||||
case TranscodeHWAccel.NVENC: {
|
||||
handler = new NVENCConfig(config);
|
||||
break;
|
||||
}
|
||||
case TranscodeHWAccel.QSV: {
|
||||
devices = await this.storageRepository.readdir('/dev/dri');
|
||||
handler = new QSVConfig(config, devices);
|
||||
break;
|
||||
}
|
||||
case TranscodeHWAccel.VAAPI: {
|
||||
devices = await this.storageRepository.readdir('/dev/dri');
|
||||
handler = new VAAPIConfig(config, devices);
|
||||
break;
|
||||
}
|
||||
case TranscodeHWAccel.RKMPP: {
|
||||
if (this.hasOpenCL === undefined) {
|
||||
try {
|
||||
const maliIcdStat = await this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd');
|
||||
const maliDeviceStat = await this.storageRepository.stat('/dev/mali0');
|
||||
this.hasOpenCL = maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice();
|
||||
} catch {
|
||||
this.logger.warn('OpenCL not available for transcoding, using CPU instead.');
|
||||
this.hasOpenCL = false;
|
||||
}
|
||||
}
|
||||
|
||||
devices = await this.storageRepository.readdir('/dev/dri');
|
||||
handler = new RKMPPConfig(config, devices, this.hasOpenCL);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new UnsupportedMediaTypeException(`${config.accel.toUpperCase()} acceleration is unsupported`);
|
||||
}
|
||||
}
|
||||
if (!handler.getSupportedCodecs().includes(config.targetVideoCodec)) {
|
||||
throw new UnsupportedMediaTypeException(
|
||||
`${config.accel.toUpperCase()} acceleration does not support codec '${config.targetVideoCodec.toUpperCase()}'. Supported codecs: ${handler.getSupportedCodecs()}`,
|
||||
);
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
isSRGB(asset: AssetEntity): boolean {
|
||||
const { colorspace, profileDescription, bitsPerSample } = asset.exifInfo ?? {};
|
||||
if (colorspace || profileDescription) {
|
||||
return [colorspace, profileDescription].some((s) => s?.toLowerCase().includes('srgb'));
|
||||
} else if (bitsPerSample) {
|
||||
// assume sRGB for 8-bit images with no color profile or colorspace metadata
|
||||
return bitsPerSample === 8;
|
||||
} else {
|
||||
// assume sRGB for images with no relevant metadata
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
parseBitrateToBps(bitrateString: string) {
|
||||
const bitrateValue = Number.parseInt(bitrateString);
|
||||
|
||||
if (Number.isNaN(bitrateValue)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (bitrateString.toLowerCase().endsWith('k')) {
|
||||
return bitrateValue * 1000; // Kilobits per second to bits per second
|
||||
} else if (bitrateString.toLowerCase().endsWith('m')) {
|
||||
return bitrateValue * 1_000_000; // Megabits per second to bits per second
|
||||
} else {
|
||||
return bitrateValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,783 @@
|
||||
import { BinaryField } from 'exiftool-vendored';
|
||||
import { when } from 'jest-when';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { constants } from 'node:fs/promises';
|
||||
import { JobName } from 'src/domain/job/job.constants';
|
||||
import { AssetType } from 'src/entities/asset.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { SystemConfigKey } from 'src/entities/system-config.entity';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.repository';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IMediaRepository } from 'src/interfaces/media.repository';
|
||||
import { IMetadataRepository, ImmichTags } from 'src/interfaces/metadata.repository';
|
||||
import { IMoveRepository } from 'src/interfaces/move.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { MetadataService, Orientation } from 'src/services/metadata.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { fileStub } from 'test/fixtures/file.stub';
|
||||
import { probeStub } from 'test/fixtures/media.stub';
|
||||
import { newAlbumRepositoryMock } from 'test/repositories/album.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newCommunicationRepositoryMock } from 'test/repositories/communication.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
import { newMediaRepositoryMock } from 'test/repositories/media.repository.mock';
|
||||
import { newMetadataRepositoryMock } from 'test/repositories/metadata.repository.mock';
|
||||
import { newMoveRepositoryMock } from 'test/repositories/move.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
|
||||
describe(MetadataService.name, () => {
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let cryptoRepository: jest.Mocked<ICryptoRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let metadataMock: jest.Mocked<IMetadataRepository>;
|
||||
let moveMock: jest.Mocked<IMoveRepository>;
|
||||
let mediaMock: jest.Mocked<IMediaRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
let communicationMock: jest.Mocked<ICommunicationRepository>;
|
||||
let databaseMock: jest.Mocked<IDatabaseRepository>;
|
||||
let sut: MetadataService;
|
||||
|
||||
beforeEach(() => {
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
cryptoRepository = newCryptoRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
metadataMock = newMetadataRepositoryMock();
|
||||
moveMock = newMoveRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
communicationMock = newCommunicationRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
mediaMock = newMediaRepositoryMock();
|
||||
databaseMock = newDatabaseRepositoryMock();
|
||||
|
||||
sut = new MetadataService(
|
||||
albumMock,
|
||||
assetMock,
|
||||
communicationMock,
|
||||
cryptoRepository,
|
||||
databaseMock,
|
||||
jobMock,
|
||||
mediaMock,
|
||||
metadataMock,
|
||||
moveMock,
|
||||
personMock,
|
||||
storageMock,
|
||||
configMock,
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await sut.teardown();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('init', () => {
|
||||
beforeEach(async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.REVERSE_GEOCODING_ENABLED, value: true }]);
|
||||
|
||||
await sut.init();
|
||||
});
|
||||
|
||||
it('should return if reverse geocoding is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.REVERSE_GEOCODING_ENABLED, value: false }]);
|
||||
|
||||
await sut.init();
|
||||
expect(jobMock.pause).toHaveBeenCalledTimes(1);
|
||||
expect(metadataMock.init).toHaveBeenCalledTimes(1);
|
||||
expect(jobMock.resume).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleLivePhotoLinking', () => {
|
||||
it('should handle an asset that could not be found', async () => {
|
||||
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(assetMock.findLivePhotoMatch).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
expect(albumMock.removeAsset).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle an asset without exif info', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image, exifInfo: undefined }]);
|
||||
|
||||
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(assetMock.findLivePhotoMatch).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
expect(albumMock.removeAsset).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle livePhotoCID not set', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image }]);
|
||||
|
||||
await expect(sut.handleLivePhotoLinking({ id: assetStub.image.id })).resolves.toBe(JobStatus.SKIPPED);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(assetMock.findLivePhotoMatch).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
expect(albumMock.removeAsset).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle not finding a match', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([
|
||||
{
|
||||
...assetStub.livePhotoMotionAsset,
|
||||
exifInfo: { livePhotoCID: assetStub.livePhotoStillAsset.id } as ExifEntity,
|
||||
},
|
||||
]);
|
||||
|
||||
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe(
|
||||
JobStatus.SKIPPED,
|
||||
);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], { exifInfo: true });
|
||||
expect(assetMock.findLivePhotoMatch).toHaveBeenCalledWith({
|
||||
livePhotoCID: assetStub.livePhotoStillAsset.id,
|
||||
ownerId: assetStub.livePhotoMotionAsset.ownerId,
|
||||
otherAssetId: assetStub.livePhotoMotionAsset.id,
|
||||
type: AssetType.IMAGE,
|
||||
});
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
expect(albumMock.removeAsset).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should link photo and video', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([
|
||||
{
|
||||
...assetStub.livePhotoStillAsset,
|
||||
exifInfo: { livePhotoCID: assetStub.livePhotoMotionAsset.id } as ExifEntity,
|
||||
},
|
||||
]);
|
||||
assetMock.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
|
||||
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
|
||||
JobStatus.SUCCESS,
|
||||
);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id], { exifInfo: true });
|
||||
expect(assetMock.findLivePhotoMatch).toHaveBeenCalledWith({
|
||||
livePhotoCID: assetStub.livePhotoMotionAsset.id,
|
||||
ownerId: assetStub.livePhotoStillAsset.ownerId,
|
||||
otherAssetId: assetStub.livePhotoStillAsset.id,
|
||||
type: AssetType.VIDEO,
|
||||
});
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
|
||||
});
|
||||
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.livePhotoMotionAsset.id, isVisible: false });
|
||||
expect(albumMock.removeAsset).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id);
|
||||
});
|
||||
|
||||
it('should notify clients on live photo link', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([
|
||||
{
|
||||
...assetStub.livePhotoStillAsset,
|
||||
exifInfo: { livePhotoCID: assetStub.livePhotoMotionAsset.id } as ExifEntity,
|
||||
},
|
||||
]);
|
||||
assetMock.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
|
||||
await expect(sut.handleLivePhotoLinking({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
|
||||
JobStatus.SUCCESS,
|
||||
);
|
||||
expect(communicationMock.send).toHaveBeenCalledWith(
|
||||
ClientEvent.ASSET_HIDDEN,
|
||||
assetStub.livePhotoMotionAsset.ownerId,
|
||||
assetStub.livePhotoMotionAsset.id,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueMetadataExtraction', () => {
|
||||
it('should queue metadata extraction for all assets without exif values', async () => {
|
||||
assetMock.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
|
||||
|
||||
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(assetMock.getWithout).toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.METADATA_EXTRACTION,
|
||||
data: { id: assetStub.image.id },
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should queue metadata extraction for all assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
|
||||
|
||||
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.METADATA_EXTRACTION,
|
||||
data: { id: assetStub.image.id },
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleMetadataExtraction', () => {
|
||||
beforeEach(() => {
|
||||
storageMock.stat.mockResolvedValue({ size: 123_456 } as Stats);
|
||||
});
|
||||
|
||||
it('should handle an asset that could not be found', async () => {
|
||||
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle a date in a sidecar file', async () => {
|
||||
const originalDate = new Date('2023-11-21T16:13:17.517Z');
|
||||
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
when(metadataMock.readTags)
|
||||
.calledWith(assetStub.sidecar.originalPath)
|
||||
// higher priority tag
|
||||
.mockResolvedValue({ CreationDate: originalDate.toISOString() });
|
||||
when(metadataMock.readTags)
|
||||
.calledWith(assetStub.sidecar.sidecarPath as string)
|
||||
// lower priority tag, but in sidecar
|
||||
.mockResolvedValue({ CreateDate: sidecarDate.toISOString() });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.sidecar.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }));
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
duration: null,
|
||||
fileCreatedAt: sidecarDate,
|
||||
localDateTime: sidecarDate,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle lists of numbers', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue({ ISO: [160] as any });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 }));
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
duration: null,
|
||||
fileCreatedAt: assetStub.image.createdAt,
|
||||
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
|
||||
});
|
||||
});
|
||||
|
||||
it('should apply reverse geocoding', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.withLocation]);
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.REVERSE_GEOCODING_ENABLED, value: true }]);
|
||||
metadataMock.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
|
||||
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
|
||||
);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.withLocation.id,
|
||||
duration: null,
|
||||
fileCreatedAt: assetStub.withLocation.createdAt,
|
||||
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
|
||||
});
|
||||
});
|
||||
|
||||
it('should discard latitude and longitude on null island', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.withLocation]);
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
GPSLatitude: 0,
|
||||
GPSLongitude: 0,
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ latitude: null, longitude: null }));
|
||||
});
|
||||
|
||||
it('should not apply motion photos if asset is video', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoMotionAsset, isVisible: true }]);
|
||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id]);
|
||||
expect(storageMock.writeFile).not.toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalledWith(
|
||||
expect.objectContaining({ assetType: AssetType.VIDEO, isVisible: false }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should extract the correct video orientation', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
|
||||
metadataMock.readTags.mockResolvedValue(null);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.video.id });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.video.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ orientation: Orientation.Rotate270CW }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should extract the MotionPhotoVideo tag from Samsung HEIC motion photos', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]);
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
Directory: 'foo/bar/',
|
||||
MotionPhotoVideo: new BinaryField(0, ''),
|
||||
// The below two are included to ensure that the MotionPhotoVideo tag is extracted
|
||||
// instead of the EmbeddedVideoFile, since HEIC MotionPhotos include both
|
||||
EmbeddedVideoFile: new BinaryField(0, ''),
|
||||
EmbeddedVideoType: 'MotionPhoto_Data',
|
||||
});
|
||||
cryptoRepository.hashSha1.mockReturnValue(randomBytes(512));
|
||||
assetMock.getByChecksum.mockResolvedValue(null);
|
||||
assetMock.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
cryptoRepository.randomUUID.mockReturnValue(fileStub.livePhotoMotion.uuid);
|
||||
const video = randomBytes(512);
|
||||
metadataMock.extractBinaryTag.mockResolvedValue(video);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
||||
expect(metadataMock.extractBinaryTag).toHaveBeenCalledWith(
|
||||
assetStub.livePhotoStillAsset.originalPath,
|
||||
'MotionPhotoVideo',
|
||||
);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id]);
|
||||
expect(assetMock.create).toHaveBeenCalled(); // This could have arguments added
|
||||
expect(storageMock.writeFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video);
|
||||
expect(assetMock.update).toHaveBeenNthCalledWith(1, {
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
livePhotoVideoId: fileStub.livePhotoMotion.uuid,
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract the EmbeddedVideo tag from Samsung JPEG motion photos', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]);
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
Directory: 'foo/bar/',
|
||||
EmbeddedVideoFile: new BinaryField(0, ''),
|
||||
EmbeddedVideoType: 'MotionPhoto_Data',
|
||||
});
|
||||
cryptoRepository.hashSha1.mockReturnValue(randomBytes(512));
|
||||
assetMock.getByChecksum.mockResolvedValue(null);
|
||||
assetMock.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
cryptoRepository.randomUUID.mockReturnValue(fileStub.livePhotoMotion.uuid);
|
||||
const video = randomBytes(512);
|
||||
metadataMock.extractBinaryTag.mockResolvedValue(video);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
||||
expect(metadataMock.extractBinaryTag).toHaveBeenCalledWith(
|
||||
assetStub.livePhotoStillAsset.originalPath,
|
||||
'EmbeddedVideoFile',
|
||||
);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id]);
|
||||
expect(assetMock.create).toHaveBeenCalled(); // This could have arguments added
|
||||
expect(storageMock.writeFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video);
|
||||
expect(assetMock.update).toHaveBeenNthCalledWith(1, {
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
livePhotoVideoId: fileStub.livePhotoMotion.uuid,
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract the motion photo video from the XMP directory entry ', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]);
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
Directory: 'foo/bar/',
|
||||
MotionPhoto: 1,
|
||||
MicroVideo: 1,
|
||||
MicroVideoOffset: 1,
|
||||
});
|
||||
cryptoRepository.hashSha1.mockReturnValue(randomBytes(512));
|
||||
assetMock.getByChecksum.mockResolvedValue(null);
|
||||
assetMock.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
cryptoRepository.randomUUID.mockReturnValue(fileStub.livePhotoMotion.uuid);
|
||||
const video = randomBytes(512);
|
||||
storageMock.readFile.mockResolvedValue(video);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id]);
|
||||
expect(storageMock.readFile).toHaveBeenCalledWith(assetStub.livePhotoStillAsset.originalPath, expect.any(Object));
|
||||
expect(assetMock.create).toHaveBeenCalled(); // This could have arguments added
|
||||
expect(storageMock.writeFile).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.originalPath, video);
|
||||
expect(assetMock.update).toHaveBeenNthCalledWith(1, {
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
livePhotoVideoId: fileStub.livePhotoMotion.uuid,
|
||||
});
|
||||
});
|
||||
|
||||
it('should delete old motion photo video assets if they do not match what is extracted', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoStillAsset]);
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
Directory: 'foo/bar/',
|
||||
MotionPhoto: 1,
|
||||
MicroVideo: 1,
|
||||
MicroVideoOffset: 1,
|
||||
});
|
||||
cryptoRepository.hashSha1.mockReturnValue(randomBytes(512));
|
||||
assetMock.getByChecksum.mockResolvedValue(null);
|
||||
assetMock.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
||||
expect(jobMock.queue).toHaveBeenNthCalledWith(2, {
|
||||
name: JobName.ASSET_DELETION,
|
||||
data: { id: assetStub.livePhotoStillAsset.livePhotoVideoId },
|
||||
});
|
||||
});
|
||||
|
||||
it('should not create a new motionphoto video asset if the of the extracted video matches an existing asset', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoStillAsset]);
|
||||
metadataMock.readTags.mockResolvedValue({
|
||||
Directory: 'foo/bar/',
|
||||
MotionPhoto: 1,
|
||||
MicroVideo: 1,
|
||||
MicroVideoOffset: 1,
|
||||
});
|
||||
cryptoRepository.hashSha1.mockReturnValue(randomBytes(512));
|
||||
assetMock.getByChecksum.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
||||
expect(assetMock.create).toHaveBeenCalledTimes(0);
|
||||
expect(storageMock.writeFile).toHaveBeenCalledTimes(0);
|
||||
// The still asset gets saved by handleMetadataExtraction, but not the video
|
||||
expect(assetMock.update).toHaveBeenCalledTimes(1);
|
||||
expect(jobMock.queue).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('should save all metadata', async () => {
|
||||
const tags: ImmichTags = {
|
||||
BitsPerSample: 1,
|
||||
ComponentBitDepth: 1,
|
||||
ImagePixelDepth: '1',
|
||||
BitDepth: 1,
|
||||
ColorBitDepth: 1,
|
||||
ColorSpace: '1',
|
||||
DateTimeOriginal: new Date('1970-01-01').toISOString(),
|
||||
ExposureTime: '100ms',
|
||||
FocalLength: 20,
|
||||
ImageDescription: 'test description',
|
||||
ISO: 100,
|
||||
LensModel: 'test lens',
|
||||
MediaGroupUUID: 'livePhoto',
|
||||
Make: 'test-factory',
|
||||
Model: "'mockel'",
|
||||
ModifyDate: new Date('1970-01-01').toISOString(),
|
||||
Orientation: 0,
|
||||
ProfileDescription: 'extensive description',
|
||||
ProjectionType: 'equirectangular',
|
||||
tz: '+02:00',
|
||||
};
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue(tags);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith({
|
||||
assetId: assetStub.image.id,
|
||||
bitsPerSample: expect.any(Number),
|
||||
autoStackId: null,
|
||||
colorspace: tags.ColorSpace,
|
||||
dateTimeOriginal: new Date('1970-01-01'),
|
||||
description: tags.ImageDescription,
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
exposureTime: tags.ExposureTime,
|
||||
fNumber: null,
|
||||
fileSizeInByte: 123_456,
|
||||
focalLength: tags.FocalLength,
|
||||
fps: null,
|
||||
iso: tags.ISO,
|
||||
latitude: null,
|
||||
lensModel: tags.LensModel,
|
||||
livePhotoCID: tags.MediaGroupUUID,
|
||||
longitude: null,
|
||||
make: tags.Make,
|
||||
model: tags.Model,
|
||||
modifyDate: expect.any(Date),
|
||||
orientation: tags.Orientation?.toString(),
|
||||
profileDescription: tags.ProfileDescription,
|
||||
projectionType: 'EQUIRECTANGULAR',
|
||||
timeZone: tags.tz,
|
||||
});
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
duration: null,
|
||||
fileCreatedAt: new Date('1970-01-01'),
|
||||
localDateTime: new Date('1970-01-01'),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle duration', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue({ Duration: 6.21 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalled();
|
||||
expect(assetMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: assetStub.image.id,
|
||||
duration: '00:00:06.210',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle duration in ISO time string', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue({ Duration: '00:00:08.41' });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalled();
|
||||
expect(assetMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: assetStub.image.id,
|
||||
duration: '00:00:08.410',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle duration as an object without Scale', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue({ Duration: { Value: 6.2 } });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalled();
|
||||
expect(assetMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: assetStub.image.id,
|
||||
duration: '00:00:06.200',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle duration with scale', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue({ Duration: { Scale: 1.111_111_111_111_11e-5, Value: 558_720 } });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalled();
|
||||
expect(assetMock.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: assetStub.image.id,
|
||||
duration: '00:00:06.207',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueSidecar', () => {
|
||||
it('should queue assets with sidecar files', async () => {
|
||||
assetMock.getAll.mockResolvedValue({ items: [assetStub.sidecar], hasNextPage: false });
|
||||
|
||||
await sut.handleQueueSidecar({ force: true });
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalledWith({ take: 1000, skip: 0 });
|
||||
expect(assetMock.getWithout).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.SIDECAR_SYNC,
|
||||
data: { id: assetStub.sidecar.id },
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should queue assets without sidecar files', async () => {
|
||||
assetMock.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
|
||||
|
||||
await sut.handleQueueSidecar({ force: false });
|
||||
|
||||
expect(assetMock.getWithout).toHaveBeenCalledWith({ take: 1000, skip: 0 }, WithoutProperty.SIDECAR);
|
||||
expect(assetMock.getAll).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.SIDECAR_DISCOVERY,
|
||||
data: { id: assetStub.image.id },
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSidecarSync', () => {
|
||||
it('should do nothing if asset could not be found', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([]);
|
||||
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if asset has no sidecar path', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should set sidecar path if exists (sidecar named photo.ext.xmp)', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
|
||||
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledWith(`${assetStub.sidecar.originalPath}.xmp`, constants.R_OK);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.sidecar.id,
|
||||
sidecarPath: assetStub.sidecar.sidecarPath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should set sidecar path if exists (sidecar named photo.xmp)', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt]);
|
||||
storageMock.checkFileExists.mockResolvedValueOnce(false);
|
||||
storageMock.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.checkFileExists).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
assetStub.sidecarWithoutExt.sidecarPath,
|
||||
constants.R_OK,
|
||||
);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.sidecarWithoutExt.id,
|
||||
sidecarPath: assetStub.sidecarWithoutExt.sidecarPath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should set sidecar path if exists (two sidecars named photo.ext.xmp and photo.xmp, should pick photo.ext.xmp)', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
storageMock.checkFileExists.mockResolvedValueOnce(true);
|
||||
storageMock.checkFileExists.mockResolvedValueOnce(true);
|
||||
|
||||
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.checkFileExists).toHaveBeenNthCalledWith(1, assetStub.sidecar.sidecarPath, constants.R_OK);
|
||||
expect(storageMock.checkFileExists).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
assetStub.sidecarWithoutExt.sidecarPath,
|
||||
constants.R_OK,
|
||||
);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.sidecar.id,
|
||||
sidecarPath: assetStub.sidecar.sidecarPath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should unset sidecar path if file does not exist anymore', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
storageMock.checkFileExists.mockResolvedValue(false);
|
||||
|
||||
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledWith(`${assetStub.sidecar.originalPath}.xmp`, constants.R_OK);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.sidecar.id,
|
||||
sidecarPath: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSidecarDiscovery', () => {
|
||||
it('should skip hidden assets', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset]);
|
||||
await sut.handleSidecarDiscovery({ id: assetStub.livePhotoMotionAsset.id });
|
||||
expect(storageMock.checkFileExists).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip assets with a sidecar path', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
await sut.handleSidecarDiscovery({ id: assetStub.sidecar.id });
|
||||
expect(storageMock.checkFileExists).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing when a sidecar is not found ', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
storageMock.checkFileExists.mockResolvedValue(false);
|
||||
await sut.handleSidecarDiscovery({ id: assetStub.image.id });
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should update a image asset when a sidecar is found', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
await sut.handleSidecarDiscovery({ id: assetStub.image.id });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledWith('/original/path.jpg.xmp', constants.R_OK);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
sidecarPath: '/original/path.jpg.xmp',
|
||||
});
|
||||
});
|
||||
|
||||
it('should update a video asset when a sidecar is found', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||
storageMock.checkFileExists.mockResolvedValue(true);
|
||||
await sut.handleSidecarDiscovery({ id: assetStub.video.id });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledWith('/original/path.ext.xmp', constants.R_OK);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
sidecarPath: '/original/path.ext.xmp',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSidecarWrite', () => {
|
||||
it('should skip assets that do not exist anymore', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([]);
|
||||
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.FAILED);
|
||||
expect(metadataMock.writeTags).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip jobs with not metadata', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
await expect(sut.handleSidecarWrite({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.SKIPPED);
|
||||
expect(metadataMock.writeTags).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should write tags', async () => {
|
||||
const description = 'this is a description';
|
||||
const gps = 12;
|
||||
const date = '2023-11-22T04:56:12.196Z';
|
||||
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
|
||||
await expect(
|
||||
sut.handleSidecarWrite({
|
||||
id: assetStub.sidecar.id,
|
||||
description,
|
||||
latitude: gps,
|
||||
longitude: gps,
|
||||
dateTimeOriginal: date,
|
||||
}),
|
||||
).resolves.toBe(JobStatus.SUCCESS);
|
||||
expect(metadataMock.writeTags).toHaveBeenCalledWith(assetStub.sidecar.sidecarPath, {
|
||||
ImageDescription: description,
|
||||
CreationDate: date,
|
||||
GPSLatitude: gps,
|
||||
GPSLongitude: gps,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,600 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { ExifDateTime, Tags } from 'exiftool-vendored';
|
||||
import { firstDateTime } from 'exiftool-vendored/dist/FirstDateTime';
|
||||
import _ from 'lodash';
|
||||
import { Duration } from 'luxon';
|
||||
import { constants } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Subscription } from 'rxjs';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { FeatureFlag, SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from 'src/domain/job/job.constants';
|
||||
import { IBaseJob, IEntityJob, ISidecarWriteJob } from 'src/domain/job/job.interface';
|
||||
import { AssetEntity, AssetType } from 'src/entities/asset.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.repository';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IMediaRepository } from 'src/interfaces/media.repository';
|
||||
import { IMetadataRepository, ImmichTags } from 'src/interfaces/metadata.repository';
|
||||
import { IMoveRepository } from 'src/interfaces/move.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { handlePromiseError, usePagination } from 'src/utils';
|
||||
|
||||
/** look for a date from these tags (in order) */
|
||||
const EXIF_DATE_TAGS: Array<keyof Tags> = [
|
||||
'SubSecDateTimeOriginal',
|
||||
'DateTimeOriginal',
|
||||
'SubSecCreateDate',
|
||||
'CreationDate',
|
||||
'CreateDate',
|
||||
'SubSecMediaCreateDate',
|
||||
'MediaCreateDate',
|
||||
'DateTimeCreated',
|
||||
];
|
||||
|
||||
interface DirectoryItem {
|
||||
Length?: number;
|
||||
Mime: string;
|
||||
Padding?: number;
|
||||
Semantic?: string;
|
||||
}
|
||||
|
||||
interface DirectoryEntry {
|
||||
Item: DirectoryItem;
|
||||
}
|
||||
|
||||
export enum Orientation {
|
||||
Horizontal = '1',
|
||||
MirrorHorizontal = '2',
|
||||
Rotate180 = '3',
|
||||
MirrorVertical = '4',
|
||||
MirrorHorizontalRotate270CW = '5',
|
||||
Rotate90CW = '6',
|
||||
MirrorHorizontalRotate90CW = '7',
|
||||
Rotate270CW = '8',
|
||||
}
|
||||
|
||||
type ExifEntityWithoutGeocodeAndTypeOrm = Omit<
|
||||
ExifEntity,
|
||||
'city' | 'state' | 'country' | 'description' | 'exifTextSearchableColumn'
|
||||
> & { dateTimeOriginal: Date };
|
||||
|
||||
const exifDate = (dt: ExifDateTime | string | undefined) => (dt instanceof ExifDateTime ? dt?.toDate() : null);
|
||||
const tzOffset = (dt: ExifDateTime | string | undefined) => (dt instanceof ExifDateTime ? dt?.tzoffsetMinutes : null);
|
||||
|
||||
const validate = <T>(value: T): NonNullable<T> | null => {
|
||||
// handle lists of numbers
|
||||
if (Array.isArray(value)) {
|
||||
value = value[0];
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
// string means a failure to parse a number, throw out result
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value === 'number' && (Number.isNaN(value) || !Number.isFinite(value))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return value ?? null;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class MetadataService {
|
||||
private logger = new ImmichLogger(MetadataService.name);
|
||||
private storageCore: StorageCore;
|
||||
private configCore: SystemConfigCore;
|
||||
private subscription: Subscription | null = null;
|
||||
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ICommunicationRepository) private communicationRepository: ICommunicationRepository,
|
||||
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
||||
@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMediaRepository) private mediaRepository: IMediaRepository,
|
||||
@Inject(IMetadataRepository) private repository: IMetadataRepository,
|
||||
@Inject(IMoveRepository) moveRepository: IMoveRepository,
|
||||
@Inject(IPersonRepository) personRepository: IPersonRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.storageCore = StorageCore.create(
|
||||
assetRepository,
|
||||
moveRepository,
|
||||
personRepository,
|
||||
cryptoRepository,
|
||||
configRepository,
|
||||
storageRepository,
|
||||
);
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (!this.subscription) {
|
||||
this.subscription = this.configCore.config$.subscribe(() => handlePromiseError(this.init(), this.logger));
|
||||
}
|
||||
|
||||
const { reverseGeocoding } = await this.configCore.getConfig();
|
||||
const { enabled } = reverseGeocoding;
|
||||
|
||||
if (!enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.jobRepository.pause(QueueName.METADATA_EXTRACTION);
|
||||
await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.repository.init());
|
||||
await this.jobRepository.resume(QueueName.METADATA_EXTRACTION);
|
||||
|
||||
this.logger.log(`Initialized local reverse geocoder`);
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Unable to initialize reverse geocoding: ${error}`, error?.stack);
|
||||
}
|
||||
}
|
||||
|
||||
async teardown() {
|
||||
this.subscription?.unsubscribe();
|
||||
await this.repository.teardown();
|
||||
}
|
||||
|
||||
async handleLivePhotoLinking(job: IEntityJob): Promise<JobStatus> {
|
||||
const { id } = job;
|
||||
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
|
||||
if (!asset?.exifInfo) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
if (!asset.exifInfo.livePhotoCID) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const otherType = asset.type === AssetType.VIDEO ? AssetType.IMAGE : AssetType.VIDEO;
|
||||
const match = await this.assetRepository.findLivePhotoMatch({
|
||||
livePhotoCID: asset.exifInfo.livePhotoCID,
|
||||
ownerId: asset.ownerId,
|
||||
otherAssetId: asset.id,
|
||||
type: otherType,
|
||||
});
|
||||
|
||||
if (!match) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const [photoAsset, motionAsset] = asset.type === AssetType.IMAGE ? [asset, match] : [match, asset];
|
||||
|
||||
await this.assetRepository.update({ id: photoAsset.id, livePhotoVideoId: motionAsset.id });
|
||||
await this.assetRepository.update({ id: motionAsset.id, isVisible: false });
|
||||
await this.albumRepository.removeAsset(motionAsset.id);
|
||||
|
||||
// Notify clients to hide the linked live photo asset
|
||||
this.communicationRepository.send(ClientEvent.ASSET_HIDDEN, motionAsset.ownerId, motionAsset.id);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueMetadataExtraction(job: IBaseJob): Promise<JobStatus> {
|
||||
const { force } = job;
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination)
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.EXIF);
|
||||
});
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleMetadataExtraction({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const { exifData, tags } = await this.exifData(asset);
|
||||
|
||||
if (asset.type === AssetType.VIDEO) {
|
||||
const { videoStreams } = await this.mediaRepository.probe(asset.originalPath);
|
||||
|
||||
if (videoStreams[0]) {
|
||||
switch (videoStreams[0].rotation) {
|
||||
case -90: {
|
||||
exifData.orientation = Orientation.Rotate90CW;
|
||||
break;
|
||||
}
|
||||
case 0: {
|
||||
exifData.orientation = Orientation.Horizontal;
|
||||
break;
|
||||
}
|
||||
case 90: {
|
||||
exifData.orientation = Orientation.Rotate270CW;
|
||||
break;
|
||||
}
|
||||
case 180: {
|
||||
exifData.orientation = Orientation.Rotate180;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await this.applyMotionPhotos(asset, tags);
|
||||
await this.applyReverseGeocoding(asset, exifData);
|
||||
await this.assetRepository.upsertExif(exifData);
|
||||
|
||||
const dateTimeOriginal = exifData.dateTimeOriginal;
|
||||
let localDateTime = dateTimeOriginal ?? undefined;
|
||||
|
||||
const timeZoneOffset = tzOffset(firstDateTime(tags as Tags)) ?? 0;
|
||||
|
||||
if (dateTimeOriginal && timeZoneOffset) {
|
||||
localDateTime = new Date(dateTimeOriginal.getTime() + timeZoneOffset * 60_000);
|
||||
}
|
||||
await this.assetRepository.update({
|
||||
id: asset.id,
|
||||
duration: tags.Duration ? this.getDuration(tags.Duration) : null,
|
||||
localDateTime,
|
||||
fileCreatedAt: exifData.dateTimeOriginal ?? undefined,
|
||||
});
|
||||
|
||||
await this.assetRepository.upsertJobStatus({
|
||||
assetId: asset.id,
|
||||
metadataExtractedAt: new Date(),
|
||||
});
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueSidecar(job: IBaseJob): Promise<JobStatus> {
|
||||
const { force } = job;
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination)
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.SIDECAR);
|
||||
});
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({
|
||||
name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY,
|
||||
data: { id: asset.id },
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
handleSidecarSync({ id }: IEntityJob): Promise<JobStatus> {
|
||||
return this.processSidecar(id, true);
|
||||
}
|
||||
|
||||
handleSidecarDiscovery({ id }: IEntityJob): Promise<JobStatus> {
|
||||
return this.processSidecar(id, false);
|
||||
}
|
||||
|
||||
async handleSidecarWrite(job: ISidecarWriteJob): Promise<JobStatus> {
|
||||
const { id, description, dateTimeOriginal, latitude, longitude } = job;
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
|
||||
const exif = _.omitBy<Tags>(
|
||||
{
|
||||
ImageDescription: description,
|
||||
CreationDate: dateTimeOriginal,
|
||||
GPSLatitude: latitude,
|
||||
GPSLongitude: longitude,
|
||||
},
|
||||
_.isUndefined,
|
||||
);
|
||||
|
||||
if (Object.keys(exif).length === 0) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
await this.repository.writeTags(sidecarPath, exif);
|
||||
|
||||
if (!asset.sidecarPath) {
|
||||
await this.assetRepository.update({ id, sidecarPath });
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private async applyReverseGeocoding(asset: AssetEntity, exifData: ExifEntityWithoutGeocodeAndTypeOrm) {
|
||||
const { latitude, longitude } = exifData;
|
||||
if (!(await this.configCore.hasFeature(FeatureFlag.REVERSE_GEOCODING)) || !longitude || !latitude) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const reverseGeocode = await this.repository.reverseGeocode({ latitude, longitude });
|
||||
if (!reverseGeocode) {
|
||||
return;
|
||||
}
|
||||
Object.assign(exifData, reverseGeocode);
|
||||
} catch (error: Error | any) {
|
||||
this.logger.warn(
|
||||
`Unable to run reverse geocoding due to ${error} for asset ${asset.id} at ${asset.originalPath}`,
|
||||
error?.stack,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async applyMotionPhotos(asset: AssetEntity, tags: ImmichTags) {
|
||||
if (asset.type !== AssetType.IMAGE) {
|
||||
return;
|
||||
}
|
||||
|
||||
const rawDirectory = tags.Directory;
|
||||
const isMotionPhoto = tags.MotionPhoto;
|
||||
const isMicroVideo = tags.MicroVideo;
|
||||
const videoOffset = tags.MicroVideoOffset;
|
||||
const hasMotionPhotoVideo = tags.MotionPhotoVideo;
|
||||
const hasEmbeddedVideoFile = tags.EmbeddedVideoType === 'MotionPhoto_Data' && tags.EmbeddedVideoFile;
|
||||
const directory = Array.isArray(rawDirectory) ? (rawDirectory as DirectoryEntry[]) : null;
|
||||
|
||||
let length = 0;
|
||||
let padding = 0;
|
||||
|
||||
if (isMotionPhoto && directory) {
|
||||
for (const entry of directory) {
|
||||
if (entry.Item.Semantic == 'MotionPhoto') {
|
||||
length = entry.Item.Length ?? 0;
|
||||
padding = entry.Item.Padding ?? 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isMicroVideo && typeof videoOffset === 'number') {
|
||||
length = videoOffset;
|
||||
}
|
||||
|
||||
if (!length && !hasEmbeddedVideoFile && !hasMotionPhotoVideo) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.debug(`Starting motion photo video extraction (${asset.id})`);
|
||||
|
||||
try {
|
||||
const stat = await this.storageRepository.stat(asset.originalPath);
|
||||
const position = stat.size - length - padding;
|
||||
let video: Buffer;
|
||||
// Samsung MotionPhoto video extraction
|
||||
// HEIC-encoded
|
||||
if (hasMotionPhotoVideo) {
|
||||
video = await this.repository.extractBinaryTag(asset.originalPath, 'MotionPhotoVideo');
|
||||
}
|
||||
// JPEG-encoded; HEIC also contains these tags, so this conditional must come second
|
||||
else if (hasEmbeddedVideoFile) {
|
||||
video = await this.repository.extractBinaryTag(asset.originalPath, 'EmbeddedVideoFile');
|
||||
}
|
||||
// Default video extraction
|
||||
else {
|
||||
video = await this.storageRepository.readFile(asset.originalPath, {
|
||||
buffer: Buffer.alloc(length),
|
||||
position,
|
||||
length,
|
||||
});
|
||||
}
|
||||
const checksum = this.cryptoRepository.hashSha1(video);
|
||||
|
||||
let motionAsset = await this.assetRepository.getByChecksum(asset.ownerId, checksum);
|
||||
if (motionAsset) {
|
||||
this.logger.debug(
|
||||
`Asset ${asset.id}'s motion photo video with checksum ${checksum.toString(
|
||||
'base64',
|
||||
)} already exists in the repository`,
|
||||
);
|
||||
} else {
|
||||
// We create a UUID in advance so that each extracted video can have a unique filename
|
||||
// (allowing us to delete old ones if necessary)
|
||||
const motionAssetId = this.cryptoRepository.randomUUID();
|
||||
const motionPath = StorageCore.getAndroidMotionPath(asset, motionAssetId);
|
||||
const createdAt = asset.fileCreatedAt ?? asset.createdAt;
|
||||
motionAsset = await this.assetRepository.create({
|
||||
id: motionAssetId,
|
||||
libraryId: asset.libraryId,
|
||||
type: AssetType.VIDEO,
|
||||
fileCreatedAt: createdAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
localDateTime: createdAt,
|
||||
checksum,
|
||||
ownerId: asset.ownerId,
|
||||
originalPath: motionPath,
|
||||
originalFileName: asset.originalFileName,
|
||||
isVisible: false,
|
||||
isReadOnly: false,
|
||||
deviceAssetId: 'NONE',
|
||||
deviceId: 'NONE',
|
||||
});
|
||||
|
||||
this.storageCore.ensureFolders(motionPath);
|
||||
await this.storageRepository.writeFile(motionAsset.originalPath, video);
|
||||
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: motionAsset.id } });
|
||||
await this.assetRepository.update({ id: asset.id, livePhotoVideoId: motionAsset.id });
|
||||
|
||||
// If the asset already had an associated livePhotoVideo, delete it, because
|
||||
// its checksum doesn't match the checksum of the motionAsset we just extracted
|
||||
// (if it did, getByChecksum() would've returned non-null)
|
||||
if (asset.livePhotoVideoId) {
|
||||
await this.jobRepository.queue({ name: JobName.ASSET_DELETION, data: { id: asset.livePhotoVideoId } });
|
||||
this.logger.log(`Removed old motion photo video asset (${asset.livePhotoVideoId})`);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug(`Finished motion photo video extraction (${asset.id})`);
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Failed to extract live photo ${asset.originalPath}: ${error}`, error?.stack);
|
||||
}
|
||||
}
|
||||
|
||||
private async exifData(
|
||||
asset: AssetEntity,
|
||||
): Promise<{ exifData: ExifEntityWithoutGeocodeAndTypeOrm; tags: ImmichTags }> {
|
||||
const stats = await this.storageRepository.stat(asset.originalPath);
|
||||
const mediaTags = await this.repository.readTags(asset.originalPath);
|
||||
const sidecarTags = asset.sidecarPath ? await this.repository.readTags(asset.sidecarPath) : null;
|
||||
|
||||
// ensure date from sidecar is used if present
|
||||
const hasDateOverride = !!this.getDateTimeOriginal(sidecarTags);
|
||||
if (mediaTags && hasDateOverride) {
|
||||
for (const tag of EXIF_DATE_TAGS) {
|
||||
delete mediaTags[tag];
|
||||
}
|
||||
}
|
||||
|
||||
const tags = { ...mediaTags, ...sidecarTags };
|
||||
|
||||
this.logger.verbose('Exif Tags', tags);
|
||||
|
||||
const exifData = {
|
||||
// altitude: tags.GPSAltitude ?? null,
|
||||
assetId: asset.id,
|
||||
bitsPerSample: this.getBitsPerSample(tags),
|
||||
colorspace: tags.ColorSpace ?? null,
|
||||
dateTimeOriginal: this.getDateTimeOriginal(tags) ?? asset.fileCreatedAt,
|
||||
description: (tags.ImageDescription || tags.Description) ?? '',
|
||||
exifImageHeight: validate(tags.ImageHeight),
|
||||
exifImageWidth: validate(tags.ImageWidth),
|
||||
exposureTime: tags.ExposureTime ?? null,
|
||||
fileSizeInByte: stats.size,
|
||||
fNumber: validate(tags.FNumber),
|
||||
focalLength: validate(tags.FocalLength),
|
||||
fps: validate(Number.parseFloat(tags.VideoFrameRate!)),
|
||||
iso: validate(tags.ISO),
|
||||
latitude: validate(tags.GPSLatitude),
|
||||
lensModel: tags.LensModel ?? null,
|
||||
livePhotoCID: (tags.ContentIdentifier || tags.MediaGroupUUID) ?? null,
|
||||
autoStackId: this.getAutoStackId(tags),
|
||||
longitude: validate(tags.GPSLongitude),
|
||||
make: tags.Make ?? null,
|
||||
model: tags.Model ?? null,
|
||||
modifyDate: exifDate(tags.ModifyDate) ?? asset.fileModifiedAt,
|
||||
orientation: validate(tags.Orientation)?.toString() ?? null,
|
||||
profileDescription: tags.ProfileDescription || null,
|
||||
projectionType: tags.ProjectionType ? String(tags.ProjectionType).toUpperCase() : null,
|
||||
timeZone: tags.tz ?? null,
|
||||
};
|
||||
|
||||
if (exifData.latitude === 0 && exifData.longitude === 0) {
|
||||
this.logger.warn('Exif data has latitude and longitude of 0, setting to null');
|
||||
exifData.latitude = null;
|
||||
exifData.longitude = null;
|
||||
}
|
||||
|
||||
return { exifData, tags };
|
||||
}
|
||||
|
||||
private getAutoStackId(tags: ImmichTags | null): string | null {
|
||||
if (!tags) {
|
||||
return null;
|
||||
}
|
||||
return tags.BurstID ?? tags.BurstUUID ?? tags.CameraBurstID ?? tags.MediaUniqueID ?? null;
|
||||
}
|
||||
|
||||
private getDateTimeOriginal(tags: ImmichTags | Tags | null) {
|
||||
if (!tags) {
|
||||
return null;
|
||||
}
|
||||
return exifDate(firstDateTime(tags as Tags, EXIF_DATE_TAGS));
|
||||
}
|
||||
|
||||
private getBitsPerSample(tags: ImmichTags): number | null {
|
||||
const bitDepthTags = [
|
||||
tags.BitsPerSample,
|
||||
tags.ComponentBitDepth,
|
||||
tags.ImagePixelDepth,
|
||||
tags.BitDepth,
|
||||
tags.ColorBitDepth,
|
||||
// `numericTags` doesn't parse values like '12 12 12'
|
||||
].map((tag) => (typeof tag === 'string' ? Number.parseInt(tag) : tag));
|
||||
|
||||
let bitsPerSample = bitDepthTags.find((tag) => typeof tag === 'number' && !Number.isNaN(tag)) ?? null;
|
||||
if (bitsPerSample && bitsPerSample >= 24 && bitsPerSample % 3 === 0) {
|
||||
bitsPerSample /= 3; // converts per-pixel bit depth to per-channel
|
||||
}
|
||||
|
||||
return bitsPerSample;
|
||||
}
|
||||
|
||||
private getDuration(seconds?: ImmichTags['Duration']): string {
|
||||
let _seconds = seconds as number;
|
||||
|
||||
if (typeof seconds === 'object') {
|
||||
_seconds = seconds.Value * (seconds?.Scale || 1);
|
||||
} else if (typeof seconds === 'string') {
|
||||
_seconds = Duration.fromISOTime(seconds).as('seconds');
|
||||
}
|
||||
|
||||
return Duration.fromObject({ seconds: _seconds }).toFormat('hh:mm:ss.SSS');
|
||||
}
|
||||
|
||||
private async processSidecar(id: string, isSync: boolean): Promise<JobStatus> {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
if (isSync && !asset.sidecarPath) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
if (!isSync && (!asset.isVisible || asset.sidecarPath)) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
|
||||
const assetPath = path.parse(asset.originalPath);
|
||||
const assetPathWithoutExt = path.join(assetPath.dir, assetPath.name);
|
||||
const sidecarPathWithoutExt = `${assetPathWithoutExt}.xmp`;
|
||||
const sidecarPathWithExt = `${asset.originalPath}.xmp`;
|
||||
|
||||
const [sidecarPathWithExtExists, sidecarPathWithoutExtExists] = await Promise.all([
|
||||
this.storageRepository.checkFileExists(sidecarPathWithExt, constants.R_OK),
|
||||
this.storageRepository.checkFileExists(sidecarPathWithoutExt, constants.R_OK),
|
||||
]);
|
||||
|
||||
let sidecarPath = null;
|
||||
if (sidecarPathWithExtExists) {
|
||||
sidecarPath = sidecarPathWithExt;
|
||||
} else if (sidecarPathWithoutExtExists) {
|
||||
sidecarPath = sidecarPathWithoutExt;
|
||||
}
|
||||
|
||||
if (sidecarPath) {
|
||||
await this.assetRepository.update({ id: asset.id, sidecarPath });
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
if (!isSync) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Sidecar file was not found. Checked paths '${sidecarPathWithExt}' and '${sidecarPathWithoutExt}'. Removing sidecarPath for asset ${asset.id}`,
|
||||
);
|
||||
await this.assetRepository.update({ id: asset.id, sidecarPath: null });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { PartnerResponseDto } from 'src/dtos/partner.dto';
|
||||
import { UserAvatarColor } from 'src/entities/user.entity';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IPartnerRepository, PartnerDirection } from 'src/interfaces/partner.repository';
|
||||
import { PartnerService } from 'src/services/partner.service';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { partnerStub } from 'test/fixtures/partner.stub';
|
||||
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
||||
|
||||
const responseDto = {
|
||||
admin: <PartnerResponseDto>{
|
||||
email: 'admin@test.com',
|
||||
name: 'admin_name',
|
||||
id: 'admin_id',
|
||||
isAdmin: true,
|
||||
oauthId: '',
|
||||
profileImagePath: '',
|
||||
shouldChangePassword: false,
|
||||
storageLabel: 'admin',
|
||||
createdAt: new Date('2021-01-01'),
|
||||
deletedAt: null,
|
||||
updatedAt: new Date('2021-01-01'),
|
||||
memoriesEnabled: true,
|
||||
avatarColor: UserAvatarColor.PRIMARY,
|
||||
quotaSizeInBytes: null,
|
||||
inTimeline: true,
|
||||
quotaUsageInBytes: 0,
|
||||
},
|
||||
user1: <PartnerResponseDto>{
|
||||
email: 'immich@test.com',
|
||||
name: 'immich_name',
|
||||
id: 'user-id',
|
||||
isAdmin: false,
|
||||
oauthId: '',
|
||||
profileImagePath: '',
|
||||
shouldChangePassword: false,
|
||||
storageLabel: null,
|
||||
createdAt: new Date('2021-01-01'),
|
||||
deletedAt: null,
|
||||
updatedAt: new Date('2021-01-01'),
|
||||
memoriesEnabled: true,
|
||||
avatarColor: UserAvatarColor.PRIMARY,
|
||||
inTimeline: true,
|
||||
quotaSizeInBytes: null,
|
||||
quotaUsageInBytes: 0,
|
||||
},
|
||||
};
|
||||
|
||||
describe(PartnerService.name, () => {
|
||||
let sut: PartnerService;
|
||||
let partnerMock: jest.Mocked<IPartnerRepository>;
|
||||
let accessMock: jest.Mocked<IAccessRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
partnerMock = newPartnerRepositoryMock();
|
||||
sut = new PartnerService(partnerMock, accessMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it("should return a list of partners with whom I've shared my library", async () => {
|
||||
partnerMock.getAll.mockResolvedValue([partnerStub.adminToUser1, partnerStub.user1ToAdmin1]);
|
||||
await expect(sut.getAll(authStub.user1, PartnerDirection.SharedBy)).resolves.toEqual([responseDto.admin]);
|
||||
expect(partnerMock.getAll).toHaveBeenCalledWith(authStub.user1.user.id);
|
||||
});
|
||||
|
||||
it('should return a list of partners who have shared their libraries with me', async () => {
|
||||
partnerMock.getAll.mockResolvedValue([partnerStub.adminToUser1, partnerStub.user1ToAdmin1]);
|
||||
await expect(sut.getAll(authStub.user1, PartnerDirection.SharedWith)).resolves.toEqual([responseDto.admin]);
|
||||
expect(partnerMock.getAll).toHaveBeenCalledWith(authStub.user1.user.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a new partner', async () => {
|
||||
partnerMock.get.mockResolvedValue(null);
|
||||
partnerMock.create.mockResolvedValue(partnerStub.adminToUser1);
|
||||
|
||||
await expect(sut.create(authStub.admin, authStub.user1.user.id)).resolves.toEqual(responseDto.user1);
|
||||
|
||||
expect(partnerMock.create).toHaveBeenCalledWith({
|
||||
sharedById: authStub.admin.user.id,
|
||||
sharedWithId: authStub.user1.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error when the partner already exists', async () => {
|
||||
partnerMock.get.mockResolvedValue(partnerStub.adminToUser1);
|
||||
|
||||
await expect(sut.create(authStub.admin, authStub.user1.user.id)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(partnerMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove', () => {
|
||||
it('should remove a partner', async () => {
|
||||
partnerMock.get.mockResolvedValue(partnerStub.adminToUser1);
|
||||
|
||||
await sut.remove(authStub.admin, authStub.user1.user.id);
|
||||
|
||||
expect(partnerMock.remove).toHaveBeenCalledWith(partnerStub.adminToUser1);
|
||||
});
|
||||
|
||||
it('should throw an error when the partner does not exist', async () => {
|
||||
partnerMock.get.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.remove(authStub.admin, authStub.user1.user.id)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(partnerMock.remove).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,68 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { PartnerResponseDto, UpdatePartnerDto } from 'src/dtos/partner.dto';
|
||||
import { mapUser } from 'src/dtos/user.dto';
|
||||
import { PartnerEntity } from 'src/entities/partner.entity';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IPartnerRepository, PartnerDirection, PartnerIds } from 'src/interfaces/partner.repository';
|
||||
|
||||
@Injectable()
|
||||
export class PartnerService {
|
||||
private access: AccessCore;
|
||||
constructor(
|
||||
@Inject(IPartnerRepository) private repository: IPartnerRepository,
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
async create(auth: AuthDto, sharedWithId: string): Promise<PartnerResponseDto> {
|
||||
const partnerId: PartnerIds = { sharedById: auth.user.id, sharedWithId };
|
||||
const exists = await this.repository.get(partnerId);
|
||||
if (exists) {
|
||||
throw new BadRequestException(`Partner already exists`);
|
||||
}
|
||||
|
||||
const partner = await this.repository.create(partnerId);
|
||||
return this.mapToPartnerEntity(partner, PartnerDirection.SharedBy);
|
||||
}
|
||||
|
||||
async remove(auth: AuthDto, sharedWithId: string): Promise<void> {
|
||||
const partnerId: PartnerIds = { sharedById: auth.user.id, sharedWithId };
|
||||
const partner = await this.repository.get(partnerId);
|
||||
if (!partner) {
|
||||
throw new BadRequestException('Partner not found');
|
||||
}
|
||||
|
||||
await this.repository.remove(partner);
|
||||
}
|
||||
|
||||
async getAll(auth: AuthDto, direction: PartnerDirection): Promise<PartnerResponseDto[]> {
|
||||
const partners = await this.repository.getAll(auth.user.id);
|
||||
const key = direction === PartnerDirection.SharedBy ? 'sharedById' : 'sharedWithId';
|
||||
return partners
|
||||
.filter((partner) => partner.sharedBy && partner.sharedWith) // Filter out soft deleted users
|
||||
.filter((partner) => partner[key] === auth.user.id)
|
||||
.map((partner) => this.mapToPartnerEntity(partner, direction));
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, sharedById: string, dto: UpdatePartnerDto): Promise<PartnerResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.PARTNER_UPDATE, sharedById);
|
||||
const partnerId: PartnerIds = { sharedById, sharedWithId: auth.user.id };
|
||||
|
||||
const entity = await this.repository.update({ ...partnerId, inTimeline: dto.inTimeline });
|
||||
return this.mapToPartnerEntity(entity, PartnerDirection.SharedWith);
|
||||
}
|
||||
|
||||
private mapToPartnerEntity(partner: PartnerEntity, direction: PartnerDirection): PartnerResponseDto {
|
||||
// this is opposite to return the non-me user of the "partner"
|
||||
const user = mapUser(
|
||||
direction === PartnerDirection.SharedBy ? partner.sharedWith : partner.sharedBy,
|
||||
) as PartnerResponseDto;
|
||||
|
||||
user.inTimeline = partner.inTimeline;
|
||||
|
||||
return user;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,596 @@
|
||||
import { BadRequestException, Inject, Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { mimeTypes } from 'src/domain/domain.constant';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from 'src/domain/job/job.constants';
|
||||
import { IBaseJob, IDeferrableJob, IEntityJob } from 'src/domain/job/job.interface';
|
||||
import { FACE_THUMBNAIL_SIZE } from 'src/domain/media/media.constant';
|
||||
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
AssetFaceResponseDto,
|
||||
AssetFaceUpdateDto,
|
||||
FaceDto,
|
||||
MergePersonDto,
|
||||
PeopleResponseDto,
|
||||
PeopleUpdateDto,
|
||||
PersonCreateDto,
|
||||
PersonResponseDto,
|
||||
PersonSearchDto,
|
||||
PersonStatisticsResponseDto,
|
||||
PersonUpdateDto,
|
||||
mapFaces,
|
||||
mapPerson,
|
||||
} from 'src/dtos/person.dto';
|
||||
import { PersonPathType } from 'src/entities/move.entity';
|
||||
import { PersonEntity } from 'src/entities/person.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { IJobRepository, JobItem, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IMachineLearningRepository } from 'src/interfaces/machine-learning.repository';
|
||||
import { CropOptions, IMediaRepository } from 'src/interfaces/media.repository';
|
||||
import { IMoveRepository } from 'src/interfaces/move.repository';
|
||||
import { IPersonRepository, UpdateFacesData } from 'src/interfaces/person.repository';
|
||||
import { ISearchRepository } from 'src/interfaces/search.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { CacheControl, ImmichFileResponse, usePagination } from 'src/utils';
|
||||
import { IsNull } from 'typeorm';
|
||||
|
||||
@Injectable()
|
||||
export class PersonService {
|
||||
private access: AccessCore;
|
||||
private configCore: SystemConfigCore;
|
||||
private storageCore: StorageCore;
|
||||
readonly logger = new ImmichLogger(PersonService.name);
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearningRepository: IMachineLearningRepository,
|
||||
@Inject(IMoveRepository) moveRepository: IMoveRepository,
|
||||
@Inject(IMediaRepository) private mediaRepository: IMediaRepository,
|
||||
@Inject(IPersonRepository) private repository: IPersonRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISearchRepository) private smartInfoRepository: ISearchRepository,
|
||||
@Inject(ICryptoRepository) cryptoRepository: ICryptoRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.storageCore = StorageCore.create(
|
||||
assetRepository,
|
||||
moveRepository,
|
||||
repository,
|
||||
cryptoRepository,
|
||||
configRepository,
|
||||
storageRepository,
|
||||
);
|
||||
}
|
||||
|
||||
async getAll(auth: AuthDto, dto: PersonSearchDto): Promise<PeopleResponseDto> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
const people = await this.repository.getAllForUser(auth.user.id, {
|
||||
minimumFaceCount: machineLearning.facialRecognition.minFaces,
|
||||
withHidden: dto.withHidden || false,
|
||||
});
|
||||
const { total, hidden } = await this.repository.getNumberOfPeople(auth.user.id);
|
||||
|
||||
return {
|
||||
people: people.map((person) => mapPerson(person)),
|
||||
total,
|
||||
hidden,
|
||||
};
|
||||
}
|
||||
|
||||
async reassignFaces(auth: AuthDto, personId: string, dto: AssetFaceUpdateDto): Promise<PersonResponseDto[]> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_WRITE, personId);
|
||||
const person = await this.findOrFail(personId);
|
||||
const result: PersonResponseDto[] = [];
|
||||
const changeFeaturePhoto: string[] = [];
|
||||
for (const data of dto.data) {
|
||||
const faces = await this.repository.getFacesByIds([{ personId: data.personId, assetId: data.assetId }]);
|
||||
|
||||
for (const face of faces) {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_CREATE, face.id);
|
||||
if (person.faceAssetId === null) {
|
||||
changeFeaturePhoto.push(person.id);
|
||||
}
|
||||
if (face.person && face.person.faceAssetId === face.id) {
|
||||
changeFeaturePhoto.push(face.person.id);
|
||||
}
|
||||
|
||||
await this.repository.reassignFace(face.id, personId);
|
||||
}
|
||||
|
||||
result.push(person);
|
||||
}
|
||||
if (changeFeaturePhoto.length > 0) {
|
||||
// Remove duplicates
|
||||
await this.createNewFeaturePhoto([...new Set(changeFeaturePhoto)]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async reassignFacesById(auth: AuthDto, personId: string, dto: FaceDto): Promise<PersonResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_WRITE, personId);
|
||||
|
||||
await this.access.requirePermission(auth, Permission.PERSON_CREATE, dto.id);
|
||||
const face = await this.repository.getFaceById(dto.id);
|
||||
const person = await this.findOrFail(personId);
|
||||
|
||||
await this.repository.reassignFace(face.id, personId);
|
||||
if (person.faceAssetId === null) {
|
||||
await this.createNewFeaturePhoto([person.id]);
|
||||
}
|
||||
if (face.person && face.person.faceAssetId === face.id) {
|
||||
await this.createNewFeaturePhoto([face.person.id]);
|
||||
}
|
||||
|
||||
return await this.findOrFail(personId).then(mapPerson);
|
||||
}
|
||||
|
||||
async getFacesById(auth: AuthDto, dto: FaceDto): Promise<AssetFaceResponseDto[]> {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_READ, dto.id);
|
||||
const faces = await this.repository.getFaces(dto.id);
|
||||
return faces.map((asset) => mapFaces(asset, auth));
|
||||
}
|
||||
|
||||
async createNewFeaturePhoto(changeFeaturePhoto: string[]) {
|
||||
this.logger.debug(
|
||||
`Changing feature photos for ${changeFeaturePhoto.length} ${changeFeaturePhoto.length > 1 ? 'people' : 'person'}`,
|
||||
);
|
||||
|
||||
const jobs: JobItem[] = [];
|
||||
for (const personId of changeFeaturePhoto) {
|
||||
const assetFace = await this.repository.getRandomFace(personId);
|
||||
|
||||
if (assetFace !== null) {
|
||||
await this.repository.update({
|
||||
id: personId,
|
||||
faceAssetId: assetFace.id,
|
||||
});
|
||||
jobs.push({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: personId } });
|
||||
}
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
}
|
||||
|
||||
async getById(auth: AuthDto, id: string): Promise<PersonResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_READ, id);
|
||||
return this.findOrFail(id).then(mapPerson);
|
||||
}
|
||||
|
||||
async getStatistics(auth: AuthDto, id: string): Promise<PersonStatisticsResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_READ, id);
|
||||
return this.repository.getStatistics(id);
|
||||
}
|
||||
|
||||
async getThumbnail(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_READ, id);
|
||||
const person = await this.repository.getById(id);
|
||||
if (!person || !person.thumbnailPath) {
|
||||
throw new NotFoundException();
|
||||
}
|
||||
|
||||
return new ImmichFileResponse({
|
||||
path: person.thumbnailPath,
|
||||
contentType: mimeTypes.lookup(person.thumbnailPath),
|
||||
cacheControl: CacheControl.PRIVATE_WITHOUT_CACHE,
|
||||
});
|
||||
}
|
||||
|
||||
async getAssets(auth: AuthDto, id: string): Promise<AssetResponseDto[]> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_READ, id);
|
||||
const assets = await this.repository.getAssets(id);
|
||||
return assets.map((asset) => mapAsset(asset));
|
||||
}
|
||||
|
||||
create(auth: AuthDto, dto: PersonCreateDto): Promise<PersonResponseDto> {
|
||||
return this.repository.create({
|
||||
ownerId: auth.user.id,
|
||||
name: dto.name,
|
||||
birthDate: dto.birthDate,
|
||||
isHidden: dto.isHidden,
|
||||
});
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: PersonUpdateDto): Promise<PersonResponseDto> {
|
||||
await this.access.requirePermission(auth, Permission.PERSON_WRITE, id);
|
||||
|
||||
const { name, birthDate, isHidden, featureFaceAssetId: assetId } = dto;
|
||||
// TODO: set by faceId directly
|
||||
let faceId: string | undefined = undefined;
|
||||
if (assetId) {
|
||||
await this.access.requirePermission(auth, Permission.ASSET_READ, assetId);
|
||||
const [face] = await this.repository.getFacesByIds([{ personId: id, assetId }]);
|
||||
if (!face) {
|
||||
throw new BadRequestException('Invalid assetId for feature face');
|
||||
}
|
||||
|
||||
faceId = face.id;
|
||||
}
|
||||
|
||||
const person = await this.repository.update({ id, faceAssetId: faceId, name, birthDate, isHidden });
|
||||
|
||||
if (assetId) {
|
||||
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id } });
|
||||
}
|
||||
|
||||
return mapPerson(person);
|
||||
}
|
||||
|
||||
async updateAll(auth: AuthDto, dto: PeopleUpdateDto): Promise<BulkIdResponseDto[]> {
|
||||
const results: BulkIdResponseDto[] = [];
|
||||
for (const person of dto.people) {
|
||||
try {
|
||||
await this.update(auth, person.id, {
|
||||
isHidden: person.isHidden,
|
||||
name: person.name,
|
||||
birthDate: person.birthDate,
|
||||
featureFaceAssetId: person.featureFaceAssetId,
|
||||
}),
|
||||
results.push({ id: person.id, success: true });
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Unable to update ${person.id} : ${error}`, error?.stack);
|
||||
results.push({ id: person.id, success: false, error: BulkIdErrorReason.UNKNOWN });
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private async delete(people: PersonEntity[]) {
|
||||
await Promise.all(people.map((person) => this.storageRepository.unlink(person.thumbnailPath)));
|
||||
await this.repository.delete(people);
|
||||
this.logger.debug(`Deleted ${people.length} people`);
|
||||
}
|
||||
|
||||
private async deleteAllPeople() {
|
||||
const personPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.repository.getAll({ ...pagination, skip: 0 }),
|
||||
);
|
||||
|
||||
for await (const people of personPagination) {
|
||||
await this.delete(people); // deletes thumbnails too
|
||||
}
|
||||
}
|
||||
|
||||
async handlePersonCleanup(): Promise<JobStatus> {
|
||||
const people = await this.repository.getAllWithoutFaces();
|
||||
await this.delete(people);
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueDetectFaces({ force }: IBaseJob): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
if (force) {
|
||||
await this.deleteAllPeople();
|
||||
await this.repository.deleteAllFaces();
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination, { orderDirection: 'DESC', withFaces: true })
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.FACES);
|
||||
});
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.FACE_DETECTION, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleDetectFaces({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const relations = {
|
||||
exifInfo: true,
|
||||
faces: {
|
||||
person: false,
|
||||
},
|
||||
};
|
||||
const [asset] = await this.assetRepository.getByIds([id], relations);
|
||||
if (!asset || !asset.resizePath || asset.faces?.length > 0) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const faces = await this.machineLearningRepository.detectFaces(
|
||||
machineLearning.url,
|
||||
{ imagePath: asset.resizePath },
|
||||
machineLearning.facialRecognition,
|
||||
);
|
||||
|
||||
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
|
||||
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `vector(${face.embedding.length})` })));
|
||||
|
||||
if (faces.length > 0) {
|
||||
await this.jobRepository.queue({ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } });
|
||||
|
||||
const mappedFaces = faces.map((face) => ({
|
||||
assetId: asset.id,
|
||||
embedding: face.embedding,
|
||||
imageHeight: face.imageHeight,
|
||||
imageWidth: face.imageWidth,
|
||||
boundingBoxX1: face.boundingBox.x1,
|
||||
boundingBoxX2: face.boundingBox.x2,
|
||||
boundingBoxY1: face.boundingBox.y1,
|
||||
boundingBoxY2: face.boundingBox.y2,
|
||||
}));
|
||||
|
||||
const faceIds = await this.repository.createFaces(mappedFaces);
|
||||
await this.jobRepository.queueAll(faceIds.map((id) => ({ name: JobName.FACIAL_RECOGNITION, data: { id } })));
|
||||
}
|
||||
|
||||
await this.assetRepository.upsertJobStatus({
|
||||
assetId: asset.id,
|
||||
facesRecognizedAt: new Date(),
|
||||
});
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleQueueRecognizeFaces({ force }: IBaseJob): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
|
||||
const { waiting } = await this.jobRepository.getJobCounts(QueueName.FACIAL_RECOGNITION);
|
||||
|
||||
if (force) {
|
||||
await this.deleteAllPeople();
|
||||
} else if (waiting) {
|
||||
this.logger.debug(
|
||||
`Skipping facial recognition queueing because ${waiting} job${waiting > 1 ? 's are' : ' is'} already queued`,
|
||||
);
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const facePagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.repository.getAllFaces(pagination, { where: force ? undefined : { personId: IsNull() } }),
|
||||
);
|
||||
|
||||
for await (const page of facePagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
page.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id, deferred: false } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleRecognizeFaces({ id, deferred }: IDeferrableJob): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const face = await this.repository.getFaceByIdWithAssets(
|
||||
id,
|
||||
{ person: true, asset: true },
|
||||
{ id: true, personId: true, embedding: true },
|
||||
);
|
||||
if (!face || !face.asset) {
|
||||
this.logger.warn(`Face ${id} not found`);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
if (face.personId) {
|
||||
this.logger.debug(`Face ${id} already has a person assigned`);
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const matches = await this.smartInfoRepository.searchFaces({
|
||||
userIds: [face.asset.ownerId],
|
||||
embedding: face.embedding,
|
||||
maxDistance: machineLearning.facialRecognition.maxDistance,
|
||||
numResults: machineLearning.facialRecognition.minFaces,
|
||||
});
|
||||
|
||||
// `matches` also includes the face itself
|
||||
if (machineLearning.facialRecognition.minFaces > 1 && matches.length <= 1) {
|
||||
this.logger.debug(`Face ${id} only matched the face itself, skipping`);
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
this.logger.debug(`Face ${id} has ${matches.length} matches`);
|
||||
|
||||
const isCore = matches.length >= machineLearning.facialRecognition.minFaces;
|
||||
if (!isCore && !deferred) {
|
||||
this.logger.debug(`Deferring non-core face ${id} for later processing`);
|
||||
await this.jobRepository.queue({ name: JobName.FACIAL_RECOGNITION, data: { id, deferred: true } });
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
let personId = matches.find((match) => match.face.personId)?.face.personId;
|
||||
if (!personId) {
|
||||
const matchWithPerson = await this.smartInfoRepository.searchFaces({
|
||||
userIds: [face.asset.ownerId],
|
||||
embedding: face.embedding,
|
||||
maxDistance: machineLearning.facialRecognition.maxDistance,
|
||||
numResults: 1,
|
||||
hasPerson: true,
|
||||
});
|
||||
|
||||
if (matchWithPerson.length > 0) {
|
||||
personId = matchWithPerson[0].face.personId;
|
||||
}
|
||||
}
|
||||
|
||||
if (isCore && !personId) {
|
||||
this.logger.log(`Creating new person for face ${id}`);
|
||||
const newPerson = await this.repository.create({ ownerId: face.asset.ownerId, faceAssetId: face.id });
|
||||
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: newPerson.id } });
|
||||
personId = newPerson.id;
|
||||
}
|
||||
|
||||
if (personId) {
|
||||
this.logger.debug(`Assigning face ${id} to person ${personId}`);
|
||||
await this.repository.reassignFaces({ faceIds: [id], newPersonId: personId });
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handlePersonMigration({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const person = await this.repository.getById(id);
|
||||
if (!person) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
await this.storageCore.movePersonFile(person, PersonPathType.FACE);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleGeneratePersonThumbnail(data: IEntityJob): Promise<JobStatus> {
|
||||
const { machineLearning, thumbnail } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const person = await this.repository.getById(data.id);
|
||||
if (!person?.faceAssetId) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const face = await this.repository.getFaceByIdWithAssets(person.faceAssetId);
|
||||
if (face === null) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const {
|
||||
assetId,
|
||||
boundingBoxX1: x1,
|
||||
boundingBoxX2: x2,
|
||||
boundingBoxY1: y1,
|
||||
boundingBoxY2: y2,
|
||||
imageWidth,
|
||||
imageHeight,
|
||||
} = face;
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([assetId]);
|
||||
if (!asset?.resizePath) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
this.logger.verbose(`Cropping face for person: ${person.id}`);
|
||||
const thumbnailPath = StorageCore.getPersonThumbnailPath(person);
|
||||
this.storageCore.ensureFolders(thumbnailPath);
|
||||
|
||||
const halfWidth = (x2 - x1) / 2;
|
||||
const halfHeight = (y2 - y1) / 2;
|
||||
|
||||
const middleX = Math.round(x1 + halfWidth);
|
||||
const middleY = Math.round(y1 + halfHeight);
|
||||
|
||||
// zoom out 10%
|
||||
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
|
||||
|
||||
// get the longest distance from the center of the image without overflowing
|
||||
const newHalfSize = Math.min(
|
||||
middleX - Math.max(0, middleX - targetHalfSize),
|
||||
middleY - Math.max(0, middleY - targetHalfSize),
|
||||
Math.min(imageWidth - 1, middleX + targetHalfSize) - middleX,
|
||||
Math.min(imageHeight - 1, middleY + targetHalfSize) - middleY,
|
||||
);
|
||||
|
||||
const cropOptions: CropOptions = {
|
||||
left: middleX - newHalfSize,
|
||||
top: middleY - newHalfSize,
|
||||
width: newHalfSize * 2,
|
||||
height: newHalfSize * 2,
|
||||
};
|
||||
|
||||
const croppedOutput = await this.mediaRepository.crop(asset.resizePath, cropOptions);
|
||||
const thumbnailOptions = {
|
||||
format: 'jpeg',
|
||||
size: FACE_THUMBNAIL_SIZE,
|
||||
colorspace: thumbnail.colorspace,
|
||||
quality: thumbnail.quality,
|
||||
} as const;
|
||||
|
||||
await this.mediaRepository.resize(croppedOutput, thumbnailPath, thumbnailOptions);
|
||||
await this.repository.update({ id: person.id, thumbnailPath });
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {
|
||||
const mergeIds = dto.ids;
|
||||
await this.access.requirePermission(auth, Permission.PERSON_WRITE, id);
|
||||
let primaryPerson = await this.findOrFail(id);
|
||||
const primaryName = primaryPerson.name || primaryPerson.id;
|
||||
|
||||
const results: BulkIdResponseDto[] = [];
|
||||
|
||||
const allowedIds = await this.access.checkAccess(auth, Permission.PERSON_MERGE, mergeIds);
|
||||
|
||||
for (const mergeId of mergeIds) {
|
||||
const hasAccess = allowedIds.has(mergeId);
|
||||
if (!hasAccess) {
|
||||
results.push({ id: mergeId, success: false, error: BulkIdErrorReason.NO_PERMISSION });
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const mergePerson = await this.repository.getById(mergeId);
|
||||
if (!mergePerson) {
|
||||
results.push({ id: mergeId, success: false, error: BulkIdErrorReason.NOT_FOUND });
|
||||
continue;
|
||||
}
|
||||
|
||||
const update: Partial<PersonEntity> = {};
|
||||
if (!primaryPerson.name && mergePerson.name) {
|
||||
update.name = mergePerson.name;
|
||||
}
|
||||
|
||||
if (!primaryPerson.birthDate && mergePerson.birthDate) {
|
||||
update.birthDate = mergePerson.birthDate;
|
||||
}
|
||||
|
||||
if (Object.keys(update).length > 0) {
|
||||
primaryPerson = await this.repository.update({ id: primaryPerson.id, ...update });
|
||||
}
|
||||
|
||||
const mergeName = mergePerson.name || mergePerson.id;
|
||||
const mergeData: UpdateFacesData = { oldPersonId: mergeId, newPersonId: id };
|
||||
this.logger.log(`Merging ${mergeName} into ${primaryName}`);
|
||||
|
||||
await this.repository.reassignFaces(mergeData);
|
||||
await this.delete([mergePerson]);
|
||||
|
||||
this.logger.log(`Merged ${mergeName} into ${primaryName}`);
|
||||
results.push({ id: mergeId, success: true });
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Unable to merge ${mergeId} into ${id}: ${error}`, error?.stack);
|
||||
results.push({ id: mergeId, success: false, error: BulkIdErrorReason.UNKNOWN });
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private async findOrFail(id: string) {
|
||||
const person = await this.repository.getById(id);
|
||||
if (!person) {
|
||||
throw new BadRequestException('Person not found');
|
||||
}
|
||||
return person;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,201 @@
|
||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { SearchDto } from 'src/dtos/search.dto';
|
||||
import { SystemConfigKey } from 'src/entities/system-config.entity';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IMachineLearningRepository } from 'src/interfaces/machine-learning.repository';
|
||||
import { IMetadataRepository } from 'src/interfaces/metadata.repository';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { ISearchRepository } from 'src/interfaces/search.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { SearchService } from 'src/services/search.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { personStub } from 'test/fixtures/person.stub';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newMachineLearningRepositoryMock } from 'test/repositories/machine-learning.repository.mock';
|
||||
import { newMetadataRepositoryMock } from 'test/repositories/metadata.repository.mock';
|
||||
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
|
||||
jest.useFakeTimers();
|
||||
|
||||
describe(SearchService.name, () => {
|
||||
let sut: SearchService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let machineMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let partnerMock: jest.Mocked<IPartnerRepository>;
|
||||
let metadataMock: jest.Mocked<IMetadataRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
machineMock = newMachineLearningRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
partnerMock = newPartnerRepositoryMock();
|
||||
metadataMock = newMetadataRepositoryMock();
|
||||
|
||||
sut = new SearchService(configMock, machineMock, personMock, searchMock, assetMock, partnerMock, metadataMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('searchPerson', () => {
|
||||
it('should pass options to search', async () => {
|
||||
const { name } = personStub.withName;
|
||||
|
||||
await sut.searchPerson(authStub.user1, { name, withHidden: false });
|
||||
|
||||
expect(personMock.getByName).toHaveBeenCalledWith(authStub.user1.user.id, name, { withHidden: false });
|
||||
|
||||
await sut.searchPerson(authStub.user1, { name, withHidden: true });
|
||||
|
||||
expect(personMock.getByName).toHaveBeenCalledWith(authStub.user1.user.id, name, { withHidden: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getExploreData', () => {
|
||||
it('should get assets by city and tag', async () => {
|
||||
assetMock.getAssetIdByCity.mockResolvedValueOnce({
|
||||
fieldName: 'exifInfo.city',
|
||||
items: [{ value: 'Paris', data: assetStub.image.id }],
|
||||
});
|
||||
assetMock.getAssetIdByTag.mockResolvedValueOnce({
|
||||
fieldName: 'smartInfo.tags',
|
||||
items: [{ value: 'train', data: assetStub.imageFrom2015.id }],
|
||||
});
|
||||
assetMock.getByIdsWithAllRelations.mockResolvedValueOnce([assetStub.image, assetStub.imageFrom2015]);
|
||||
const expectedResponse = [
|
||||
{ fieldName: 'exifInfo.city', items: [{ value: 'Paris', data: mapAsset(assetStub.image) }] },
|
||||
{ fieldName: 'smartInfo.tags', items: [{ value: 'train', data: mapAsset(assetStub.imageFrom2015) }] },
|
||||
];
|
||||
|
||||
const result = await sut.getExploreData(authStub.user1);
|
||||
|
||||
expect(result).toEqual(expectedResponse);
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
it('should throw an error if query is missing', async () => {
|
||||
await expect(sut.search(authStub.user1, { q: '' })).rejects.toThrow('Missing query');
|
||||
});
|
||||
|
||||
it('should search by metadata if `clip` option is false', async () => {
|
||||
const dto: SearchDto = { q: 'test query', clip: false };
|
||||
assetMock.searchMetadata.mockResolvedValueOnce([assetStub.image]);
|
||||
partnerMock.getAll.mockResolvedValueOnce([]);
|
||||
const expectedResponse = {
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
items: [],
|
||||
facets: [],
|
||||
},
|
||||
assets: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
items: [mapAsset(assetStub.image)],
|
||||
facets: [],
|
||||
nextPage: null,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await sut.search(authStub.user1, dto);
|
||||
|
||||
expect(result).toEqual(expectedResponse);
|
||||
expect(assetMock.searchMetadata).toHaveBeenCalledWith(dto.q, [authStub.user1.user.id], { numResults: 250 });
|
||||
expect(searchMock.searchSmart).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should search archived photos if `withArchived` option is true', async () => {
|
||||
const dto: SearchDto = { q: 'test query', clip: true, withArchived: true };
|
||||
const embedding = [1, 2, 3];
|
||||
searchMock.searchSmart.mockResolvedValueOnce({ items: [assetStub.image], hasNextPage: false });
|
||||
machineMock.encodeText.mockResolvedValueOnce(embedding);
|
||||
partnerMock.getAll.mockResolvedValueOnce([]);
|
||||
const expectedResponse = {
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
items: [],
|
||||
facets: [],
|
||||
},
|
||||
assets: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
items: [mapAsset(assetStub.image)],
|
||||
facets: [],
|
||||
nextPage: null,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await sut.search(authStub.user1, dto);
|
||||
|
||||
expect(result).toEqual(expectedResponse);
|
||||
expect(searchMock.searchSmart).toHaveBeenCalledWith(
|
||||
{ page: 1, size: 100 },
|
||||
{
|
||||
userIds: [authStub.user1.user.id],
|
||||
embedding,
|
||||
withArchived: true,
|
||||
},
|
||||
);
|
||||
expect(assetMock.searchMetadata).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should search by CLIP if `clip` option is true', async () => {
|
||||
const dto: SearchDto = { q: 'test query', clip: true };
|
||||
const embedding = [1, 2, 3];
|
||||
searchMock.searchSmart.mockResolvedValueOnce({ items: [assetStub.image], hasNextPage: false });
|
||||
machineMock.encodeText.mockResolvedValueOnce(embedding);
|
||||
partnerMock.getAll.mockResolvedValueOnce([]);
|
||||
const expectedResponse = {
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
items: [],
|
||||
facets: [],
|
||||
},
|
||||
assets: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
items: [mapAsset(assetStub.image)],
|
||||
facets: [],
|
||||
nextPage: null,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await sut.search(authStub.user1, dto);
|
||||
|
||||
expect(result).toEqual(expectedResponse);
|
||||
expect(searchMock.searchSmart).toHaveBeenCalledWith(
|
||||
{ page: 1, size: 100 },
|
||||
{
|
||||
userIds: [authStub.user1.user.id],
|
||||
embedding,
|
||||
withArchived: false,
|
||||
},
|
||||
);
|
||||
expect(assetMock.searchMetadata).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each([
|
||||
{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED },
|
||||
{ key: SystemConfigKey.MACHINE_LEARNING_CLIP_ENABLED },
|
||||
])('should throw an error if clip is requested but disabled', async ({ key }) => {
|
||||
const dto: SearchDto = { q: 'test query', clip: true };
|
||||
configMock.load.mockResolvedValue([{ key, value: false }]);
|
||||
|
||||
await expect(sut.search(authStub.user1, dto)).rejects.toThrow('Smart search is not enabled');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,218 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { FeatureFlag, SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { PersonResponseDto } from 'src/dtos/person.dto';
|
||||
import {
|
||||
MetadataSearchDto,
|
||||
PlacesResponseDto,
|
||||
SearchDto,
|
||||
SearchPeopleDto,
|
||||
SearchPlacesDto,
|
||||
SearchResponseDto,
|
||||
SearchSuggestionRequestDto,
|
||||
SearchSuggestionType,
|
||||
SmartSearchDto,
|
||||
mapPlaces,
|
||||
} from 'src/dtos/search.dto';
|
||||
import { AssetOrder } from 'src/entities/album.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { IMachineLearningRepository } from 'src/interfaces/machine-learning.repository';
|
||||
import { IMetadataRepository } from 'src/interfaces/metadata.repository';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { ISearchRepository, SearchExploreItem, SearchStrategy } from 'src/interfaces/search.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
|
||||
@Injectable()
|
||||
export class SearchService {
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
constructor(
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IPartnerRepository) private partnerRepository: IPartnerRepository,
|
||||
@Inject(IMetadataRepository) private metadataRepository: IMetadataRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
async searchPerson(auth: AuthDto, dto: SearchPeopleDto): Promise<PersonResponseDto[]> {
|
||||
return this.personRepository.getByName(auth.user.id, dto.name, { withHidden: dto.withHidden });
|
||||
}
|
||||
|
||||
async searchPlaces(dto: SearchPlacesDto): Promise<PlacesResponseDto[]> {
|
||||
const places = await this.searchRepository.searchPlaces(dto.name);
|
||||
return places.map((place) => mapPlaces(place));
|
||||
}
|
||||
|
||||
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
const options = { maxFields: 12, minAssetsPerField: 5 };
|
||||
const results = await Promise.all([
|
||||
this.assetRepository.getAssetIdByCity(auth.user.id, options),
|
||||
this.assetRepository.getAssetIdByTag(auth.user.id, options),
|
||||
]);
|
||||
const assetIds = new Set<string>(results.flatMap((field) => field.items.map((item) => item.data)));
|
||||
const assets = await this.assetRepository.getByIdsWithAllRelations([...assetIds]);
|
||||
const assetMap = new Map<string, AssetResponseDto>(assets.map((asset) => [asset.id, mapAsset(asset)]));
|
||||
|
||||
return results.map(({ fieldName, items }) => ({
|
||||
fieldName,
|
||||
items: items.map(({ value, data }) => ({ value, data: assetMap.get(data) as AssetResponseDto })),
|
||||
}));
|
||||
}
|
||||
|
||||
async searchMetadata(auth: AuthDto, dto: MetadataSearchDto): Promise<SearchResponseDto> {
|
||||
let checksum: Buffer | undefined;
|
||||
const userIds = await this.getUserIdsToSearch(auth);
|
||||
|
||||
if (dto.checksum) {
|
||||
const encoding = dto.checksum.length === 28 ? 'base64' : 'hex';
|
||||
checksum = Buffer.from(dto.checksum, encoding);
|
||||
}
|
||||
|
||||
const page = dto.page ?? 1;
|
||||
const size = dto.size || 250;
|
||||
const enumToOrder = { [AssetOrder.ASC]: 'ASC', [AssetOrder.DESC]: 'DESC' } as const;
|
||||
const { hasNextPage, items } = await this.searchRepository.searchMetadata(
|
||||
{ page, size },
|
||||
{
|
||||
...dto,
|
||||
checksum,
|
||||
userIds,
|
||||
orderDirection: dto.order ? enumToOrder[dto.order] : 'DESC',
|
||||
},
|
||||
);
|
||||
|
||||
return this.mapResponse(items, hasNextPage ? (page + 1).toString() : null);
|
||||
}
|
||||
|
||||
async searchSmart(auth: AuthDto, dto: SmartSearchDto): Promise<SearchResponseDto> {
|
||||
await this.configCore.requireFeature(FeatureFlag.SMART_SEARCH);
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
const userIds = await this.getUserIdsToSearch(auth);
|
||||
|
||||
const embedding = await this.machineLearning.encodeText(
|
||||
machineLearning.url,
|
||||
{ text: dto.query },
|
||||
machineLearning.clip,
|
||||
);
|
||||
|
||||
const page = dto.page ?? 1;
|
||||
const size = dto.size || 100;
|
||||
const { hasNextPage, items } = await this.searchRepository.searchSmart(
|
||||
{ page, size },
|
||||
{ ...dto, userIds, embedding },
|
||||
);
|
||||
|
||||
return this.mapResponse(items, hasNextPage ? (page + 1).toString() : null);
|
||||
}
|
||||
|
||||
async getAssetsByCity(auth: AuthDto): Promise<AssetResponseDto[]> {
|
||||
const userIds = await this.getUserIdsToSearch(auth);
|
||||
const assets = await this.searchRepository.getAssetsByCity(userIds);
|
||||
return assets.map((asset) => mapAsset(asset));
|
||||
}
|
||||
|
||||
getSearchSuggestions(auth: AuthDto, dto: SearchSuggestionRequestDto): Promise<string[]> {
|
||||
switch (dto.type) {
|
||||
case SearchSuggestionType.COUNTRY: {
|
||||
return this.metadataRepository.getCountries(auth.user.id);
|
||||
}
|
||||
case SearchSuggestionType.STATE: {
|
||||
return this.metadataRepository.getStates(auth.user.id, dto.country);
|
||||
}
|
||||
case SearchSuggestionType.CITY: {
|
||||
return this.metadataRepository.getCities(auth.user.id, dto.country, dto.state);
|
||||
}
|
||||
case SearchSuggestionType.CAMERA_MAKE: {
|
||||
return this.metadataRepository.getCameraMakes(auth.user.id, dto.model);
|
||||
}
|
||||
case SearchSuggestionType.CAMERA_MODEL: {
|
||||
return this.metadataRepository.getCameraModels(auth.user.id, dto.make);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remove after implementing new search filters
|
||||
/** @deprecated */
|
||||
async search(auth: AuthDto, dto: SearchDto): Promise<SearchResponseDto> {
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
const query = dto.q || dto.query;
|
||||
if (!query) {
|
||||
throw new Error('Missing query');
|
||||
}
|
||||
|
||||
let strategy = SearchStrategy.TEXT;
|
||||
if (dto.smart || dto.clip) {
|
||||
await this.configCore.requireFeature(FeatureFlag.SMART_SEARCH);
|
||||
strategy = SearchStrategy.SMART;
|
||||
}
|
||||
|
||||
const userIds = await this.getUserIdsToSearch(auth);
|
||||
const page = dto.page ?? 1;
|
||||
|
||||
let nextPage: string | null = null;
|
||||
let assets: AssetEntity[] = [];
|
||||
switch (strategy) {
|
||||
case SearchStrategy.SMART: {
|
||||
const embedding = await this.machineLearning.encodeText(
|
||||
machineLearning.url,
|
||||
{ text: query },
|
||||
machineLearning.clip,
|
||||
);
|
||||
|
||||
const { hasNextPage, items } = await this.searchRepository.searchSmart(
|
||||
{ page, size: dto.size || 100 },
|
||||
{
|
||||
userIds,
|
||||
embedding,
|
||||
withArchived: !!dto.withArchived,
|
||||
},
|
||||
);
|
||||
if (hasNextPage) {
|
||||
nextPage = (page + 1).toString();
|
||||
}
|
||||
assets = items;
|
||||
break;
|
||||
}
|
||||
case SearchStrategy.TEXT: {
|
||||
assets = await this.assetRepository.searchMetadata(query, userIds, { numResults: dto.size || 250 });
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return this.mapResponse(assets, nextPage);
|
||||
}
|
||||
|
||||
private async getUserIdsToSearch(auth: AuthDto): Promise<string[]> {
|
||||
const userIds: string[] = [auth.user.id];
|
||||
const partners = await this.partnerRepository.getAll(auth.user.id);
|
||||
const partnersIds = partners
|
||||
.filter((partner) => partner.sharedBy && partner.inTimeline)
|
||||
.map((partner) => partner.sharedById);
|
||||
userIds.push(...partnersIds);
|
||||
return userIds;
|
||||
}
|
||||
|
||||
private mapResponse(assets: AssetEntity[], nextPage: string | null): SearchResponseDto {
|
||||
return {
|
||||
albums: { total: 0, count: 0, items: [], facets: [] },
|
||||
assets: {
|
||||
total: assets.length,
|
||||
count: assets.length,
|
||||
items: assets.map((asset) => mapAsset(asset)),
|
||||
facets: [],
|
||||
nextPage,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,275 @@
|
||||
import { serverVersion } from 'src/domain/domain.constant';
|
||||
import { SystemMetadataKey } from 'src/entities/system-metadata.entity';
|
||||
import { ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IServerInfoRepository } from 'src/interfaces/server-info.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { ServerInfoService } from 'src/services/server-info.service';
|
||||
import { newCommunicationRepositoryMock } from 'test/repositories/communication.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
import { newServerInfoRepositoryMock } from 'test/repositories/system-info.repository.mock';
|
||||
import { newSystemMetadataRepositoryMock } from 'test/repositories/system-metadata.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
|
||||
describe(ServerInfoService.name, () => {
|
||||
let sut: ServerInfoService;
|
||||
let communicationMock: jest.Mocked<ICommunicationRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let serverInfoMock: jest.Mocked<IServerInfoRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
let userMock: jest.Mocked<IUserRepository>;
|
||||
let systemMetadataMock: jest.Mocked<ISystemMetadataRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
communicationMock = newCommunicationRepositoryMock();
|
||||
serverInfoMock = newServerInfoRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
systemMetadataMock = newSystemMetadataRepositoryMock();
|
||||
|
||||
sut = new ServerInfoService(
|
||||
communicationMock,
|
||||
configMock,
|
||||
userMock,
|
||||
serverInfoMock,
|
||||
storageMock,
|
||||
systemMetadataMock,
|
||||
);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getInfo', () => {
|
||||
it('should return the disk space as B', async () => {
|
||||
storageMock.checkDiskUsage.mockResolvedValue({ free: 200, available: 300, total: 500 });
|
||||
|
||||
await expect(sut.getInfo()).resolves.toEqual({
|
||||
diskAvailable: '300 B',
|
||||
diskAvailableRaw: 300,
|
||||
diskSize: '500 B',
|
||||
diskSizeRaw: 500,
|
||||
diskUsagePercentage: 60,
|
||||
diskUse: '300 B',
|
||||
diskUseRaw: 300,
|
||||
});
|
||||
|
||||
expect(storageMock.checkDiskUsage).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
|
||||
it('should return the disk space as KiB', async () => {
|
||||
storageMock.checkDiskUsage.mockResolvedValue({ free: 200_000, available: 300_000, total: 500_000 });
|
||||
|
||||
await expect(sut.getInfo()).resolves.toEqual({
|
||||
diskAvailable: '293.0 KiB',
|
||||
diskAvailableRaw: 300_000,
|
||||
diskSize: '488.3 KiB',
|
||||
diskSizeRaw: 500_000,
|
||||
diskUsagePercentage: 60,
|
||||
diskUse: '293.0 KiB',
|
||||
diskUseRaw: 300_000,
|
||||
});
|
||||
|
||||
expect(storageMock.checkDiskUsage).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
|
||||
it('should return the disk space as MiB', async () => {
|
||||
storageMock.checkDiskUsage.mockResolvedValue({ free: 200_000_000, available: 300_000_000, total: 500_000_000 });
|
||||
|
||||
await expect(sut.getInfo()).resolves.toEqual({
|
||||
diskAvailable: '286.1 MiB',
|
||||
diskAvailableRaw: 300_000_000,
|
||||
diskSize: '476.8 MiB',
|
||||
diskSizeRaw: 500_000_000,
|
||||
diskUsagePercentage: 60,
|
||||
diskUse: '286.1 MiB',
|
||||
diskUseRaw: 300_000_000,
|
||||
});
|
||||
|
||||
expect(storageMock.checkDiskUsage).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
|
||||
it('should return the disk space as GiB', async () => {
|
||||
storageMock.checkDiskUsage.mockResolvedValue({
|
||||
free: 200_000_000_000,
|
||||
available: 300_000_000_000,
|
||||
total: 500_000_000_000,
|
||||
});
|
||||
|
||||
await expect(sut.getInfo()).resolves.toEqual({
|
||||
diskAvailable: '279.4 GiB',
|
||||
diskAvailableRaw: 300_000_000_000,
|
||||
diskSize: '465.7 GiB',
|
||||
diskSizeRaw: 500_000_000_000,
|
||||
diskUsagePercentage: 60,
|
||||
diskUse: '279.4 GiB',
|
||||
diskUseRaw: 300_000_000_000,
|
||||
});
|
||||
|
||||
expect(storageMock.checkDiskUsage).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
|
||||
it('should return the disk space as TiB', async () => {
|
||||
storageMock.checkDiskUsage.mockResolvedValue({
|
||||
free: 200_000_000_000_000,
|
||||
available: 300_000_000_000_000,
|
||||
total: 500_000_000_000_000,
|
||||
});
|
||||
|
||||
await expect(sut.getInfo()).resolves.toEqual({
|
||||
diskAvailable: '272.8 TiB',
|
||||
diskAvailableRaw: 300_000_000_000_000,
|
||||
diskSize: '454.7 TiB',
|
||||
diskSizeRaw: 500_000_000_000_000,
|
||||
diskUsagePercentage: 60,
|
||||
diskUse: '272.8 TiB',
|
||||
diskUseRaw: 300_000_000_000_000,
|
||||
});
|
||||
|
||||
expect(storageMock.checkDiskUsage).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
|
||||
it('should return the disk space as PiB', async () => {
|
||||
storageMock.checkDiskUsage.mockResolvedValue({
|
||||
free: 200_000_000_000_000_000,
|
||||
available: 300_000_000_000_000_000,
|
||||
total: 500_000_000_000_000_000,
|
||||
});
|
||||
|
||||
await expect(sut.getInfo()).resolves.toEqual({
|
||||
diskAvailable: '266.5 PiB',
|
||||
diskAvailableRaw: 300_000_000_000_000_000,
|
||||
diskSize: '444.1 PiB',
|
||||
diskSizeRaw: 500_000_000_000_000_000,
|
||||
diskUsagePercentage: 60,
|
||||
diskUse: '266.5 PiB',
|
||||
diskUseRaw: 300_000_000_000_000_000,
|
||||
});
|
||||
|
||||
expect(storageMock.checkDiskUsage).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
});
|
||||
|
||||
describe('ping', () => {
|
||||
it('should respond with pong', () => {
|
||||
expect(sut.ping()).toEqual({ res: 'pong' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersion', () => {
|
||||
it('should respond the server version', () => {
|
||||
expect(sut.getVersion()).toEqual(serverVersion);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFeatures', () => {
|
||||
it('should respond the server features', async () => {
|
||||
await expect(sut.getFeatures()).resolves.toEqual({
|
||||
smartSearch: true,
|
||||
facialRecognition: true,
|
||||
map: true,
|
||||
reverseGeocoding: true,
|
||||
oauth: false,
|
||||
oauthAutoLaunch: false,
|
||||
passwordLogin: true,
|
||||
search: true,
|
||||
sidecar: true,
|
||||
configFile: false,
|
||||
trash: true,
|
||||
});
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConfig', () => {
|
||||
it('should respond the server configuration', async () => {
|
||||
await expect(sut.getConfig()).resolves.toEqual({
|
||||
loginPageMessage: '',
|
||||
oauthButtonText: 'Login with OAuth',
|
||||
trashDays: 30,
|
||||
userDeleteDelay: 7,
|
||||
isInitialized: undefined,
|
||||
isOnboarded: false,
|
||||
externalDomain: '',
|
||||
});
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('setAdminOnboarding', () => {
|
||||
it('should set admin onboarding to true', async () => {
|
||||
await sut.setAdminOnboarding();
|
||||
expect(systemMetadataMock.set).toHaveBeenCalledWith(SystemMetadataKey.ADMIN_ONBOARDING, { isOnboarded: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStats', () => {
|
||||
it('should total up usage by user', async () => {
|
||||
userMock.getUserStats.mockResolvedValue([
|
||||
{
|
||||
userId: 'user1',
|
||||
userName: '1 User',
|
||||
photos: 10,
|
||||
videos: 11,
|
||||
usage: 12_345,
|
||||
quotaSizeInBytes: 0,
|
||||
},
|
||||
{
|
||||
userId: 'user2',
|
||||
userName: '2 User',
|
||||
photos: 10,
|
||||
videos: 20,
|
||||
usage: 123_456,
|
||||
quotaSizeInBytes: 0,
|
||||
},
|
||||
{
|
||||
userId: 'user3',
|
||||
userName: '3 User',
|
||||
photos: 100,
|
||||
videos: 0,
|
||||
usage: 987_654,
|
||||
quotaSizeInBytes: 0,
|
||||
},
|
||||
]);
|
||||
|
||||
await expect(sut.getStatistics()).resolves.toEqual({
|
||||
photos: 120,
|
||||
videos: 31,
|
||||
usage: 1_123_455,
|
||||
usageByUser: [
|
||||
{
|
||||
photos: 10,
|
||||
quotaSizeInBytes: 0,
|
||||
usage: 12_345,
|
||||
userName: '1 User',
|
||||
userId: 'user1',
|
||||
videos: 11,
|
||||
},
|
||||
{
|
||||
photos: 10,
|
||||
quotaSizeInBytes: 0,
|
||||
usage: 123_456,
|
||||
userName: '2 User',
|
||||
userId: 'user2',
|
||||
videos: 20,
|
||||
},
|
||||
{
|
||||
photos: 100,
|
||||
quotaSizeInBytes: 0,
|
||||
usage: 987_654,
|
||||
userName: '3 User',
|
||||
userId: 'user3',
|
||||
videos: 0,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(userMock.getUserStats).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,188 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { Version, isDev, mimeTypes, serverVersion } from 'src/domain/domain.constant';
|
||||
import {
|
||||
ServerConfigDto,
|
||||
ServerFeaturesDto,
|
||||
ServerInfoResponseDto,
|
||||
ServerMediaTypesResponseDto,
|
||||
ServerPingResponse,
|
||||
ServerStatsResponseDto,
|
||||
UsageByUserDto,
|
||||
} from 'src/dtos/server-info.dto';
|
||||
import { SystemMetadataKey } from 'src/entities/system-metadata.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IServerInfoRepository } from 'src/interfaces/server-info.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.repository';
|
||||
import { IUserRepository, UserStatsQueryResponse } from 'src/interfaces/user.repository';
|
||||
import { asHumanReadable } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class ServerInfoService {
|
||||
private logger = new ImmichLogger(ServerInfoService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
private releaseVersion = serverVersion;
|
||||
private releaseVersionCheckedAt: DateTime | null = null;
|
||||
|
||||
constructor(
|
||||
@Inject(ICommunicationRepository) private communicationRepository: ICommunicationRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
@Inject(IServerInfoRepository) private repository: IServerInfoRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(ISystemMetadataRepository) private readonly systemMetadataRepository: ISystemMetadataRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.communicationRepository.on('connect', (userId) => this.handleConnect(userId));
|
||||
}
|
||||
|
||||
async init(): Promise<void> {
|
||||
await this.handleVersionCheck();
|
||||
|
||||
const featureFlags = await this.getFeatures();
|
||||
if (featureFlags.configFile) {
|
||||
await this.setAdminOnboarding();
|
||||
}
|
||||
}
|
||||
|
||||
async getInfo(): Promise<ServerInfoResponseDto> {
|
||||
const libraryBase = StorageCore.getBaseFolder(StorageFolder.LIBRARY);
|
||||
const diskInfo = await this.storageRepository.checkDiskUsage(libraryBase);
|
||||
|
||||
const usagePercentage = (((diskInfo.total - diskInfo.free) / diskInfo.total) * 100).toFixed(2);
|
||||
|
||||
const serverInfo = new ServerInfoResponseDto();
|
||||
serverInfo.diskAvailable = asHumanReadable(diskInfo.available);
|
||||
serverInfo.diskSize = asHumanReadable(diskInfo.total);
|
||||
serverInfo.diskUse = asHumanReadable(diskInfo.total - diskInfo.free);
|
||||
serverInfo.diskAvailableRaw = diskInfo.available;
|
||||
serverInfo.diskSizeRaw = diskInfo.total;
|
||||
serverInfo.diskUseRaw = diskInfo.total - diskInfo.free;
|
||||
serverInfo.diskUsagePercentage = Number.parseFloat(usagePercentage);
|
||||
return serverInfo;
|
||||
}
|
||||
|
||||
ping(): ServerPingResponse {
|
||||
return { res: 'pong' };
|
||||
}
|
||||
|
||||
getVersion() {
|
||||
return serverVersion;
|
||||
}
|
||||
|
||||
getFeatures(): Promise<ServerFeaturesDto> {
|
||||
return this.configCore.getFeatures();
|
||||
}
|
||||
|
||||
async getTheme() {
|
||||
const { theme } = await this.configCore.getConfig();
|
||||
return theme;
|
||||
}
|
||||
|
||||
async getConfig(): Promise<ServerConfigDto> {
|
||||
const config = await this.configCore.getConfig();
|
||||
const isInitialized = await this.userRepository.hasAdmin();
|
||||
const onboarding = await this.systemMetadataRepository.get(SystemMetadataKey.ADMIN_ONBOARDING);
|
||||
|
||||
return {
|
||||
loginPageMessage: config.server.loginPageMessage,
|
||||
trashDays: config.trash.days,
|
||||
userDeleteDelay: config.user.deleteDelay,
|
||||
oauthButtonText: config.oauth.buttonText,
|
||||
isInitialized,
|
||||
isOnboarded: onboarding?.isOnboarded || false,
|
||||
externalDomain: config.server.externalDomain,
|
||||
};
|
||||
}
|
||||
|
||||
setAdminOnboarding(): Promise<void> {
|
||||
return this.systemMetadataRepository.set(SystemMetadataKey.ADMIN_ONBOARDING, { isOnboarded: true });
|
||||
}
|
||||
|
||||
async getStatistics(): Promise<ServerStatsResponseDto> {
|
||||
const userStats: UserStatsQueryResponse[] = await this.userRepository.getUserStats();
|
||||
const serverStats = new ServerStatsResponseDto();
|
||||
|
||||
for (const user of userStats) {
|
||||
const usage = new UsageByUserDto();
|
||||
usage.userId = user.userId;
|
||||
usage.userName = user.userName;
|
||||
usage.photos = user.photos;
|
||||
usage.videos = user.videos;
|
||||
usage.usage = user.usage;
|
||||
usage.quotaSizeInBytes = user.quotaSizeInBytes;
|
||||
|
||||
serverStats.photos += usage.photos;
|
||||
serverStats.videos += usage.videos;
|
||||
serverStats.usage += usage.usage;
|
||||
serverStats.usageByUser.push(usage);
|
||||
}
|
||||
|
||||
return serverStats;
|
||||
}
|
||||
|
||||
getSupportedMediaTypes(): ServerMediaTypesResponseDto {
|
||||
return {
|
||||
video: Object.keys(mimeTypes.video),
|
||||
image: Object.keys(mimeTypes.image),
|
||||
sidecar: Object.keys(mimeTypes.sidecar),
|
||||
};
|
||||
}
|
||||
|
||||
async handleVersionCheck(): Promise<boolean> {
|
||||
try {
|
||||
if (isDev) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const { newVersionCheck } = await this.configCore.getConfig();
|
||||
if (!newVersionCheck.enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// check once per hour (max)
|
||||
if (this.releaseVersionCheckedAt && DateTime.now().diff(this.releaseVersionCheckedAt).as('minutes') < 60) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const githubRelease = await this.repository.getGitHubRelease();
|
||||
const githubVersion = Version.fromString(githubRelease.tag_name);
|
||||
const publishedAt = new Date(githubRelease.published_at);
|
||||
this.releaseVersion = githubVersion;
|
||||
this.releaseVersionCheckedAt = DateTime.now();
|
||||
|
||||
if (githubVersion.isNewerThan(serverVersion)) {
|
||||
this.logger.log(`Found ${githubVersion.toString()}, released at ${publishedAt.toLocaleString()}`);
|
||||
this.newReleaseNotification();
|
||||
}
|
||||
} catch (error: Error | any) {
|
||||
this.logger.warn(`Unable to run version check: ${error}`, error?.stack);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private handleConnect(userId: string) {
|
||||
this.communicationRepository.send(ClientEvent.SERVER_VERSION, userId, serverVersion);
|
||||
this.newReleaseNotification(userId);
|
||||
}
|
||||
|
||||
private newReleaseNotification(userId?: string) {
|
||||
const event = ClientEvent.NEW_RELEASE;
|
||||
const payload = {
|
||||
isAvailable: this.releaseVersion.isNewerThan(serverVersion),
|
||||
checkedAt: this.releaseVersionCheckedAt,
|
||||
serverVersion,
|
||||
releaseVersion: this.releaseVersion,
|
||||
};
|
||||
|
||||
userId
|
||||
? this.communicationRepository.send(event, userId, payload)
|
||||
: this.communicationRepository.broadcast(event, payload);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,279 @@
|
||||
import { BadRequestException, ForbiddenException, UnauthorizedException } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { AssetIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
||||
import { SharedLinkType } from 'src/entities/shared-link.entity';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { ISharedLinkRepository } from 'src/interfaces/shared-link.repository';
|
||||
import { SharedLinkService } from 'src/services/shared-link.service';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { sharedLinkResponseStub, sharedLinkStub } from 'test/fixtures/shared-link.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newSharedLinkRepositoryMock } from 'test/repositories/shared-link.repository.mock';
|
||||
|
||||
describe(SharedLinkService.name, () => {
|
||||
let sut: SharedLinkService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let cryptoMock: jest.Mocked<ICryptoRepository>;
|
||||
let shareMock: jest.Mocked<ISharedLinkRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
cryptoMock = newCryptoRepositoryMock();
|
||||
shareMock = newSharedLinkRepositoryMock();
|
||||
|
||||
sut = new SharedLinkService(accessMock, cryptoMock, shareMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all shared links for a user', async () => {
|
||||
shareMock.getAll.mockResolvedValue([sharedLinkStub.expired, sharedLinkStub.valid]);
|
||||
await expect(sut.getAll(authStub.user1)).resolves.toEqual([
|
||||
sharedLinkResponseStub.expired,
|
||||
sharedLinkResponseStub.valid,
|
||||
]);
|
||||
expect(shareMock.getAll).toHaveBeenCalledWith(authStub.user1.user.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMine', () => {
|
||||
it('should only work for a public user', async () => {
|
||||
await expect(sut.getMine(authStub.admin, {})).rejects.toBeInstanceOf(ForbiddenException);
|
||||
expect(shareMock.get).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return the shared link for the public user', async () => {
|
||||
const authDto = authStub.adminSharedLink;
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
await expect(sut.getMine(authDto, {})).resolves.toEqual(sharedLinkResponseStub.valid);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authDto.user.id, authDto.sharedLink?.id);
|
||||
});
|
||||
|
||||
it('should not return metadata', async () => {
|
||||
const authDto = authStub.adminSharedLinkNoExif;
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.readonlyNoExif);
|
||||
await expect(sut.getMine(authDto, {})).resolves.toEqual(sharedLinkResponseStub.readonlyNoMetadata);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authDto.user.id, authDto.sharedLink?.id);
|
||||
});
|
||||
|
||||
it('should throw an error for an password protected shared link', async () => {
|
||||
const authDto = authStub.adminSharedLink;
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.passwordRequired);
|
||||
await expect(sut.getMine(authDto, {})).rejects.toBeInstanceOf(UnauthorizedException);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authDto.user.id, authDto.sharedLink?.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should throw an error for an invalid shared link', async () => {
|
||||
shareMock.get.mockResolvedValue(null);
|
||||
await expect(sut.get(authStub.user1, 'missing-id')).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authStub.user1.user.id, 'missing-id');
|
||||
expect(shareMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should get a shared link by id', async () => {
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
await expect(sut.get(authStub.user1, sharedLinkStub.valid.id)).resolves.toEqual(sharedLinkResponseStub.valid);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should not allow an album shared link without an albumId', async () => {
|
||||
await expect(sut.create(authStub.admin, { type: SharedLinkType.ALBUM, assetIds: [] })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow non-owners to create album shared links', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, { type: SharedLinkType.ALBUM, assetIds: [], albumId: 'album-1' }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should not allow individual shared links with no assets', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, { type: SharedLinkType.INDIVIDUAL, assetIds: [] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should require asset ownership to make an individual shared link', async () => {
|
||||
await expect(
|
||||
sut.create(authStub.admin, { type: SharedLinkType.INDIVIDUAL, assetIds: ['asset-1'] }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should create an album shared link', async () => {
|
||||
accessMock.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.oneAsset.id]));
|
||||
shareMock.create.mockResolvedValue(sharedLinkStub.valid);
|
||||
|
||||
await sut.create(authStub.admin, { type: SharedLinkType.ALBUM, albumId: albumStub.oneAsset.id });
|
||||
|
||||
expect(accessMock.album.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
new Set([albumStub.oneAsset.id]),
|
||||
);
|
||||
expect(shareMock.create).toHaveBeenCalledWith({
|
||||
type: SharedLinkType.ALBUM,
|
||||
userId: authStub.admin.user.id,
|
||||
albumId: albumStub.oneAsset.id,
|
||||
allowDownload: true,
|
||||
allowUpload: true,
|
||||
assets: [],
|
||||
description: null,
|
||||
expiresAt: null,
|
||||
showExif: true,
|
||||
key: Buffer.from('random-bytes', 'utf8'),
|
||||
});
|
||||
});
|
||||
|
||||
it('should create an individual shared link', async () => {
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.image.id]));
|
||||
shareMock.create.mockResolvedValue(sharedLinkStub.individual);
|
||||
|
||||
await sut.create(authStub.admin, {
|
||||
type: SharedLinkType.INDIVIDUAL,
|
||||
assetIds: [assetStub.image.id],
|
||||
showMetadata: true,
|
||||
allowDownload: true,
|
||||
allowUpload: true,
|
||||
});
|
||||
|
||||
expect(accessMock.asset.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
new Set([assetStub.image.id]),
|
||||
);
|
||||
expect(shareMock.create).toHaveBeenCalledWith({
|
||||
type: SharedLinkType.INDIVIDUAL,
|
||||
userId: authStub.admin.user.id,
|
||||
albumId: null,
|
||||
allowDownload: true,
|
||||
allowUpload: true,
|
||||
assets: [{ id: assetStub.image.id }],
|
||||
description: null,
|
||||
expiresAt: null,
|
||||
showExif: true,
|
||||
key: Buffer.from('random-bytes', 'utf8'),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should throw an error for an invalid shared link', async () => {
|
||||
shareMock.get.mockResolvedValue(null);
|
||||
await expect(sut.update(authStub.user1, 'missing-id', {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authStub.user1.user.id, 'missing-id');
|
||||
expect(shareMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should update a shared link', async () => {
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
shareMock.update.mockResolvedValue(sharedLinkStub.valid);
|
||||
await sut.update(authStub.user1, sharedLinkStub.valid.id, { allowDownload: false });
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
|
||||
expect(shareMock.update).toHaveBeenCalledWith({
|
||||
id: sharedLinkStub.valid.id,
|
||||
userId: authStub.user1.user.id,
|
||||
allowDownload: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove', () => {
|
||||
it('should throw an error for an invalid shared link', async () => {
|
||||
shareMock.get.mockResolvedValue(null);
|
||||
await expect(sut.remove(authStub.user1, 'missing-id')).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authStub.user1.user.id, 'missing-id');
|
||||
expect(shareMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove a key', async () => {
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
await sut.remove(authStub.user1, sharedLinkStub.valid.id);
|
||||
expect(shareMock.get).toHaveBeenCalledWith(authStub.user1.user.id, sharedLinkStub.valid.id);
|
||||
expect(shareMock.remove).toHaveBeenCalledWith(sharedLinkStub.valid);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addAssets', () => {
|
||||
it('should not work on album shared links', async () => {
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
await expect(sut.addAssets(authStub.admin, 'link-1', { assetIds: ['asset-1'] })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
|
||||
it('should add assets to a shared link', async () => {
|
||||
shareMock.get.mockResolvedValue(_.cloneDeep(sharedLinkStub.individual));
|
||||
shareMock.create.mockResolvedValue(sharedLinkStub.individual);
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-3']));
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.admin, 'link-1', { assetIds: [assetStub.image.id, 'asset-2', 'asset-3'] }),
|
||||
).resolves.toEqual([
|
||||
{ assetId: assetStub.image.id, success: false, error: AssetIdErrorReason.DUPLICATE },
|
||||
{ assetId: 'asset-2', success: false, error: AssetIdErrorReason.NO_PERMISSION },
|
||||
{ assetId: 'asset-3', success: true },
|
||||
]);
|
||||
|
||||
expect(accessMock.asset.checkOwnerAccess).toHaveBeenCalledTimes(1);
|
||||
expect(shareMock.update).toHaveBeenCalledWith({
|
||||
...sharedLinkStub.individual,
|
||||
assets: [assetStub.image, { id: 'asset-3' }],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeAssets', () => {
|
||||
it('should not work on album shared links', async () => {
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.valid);
|
||||
await expect(sut.removeAssets(authStub.admin, 'link-1', { assetIds: ['asset-1'] })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove assets from a shared link', async () => {
|
||||
shareMock.get.mockResolvedValue(_.cloneDeep(sharedLinkStub.individual));
|
||||
shareMock.create.mockResolvedValue(sharedLinkStub.individual);
|
||||
|
||||
await expect(
|
||||
sut.removeAssets(authStub.admin, 'link-1', { assetIds: [assetStub.image.id, 'asset-2'] }),
|
||||
).resolves.toEqual([
|
||||
{ assetId: assetStub.image.id, success: true },
|
||||
{ assetId: 'asset-2', success: false, error: AssetIdErrorReason.NOT_FOUND },
|
||||
]);
|
||||
|
||||
expect(shareMock.update).toHaveBeenCalledWith({ ...sharedLinkStub.individual, assets: [] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetadataTags', () => {
|
||||
it('should return null when auth is not a shared link', async () => {
|
||||
await expect(sut.getMetadataTags(authStub.admin)).resolves.toBe(null);
|
||||
expect(shareMock.get).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return null when shared link has a password', async () => {
|
||||
await expect(sut.getMetadataTags(authStub.passwordSharedLink)).resolves.toBe(null);
|
||||
expect(shareMock.get).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return metadata tags', async () => {
|
||||
shareMock.get.mockResolvedValue(sharedLinkStub.individual);
|
||||
await expect(sut.getMetadataTags(authStub.adminSharedLink)).resolves.toEqual({
|
||||
description: '1 shared photos & videos',
|
||||
imageUrl:
|
||||
'/api/asset/thumbnail/asset-id?key=LCtkaJX4R1O_9D-2lq0STzsPryoL1UdAbyb6Sna1xxmQCSuqU2J1ZUsqt6GR-yGm1s0',
|
||||
title: 'Public Share',
|
||||
});
|
||||
expect(shareMock.get).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,215 @@
|
||||
import { BadRequestException, ForbiddenException, Inject, Injectable, UnauthorizedException } from '@nestjs/common';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { AssetIdErrorReason, AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AssetIdsDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
SharedLinkCreateDto,
|
||||
SharedLinkEditDto,
|
||||
SharedLinkPasswordDto,
|
||||
SharedLinkResponseDto,
|
||||
mapSharedLink,
|
||||
mapSharedLinkWithoutMetadata,
|
||||
} from 'src/dtos/shared-link.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { SharedLinkEntity, SharedLinkType } from 'src/entities/shared-link.entity';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { ISharedLinkRepository } from 'src/interfaces/shared-link.repository';
|
||||
import { OpenGraphTags } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class SharedLinkService {
|
||||
private access: AccessCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
||||
@Inject(ISharedLinkRepository) private repository: ISharedLinkRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
getAll(auth: AuthDto): Promise<SharedLinkResponseDto[]> {
|
||||
return this.repository.getAll(auth.user.id).then((links) => links.map((link) => mapSharedLink(link)));
|
||||
}
|
||||
|
||||
async getMine(auth: AuthDto, dto: SharedLinkPasswordDto): Promise<SharedLinkResponseDto> {
|
||||
if (!auth.sharedLink) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
const sharedLink = await this.findOrFail(auth.user.id, auth.sharedLink.id);
|
||||
const response = this.mapToSharedLink(sharedLink, { withExif: sharedLink.showExif });
|
||||
if (sharedLink.password) {
|
||||
response.token = this.validateAndRefreshToken(sharedLink, dto);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async get(auth: AuthDto, id: string): Promise<SharedLinkResponseDto> {
|
||||
const sharedLink = await this.findOrFail(auth.user.id, id);
|
||||
return this.mapToSharedLink(sharedLink, { withExif: true });
|
||||
}
|
||||
|
||||
async create(auth: AuthDto, dto: SharedLinkCreateDto): Promise<SharedLinkResponseDto> {
|
||||
switch (dto.type) {
|
||||
case SharedLinkType.ALBUM: {
|
||||
if (!dto.albumId) {
|
||||
throw new BadRequestException('Invalid albumId');
|
||||
}
|
||||
await this.access.requirePermission(auth, Permission.ALBUM_SHARE, dto.albumId);
|
||||
break;
|
||||
}
|
||||
|
||||
case SharedLinkType.INDIVIDUAL: {
|
||||
if (!dto.assetIds || dto.assetIds.length === 0) {
|
||||
throw new BadRequestException('Invalid assetIds');
|
||||
}
|
||||
|
||||
await this.access.requirePermission(auth, Permission.ASSET_SHARE, dto.assetIds);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const sharedLink = await this.repository.create({
|
||||
key: this.cryptoRepository.randomBytes(50),
|
||||
userId: auth.user.id,
|
||||
type: dto.type,
|
||||
albumId: dto.albumId || null,
|
||||
assets: (dto.assetIds || []).map((id) => ({ id }) as AssetEntity),
|
||||
description: dto.description || null,
|
||||
password: dto.password,
|
||||
expiresAt: dto.expiresAt || null,
|
||||
allowUpload: dto.allowUpload ?? true,
|
||||
allowDownload: dto.allowDownload ?? true,
|
||||
showExif: dto.showMetadata ?? true,
|
||||
});
|
||||
|
||||
return this.mapToSharedLink(sharedLink, { withExif: true });
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: SharedLinkEditDto) {
|
||||
await this.findOrFail(auth.user.id, id);
|
||||
const sharedLink = await this.repository.update({
|
||||
id,
|
||||
userId: auth.user.id,
|
||||
description: dto.description,
|
||||
password: dto.password,
|
||||
expiresAt: dto.changeExpiryTime && !dto.expiresAt ? null : dto.expiresAt,
|
||||
allowUpload: dto.allowUpload,
|
||||
allowDownload: dto.allowDownload,
|
||||
showExif: dto.showMetadata,
|
||||
});
|
||||
return this.mapToSharedLink(sharedLink, { withExif: true });
|
||||
}
|
||||
|
||||
async remove(auth: AuthDto, id: string): Promise<void> {
|
||||
const sharedLink = await this.findOrFail(auth.user.id, id);
|
||||
await this.repository.remove(sharedLink);
|
||||
}
|
||||
|
||||
// TODO: replace `userId` with permissions and access control checks
|
||||
private async findOrFail(userId: string, id: string) {
|
||||
const sharedLink = await this.repository.get(userId, id);
|
||||
if (!sharedLink) {
|
||||
throw new BadRequestException('Shared link not found');
|
||||
}
|
||||
return sharedLink;
|
||||
}
|
||||
|
||||
async addAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise<AssetIdsResponseDto[]> {
|
||||
const sharedLink = await this.findOrFail(auth.user.id, id);
|
||||
|
||||
if (sharedLink.type !== SharedLinkType.INDIVIDUAL) {
|
||||
throw new BadRequestException('Invalid shared link type');
|
||||
}
|
||||
|
||||
const existingAssetIds = new Set(sharedLink.assets.map((asset) => asset.id));
|
||||
const notPresentAssetIds = dto.assetIds.filter((assetId) => !existingAssetIds.has(assetId));
|
||||
const allowedAssetIds = await this.access.checkAccess(auth, Permission.ASSET_SHARE, notPresentAssetIds);
|
||||
|
||||
const results: AssetIdsResponseDto[] = [];
|
||||
for (const assetId of dto.assetIds) {
|
||||
const hasAsset = existingAssetIds.has(assetId);
|
||||
if (hasAsset) {
|
||||
results.push({ assetId, success: false, error: AssetIdErrorReason.DUPLICATE });
|
||||
continue;
|
||||
}
|
||||
|
||||
const hasAccess = allowedAssetIds.has(assetId);
|
||||
if (!hasAccess) {
|
||||
results.push({ assetId, success: false, error: AssetIdErrorReason.NO_PERMISSION });
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push({ assetId, success: true });
|
||||
sharedLink.assets.push({ id: assetId } as AssetEntity);
|
||||
}
|
||||
|
||||
await this.repository.update(sharedLink);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async removeAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise<AssetIdsResponseDto[]> {
|
||||
const sharedLink = await this.findOrFail(auth.user.id, id);
|
||||
|
||||
if (sharedLink.type !== SharedLinkType.INDIVIDUAL) {
|
||||
throw new BadRequestException('Invalid shared link type');
|
||||
}
|
||||
|
||||
const results: AssetIdsResponseDto[] = [];
|
||||
for (const assetId of dto.assetIds) {
|
||||
const hasAsset = sharedLink.assets.find((asset) => asset.id === assetId);
|
||||
if (!hasAsset) {
|
||||
results.push({ assetId, success: false, error: AssetIdErrorReason.NOT_FOUND });
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push({ assetId, success: true });
|
||||
sharedLink.assets = sharedLink.assets.filter((asset) => asset.id !== assetId);
|
||||
}
|
||||
|
||||
await this.repository.update(sharedLink);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async getMetadataTags(auth: AuthDto): Promise<null | OpenGraphTags> {
|
||||
if (!auth.sharedLink || auth.sharedLink.password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const sharedLink = await this.findOrFail(auth.sharedLink.userId, auth.sharedLink.id);
|
||||
const assetId = sharedLink.album?.albumThumbnailAssetId || sharedLink.assets[0]?.id;
|
||||
const assetCount = sharedLink.assets.length ?? sharedLink.album?.assets.length ?? 0;
|
||||
|
||||
return {
|
||||
title: sharedLink.album ? sharedLink.album.albumName : 'Public Share',
|
||||
description: sharedLink.description || `${assetCount} shared photos & videos`,
|
||||
imageUrl: assetId
|
||||
? `/api/asset/thumbnail/${assetId}?key=${sharedLink.key.toString('base64url')}`
|
||||
: '/feature-panel.png',
|
||||
};
|
||||
}
|
||||
|
||||
private mapToSharedLink(sharedLink: SharedLinkEntity, { withExif }: { withExif: boolean }) {
|
||||
return withExif ? mapSharedLink(sharedLink) : mapSharedLinkWithoutMetadata(sharedLink);
|
||||
}
|
||||
|
||||
private validateAndRefreshToken(sharedLink: SharedLinkEntity, dto: SharedLinkPasswordDto): string {
|
||||
const token = this.cryptoRepository.hashSha256(`${sharedLink.id}-${sharedLink.password}`);
|
||||
const sharedLinkTokens = dto.token?.split(',') || [];
|
||||
if (sharedLink.password !== dto.password && !sharedLinkTokens.includes(token)) {
|
||||
throw new UnauthorizedException('Invalid password');
|
||||
}
|
||||
|
||||
if (!sharedLinkTokens.includes(token)) {
|
||||
sharedLinkTokens.push(token);
|
||||
}
|
||||
return sharedLinkTokens.join(',');
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
import { JobName } from 'src/domain/job/job.constants';
|
||||
import { cleanModelName, getCLIPModelInfo } from 'src/domain/smart-info/smart-info.constant';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { SystemConfigKey } from 'src/entities/system-config.entity';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.repository';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { IJobRepository } from 'src/interfaces/job.repository';
|
||||
import { IMachineLearningRepository } from 'src/interfaces/machine-learning.repository';
|
||||
import { ISearchRepository } from 'src/interfaces/search.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { SmartInfoService } from 'src/services/smart-info.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
import { newMachineLearningRepositoryMock } from 'test/repositories/machine-learning.repository.mock';
|
||||
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
|
||||
const asset = {
|
||||
id: 'asset-1',
|
||||
resizePath: 'path/to/resize.ext',
|
||||
} as AssetEntity;
|
||||
|
||||
describe(SmartInfoService.name, () => {
|
||||
let sut: SmartInfoService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let machineMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let databaseMock: jest.Mocked<IDatabaseRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
machineMock = newMachineLearningRepositoryMock();
|
||||
databaseMock = newDatabaseRepositoryMock();
|
||||
sut = new SmartInfoService(assetMock, databaseMock, jobMock, machineMock, searchMock, configMock);
|
||||
|
||||
assetMock.getByIds.mockResolvedValue([asset]);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleQueueEncodeClip', () => {
|
||||
it('should do nothing if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await sut.handleQueueEncodeClip({});
|
||||
|
||||
expect(assetMock.getAll).not.toHaveBeenCalled();
|
||||
expect(assetMock.getWithout).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should queue the assets without clip embeddings', async () => {
|
||||
assetMock.getWithout.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
|
||||
await sut.handleQueueEncodeClip({ force: false });
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }]);
|
||||
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.SMART_SEARCH);
|
||||
expect(searchMock.deleteAllSearchEmbeddings).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should queue all the assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
|
||||
await sut.handleQueueEncodeClip({ force: true });
|
||||
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.SMART_SEARCH, data: { id: assetStub.image.id } }]);
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(searchMock.deleteAllSearchEmbeddings).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleEncodeClip', () => {
|
||||
it('should do nothing if machine learning is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
|
||||
|
||||
await sut.handleEncodeClip({ id: '123' });
|
||||
|
||||
expect(assetMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(machineMock.encodeImage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip assets without a resize path', async () => {
|
||||
const asset = { resizePath: '' } as AssetEntity;
|
||||
assetMock.getByIds.mockResolvedValue([asset]);
|
||||
|
||||
await sut.handleEncodeClip({ id: asset.id });
|
||||
|
||||
expect(searchMock.upsert).not.toHaveBeenCalled();
|
||||
expect(machineMock.encodeImage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should save the returned objects', async () => {
|
||||
searchMock.upsert.mockResolvedValue();
|
||||
machineMock.encodeImage.mockResolvedValue([0.01, 0.02, 0.03]);
|
||||
|
||||
await sut.handleEncodeClip({ id: asset.id });
|
||||
|
||||
expect(machineMock.encodeImage).toHaveBeenCalledWith(
|
||||
'http://immich-machine-learning:3003',
|
||||
{ imagePath: 'path/to/resize.ext' },
|
||||
{ enabled: true, modelName: 'ViT-B-32__openai' },
|
||||
);
|
||||
expect(searchMock.upsert).toHaveBeenCalledWith(
|
||||
{
|
||||
assetId: 'asset-1',
|
||||
},
|
||||
[0.01, 0.02, 0.03],
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanModelName', () => {
|
||||
it('should clean name', () => {
|
||||
expect(cleanModelName('ViT-B-32::openai')).toEqual('ViT-B-32__openai');
|
||||
expect(cleanModelName('M-CLIP/XLM-Roberta-Large-Vit-L-14')).toEqual('XLM-Roberta-Large-Vit-L-14');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCLIPModelInfo', () => {
|
||||
it('should return the model info', () => {
|
||||
expect(getCLIPModelInfo('ViT-B-32__openai')).toEqual({ dimSize: 512 });
|
||||
});
|
||||
|
||||
it('should throw an error if the model is not present', () => {
|
||||
expect(() => getCLIPModelInfo('test-model')).toThrow('Unknown CLIP model: test-model');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,99 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from 'src/domain/job/job.constants';
|
||||
import { IBaseJob, IEntityJob } from 'src/domain/job/job.interface';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.repository';
|
||||
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IMachineLearningRepository } from 'src/interfaces/machine-learning.repository';
|
||||
import { ISearchRepository } from 'src/interfaces/search.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { usePagination } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class SmartInfoService {
|
||||
private configCore: SystemConfigCore;
|
||||
private logger = new ImmichLogger(SmartInfoService.name);
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@Inject(ISearchRepository) private repository: ISearchRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
async init() {
|
||||
await this.jobRepository.pause(QueueName.SMART_SEARCH);
|
||||
|
||||
await this.jobRepository.waitForQueueCompletion(QueueName.SMART_SEARCH);
|
||||
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
|
||||
await this.databaseRepository.withLock(DatabaseLock.CLIPDimSize, () =>
|
||||
this.repository.init(machineLearning.clip.modelName),
|
||||
);
|
||||
|
||||
await this.jobRepository.resume(QueueName.SMART_SEARCH);
|
||||
}
|
||||
|
||||
async handleQueueEncodeClip({ force }: IBaseJob): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.clip.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
if (force) {
|
||||
await this.repository.deleteAllSearchEmbeddings();
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination)
|
||||
: this.assetRepository.getWithout(pagination, WithoutProperty.SMART_SEARCH);
|
||||
});
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.SMART_SEARCH, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleEncodeClip({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.clip.enabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
if (!asset.resizePath) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const clipEmbedding = await this.machineLearning.encodeImage(
|
||||
machineLearning.url,
|
||||
{ imagePath: asset.resizePath },
|
||||
machineLearning.clip,
|
||||
);
|
||||
|
||||
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
|
||||
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);
|
||||
await this.databaseRepository.wait(DatabaseLock.CLIPDimSize);
|
||||
}
|
||||
|
||||
await this.repository.upsert({ assetId: asset.id }, clipEmbedding);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,652 @@
|
||||
import { when } from 'jest-when';
|
||||
import { Stats } from 'node:fs';
|
||||
import { SystemConfigCore, defaults } from 'src/cores/system-config.core';
|
||||
import { AssetPathType } from 'src/entities/move.entity';
|
||||
import { SystemConfig, SystemConfigKey } from 'src/entities/system-config.entity';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IMoveRepository } from 'src/interfaces/move.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { StorageTemplateService } from 'src/services/storage-template.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { newAlbumRepositoryMock } from 'test/repositories/album.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
|
||||
import { newMoveRepositoryMock } from 'test/repositories/move.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
|
||||
describe(StorageTemplateService.name, () => {
|
||||
let sut: StorageTemplateService;
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let moveMock: jest.Mocked<IMoveRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
let userMock: jest.Mocked<IUserRepository>;
|
||||
let cryptoMock: jest.Mocked<ICryptoRepository>;
|
||||
let databaseMock: jest.Mocked<IDatabaseRepository>;
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
moveMock = newMoveRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
cryptoMock = newCryptoRepositoryMock();
|
||||
databaseMock = newDatabaseRepositoryMock();
|
||||
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.STORAGE_TEMPLATE_ENABLED, value: true }]);
|
||||
|
||||
sut = new StorageTemplateService(
|
||||
albumMock,
|
||||
assetMock,
|
||||
configMock,
|
||||
moveMock,
|
||||
personMock,
|
||||
storageMock,
|
||||
userMock,
|
||||
cryptoMock,
|
||||
databaseMock,
|
||||
);
|
||||
|
||||
SystemConfigCore.create(configMock).config$.next(defaults);
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('should allow valid templates', () => {
|
||||
expect(() =>
|
||||
sut.validate({
|
||||
newConfig: {
|
||||
storageTemplate: {
|
||||
template:
|
||||
'{{y}}{{M}}{{W}}{{d}}{{h}}{{m}}{{s}}{{filename}}{{ext}}{{filetype}}{{filetypefull}}{{assetId}}{{album}}',
|
||||
},
|
||||
} as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).not.toThrow();
|
||||
});
|
||||
|
||||
it('should fail for an invalid template', () => {
|
||||
expect(() =>
|
||||
sut.validate({
|
||||
newConfig: {
|
||||
storageTemplate: {
|
||||
template: '{{foo}}',
|
||||
},
|
||||
} as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).toThrow(/Invalid storage template.*/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleMigrationSingle', () => {
|
||||
it('should skip when storage template is disabled', async () => {
|
||||
configMock.load.mockResolvedValue([{ key: SystemConfigKey.STORAGE_TEMPLATE_ENABLED, value: false }]);
|
||||
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SKIPPED);
|
||||
expect(assetMock.getByIds).not.toHaveBeenCalled();
|
||||
expect(storageMock.checkFileExists).not.toHaveBeenCalled();
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
expect(storageMock.copyFile).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
expect(moveMock.create).not.toHaveBeenCalled();
|
||||
expect(moveMock.update).not.toHaveBeenCalled();
|
||||
expect(storageMock.stat).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should migrate single moving picture', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
const newMotionPicturePath = `upload/library/${userStub.user1.id}/2022/2022-06-19/${assetStub.livePhotoStillAsset.id}.mp4`;
|
||||
const newStillPicturePath = `upload/library/${userStub.user1.id}/2022/2022-06-19/${assetStub.livePhotoStillAsset.id}.jpeg`;
|
||||
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.livePhotoStillAsset.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.livePhotoStillAsset]);
|
||||
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.livePhotoMotionAsset.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.livePhotoMotionAsset]);
|
||||
|
||||
when(moveMock.create)
|
||||
.calledWith({
|
||||
entityId: assetStub.livePhotoStillAsset.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.livePhotoStillAsset.originalPath,
|
||||
newPath: newStillPicturePath,
|
||||
})
|
||||
.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.livePhotoStillAsset.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.livePhotoStillAsset.originalPath,
|
||||
newPath: newStillPicturePath,
|
||||
});
|
||||
|
||||
when(moveMock.create)
|
||||
.calledWith({
|
||||
entityId: assetStub.livePhotoMotionAsset.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.livePhotoMotionAsset.originalPath,
|
||||
newPath: newMotionPicturePath,
|
||||
})
|
||||
.mockResolvedValue({
|
||||
id: '124',
|
||||
entityId: assetStub.livePhotoMotionAsset.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.livePhotoMotionAsset.originalPath,
|
||||
newPath: newMotionPicturePath,
|
||||
});
|
||||
|
||||
await expect(sut.handleMigrationSingle({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
|
||||
JobStatus.SUCCESS,
|
||||
);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoStillAsset.id], { exifInfo: true });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.livePhotoMotionAsset.id], { exifInfo: true });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(2);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
originalPath: newStillPicturePath,
|
||||
});
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.livePhotoMotionAsset.id,
|
||||
originalPath: newMotionPicturePath,
|
||||
});
|
||||
});
|
||||
it('should migrate previously failed move from original path when it still exists', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
const previousFailedNewPath = `upload/library/${userStub.user1.id}/2023/Feb/${assetStub.image.id}.jpg`;
|
||||
const newPath = `upload/library/${userStub.user1.id}/2023/2023-02-23/${assetStub.image.id}.jpg`;
|
||||
|
||||
when(storageMock.checkFileExists).calledWith(assetStub.image.originalPath).mockResolvedValue(true);
|
||||
when(storageMock.checkFileExists).calledWith(previousFailedNewPath).mockResolvedValue(false);
|
||||
|
||||
when(moveMock.getByEntity).calledWith(assetStub.image.id, AssetPathType.ORIGINAL).mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: previousFailedNewPath,
|
||||
});
|
||||
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.image.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.image]);
|
||||
|
||||
when(moveMock.update)
|
||||
.calledWith({
|
||||
id: '123',
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath,
|
||||
})
|
||||
.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath,
|
||||
});
|
||||
|
||||
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(3);
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(assetStub.image.originalPath, newPath);
|
||||
expect(moveMock.update).toHaveBeenCalledWith({
|
||||
id: '123',
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath,
|
||||
});
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
originalPath: newPath,
|
||||
});
|
||||
});
|
||||
it('should migrate previously failed move from previous new path when old path no longer exists, should validate file size still matches before moving', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
const previousFailedNewPath = `upload/library/${userStub.user1.id}/2023/Feb/${assetStub.image.id}.jpg`;
|
||||
const newPath = `upload/library/${userStub.user1.id}/2023/2023-02-23/${assetStub.image.id}.jpg`;
|
||||
|
||||
when(storageMock.checkFileExists).calledWith(assetStub.image.originalPath).mockResolvedValue(false);
|
||||
when(storageMock.checkFileExists).calledWith(previousFailedNewPath).mockResolvedValue(true);
|
||||
when(storageMock.stat)
|
||||
.calledWith(previousFailedNewPath)
|
||||
.mockResolvedValue({ size: 5000 } as Stats);
|
||||
when(cryptoMock.hashFile).calledWith(previousFailedNewPath).mockResolvedValue(assetStub.image.checksum);
|
||||
|
||||
when(moveMock.getByEntity).calledWith(assetStub.image.id, AssetPathType.ORIGINAL).mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: previousFailedNewPath,
|
||||
});
|
||||
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.image.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.image]);
|
||||
|
||||
when(moveMock.update)
|
||||
.calledWith({
|
||||
id: '123',
|
||||
oldPath: previousFailedNewPath,
|
||||
newPath,
|
||||
})
|
||||
.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: previousFailedNewPath,
|
||||
newPath,
|
||||
});
|
||||
|
||||
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(3);
|
||||
expect(storageMock.stat).toHaveBeenCalledWith(previousFailedNewPath);
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(previousFailedNewPath, newPath);
|
||||
expect(storageMock.copyFile).not.toHaveBeenCalled();
|
||||
expect(moveMock.update).toHaveBeenCalledWith({
|
||||
id: '123',
|
||||
oldPath: previousFailedNewPath,
|
||||
newPath,
|
||||
});
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
originalPath: newPath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail move if copying and hash of asset and the new file do not match', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
const newPath = `upload/library/${userStub.user1.id}/2023/2023-02-23/${assetStub.image.id}.jpg`;
|
||||
|
||||
when(storageMock.rename).calledWith(assetStub.image.originalPath, newPath).mockRejectedValue({ code: 'EXDEV' });
|
||||
when(storageMock.stat)
|
||||
.calledWith(newPath)
|
||||
.mockResolvedValue({ size: 5000 } as Stats);
|
||||
when(cryptoMock.hashFile).calledWith(newPath).mockResolvedValue(Buffer.from('different-hash', 'utf8'));
|
||||
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.image.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.image]);
|
||||
|
||||
when(moveMock.create)
|
||||
.calledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: newPath,
|
||||
})
|
||||
.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath,
|
||||
});
|
||||
|
||||
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.stat).toHaveBeenCalledWith(newPath);
|
||||
expect(moveMock.create).toHaveBeenCalledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: newPath,
|
||||
});
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(assetStub.image.originalPath, newPath);
|
||||
expect(storageMock.copyFile).toHaveBeenCalledWith(assetStub.image.originalPath, newPath);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(newPath);
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each`
|
||||
failedPathChecksum | failedPathSize | reason
|
||||
${assetStub.image.checksum} | ${500} | ${'file size'}
|
||||
${Buffer.from('bad checksum', 'utf8')} | ${assetStub.image.exifInfo?.fileSizeInByte} | ${'checksum'}
|
||||
`(
|
||||
'should fail to migrate previously failed move from previous new path when old path no longer exists if $reason validation fails',
|
||||
async ({ failedPathChecksum, failedPathSize }) => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
const previousFailedNewPath = `upload/library/${userStub.user1.id}/2023/Feb/${assetStub.image.id}.jpg`;
|
||||
const newPath = `upload/library/${userStub.user1.id}/2023/2023-02-23/${assetStub.image.id}.jpg`;
|
||||
|
||||
when(storageMock.checkFileExists).calledWith(assetStub.image.originalPath).mockResolvedValue(false);
|
||||
when(storageMock.checkFileExists).calledWith(previousFailedNewPath).mockResolvedValue(true);
|
||||
when(storageMock.stat)
|
||||
.calledWith(previousFailedNewPath)
|
||||
.mockResolvedValue({ size: failedPathSize } as Stats);
|
||||
when(cryptoMock.hashFile).calledWith(previousFailedNewPath).mockResolvedValue(failedPathChecksum);
|
||||
|
||||
when(moveMock.getByEntity).calledWith(assetStub.image.id, AssetPathType.ORIGINAL).mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: previousFailedNewPath,
|
||||
});
|
||||
|
||||
when(assetMock.getByIds)
|
||||
.calledWith([assetStub.image.id], { exifInfo: true })
|
||||
.mockResolvedValue([assetStub.image]);
|
||||
|
||||
when(moveMock.update)
|
||||
.calledWith({
|
||||
id: '123',
|
||||
oldPath: previousFailedNewPath,
|
||||
newPath,
|
||||
})
|
||||
.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: previousFailedNewPath,
|
||||
newPath,
|
||||
});
|
||||
|
||||
await expect(sut.handleMigrationSingle({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { exifInfo: true });
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(3);
|
||||
expect(storageMock.stat).toHaveBeenCalledWith(previousFailedNewPath);
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
expect(storageMock.copyFile).not.toHaveBeenCalled();
|
||||
expect(moveMock.update).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('handle template migration', () => {
|
||||
it('should handle no assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([]);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle an asset with a duplicate destination', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
moveMock.create.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: 'upload/library/user-id/2023/2023-02-23/asset-id+1.jpg',
|
||||
});
|
||||
|
||||
when(storageMock.checkFileExists)
|
||||
.calledWith('upload/library/user-id/2023/2023-02-23/asset-id.jpg')
|
||||
.mockResolvedValue(true);
|
||||
|
||||
when(storageMock.checkFileExists)
|
||||
.calledWith('upload/library/user-id/2023/2023-02-23/asset-id+1.jpg')
|
||||
.mockResolvedValue(false);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.checkFileExists).toHaveBeenCalledTimes(2);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
originalPath: 'upload/library/user-id/2023/2023-02-23/asset-id+1.jpg',
|
||||
});
|
||||
expect(userMock.getList).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip when an asset already matches the template', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [
|
||||
{
|
||||
...assetStub.image,
|
||||
originalPath: 'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
},
|
||||
],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
expect(storageMock.copyFile).not.toHaveBeenCalled();
|
||||
expect(storageMock.checkFileExists).not.toHaveBeenCalledTimes(2);
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip when an asset is probably a duplicate', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [
|
||||
{
|
||||
...assetStub.image,
|
||||
originalPath: 'upload/library/user-id/2023/2023-02-23/asset-id+1.jpg',
|
||||
},
|
||||
],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
expect(storageMock.copyFile).not.toHaveBeenCalled();
|
||||
expect(storageMock.checkFileExists).not.toHaveBeenCalledTimes(2);
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should move an asset', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
moveMock.create.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: 'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
});
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(
|
||||
'/original/path.jpg',
|
||||
'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
originalPath: 'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
});
|
||||
});
|
||||
|
||||
it('should use the user storage label', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.storageLabel]);
|
||||
moveMock.create.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: 'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
});
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(
|
||||
'/original/path.jpg',
|
||||
'upload/library/label-1/2023/2023-02-23/asset-id.jpg',
|
||||
);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
originalPath: 'upload/library/label-1/2023/2023-02-23/asset-id.jpg',
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy the file if rename fails due to EXDEV (rename across filesystems)', async () => {
|
||||
const newPath = 'upload/library/user-id/2023/2023-02-23/asset-id.jpg';
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
storageMock.rename.mockRejectedValue({ code: 'EXDEV' });
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
moveMock.create.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath,
|
||||
});
|
||||
when(storageMock.stat)
|
||||
.calledWith(newPath)
|
||||
.mockResolvedValue({
|
||||
size: 5000,
|
||||
} as Stats);
|
||||
when(storageMock.stat)
|
||||
.calledWith(assetStub.image.originalPath)
|
||||
.mockResolvedValue({
|
||||
atime: new Date(),
|
||||
mtime: new Date(),
|
||||
} as Stats);
|
||||
when(cryptoMock.hashFile).calledWith(newPath).mockResolvedValue(assetStub.image.checksum);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).toHaveBeenCalledWith('/original/path.jpg', newPath);
|
||||
expect(storageMock.copyFile).toHaveBeenCalledWith('/original/path.jpg', newPath);
|
||||
expect(storageMock.stat).toHaveBeenCalledWith(newPath);
|
||||
expect(storageMock.stat).toHaveBeenCalledWith(assetStub.image.originalPath);
|
||||
expect(storageMock.utimes).toHaveBeenCalledWith(newPath, expect.any(Date), expect.any(Date));
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(assetStub.image.originalPath);
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
originalPath: newPath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not update the database if the move fails due to incorrect newPath filesize', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
storageMock.rename.mockRejectedValue({ code: 'EXDEV' });
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
moveMock.create.mockResolvedValue({
|
||||
id: '123',
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: 'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
});
|
||||
when(storageMock.stat)
|
||||
.calledWith('upload/library/user-id/2023/2023-02-23/asset-id.jpg')
|
||||
.mockResolvedValue({
|
||||
size: 100,
|
||||
} as Stats);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(
|
||||
'/original/path.jpg',
|
||||
'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
);
|
||||
expect(storageMock.copyFile).toHaveBeenCalledWith(
|
||||
'/original/path.jpg',
|
||||
'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
);
|
||||
expect(storageMock.stat).toHaveBeenCalledWith('upload/library/user-id/2023/2023-02-23/asset-id.jpg');
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not update the database if the move fails', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
storageMock.rename.mockRejectedValue(new Error('Read only system'));
|
||||
storageMock.copyFile.mockRejectedValue(new Error('Read only system'));
|
||||
moveMock.create.mockResolvedValue({
|
||||
id: 'move-123',
|
||||
entityId: '123',
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath: assetStub.image.originalPath,
|
||||
newPath: '',
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).toHaveBeenCalledWith(
|
||||
'/original/path.jpg',
|
||||
'upload/library/user-id/2023/2023-02-23/asset-id.jpg',
|
||||
);
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not move read-only asset', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [
|
||||
{
|
||||
...assetStub.image,
|
||||
originalPath: 'upload/library/user-id/2023/2023-02-23/asset-id+1.jpg',
|
||||
isReadOnly: true,
|
||||
},
|
||||
],
|
||||
hasNextPage: false,
|
||||
});
|
||||
userMock.getList.mockResolvedValue([userStub.user1]);
|
||||
|
||||
await sut.handleMigration();
|
||||
|
||||
expect(assetMock.getAll).toHaveBeenCalled();
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
expect(storageMock.copyFile).not.toHaveBeenCalled();
|
||||
expect(assetMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,332 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import handlebar from 'handlebars';
|
||||
import { DateTime } from 'luxon';
|
||||
import path from 'node:path';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/domain/job/job.constants';
|
||||
import { IEntityJob } from 'src/domain/job/job.interface';
|
||||
import {
|
||||
supportedDayTokens,
|
||||
supportedHourTokens,
|
||||
supportedMinuteTokens,
|
||||
supportedMonthTokens,
|
||||
supportedSecondTokens,
|
||||
supportedWeekTokens,
|
||||
supportedYearTokens,
|
||||
} from 'src/domain/system-config/system-config.constants';
|
||||
import { AssetEntity, AssetType } from 'src/entities/asset.entity';
|
||||
import { AssetPathType } from 'src/entities/move.entity';
|
||||
import { SystemConfig } from 'src/entities/system-config.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { InternalEvent, InternalEventMap } from 'src/interfaces/communication.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.repository';
|
||||
import { JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IMoveRepository } from 'src/interfaces/move.repository';
|
||||
import { IPersonRepository } from 'src/interfaces/person.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { getLivePhotoMotionFilename, usePagination } from 'src/utils';
|
||||
|
||||
export interface MoveAssetMetadata {
|
||||
storageLabel: string | null;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
interface RenderMetadata {
|
||||
asset: AssetEntity;
|
||||
filename: string;
|
||||
extension: string;
|
||||
albumName: string | null;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class StorageTemplateService {
|
||||
private logger = new ImmichLogger(StorageTemplateService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
private storageCore: StorageCore;
|
||||
private _template: {
|
||||
compiled: HandlebarsTemplateDelegate<any>;
|
||||
raw: string;
|
||||
needsAlbum: boolean;
|
||||
} | null = null;
|
||||
|
||||
private get template() {
|
||||
if (!this._template) {
|
||||
throw new Error('Template not initialized');
|
||||
}
|
||||
return this._template;
|
||||
}
|
||||
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IMoveRepository) moveRepository: IMoveRepository,
|
||||
@Inject(IPersonRepository) personRepository: IPersonRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
@Inject(ICryptoRepository) cryptoRepository: ICryptoRepository,
|
||||
@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
this.configCore.config$.subscribe((config) => this.onConfig(config));
|
||||
this.storageCore = StorageCore.create(
|
||||
assetRepository,
|
||||
moveRepository,
|
||||
personRepository,
|
||||
cryptoRepository,
|
||||
configRepository,
|
||||
storageRepository,
|
||||
);
|
||||
}
|
||||
|
||||
@OnEvent(InternalEvent.VALIDATE_CONFIG)
|
||||
validate({ newConfig }: InternalEventMap[InternalEvent.VALIDATE_CONFIG]) {
|
||||
try {
|
||||
const { compiled } = this.compile(newConfig.storageTemplate.template);
|
||||
this.render(compiled, {
|
||||
asset: {
|
||||
fileCreatedAt: new Date(),
|
||||
originalPath: '/upload/test/IMG_123.jpg',
|
||||
type: AssetType.IMAGE,
|
||||
id: 'd587e44b-f8c0-4832-9ba3-43268bbf5d4e',
|
||||
} as AssetEntity,
|
||||
filename: 'IMG_123',
|
||||
extension: 'jpg',
|
||||
albumName: 'album',
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.warn(`Storage template validation failed: ${JSON.stringify(error)}`);
|
||||
throw new Error(`Invalid storage template: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
async handleMigrationSingle({ id }: IEntityJob): Promise<JobStatus> {
|
||||
const config = await this.configCore.getConfig();
|
||||
const storageTemplateEnabled = config.storageTemplate.enabled;
|
||||
if (!storageTemplateEnabled) {
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([id], { exifInfo: true });
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const user = await this.userRepository.get(asset.ownerId, {});
|
||||
const storageLabel = user?.storageLabel || null;
|
||||
const filename = asset.originalFileName || asset.id;
|
||||
await this.moveAsset(asset, { storageLabel, filename });
|
||||
|
||||
// move motion part of live photo
|
||||
if (asset.livePhotoVideoId) {
|
||||
const [livePhotoVideo] = await this.assetRepository.getByIds([asset.livePhotoVideoId], { exifInfo: true });
|
||||
if (!livePhotoVideo) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
const motionFilename = getLivePhotoMotionFilename(filename, livePhotoVideo.originalPath);
|
||||
await this.moveAsset(livePhotoVideo, { storageLabel, filename: motionFilename });
|
||||
}
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleMigration(): Promise<JobStatus> {
|
||||
this.logger.log('Starting storage template migration');
|
||||
const { storageTemplate } = await this.configCore.getConfig();
|
||||
const { enabled } = storageTemplate;
|
||||
if (!enabled) {
|
||||
this.logger.log('Storage template migration disabled, skipping');
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAll(pagination, { withExif: true }),
|
||||
);
|
||||
const users = await this.userRepository.getList();
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
for (const asset of assets) {
|
||||
const user = users.find((user) => user.id === asset.ownerId);
|
||||
const storageLabel = user?.storageLabel || null;
|
||||
const filename = asset.originalFileName || asset.id;
|
||||
await this.moveAsset(asset, { storageLabel, filename });
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug('Cleaning up empty directories...');
|
||||
const libraryFolder = StorageCore.getBaseFolder(StorageFolder.LIBRARY);
|
||||
await this.storageRepository.removeEmptyDirs(libraryFolder);
|
||||
|
||||
this.logger.log('Finished storage template migration');
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async moveAsset(asset: AssetEntity, metadata: MoveAssetMetadata) {
|
||||
if (asset.isReadOnly || asset.isExternal || StorageCore.isAndroidMotionPath(asset.originalPath)) {
|
||||
// External assets are not affected by storage template
|
||||
// TODO: shouldn't this only apply to external assets?
|
||||
return;
|
||||
}
|
||||
|
||||
return this.databaseRepository.withLock(DatabaseLock.StorageTemplateMigration, async () => {
|
||||
const { id, sidecarPath, originalPath, exifInfo, checksum } = asset;
|
||||
const oldPath = originalPath;
|
||||
const newPath = await this.getTemplatePath(asset, metadata);
|
||||
|
||||
if (!exifInfo || !exifInfo.fileSizeInByte) {
|
||||
this.logger.error(`Asset ${id} missing exif info, skipping storage template migration`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.storageCore.moveFile({
|
||||
entityId: id,
|
||||
pathType: AssetPathType.ORIGINAL,
|
||||
oldPath,
|
||||
newPath,
|
||||
assetInfo: { sizeInBytes: exifInfo.fileSizeInByte, checksum },
|
||||
});
|
||||
if (sidecarPath) {
|
||||
await this.storageCore.moveFile({
|
||||
entityId: id,
|
||||
pathType: AssetPathType.SIDECAR,
|
||||
oldPath: sidecarPath,
|
||||
newPath: `${newPath}.xmp`,
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Problem applying storage template`, error?.stack, { id, oldPath, newPath });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async getTemplatePath(asset: AssetEntity, metadata: MoveAssetMetadata): Promise<string> {
|
||||
const { storageLabel, filename } = metadata;
|
||||
|
||||
try {
|
||||
const source = asset.originalPath;
|
||||
const extension = path.extname(source).split('.').pop() as string;
|
||||
const sanitized = sanitize(path.basename(filename, `.${extension}`));
|
||||
const rootPath = StorageCore.getLibraryFolder({ id: asset.ownerId, storageLabel });
|
||||
|
||||
let albumName = null;
|
||||
if (this.template.needsAlbum) {
|
||||
const albums = await this.albumRepository.getByAssetId(asset.ownerId, asset.id);
|
||||
albumName = albums?.[0]?.albumName || null;
|
||||
}
|
||||
|
||||
const storagePath = this.render(this.template.compiled, {
|
||||
asset,
|
||||
filename: sanitized,
|
||||
extension: extension,
|
||||
albumName,
|
||||
});
|
||||
const fullPath = path.normalize(path.join(rootPath, storagePath));
|
||||
let destination = `${fullPath}.${extension}`;
|
||||
|
||||
if (!fullPath.startsWith(rootPath)) {
|
||||
this.logger.warn(`Skipped attempt to access an invalid path: ${fullPath}. Path should start with ${rootPath}`);
|
||||
return source;
|
||||
}
|
||||
|
||||
if (source === destination) {
|
||||
return source;
|
||||
}
|
||||
|
||||
/**
|
||||
* In case of migrating duplicate filename to a new path, we need to check if it is already migrated
|
||||
* Due to the mechanism of appending +1, +2, +3, etc to the filename
|
||||
*
|
||||
* Example:
|
||||
* Source = upload/abc/def/FullSizeRender+7.heic
|
||||
* Expected Destination = upload/abc/def/FullSizeRender.heic
|
||||
*
|
||||
* The file is already at the correct location, but since there are other FullSizeRender.heic files in the
|
||||
* destination, it was renamed to FullSizeRender+7.heic.
|
||||
*
|
||||
* The lines below will be used to check if the differences between the source and destination is only the
|
||||
* +7 suffix, and if so, it will be considered as already migrated.
|
||||
*/
|
||||
if (source.startsWith(fullPath) && source.endsWith(`.${extension}`)) {
|
||||
const diff = source.replace(fullPath, '').replace(`.${extension}`, '');
|
||||
const hasDuplicationAnnotation = /^\+\d+$/.test(diff);
|
||||
if (hasDuplicationAnnotation) {
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
||||
let duplicateCount = 0;
|
||||
|
||||
while (true) {
|
||||
const exists = await this.storageRepository.checkFileExists(destination);
|
||||
if (!exists) {
|
||||
break;
|
||||
}
|
||||
|
||||
duplicateCount++;
|
||||
destination = `${fullPath}+${duplicateCount}.${extension}`;
|
||||
}
|
||||
|
||||
return destination;
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Unable to get template path for ${filename}`, error);
|
||||
return asset.originalPath;
|
||||
}
|
||||
}
|
||||
|
||||
private onConfig(config: SystemConfig) {
|
||||
const template = config.storageTemplate.template;
|
||||
if (!this._template || template !== this.template.raw) {
|
||||
this.logger.debug(`Compiling new storage template: ${template}`);
|
||||
this._template = this.compile(template);
|
||||
}
|
||||
}
|
||||
|
||||
private compile(template: string) {
|
||||
return {
|
||||
raw: template,
|
||||
compiled: handlebar.compile(template, { knownHelpers: undefined, strict: true }),
|
||||
needsAlbum: template.includes('{{album}}'),
|
||||
};
|
||||
}
|
||||
|
||||
private render(template: HandlebarsTemplateDelegate<any>, options: RenderMetadata) {
|
||||
const { filename, extension, asset, albumName } = options;
|
||||
const substitutions: Record<string, string> = {
|
||||
filename,
|
||||
ext: extension,
|
||||
filetype: asset.type == AssetType.IMAGE ? 'IMG' : 'VID',
|
||||
filetypefull: asset.type == AssetType.IMAGE ? 'IMAGE' : 'VIDEO',
|
||||
assetId: asset.id,
|
||||
//just throw into the root if it doesn't belong to an album
|
||||
album: (albumName && sanitize(albumName.replaceAll(/\.+/g, ''))) || '.',
|
||||
};
|
||||
|
||||
const systemTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||
const zone = asset.exifInfo?.timeZone || systemTimeZone;
|
||||
const dt = DateTime.fromJSDate(asset.fileCreatedAt, { zone });
|
||||
|
||||
const dateTokens = [
|
||||
...supportedYearTokens,
|
||||
...supportedMonthTokens,
|
||||
...supportedWeekTokens,
|
||||
...supportedDayTokens,
|
||||
...supportedHourTokens,
|
||||
...supportedMinuteTokens,
|
||||
...supportedSecondTokens,
|
||||
];
|
||||
|
||||
for (const token of dateTokens) {
|
||||
substitutions[token] = dt.toFormat(token);
|
||||
}
|
||||
|
||||
return template(substitutions);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { StorageService } from 'src/services/storage.service';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
|
||||
describe(StorageService.name, () => {
|
||||
let sut: StorageService;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
storageMock = newStorageRepositoryMock();
|
||||
sut = new StorageService(storageMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('init', () => {
|
||||
it('should create the library folder on initialization', () => {
|
||||
sut.init();
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/library');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleDeleteFiles', () => {
|
||||
it('should handle null values', async () => {
|
||||
await sut.handleDeleteFiles({ files: [undefined, null] });
|
||||
|
||||
expect(storageMock.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle an error removing a file', async () => {
|
||||
storageMock.unlink.mockRejectedValue(new Error('something-went-wrong'));
|
||||
|
||||
await sut.handleDeleteFiles({ files: ['path/to/something'] });
|
||||
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith('path/to/something');
|
||||
});
|
||||
|
||||
it('should remove the file', async () => {
|
||||
await sut.handleDeleteFiles({ files: ['path/to/something'] });
|
||||
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith('path/to/something');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,37 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { IDeleteFilesJob } from 'src/domain/job/job.interface';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { JobStatus } from 'src/interfaces/job.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
|
||||
@Injectable()
|
||||
export class StorageService {
|
||||
private logger = new ImmichLogger(StorageService.name);
|
||||
|
||||
constructor(@Inject(IStorageRepository) private storageRepository: IStorageRepository) {}
|
||||
|
||||
init() {
|
||||
const libraryBase = StorageCore.getBaseFolder(StorageFolder.LIBRARY);
|
||||
this.storageRepository.mkdirSync(libraryBase);
|
||||
}
|
||||
|
||||
async handleDeleteFiles(job: IDeleteFilesJob) {
|
||||
const { files } = job;
|
||||
|
||||
// TODO: one job per file
|
||||
for (const file of files) {
|
||||
if (!file) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.storageRepository.unlink(file);
|
||||
} catch (error: any) {
|
||||
this.logger.warn('Unable to remove file from disk', error);
|
||||
}
|
||||
}
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,364 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { defaults } from 'src/cores/system-config.core';
|
||||
import { QueueName } from 'src/domain/job/job.constants';
|
||||
import {
|
||||
AudioCodec,
|
||||
CQMode,
|
||||
Colorspace,
|
||||
LogLevel,
|
||||
SystemConfig,
|
||||
SystemConfigEntity,
|
||||
SystemConfigKey,
|
||||
ToneMapping,
|
||||
TranscodeHWAccel,
|
||||
TranscodePolicy,
|
||||
VideoCodec,
|
||||
} from 'src/entities/system-config.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { ICommunicationRepository, ServerEvent } from 'src/interfaces/communication.repository';
|
||||
import { ISearchRepository } from 'src/interfaces/search.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { SystemConfigService } from 'src/services/system-config.service';
|
||||
import { newCommunicationRepositoryMock } from 'test/repositories/communication.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
|
||||
const updates: SystemConfigEntity[] = [
|
||||
{ key: SystemConfigKey.FFMPEG_CRF, value: 30 },
|
||||
{ key: SystemConfigKey.OAUTH_AUTO_LAUNCH, value: true },
|
||||
{ key: SystemConfigKey.TRASH_DAYS, value: 10 },
|
||||
{ key: SystemConfigKey.USER_DELETE_DELAY, value: 15 },
|
||||
];
|
||||
|
||||
const updatedConfig = Object.freeze<SystemConfig>({
|
||||
job: {
|
||||
[QueueName.BACKGROUND_TASK]: { concurrency: 5 },
|
||||
[QueueName.SMART_SEARCH]: { concurrency: 2 },
|
||||
[QueueName.METADATA_EXTRACTION]: { concurrency: 5 },
|
||||
[QueueName.FACE_DETECTION]: { concurrency: 2 },
|
||||
[QueueName.SEARCH]: { concurrency: 5 },
|
||||
[QueueName.SIDECAR]: { concurrency: 5 },
|
||||
[QueueName.LIBRARY]: { concurrency: 5 },
|
||||
[QueueName.MIGRATION]: { concurrency: 5 },
|
||||
[QueueName.THUMBNAIL_GENERATION]: { concurrency: 5 },
|
||||
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
|
||||
},
|
||||
ffmpeg: {
|
||||
crf: 30,
|
||||
threads: 0,
|
||||
preset: 'ultrafast',
|
||||
targetAudioCodec: AudioCodec.AAC,
|
||||
acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS],
|
||||
targetResolution: '720',
|
||||
targetVideoCodec: VideoCodec.H264,
|
||||
acceptedVideoCodecs: [VideoCodec.H264],
|
||||
maxBitrate: '0',
|
||||
bframes: -1,
|
||||
refs: 0,
|
||||
gopSize: 0,
|
||||
npl: 0,
|
||||
temporalAQ: false,
|
||||
cqMode: CQMode.AUTO,
|
||||
twoPass: false,
|
||||
preferredHwDevice: 'auto',
|
||||
transcode: TranscodePolicy.REQUIRED,
|
||||
accel: TranscodeHWAccel.DISABLED,
|
||||
tonemap: ToneMapping.HABLE,
|
||||
},
|
||||
logging: {
|
||||
enabled: true,
|
||||
level: LogLevel.LOG,
|
||||
},
|
||||
machineLearning: {
|
||||
enabled: true,
|
||||
url: 'http://immich-machine-learning:3003',
|
||||
clip: {
|
||||
enabled: true,
|
||||
modelName: 'ViT-B-32__openai',
|
||||
},
|
||||
facialRecognition: {
|
||||
enabled: true,
|
||||
modelName: 'buffalo_l',
|
||||
minScore: 0.7,
|
||||
maxDistance: 0.5,
|
||||
minFaces: 3,
|
||||
},
|
||||
},
|
||||
map: {
|
||||
enabled: true,
|
||||
lightStyle: '',
|
||||
darkStyle: '',
|
||||
},
|
||||
reverseGeocoding: {
|
||||
enabled: true,
|
||||
},
|
||||
oauth: {
|
||||
autoLaunch: true,
|
||||
autoRegister: true,
|
||||
buttonText: 'Login with OAuth',
|
||||
clientId: '',
|
||||
clientSecret: '',
|
||||
defaultStorageQuota: 0,
|
||||
enabled: false,
|
||||
issuerUrl: '',
|
||||
mobileOverrideEnabled: false,
|
||||
mobileRedirectUri: '',
|
||||
scope: 'openid email profile',
|
||||
signingAlgorithm: 'RS256',
|
||||
storageLabelClaim: 'preferred_username',
|
||||
storageQuotaClaim: 'immich_quota',
|
||||
},
|
||||
passwordLogin: {
|
||||
enabled: true,
|
||||
},
|
||||
server: {
|
||||
externalDomain: '',
|
||||
loginPageMessage: '',
|
||||
},
|
||||
storageTemplate: {
|
||||
enabled: false,
|
||||
hashVerificationEnabled: true,
|
||||
template: '{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
|
||||
},
|
||||
thumbnail: {
|
||||
webpSize: 250,
|
||||
jpegSize: 1440,
|
||||
quality: 80,
|
||||
colorspace: Colorspace.P3,
|
||||
},
|
||||
newVersionCheck: {
|
||||
enabled: true,
|
||||
},
|
||||
trash: {
|
||||
enabled: true,
|
||||
days: 10,
|
||||
},
|
||||
theme: {
|
||||
customCss: '',
|
||||
},
|
||||
library: {
|
||||
scan: {
|
||||
enabled: true,
|
||||
cronExpression: '0 0 * * *',
|
||||
},
|
||||
watch: {
|
||||
enabled: false,
|
||||
},
|
||||
},
|
||||
user: {
|
||||
deleteDelay: 15,
|
||||
},
|
||||
});
|
||||
|
||||
describe(SystemConfigService.name, () => {
|
||||
let sut: SystemConfigService;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let communicationMock: jest.Mocked<ICommunicationRepository>;
|
||||
let smartInfoMock: jest.Mocked<ISearchRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
delete process.env.IMMICH_CONFIG_FILE;
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
communicationMock = newCommunicationRepositoryMock();
|
||||
sut = new SystemConfigService(configMock, communicationMock, smartInfoMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getDefaults', () => {
|
||||
it('should return the default config', () => {
|
||||
configMock.load.mockResolvedValue(updates);
|
||||
|
||||
expect(sut.getDefaults()).toEqual(defaults);
|
||||
expect(configMock.load).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConfig', () => {
|
||||
let warnLog: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
warnLog = jest.spyOn(ImmichLogger.prototype, 'warn');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
warnLog.mockRestore();
|
||||
});
|
||||
|
||||
it('should return the default config', async () => {
|
||||
configMock.load.mockResolvedValue([]);
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(defaults);
|
||||
});
|
||||
|
||||
it('should merge the overrides', async () => {
|
||||
configMock.load.mockResolvedValue([
|
||||
{ key: SystemConfigKey.FFMPEG_CRF, value: 30 },
|
||||
{ key: SystemConfigKey.OAUTH_AUTO_LAUNCH, value: true },
|
||||
{ key: SystemConfigKey.TRASH_DAYS, value: 10 },
|
||||
{ key: SystemConfigKey.USER_DELETE_DELAY, value: 15 },
|
||||
]);
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(updatedConfig);
|
||||
});
|
||||
|
||||
it('should load the config from a json file', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
const partialConfig = {
|
||||
ffmpeg: { crf: 30 },
|
||||
oauth: { autoLaunch: true },
|
||||
trash: { days: 10 },
|
||||
user: { deleteDelay: 15 },
|
||||
};
|
||||
configMock.readFile.mockResolvedValue(JSON.stringify(partialConfig));
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(updatedConfig);
|
||||
|
||||
expect(configMock.readFile).toHaveBeenCalledWith('immich-config.json');
|
||||
});
|
||||
|
||||
it('should load the config from a yaml file', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.yaml';
|
||||
const partialConfig = `
|
||||
ffmpeg:
|
||||
crf: 30
|
||||
oauth:
|
||||
autoLaunch: true
|
||||
trash:
|
||||
days: 10
|
||||
user:
|
||||
deleteDelay: 15
|
||||
`;
|
||||
configMock.readFile.mockResolvedValue(partialConfig);
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(updatedConfig);
|
||||
|
||||
expect(configMock.readFile).toHaveBeenCalledWith('immich-config.yaml');
|
||||
});
|
||||
|
||||
it('should accept an empty configuration file', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
configMock.readFile.mockResolvedValue(JSON.stringify({}));
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(defaults);
|
||||
|
||||
expect(configMock.readFile).toHaveBeenCalledWith('immich-config.json');
|
||||
});
|
||||
|
||||
it('should allow underscores in the machine learning url', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
const partialConfig = { machineLearning: { url: 'immich_machine_learning' } };
|
||||
configMock.readFile.mockResolvedValue(JSON.stringify(partialConfig));
|
||||
|
||||
const config = await sut.getConfig();
|
||||
expect(config.machineLearning.url).toEqual('immich_machine_learning');
|
||||
});
|
||||
|
||||
it('should warn for unknown options in yaml', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.yaml';
|
||||
const partialConfig = `
|
||||
unknownOption: true
|
||||
`;
|
||||
configMock.readFile.mockResolvedValue(partialConfig);
|
||||
|
||||
await sut.getConfig();
|
||||
expect(warnLog).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const tests = [
|
||||
{ should: 'validate numbers', config: { ffmpeg: { crf: 'not-a-number' } } },
|
||||
{ should: 'validate booleans', config: { oauth: { enabled: 'invalid' } } },
|
||||
{ should: 'validate enums', config: { ffmpeg: { transcode: 'unknown' } } },
|
||||
{ should: 'validate required oauth fields', config: { oauth: { enabled: true } } },
|
||||
{ should: 'warn for top level unknown options', warn: true, config: { unknownOption: true } },
|
||||
{ should: 'warn for nested unknown options', warn: true, config: { ffmpeg: { unknownOption: true } } },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
it(`should ${test.should}`, async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
configMock.readFile.mockResolvedValue(JSON.stringify(test.config));
|
||||
|
||||
if (test.warn) {
|
||||
await sut.getConfig();
|
||||
expect(warnLog).toHaveBeenCalled();
|
||||
} else {
|
||||
await expect(sut.getConfig()).rejects.toBeInstanceOf(Error);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('getStorageTemplateOptions', () => {
|
||||
it('should send back the datetime variables', () => {
|
||||
expect(sut.getStorageTemplateOptions()).toEqual({
|
||||
dayOptions: ['d', 'dd'],
|
||||
hourOptions: ['h', 'hh', 'H', 'HH'],
|
||||
minuteOptions: ['m', 'mm'],
|
||||
monthOptions: ['M', 'MM', 'MMM', 'MMMM'],
|
||||
presetOptions: [
|
||||
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}/{{MM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}/{{MMMM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}/{{MM}}/{{filename}}',
|
||||
'{{y}}/{{MMM}}/{{filename}}',
|
||||
'{{y}}/{{MMMM}}/{{filename}}',
|
||||
'{{y}}/{{MM}}/{{dd}}/{{filename}}',
|
||||
'{{y}}/{{MMMM}}/{{dd}}/{{filename}}',
|
||||
'{{y}}/{{y}}-{{MM}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}-{{MM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}-{{MMM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}-{{MMMM}}-{{dd}}/{{filename}}',
|
||||
'{{y}}/{{y}}-{{MM}}/{{filename}}',
|
||||
'{{y}}/{{y}}-{{WW}}/{{filename}}',
|
||||
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{assetId}}',
|
||||
'{{y}}/{{y}}-{{MM}}/{{assetId}}',
|
||||
'{{y}}/{{y}}-{{WW}}/{{assetId}}',
|
||||
'{{album}}/{{filename}}',
|
||||
],
|
||||
secondOptions: ['s', 'ss', 'SSS'],
|
||||
weekOptions: ['W', 'WW'],
|
||||
yearOptions: ['y', 'yy'],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateConfig', () => {
|
||||
it('should update the config and emit client and server events', async () => {
|
||||
configMock.load.mockResolvedValue(updates);
|
||||
|
||||
await expect(sut.updateConfig(updatedConfig)).resolves.toEqual(updatedConfig);
|
||||
|
||||
expect(communicationMock.broadcast).toHaveBeenCalled();
|
||||
expect(communicationMock.sendServerEvent).toHaveBeenCalledWith(ServerEvent.CONFIG_UPDATE);
|
||||
expect(configMock.saveAll).toHaveBeenCalledWith(updates);
|
||||
});
|
||||
|
||||
it('should throw an error if a config file is in use', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
configMock.readFile.mockResolvedValue(JSON.stringify({}));
|
||||
await expect(sut.updateConfig(defaults)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(configMock.saveAll).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshConfig', () => {
|
||||
it('should notify the subscribers', async () => {
|
||||
const changeMock = jest.fn();
|
||||
const subscription = sut.config$.subscribe(changeMock);
|
||||
|
||||
await sut.refreshConfig();
|
||||
|
||||
expect(changeMock).toHaveBeenCalledWith(defaults);
|
||||
|
||||
subscription.unsubscribe();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCustomCss', () => {
|
||||
it('should return the default theme', async () => {
|
||||
await expect(sut.getCustomCss()).resolves.toEqual(defaults.theme.customCss);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,145 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { instanceToPlain } from 'class-transformer';
|
||||
import _ from 'lodash';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import {
|
||||
supportedDayTokens,
|
||||
supportedHourTokens,
|
||||
supportedMinuteTokens,
|
||||
supportedMonthTokens,
|
||||
supportedPresetTokens,
|
||||
supportedSecondTokens,
|
||||
supportedWeekTokens,
|
||||
supportedYearTokens,
|
||||
} from 'src/domain/system-config/system-config.constants';
|
||||
import { SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config-storage-template.dto';
|
||||
import { SystemConfigDto, mapConfig } from 'src/dtos/system-config.dto';
|
||||
import { LogLevel, SystemConfig } from 'src/entities/system-config.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import {
|
||||
ClientEvent,
|
||||
ICommunicationRepository,
|
||||
InternalEvent,
|
||||
InternalEventMap,
|
||||
ServerEvent,
|
||||
} from 'src/interfaces/communication.repository';
|
||||
import { ISearchRepository } from 'src/interfaces/search.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
|
||||
@Injectable()
|
||||
export class SystemConfigService {
|
||||
private logger = new ImmichLogger(SystemConfigService.name);
|
||||
private core: SystemConfigCore;
|
||||
|
||||
constructor(
|
||||
@Inject(ISystemConfigRepository) private repository: ISystemConfigRepository,
|
||||
@Inject(ICommunicationRepository) private communicationRepository: ICommunicationRepository,
|
||||
@Inject(ISearchRepository) private smartInfoRepository: ISearchRepository,
|
||||
) {
|
||||
this.core = SystemConfigCore.create(repository);
|
||||
this.communicationRepository.on(ServerEvent.CONFIG_UPDATE, () => this.handleConfigUpdate());
|
||||
this.core.config$.subscribe((config) => this.setLogLevel(config));
|
||||
}
|
||||
|
||||
async init() {
|
||||
const config = await this.core.getConfig();
|
||||
this.config$.next(config);
|
||||
}
|
||||
|
||||
get config$() {
|
||||
return this.core.config$;
|
||||
}
|
||||
|
||||
async getConfig(): Promise<SystemConfigDto> {
|
||||
const config = await this.core.getConfig();
|
||||
return mapConfig(config);
|
||||
}
|
||||
|
||||
getDefaults(): SystemConfigDto {
|
||||
const config = this.core.getDefaults();
|
||||
return mapConfig(config);
|
||||
}
|
||||
|
||||
@OnEvent(InternalEvent.VALIDATE_CONFIG)
|
||||
validateConfig({ newConfig, oldConfig }: InternalEventMap[InternalEvent.VALIDATE_CONFIG]) {
|
||||
if (!_.isEqual(instanceToPlain(newConfig.logging), oldConfig.logging) && this.getEnvLogLevel()) {
|
||||
throw new Error('Logging cannot be changed while the environment variable LOG_LEVEL is set.');
|
||||
}
|
||||
}
|
||||
|
||||
async updateConfig(dto: SystemConfigDto): Promise<SystemConfigDto> {
|
||||
const oldConfig = await this.core.getConfig();
|
||||
|
||||
try {
|
||||
await this.communicationRepository.emitAsync(InternalEvent.VALIDATE_CONFIG, { newConfig: dto, oldConfig });
|
||||
} catch (error) {
|
||||
this.logger.warn(`Unable to save system config due to a validation error: ${error}`);
|
||||
throw new BadRequestException(error instanceof Error ? error.message : error);
|
||||
}
|
||||
|
||||
const newConfig = await this.core.updateConfig(dto);
|
||||
|
||||
this.communicationRepository.broadcast(ClientEvent.CONFIG_UPDATE, {});
|
||||
this.communicationRepository.sendServerEvent(ServerEvent.CONFIG_UPDATE);
|
||||
|
||||
if (oldConfig.machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
|
||||
await this.smartInfoRepository.init(newConfig.machineLearning.clip.modelName);
|
||||
}
|
||||
return mapConfig(newConfig);
|
||||
}
|
||||
|
||||
// this is only used by the cli on config change, and it's not actually needed anymore
|
||||
async refreshConfig() {
|
||||
this.communicationRepository.sendServerEvent(ServerEvent.CONFIG_UPDATE);
|
||||
await this.core.refreshConfig();
|
||||
return true;
|
||||
}
|
||||
|
||||
getStorageTemplateOptions(): SystemConfigTemplateStorageOptionDto {
|
||||
const options = new SystemConfigTemplateStorageOptionDto();
|
||||
|
||||
options.dayOptions = supportedDayTokens;
|
||||
options.weekOptions = supportedWeekTokens;
|
||||
options.monthOptions = supportedMonthTokens;
|
||||
options.yearOptions = supportedYearTokens;
|
||||
options.hourOptions = supportedHourTokens;
|
||||
options.secondOptions = supportedSecondTokens;
|
||||
options.minuteOptions = supportedMinuteTokens;
|
||||
options.presetOptions = supportedPresetTokens;
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
async getMapStyle(theme: 'light' | 'dark') {
|
||||
const { map } = await this.getConfig();
|
||||
const styleUrl = theme === 'dark' ? map.darkStyle : map.lightStyle;
|
||||
|
||||
if (styleUrl) {
|
||||
return this.repository.fetchStyle(styleUrl);
|
||||
}
|
||||
|
||||
return JSON.parse(await this.repository.readFile(`./resources/style-${theme}.json`));
|
||||
}
|
||||
|
||||
async getCustomCss(): Promise<string> {
|
||||
const { theme } = await this.core.getConfig();
|
||||
return theme.customCss;
|
||||
}
|
||||
|
||||
private async handleConfigUpdate() {
|
||||
await this.core.refreshConfig();
|
||||
}
|
||||
|
||||
private setLogLevel({ logging }: SystemConfig) {
|
||||
const envLevel = this.getEnvLogLevel();
|
||||
const configLevel = logging.enabled ? logging.level : false;
|
||||
const level = envLevel ?? configLevel;
|
||||
ImmichLogger.setLogLevel(level);
|
||||
this.logger.log(`LogLevel=${level} ${envLevel ? '(set via LOG_LEVEL)' : '(set via system config)'}`);
|
||||
}
|
||||
|
||||
private getEnvLogLevel() {
|
||||
return process.env.LOG_LEVEL as LogLevel;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { when } from 'jest-when';
|
||||
import { AssetIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
||||
import { TagType } from 'src/entities/tag.entity';
|
||||
import { ITagRepository } from 'src/interfaces/tag.repository';
|
||||
import { TagService } from 'src/services/tag.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { tagResponseStub, tagStub } from 'test/fixtures/tag.stub';
|
||||
import { newTagRepositoryMock } from 'test/repositories/tag.repository.mock';
|
||||
|
||||
describe(TagService.name, () => {
|
||||
let sut: TagService;
|
||||
let tagMock: jest.Mocked<ITagRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
tagMock = newTagRepositoryMock();
|
||||
sut = new TagService(tagMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all tags for a user', async () => {
|
||||
tagMock.getAll.mockResolvedValue([tagStub.tag1]);
|
||||
await expect(sut.getAll(authStub.admin)).resolves.toEqual([tagResponseStub.tag1]);
|
||||
expect(tagMock.getAll).toHaveBeenCalledWith(authStub.admin.user.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getById', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
tagMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.getById(authStub.admin, 'tag-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
});
|
||||
|
||||
it('should return a tag for a user', async () => {
|
||||
tagMock.getById.mockResolvedValue(tagStub.tag1);
|
||||
await expect(sut.getById(authStub.admin, 'tag-1')).resolves.toEqual(tagResponseStub.tag1);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should throw an error for a duplicate tag', async () => {
|
||||
tagMock.hasName.mockResolvedValue(true);
|
||||
await expect(sut.create(authStub.admin, { name: 'tag-1', type: TagType.CUSTOM })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
expect(tagMock.hasName).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should create a new tag', async () => {
|
||||
tagMock.create.mockResolvedValue(tagStub.tag1);
|
||||
await expect(sut.create(authStub.admin, { name: 'tag-1', type: TagType.CUSTOM })).resolves.toEqual(
|
||||
tagResponseStub.tag1,
|
||||
);
|
||||
expect(tagMock.create).toHaveBeenCalledWith({
|
||||
userId: authStub.admin.user.id,
|
||||
name: 'tag-1',
|
||||
type: TagType.CUSTOM,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
tagMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.update(authStub.admin, 'tag-1', { name: 'tag-2' })).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.remove).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should update a tag', async () => {
|
||||
tagMock.getById.mockResolvedValue(tagStub.tag1);
|
||||
tagMock.update.mockResolvedValue(tagStub.tag1);
|
||||
await expect(sut.update(authStub.admin, 'tag-1', { name: 'tag-2' })).resolves.toEqual(tagResponseStub.tag1);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.update).toHaveBeenCalledWith({ id: 'tag-1', name: 'tag-2' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
tagMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.remove(authStub.admin, 'tag-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.remove).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove a tag', async () => {
|
||||
tagMock.getById.mockResolvedValue(tagStub.tag1);
|
||||
await sut.remove(authStub.admin, 'tag-1');
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.remove).toHaveBeenCalledWith(tagStub.tag1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAssets', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
tagMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.remove(authStub.admin, 'tag-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.remove).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should get the assets for a tag', async () => {
|
||||
tagMock.getById.mockResolvedValue(tagStub.tag1);
|
||||
tagMock.getAssets.mockResolvedValue([assetStub.image]);
|
||||
await sut.getAssets(authStub.admin, 'tag-1');
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.getAssets).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('addAssets', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
tagMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.addAssets(authStub.admin, 'tag-1', { assetIds: ['asset-1'] })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.addAssets).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reject duplicate asset ids and accept new ones', async () => {
|
||||
tagMock.getById.mockResolvedValue(tagStub.tag1);
|
||||
|
||||
when(tagMock.hasAsset).calledWith(authStub.admin.user.id, 'tag-1', 'asset-1').mockResolvedValue(true);
|
||||
when(tagMock.hasAsset).calledWith(authStub.admin.user.id, 'tag-1', 'asset-2').mockResolvedValue(false);
|
||||
|
||||
await expect(
|
||||
sut.addAssets(authStub.admin, 'tag-1', {
|
||||
assetIds: ['asset-1', 'asset-2'],
|
||||
}),
|
||||
).resolves.toEqual([
|
||||
{ assetId: 'asset-1', success: false, error: AssetIdErrorReason.DUPLICATE },
|
||||
{ assetId: 'asset-2', success: true },
|
||||
]);
|
||||
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.hasAsset).toHaveBeenCalledTimes(2);
|
||||
expect(tagMock.addAssets).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1', ['asset-2']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeAssets', () => {
|
||||
it('should throw an error for an invalid id', async () => {
|
||||
tagMock.getById.mockResolvedValue(null);
|
||||
await expect(sut.removeAssets(authStub.admin, 'tag-1', { assetIds: ['asset-1'] })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.removeAssets).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should accept accept ids that are tagged and reject the rest', async () => {
|
||||
tagMock.getById.mockResolvedValue(tagStub.tag1);
|
||||
|
||||
when(tagMock.hasAsset).calledWith(authStub.admin.user.id, 'tag-1', 'asset-1').mockResolvedValue(true);
|
||||
when(tagMock.hasAsset).calledWith(authStub.admin.user.id, 'tag-1', 'asset-2').mockResolvedValue(false);
|
||||
|
||||
await expect(
|
||||
sut.removeAssets(authStub.admin, 'tag-1', {
|
||||
assetIds: ['asset-1', 'asset-2'],
|
||||
}),
|
||||
).resolves.toEqual([
|
||||
{ assetId: 'asset-1', success: true },
|
||||
{ assetId: 'asset-2', success: false, error: AssetIdErrorReason.NOT_FOUND },
|
||||
]);
|
||||
|
||||
expect(tagMock.getById).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1');
|
||||
expect(tagMock.hasAsset).toHaveBeenCalledTimes(2);
|
||||
expect(tagMock.removeAssets).toHaveBeenCalledWith(authStub.admin.user.id, 'tag-1', ['asset-1']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,105 @@
|
||||
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
||||
import { AssetIdErrorReason, AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetIdsDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { CreateTagDto, TagResponseDto, UpdateTagDto, mapTag } from 'src/dtos/tag.dto';
|
||||
import { ITagRepository } from 'src/interfaces/tag.repository';
|
||||
|
||||
@Injectable()
|
||||
export class TagService {
|
||||
constructor(@Inject(ITagRepository) private repository: ITagRepository) {}
|
||||
|
||||
getAll(auth: AuthDto) {
|
||||
return this.repository.getAll(auth.user.id).then((tags) => tags.map((tag) => mapTag(tag)));
|
||||
}
|
||||
|
||||
async getById(auth: AuthDto, id: string): Promise<TagResponseDto> {
|
||||
const tag = await this.findOrFail(auth, id);
|
||||
return mapTag(tag);
|
||||
}
|
||||
|
||||
async create(auth: AuthDto, dto: CreateTagDto) {
|
||||
const duplicate = await this.repository.hasName(auth.user.id, dto.name);
|
||||
if (duplicate) {
|
||||
throw new BadRequestException(`A tag with that name already exists`);
|
||||
}
|
||||
|
||||
const tag = await this.repository.create({
|
||||
userId: auth.user.id,
|
||||
name: dto.name,
|
||||
type: dto.type,
|
||||
});
|
||||
|
||||
return mapTag(tag);
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, id: string, dto: UpdateTagDto): Promise<TagResponseDto> {
|
||||
await this.findOrFail(auth, id);
|
||||
const tag = await this.repository.update({ id, name: dto.name });
|
||||
return mapTag(tag);
|
||||
}
|
||||
|
||||
async remove(auth: AuthDto, id: string): Promise<void> {
|
||||
const tag = await this.findOrFail(auth, id);
|
||||
await this.repository.remove(tag);
|
||||
}
|
||||
|
||||
async getAssets(auth: AuthDto, id: string): Promise<AssetResponseDto[]> {
|
||||
await this.findOrFail(auth, id);
|
||||
const assets = await this.repository.getAssets(auth.user.id, id);
|
||||
return assets.map((asset) => mapAsset(asset));
|
||||
}
|
||||
|
||||
async addAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise<AssetIdsResponseDto[]> {
|
||||
await this.findOrFail(auth, id);
|
||||
|
||||
const results: AssetIdsResponseDto[] = [];
|
||||
for (const assetId of dto.assetIds) {
|
||||
const hasAsset = await this.repository.hasAsset(auth.user.id, id, assetId);
|
||||
if (hasAsset) {
|
||||
results.push({ assetId, success: false, error: AssetIdErrorReason.DUPLICATE });
|
||||
} else {
|
||||
results.push({ assetId, success: true });
|
||||
}
|
||||
}
|
||||
|
||||
await this.repository.addAssets(
|
||||
auth.user.id,
|
||||
id,
|
||||
results.filter((result) => result.success).map((result) => result.assetId),
|
||||
);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async removeAssets(auth: AuthDto, id: string, dto: AssetIdsDto): Promise<AssetIdsResponseDto[]> {
|
||||
await this.findOrFail(auth, id);
|
||||
|
||||
const results: AssetIdsResponseDto[] = [];
|
||||
for (const assetId of dto.assetIds) {
|
||||
const hasAsset = await this.repository.hasAsset(auth.user.id, id, assetId);
|
||||
if (hasAsset) {
|
||||
results.push({ assetId, success: true });
|
||||
} else {
|
||||
results.push({ assetId, success: false, error: AssetIdErrorReason.NOT_FOUND });
|
||||
}
|
||||
}
|
||||
|
||||
await this.repository.removeAssets(
|
||||
auth.user.id,
|
||||
id,
|
||||
results.filter((result) => result.success).map((result) => result.assetId),
|
||||
);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private async findOrFail(auth: AuthDto, id: string) {
|
||||
const tag = await this.repository.getById(auth.user.id, id);
|
||||
if (!tag) {
|
||||
throw new BadRequestException('Tag not found');
|
||||
}
|
||||
return tag;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { JobName } from 'src/domain/job/job.constants';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IJobRepository } from 'src/interfaces/job.repository';
|
||||
import { TrashService } from 'src/services/trash.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { IAccessRepositoryMock, newAccessRepositoryMock } from 'test/repositories/access.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newCommunicationRepositoryMock } from 'test/repositories/communication.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
|
||||
describe(TrashService.name, () => {
|
||||
let sut: TrashService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let communicationMock: jest.Mocked<ICommunicationRepository>;
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
communicationMock = newCommunicationRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
|
||||
sut = new TrashService(accessMock, assetMock, jobMock, communicationMock);
|
||||
});
|
||||
|
||||
describe('restoreAssets', () => {
|
||||
it('should require asset restore access for all ids', async () => {
|
||||
await expect(
|
||||
sut.restoreAssets(authStub.user1, {
|
||||
ids: ['asset-1'],
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should restore a batch of assets', async () => {
|
||||
accessMock.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset1', 'asset2']));
|
||||
|
||||
await sut.restoreAssets(authStub.user1, { ids: ['asset1', 'asset2'] });
|
||||
|
||||
expect(assetMock.restoreAll).toHaveBeenCalledWith(['asset1', 'asset2']);
|
||||
expect(jobMock.queue.mock.calls).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('restore', () => {
|
||||
it('should handle an empty trash', async () => {
|
||||
assetMock.getByUserId.mockResolvedValue({ items: [], hasNextPage: false });
|
||||
await expect(sut.restore(authStub.user1)).resolves.toBeUndefined();
|
||||
expect(assetMock.restoreAll).not.toHaveBeenCalled();
|
||||
expect(communicationMock.send).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should restore and notify', async () => {
|
||||
assetMock.getByUserId.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
|
||||
await expect(sut.restore(authStub.user1)).resolves.toBeUndefined();
|
||||
expect(assetMock.restoreAll).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(communicationMock.send).toHaveBeenCalledWith(ClientEvent.ASSET_RESTORE, authStub.user1.user.id, [
|
||||
assetStub.image.id,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('empty', () => {
|
||||
it('should handle an empty trash', async () => {
|
||||
assetMock.getByUserId.mockResolvedValue({ items: [], hasNextPage: false });
|
||||
await expect(sut.empty(authStub.user1)).resolves.toBeUndefined();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should empty the trash', async () => {
|
||||
assetMock.getByUserId.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
|
||||
await expect(sut.empty(authStub.user1)).resolves.toBeUndefined();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([
|
||||
{ name: JobName.ASSET_DELETION, data: { id: assetStub.image.id } },
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,66 @@
|
||||
import { Inject } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AccessCore, Permission } from 'src/cores/access.core';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE, JobName } from 'src/domain/job/job.constants';
|
||||
import { BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { IAccessRepository } from 'src/interfaces/access.repository';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.repository';
|
||||
import { ClientEvent, ICommunicationRepository } from 'src/interfaces/communication.repository';
|
||||
import { IJobRepository } from 'src/interfaces/job.repository';
|
||||
import { usePagination } from 'src/utils';
|
||||
|
||||
export class TrashService {
|
||||
private access: AccessCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ICommunicationRepository) private communicationRepository: ICommunicationRepository,
|
||||
) {
|
||||
this.access = AccessCore.create(accessRepository);
|
||||
}
|
||||
|
||||
async restoreAssets(auth: AuthDto, dto: BulkIdsDto): Promise<void> {
|
||||
const { ids } = dto;
|
||||
await this.access.requirePermission(auth, Permission.ASSET_RESTORE, ids);
|
||||
await this.restoreAndSend(auth, ids);
|
||||
}
|
||||
|
||||
async restore(auth: AuthDto): Promise<void> {
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getByUserId(pagination, auth.user.id, {
|
||||
trashedBefore: DateTime.now().toJSDate(),
|
||||
}),
|
||||
);
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
const ids = assets.map((a) => a.id);
|
||||
await this.restoreAndSend(auth, ids);
|
||||
}
|
||||
}
|
||||
|
||||
async empty(auth: AuthDto): Promise<void> {
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getByUserId(pagination, auth.user.id, {
|
||||
trashedBefore: DateTime.now().toJSDate(),
|
||||
}),
|
||||
);
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
await this.jobRepository.queueAll(
|
||||
assets.map((asset) => ({ name: JobName.ASSET_DELETION, data: { id: asset.id } })),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreAndSend(auth: AuthDto, ids: string[]) {
|
||||
if (ids.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.assetRepository.restoreAll(ids);
|
||||
this.communicationRepository.send(ClientEvent.ASSET_RESTORE, auth.user.id, ids);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,561 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
ForbiddenException,
|
||||
InternalServerErrorException,
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { when } from 'jest-when';
|
||||
import { JobName } from 'src/domain/job/job.constants';
|
||||
import { UpdateUserDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { UserEntity, UserStatus } from 'src/entities/user.entity';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { IJobRepository } from 'src/interfaces/job.repository';
|
||||
import { ILibraryRepository } from 'src/interfaces/library.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserRepository } from 'src/interfaces/user.repository';
|
||||
import { UserService } from 'src/services/user.service';
|
||||
import { CacheControl, ImmichFileResponse } from 'src/utils';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { newAlbumRepositoryMock } from 'test/repositories/album.repository.mock';
|
||||
import { newCryptoRepositoryMock } from 'test/repositories/crypto.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
import { newLibraryRepositoryMock } from 'test/repositories/library.repository.mock';
|
||||
import { newStorageRepositoryMock } from 'test/repositories/storage.repository.mock';
|
||||
import { newSystemConfigRepositoryMock } from 'test/repositories/system-config.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
|
||||
const makeDeletedAt = (daysAgo: number) => {
|
||||
const deletedAt = new Date();
|
||||
deletedAt.setDate(deletedAt.getDate() - daysAgo);
|
||||
return deletedAt;
|
||||
};
|
||||
|
||||
describe(UserService.name, () => {
|
||||
let sut: UserService;
|
||||
let userMock: jest.Mocked<IUserRepository>;
|
||||
let cryptoRepositoryMock: jest.Mocked<ICryptoRepository>;
|
||||
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let libraryMock: jest.Mocked<ILibraryRepository>;
|
||||
let storageMock: jest.Mocked<IStorageRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
cryptoRepositoryMock = newCryptoRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
libraryMock = newLibraryRepositoryMock();
|
||||
storageMock = newStorageRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
|
||||
sut = new UserService(albumMock, cryptoRepositoryMock, jobMock, libraryMock, storageMock, configMock, userMock);
|
||||
|
||||
when(userMock.get).calledWith(authStub.admin.user.id, {}).mockResolvedValue(userStub.admin);
|
||||
when(userMock.get).calledWith(authStub.admin.user.id, { withDeleted: true }).mockResolvedValue(userStub.admin);
|
||||
when(userMock.get).calledWith(authStub.user1.user.id, {}).mockResolvedValue(userStub.user1);
|
||||
when(userMock.get).calledWith(authStub.user1.user.id, { withDeleted: true }).mockResolvedValue(userStub.user1);
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should get all users', async () => {
|
||||
userMock.getList.mockResolvedValue([userStub.admin]);
|
||||
await expect(sut.getAll(authStub.admin, false)).resolves.toEqual([
|
||||
expect.objectContaining({
|
||||
id: authStub.admin.user.id,
|
||||
email: authStub.admin.user.email,
|
||||
}),
|
||||
]);
|
||||
expect(userMock.getList).toHaveBeenCalledWith({ withDeleted: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should get a user by id', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
await sut.get(authStub.admin.user.id);
|
||||
expect(userMock.get).toHaveBeenCalledWith(authStub.admin.user.id, { withDeleted: false });
|
||||
});
|
||||
|
||||
it('should throw an error if a user is not found', async () => {
|
||||
userMock.get.mockResolvedValue(null);
|
||||
await expect(sut.get(authStub.admin.user.id)).rejects.toBeInstanceOf(NotFoundException);
|
||||
expect(userMock.get).toHaveBeenCalledWith(authStub.admin.user.id, { withDeleted: false });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMe', () => {
|
||||
it("should get the auth user's info", async () => {
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
await sut.getMe(authStub.admin);
|
||||
expect(userMock.get).toHaveBeenCalledWith(authStub.admin.user.id, {});
|
||||
});
|
||||
|
||||
it('should throw an error if a user is not found', async () => {
|
||||
userMock.get.mockResolvedValue(null);
|
||||
await expect(sut.getMe(authStub.admin)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(userMock.get).toHaveBeenCalledWith(authStub.admin.user.id, {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should update user', async () => {
|
||||
const update: UpdateUserDto = {
|
||||
id: userStub.user1.id,
|
||||
shouldChangePassword: true,
|
||||
email: 'immich@test.com',
|
||||
storageLabel: 'storage_label',
|
||||
};
|
||||
userMock.getByEmail.mockResolvedValue(null);
|
||||
userMock.getByStorageLabel.mockResolvedValue(null);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.update({ user: { ...authStub.user1.user, isAdmin: true } }, update);
|
||||
|
||||
expect(userMock.getByEmail).toHaveBeenCalledWith(update.email);
|
||||
expect(userMock.getByStorageLabel).toHaveBeenCalledWith(update.storageLabel);
|
||||
});
|
||||
|
||||
it('should not set an empty string for storage label', async () => {
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
await sut.update(authStub.admin, { id: userStub.user1.id, storageLabel: '' });
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, {
|
||||
id: userStub.user1.id,
|
||||
storageLabel: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should omit a storage label set by non-admin users', async () => {
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
await sut.update({ user: userStub.user1 }, { id: userStub.user1.id, storageLabel: 'admin' });
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, { id: userStub.user1.id });
|
||||
});
|
||||
|
||||
it('user can only update its information', async () => {
|
||||
when(userMock.get)
|
||||
.calledWith('not_immich_auth_user_id', {})
|
||||
.mockResolvedValueOnce({
|
||||
...userStub.user1,
|
||||
id: 'not_immich_auth_user_id',
|
||||
});
|
||||
|
||||
const result = sut.update(
|
||||
{ user: userStub.user1 },
|
||||
{
|
||||
id: 'not_immich_auth_user_id',
|
||||
password: 'I take over your account now',
|
||||
},
|
||||
);
|
||||
await expect(result).rejects.toBeInstanceOf(ForbiddenException);
|
||||
});
|
||||
|
||||
it('should let a user change their email', async () => {
|
||||
const dto = { id: userStub.user1.id, email: 'updated@test.com' };
|
||||
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await sut.update({ user: userStub.user1 }, dto);
|
||||
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, {
|
||||
id: 'user-id',
|
||||
email: 'updated@test.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not let a user change their email to one already in use', async () => {
|
||||
const dto = { id: userStub.user1.id, email: 'updated@test.com' };
|
||||
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userMock.getByEmail.mockResolvedValue(userStub.admin);
|
||||
|
||||
await expect(sut.update({ user: userStub.user1 }, dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(userMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not let the admin change the storage label to one already in use', async () => {
|
||||
const dto = { id: userStub.user1.id, storageLabel: 'admin' };
|
||||
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userMock.getByStorageLabel.mockResolvedValue(userStub.admin);
|
||||
|
||||
await expect(sut.update(authStub.admin, dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(userMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('admin can update any user information', async () => {
|
||||
const update: UpdateUserDto = {
|
||||
id: userStub.user1.id,
|
||||
shouldChangePassword: true,
|
||||
};
|
||||
|
||||
when(userMock.update).calledWith(userStub.user1.id, update).mockResolvedValueOnce(userStub.user1);
|
||||
await sut.update(authStub.admin, update);
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, {
|
||||
id: 'user-id',
|
||||
shouldChangePassword: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('update user information should throw error if user not found', async () => {
|
||||
when(userMock.get).calledWith(userStub.user1.id, {}).mockResolvedValueOnce(null);
|
||||
|
||||
const result = sut.update(authStub.admin, {
|
||||
id: userStub.user1.id,
|
||||
shouldChangePassword: true,
|
||||
});
|
||||
|
||||
await expect(result).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should let the admin update himself', async () => {
|
||||
const dto = { id: userStub.admin.id, shouldChangePassword: true, isAdmin: true };
|
||||
|
||||
when(userMock.update).calledWith(userStub.admin.id, dto).mockResolvedValueOnce(userStub.admin);
|
||||
|
||||
await sut.update(authStub.admin, dto);
|
||||
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.admin.id, dto);
|
||||
});
|
||||
|
||||
it('should not let the another user become an admin', async () => {
|
||||
const dto = { id: userStub.user1.id, shouldChangePassword: true, isAdmin: true };
|
||||
|
||||
when(userMock.get).calledWith(userStub.user1.id, {}).mockResolvedValueOnce(userStub.user1);
|
||||
|
||||
await expect(sut.update(authStub.admin, dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
});
|
||||
|
||||
describe('restore', () => {
|
||||
it('should throw error if user could not be found', async () => {
|
||||
when(userMock.get).calledWith(userStub.admin.id, { withDeleted: true }).mockResolvedValue(null);
|
||||
await expect(sut.restore(authStub.admin, userStub.admin.id)).rejects.toThrowError(BadRequestException);
|
||||
expect(userMock.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should restore an user', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
await expect(sut.restore(authStub.admin, userStub.user1.id)).resolves.toEqual(mapUser(userStub.user1));
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, { status: UserStatus.ACTIVE, deletedAt: null });
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should throw error if user could not be found', async () => {
|
||||
userMock.get.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.delete(authStub.admin, userStub.admin.id, {})).rejects.toThrowError(BadRequestException);
|
||||
expect(userMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('cannot delete admin user', async () => {
|
||||
await expect(sut.delete(authStub.admin, userStub.admin.id, {})).rejects.toBeInstanceOf(ForbiddenException);
|
||||
});
|
||||
|
||||
it('should require the auth user be an admin', async () => {
|
||||
await expect(sut.delete(authStub.user1, authStub.admin.user.id, {})).rejects.toBeInstanceOf(ForbiddenException);
|
||||
|
||||
expect(userMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should delete user', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await expect(sut.delete(authStub.admin, userStub.user1.id, {})).resolves.toEqual(mapUser(userStub.user1));
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, {
|
||||
status: UserStatus.DELETED,
|
||||
deletedAt: expect.any(Date),
|
||||
});
|
||||
});
|
||||
|
||||
it('should force delete user', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.user1);
|
||||
userMock.update.mockResolvedValue(userStub.user1);
|
||||
|
||||
await expect(sut.delete(authStub.admin, userStub.user1.id, { force: true })).resolves.toEqual(
|
||||
mapUser(userStub.user1),
|
||||
);
|
||||
|
||||
expect(userMock.update).toHaveBeenCalledWith(userStub.user1.id, {
|
||||
status: UserStatus.REMOVING,
|
||||
deletedAt: expect.any(Date),
|
||||
});
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.USER_DELETION,
|
||||
data: { id: userStub.user1.id, force: true },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should not create a user if there is no local admin account', async () => {
|
||||
when(userMock.getAdmin).calledWith().mockResolvedValueOnce(null);
|
||||
|
||||
await expect(
|
||||
sut.create({
|
||||
email: 'john_smith@email.com',
|
||||
name: 'John Smith',
|
||||
password: 'password',
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should create user', async () => {
|
||||
userMock.getAdmin.mockResolvedValue(userStub.admin);
|
||||
userMock.create.mockResolvedValue(userStub.user1);
|
||||
|
||||
await expect(
|
||||
sut.create({
|
||||
email: userStub.user1.email,
|
||||
name: userStub.user1.name,
|
||||
password: 'password',
|
||||
storageLabel: 'label',
|
||||
}),
|
||||
).resolves.toEqual(mapUser(userStub.user1));
|
||||
|
||||
expect(userMock.getAdmin).toBeCalled();
|
||||
expect(userMock.create).toBeCalledWith({
|
||||
email: userStub.user1.email,
|
||||
name: userStub.user1.name,
|
||||
storageLabel: 'label',
|
||||
password: expect.anything(),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('createProfileImage', () => {
|
||||
it('should throw an error if the user does not exist', async () => {
|
||||
const file = { path: '/profile/path' } as Express.Multer.File;
|
||||
userMock.update.mockResolvedValue({ ...userStub.admin, profileImagePath: file.path });
|
||||
|
||||
await expect(sut.createProfileImage(authStub.admin, file)).rejects.toThrowError(BadRequestException);
|
||||
});
|
||||
|
||||
it('should throw an error if the user profile could not be updated with the new image', async () => {
|
||||
const file = { path: '/profile/path' } as Express.Multer.File;
|
||||
userMock.get.mockResolvedValue(userStub.profilePath);
|
||||
userMock.update.mockRejectedValue(new InternalServerErrorException('mocked error'));
|
||||
|
||||
await expect(sut.createProfileImage(authStub.admin, file)).rejects.toThrowError(InternalServerErrorException);
|
||||
});
|
||||
|
||||
it('should delete the previous profile image', async () => {
|
||||
const file = { path: '/profile/path' } as Express.Multer.File;
|
||||
userMock.get.mockResolvedValue(userStub.profilePath);
|
||||
const files = [userStub.profilePath.profileImagePath];
|
||||
userMock.update.mockResolvedValue({ ...userStub.admin, profileImagePath: file.path });
|
||||
|
||||
await sut.createProfileImage(authStub.admin, file);
|
||||
expect(jobMock.queue.mock.calls).toEqual([[{ name: JobName.DELETE_FILES, data: { files } }]]);
|
||||
});
|
||||
|
||||
it('should not delete the profile image if it has not been set', async () => {
|
||||
const file = { path: '/profile/path' } as Express.Multer.File;
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
userMock.update.mockResolvedValue({ ...userStub.admin, profileImagePath: file.path });
|
||||
|
||||
await sut.createProfileImage(authStub.admin, file);
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteProfileImage', () => {
|
||||
it('should send an http error has no profile image', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
|
||||
await expect(sut.deleteProfileImage(authStub.admin)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should delete the profile image if user has one', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.profilePath);
|
||||
const files = [userStub.profilePath.profileImagePath];
|
||||
|
||||
await sut.deleteProfileImage(authStub.admin);
|
||||
expect(jobMock.queue.mock.calls).toEqual([[{ name: JobName.DELETE_FILES, data: { files } }]]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUserProfileImage', () => {
|
||||
it('should throw an error if the user does not exist', async () => {
|
||||
userMock.get.mockResolvedValue(null);
|
||||
|
||||
await expect(sut.getProfileImage(userStub.admin.id)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(userMock.get).toHaveBeenCalledWith(userStub.admin.id, {});
|
||||
});
|
||||
|
||||
it('should throw an error if the user does not have a picture', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.admin);
|
||||
|
||||
await expect(sut.getProfileImage(userStub.admin.id)).rejects.toBeInstanceOf(NotFoundException);
|
||||
|
||||
expect(userMock.get).toHaveBeenCalledWith(userStub.admin.id, {});
|
||||
});
|
||||
|
||||
it('should return the profile picture', async () => {
|
||||
userMock.get.mockResolvedValue(userStub.profilePath);
|
||||
|
||||
await expect(sut.getProfileImage(userStub.profilePath.id)).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/path/to/profile.jpg',
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.NONE,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(userMock.get).toHaveBeenCalledWith(userStub.profilePath.id, {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resetAdminPassword', () => {
|
||||
it('should only work when there is an admin account', async () => {
|
||||
userMock.getAdmin.mockResolvedValue(null);
|
||||
const ask = jest.fn().mockResolvedValue('new-password');
|
||||
|
||||
await expect(sut.resetAdminPassword(ask)).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(ask).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should default to a random password', async () => {
|
||||
userMock.getAdmin.mockResolvedValue(userStub.admin);
|
||||
const ask = jest.fn().mockImplementation(() => {});
|
||||
|
||||
const response = await sut.resetAdminPassword(ask);
|
||||
|
||||
const [id, update] = userMock.update.mock.calls[0];
|
||||
|
||||
expect(response.provided).toBe(false);
|
||||
expect(ask).toHaveBeenCalled();
|
||||
expect(id).toEqual(userStub.admin.id);
|
||||
expect(update.password).toBeDefined();
|
||||
});
|
||||
|
||||
it('should use the supplied password', async () => {
|
||||
userMock.getAdmin.mockResolvedValue(userStub.admin);
|
||||
const ask = jest.fn().mockResolvedValue('new-password');
|
||||
|
||||
const response = await sut.resetAdminPassword(ask);
|
||||
|
||||
const [id, update] = userMock.update.mock.calls[0];
|
||||
|
||||
expect(response.provided).toBe(true);
|
||||
expect(ask).toHaveBeenCalled();
|
||||
expect(id).toEqual(userStub.admin.id);
|
||||
expect(update.password).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueUserDelete', () => {
|
||||
it('should skip users not ready for deletion', async () => {
|
||||
userMock.getDeletedUsers.mockResolvedValue([
|
||||
{},
|
||||
{ deletedAt: undefined },
|
||||
{ deletedAt: null },
|
||||
{ deletedAt: makeDeletedAt(5) },
|
||||
] as UserEntity[]);
|
||||
|
||||
await sut.handleUserDeleteCheck();
|
||||
|
||||
expect(userMock.getDeletedUsers).toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should skip users not ready for deletion - deleteDelay30', async () => {
|
||||
configMock.load.mockResolvedValue(systemConfigStub.deleteDelay30);
|
||||
userMock.getDeletedUsers.mockResolvedValue([
|
||||
{},
|
||||
{ deletedAt: undefined },
|
||||
{ deletedAt: null },
|
||||
{ deletedAt: makeDeletedAt(15) },
|
||||
] as UserEntity[]);
|
||||
|
||||
await sut.handleUserDeleteCheck();
|
||||
|
||||
expect(userMock.getDeletedUsers).toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should queue user ready for deletion', async () => {
|
||||
const user = { id: 'deleted-user', deletedAt: makeDeletedAt(10) };
|
||||
userMock.getDeletedUsers.mockResolvedValue([user] as UserEntity[]);
|
||||
|
||||
await sut.handleUserDeleteCheck();
|
||||
|
||||
expect(userMock.getDeletedUsers).toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.USER_DELETION, data: { id: user.id } }]);
|
||||
});
|
||||
|
||||
it('should queue user ready for deletion - deleteDelay30', async () => {
|
||||
const user = { id: 'deleted-user', deletedAt: makeDeletedAt(31) };
|
||||
userMock.getDeletedUsers.mockResolvedValue([user] as UserEntity[]);
|
||||
|
||||
await sut.handleUserDeleteCheck();
|
||||
|
||||
expect(userMock.getDeletedUsers).toHaveBeenCalled();
|
||||
expect(jobMock.queueAll).toHaveBeenCalledWith([{ name: JobName.USER_DELETION, data: { id: user.id } }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleUserDelete', () => {
|
||||
it('should skip users not ready for deletion', async () => {
|
||||
const user = { id: 'user-1', deletedAt: makeDeletedAt(5) } as UserEntity;
|
||||
userMock.get.mockResolvedValue(user);
|
||||
|
||||
await sut.handleUserDelete({ id: user.id });
|
||||
|
||||
expect(storageMock.unlinkDir).not.toHaveBeenCalled();
|
||||
expect(userMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should delete the user and associated assets', async () => {
|
||||
const user = { id: 'deleted-user', deletedAt: makeDeletedAt(10) } as UserEntity;
|
||||
userMock.get.mockResolvedValue(user);
|
||||
|
||||
await sut.handleUserDelete({ id: user.id });
|
||||
|
||||
const options = { force: true, recursive: true };
|
||||
|
||||
expect(storageMock.unlinkDir).toHaveBeenCalledWith('upload/library/deleted-user', options);
|
||||
expect(storageMock.unlinkDir).toHaveBeenCalledWith('upload/upload/deleted-user', options);
|
||||
expect(storageMock.unlinkDir).toHaveBeenCalledWith('upload/profile/deleted-user', options);
|
||||
expect(storageMock.unlinkDir).toHaveBeenCalledWith('upload/thumbs/deleted-user', options);
|
||||
expect(storageMock.unlinkDir).toHaveBeenCalledWith('upload/encoded-video/deleted-user', options);
|
||||
expect(albumMock.deleteAll).toHaveBeenCalledWith(user.id);
|
||||
expect(userMock.delete).toHaveBeenCalledWith(user, true);
|
||||
});
|
||||
|
||||
it('should delete the library path for a storage label', async () => {
|
||||
const user = { id: 'deleted-user', deletedAt: makeDeletedAt(10), storageLabel: 'admin' } as UserEntity;
|
||||
userMock.get.mockResolvedValue(user);
|
||||
|
||||
await sut.handleUserDelete({ id: user.id });
|
||||
|
||||
const options = { force: true, recursive: true };
|
||||
|
||||
expect(storageMock.unlinkDir).toHaveBeenCalledWith('upload/library/admin', options);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleUserSyncUsage', () => {
|
||||
it('should sync usage', async () => {
|
||||
await sut.handleUserSyncUsage();
|
||||
expect(userMock.syncUsage).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,212 @@
|
||||
import { BadRequestException, ForbiddenException, Inject, Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { UserCore } from 'src/cores/user.core';
|
||||
import { JobName } from 'src/domain/job/job.constants';
|
||||
import { IEntityJob } from 'src/domain/job/job.interface';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { CreateProfileImageResponseDto, mapCreateProfileImageResponse } from 'src/dtos/user-profile.dto';
|
||||
import { CreateUserDto, DeleteUserDto, UpdateUserDto, UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { UserEntity, UserStatus } from 'src/entities/user.entity';
|
||||
import { ImmichLogger } from 'src/infra/logger';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.repository';
|
||||
import { ICryptoRepository } from 'src/interfaces/crypto.repository';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.repository';
|
||||
import { ILibraryRepository } from 'src/interfaces/library.repository';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.repository';
|
||||
import { ISystemConfigRepository } from 'src/interfaces/system-config.repository';
|
||||
import { IUserRepository, UserFindOptions } from 'src/interfaces/user.repository';
|
||||
import { CacheControl, ImmichFileResponse } from 'src/utils';
|
||||
|
||||
@Injectable()
|
||||
export class UserService {
|
||||
private configCore: SystemConfigCore;
|
||||
private logger = new ImmichLogger(UserService.name);
|
||||
private userCore: UserCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(ICryptoRepository) cryptoRepository: ICryptoRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ILibraryRepository) libraryRepository: ILibraryRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IUserRepository) private userRepository: IUserRepository,
|
||||
) {
|
||||
this.userCore = UserCore.create(cryptoRepository, libraryRepository, userRepository);
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
async getAll(auth: AuthDto, isAll: boolean): Promise<UserResponseDto[]> {
|
||||
const users = await this.userRepository.getList({ withDeleted: !isAll });
|
||||
return users.map((user) => mapUser(user));
|
||||
}
|
||||
|
||||
async get(userId: string): Promise<UserResponseDto> {
|
||||
const user = await this.userRepository.get(userId, { withDeleted: false });
|
||||
if (!user) {
|
||||
throw new NotFoundException('User not found');
|
||||
}
|
||||
|
||||
return mapUser(user);
|
||||
}
|
||||
|
||||
getMe(auth: AuthDto): Promise<UserResponseDto> {
|
||||
return this.findOrFail(auth.user.id, {}).then(mapUser);
|
||||
}
|
||||
|
||||
create(createUserDto: CreateUserDto): Promise<UserResponseDto> {
|
||||
return this.userCore.createUser(createUserDto).then(mapUser);
|
||||
}
|
||||
|
||||
async update(auth: AuthDto, dto: UpdateUserDto): Promise<UserResponseDto> {
|
||||
const user = await this.findOrFail(dto.id, {});
|
||||
|
||||
if (dto.quotaSizeInBytes && user.quotaSizeInBytes !== dto.quotaSizeInBytes) {
|
||||
await this.userRepository.syncUsage(dto.id);
|
||||
}
|
||||
|
||||
return this.userCore.updateUser(auth.user, dto.id, dto).then(mapUser);
|
||||
}
|
||||
|
||||
async delete(auth: AuthDto, id: string, dto: DeleteUserDto): Promise<UserResponseDto> {
|
||||
const { force } = dto;
|
||||
const { isAdmin } = await this.findOrFail(id, {});
|
||||
if (isAdmin) {
|
||||
throw new ForbiddenException('Cannot delete admin user');
|
||||
}
|
||||
|
||||
await this.albumRepository.softDeleteAll(id);
|
||||
|
||||
const status = force ? UserStatus.REMOVING : UserStatus.DELETED;
|
||||
const user = await this.userRepository.update(id, { status, deletedAt: new Date() });
|
||||
|
||||
if (force) {
|
||||
await this.jobRepository.queue({ name: JobName.USER_DELETION, data: { id: user.id, force } });
|
||||
}
|
||||
|
||||
return mapUser(user);
|
||||
}
|
||||
|
||||
async restore(auth: AuthDto, id: string): Promise<UserResponseDto> {
|
||||
await this.findOrFail(id, { withDeleted: true });
|
||||
await this.albumRepository.restoreAll(id);
|
||||
return this.userRepository.update(id, { deletedAt: null, status: UserStatus.ACTIVE }).then(mapUser);
|
||||
}
|
||||
|
||||
async createProfileImage(auth: AuthDto, fileInfo: Express.Multer.File): Promise<CreateProfileImageResponseDto> {
|
||||
const { profileImagePath: oldpath } = await this.findOrFail(auth.user.id, { withDeleted: false });
|
||||
const updatedUser = await this.userRepository.update(auth.user.id, { profileImagePath: fileInfo.path });
|
||||
if (oldpath !== '') {
|
||||
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [oldpath] } });
|
||||
}
|
||||
return mapCreateProfileImageResponse(updatedUser.id, updatedUser.profileImagePath);
|
||||
}
|
||||
|
||||
async deleteProfileImage(auth: AuthDto): Promise<void> {
|
||||
const user = await this.findOrFail(auth.user.id, { withDeleted: false });
|
||||
if (user.profileImagePath === '') {
|
||||
throw new BadRequestException("Can't delete a missing profile Image");
|
||||
}
|
||||
await this.userRepository.update(auth.user.id, { profileImagePath: '' });
|
||||
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [user.profileImagePath] } });
|
||||
}
|
||||
|
||||
async getProfileImage(id: string): Promise<ImmichFileResponse> {
|
||||
const user = await this.findOrFail(id, {});
|
||||
if (!user.profileImagePath) {
|
||||
throw new NotFoundException('User does not have a profile image');
|
||||
}
|
||||
|
||||
return new ImmichFileResponse({
|
||||
path: user.profileImagePath,
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.NONE,
|
||||
});
|
||||
}
|
||||
|
||||
async resetAdminPassword(ask: (admin: UserResponseDto) => Promise<string | undefined>) {
|
||||
const admin = await this.userRepository.getAdmin();
|
||||
if (!admin) {
|
||||
throw new BadRequestException('Admin account does not exist');
|
||||
}
|
||||
|
||||
const providedPassword = await ask(mapUser(admin));
|
||||
const password = providedPassword || randomBytes(24).toString('base64').replaceAll(/\W/g, '');
|
||||
|
||||
await this.userCore.updateUser(admin, admin.id, { password });
|
||||
|
||||
return { admin, password, provided: !!providedPassword };
|
||||
}
|
||||
|
||||
async handleUserSyncUsage(): Promise<JobStatus> {
|
||||
await this.userRepository.syncUsage();
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleUserDeleteCheck(): Promise<JobStatus> {
|
||||
const users = await this.userRepository.getDeletedUsers();
|
||||
const config = await this.configCore.getConfig();
|
||||
await this.jobRepository.queueAll(
|
||||
users.flatMap((user) =>
|
||||
this.isReadyForDeletion(user, config.user.deleteDelay)
|
||||
? [{ name: JobName.USER_DELETION, data: { id: user.id } }]
|
||||
: [],
|
||||
),
|
||||
);
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
async handleUserDelete({ id, force }: IEntityJob): Promise<JobStatus> {
|
||||
const config = await this.configCore.getConfig();
|
||||
const user = await this.userRepository.get(id, { withDeleted: true });
|
||||
if (!user) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
// just for extra protection here
|
||||
if (!force && !this.isReadyForDeletion(user, config.user.deleteDelay)) {
|
||||
this.logger.warn(`Skipped user that was not ready for deletion: id=${id}`);
|
||||
return JobStatus.SKIPPED;
|
||||
}
|
||||
|
||||
this.logger.log(`Deleting user: ${user.id}`);
|
||||
|
||||
const folders = [
|
||||
StorageCore.getLibraryFolder(user),
|
||||
StorageCore.getFolderLocation(StorageFolder.UPLOAD, user.id),
|
||||
StorageCore.getFolderLocation(StorageFolder.PROFILE, user.id),
|
||||
StorageCore.getFolderLocation(StorageFolder.THUMBNAILS, user.id),
|
||||
StorageCore.getFolderLocation(StorageFolder.ENCODED_VIDEO, user.id),
|
||||
];
|
||||
|
||||
for (const folder of folders) {
|
||||
this.logger.warn(`Removing user from filesystem: ${folder}`);
|
||||
await this.storageRepository.unlinkDir(folder, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
this.logger.warn(`Removing user from database: ${user.id}`);
|
||||
await this.albumRepository.deleteAll(user.id);
|
||||
await this.userRepository.delete(user, true);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
|
||||
private isReadyForDeletion(user: UserEntity, deleteDelay: number): boolean {
|
||||
if (!user.deletedAt) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return DateTime.now().minus({ days: deleteDelay }) > DateTime.fromJSDate(user.deletedAt);
|
||||
}
|
||||
|
||||
private async findOrFail(id: string, options: UserFindOptions) {
|
||||
const user = await this.userRepository.get(id, options);
|
||||
if (!user) {
|
||||
throw new BadRequestException('User not found');
|
||||
}
|
||||
return user;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user