Merge branch 'lighter_buckets_web' into lighter_buckets_server

This commit is contained in:
Min Idzelis
2025-04-29 01:58:00 +00:00
328 changed files with 6090 additions and 2169 deletions

View File

@@ -44,7 +44,7 @@ const imports = [
BullModule.registerQueue(...bull.queues),
ClsModule.forRoot(cls.config),
OpenTelemetryModule.forRoot(otel),
KyselyModule.forRoot(getKyselyConfig(database.config.kysely)),
KyselyModule.forRoot(getKyselyConfig(database.config)),
];
class BaseModule implements OnModuleInit, OnModuleDestroy {

View File

@@ -10,7 +10,7 @@ import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import 'src/schema';
import { schemaDiff, schemaFromCode, schemaFromDatabase } from 'src/sql-tools';
import { getKyselyConfig } from 'src/utils/database';
import { asPostgresConnectionConfig, getKyselyConfig } from 'src/utils/database';
const main = async () => {
const command = process.argv[2];
@@ -56,7 +56,7 @@ const main = async () => {
const getDatabaseClient = () => {
const configRepository = new ConfigRepository();
const { database } = configRepository.getEnv();
return new Kysely<any>(getKyselyConfig(database.config.kysely));
return new Kysely<any>(getKyselyConfig(database.config));
};
const runQuery = async (query: string) => {
@@ -105,7 +105,7 @@ const create = (path: string, up: string[], down: string[]) => {
const compare = async () => {
const configRepository = new ConfigRepository();
const { database } = configRepository.getEnv();
const db = postgres(database.config.kysely);
const db = postgres(asPostgresConnectionConfig(database.config));
const source = schemaFromCode();
const target = await schemaFromDatabase(db, {});

View File

@@ -78,7 +78,7 @@ class SqlGenerator {
const moduleFixture = await Test.createTestingModule({
imports: [
KyselyModule.forRoot({
...getKyselyConfig(database.config.kysely),
...getKyselyConfig(database.config),
log: (event) => {
if (event.level === 'query') {
this.sqlLogger.logQuery(event.query.sql);

View File

@@ -14,6 +14,7 @@ import { LibraryController } from 'src/controllers/library.controller';
import { MapController } from 'src/controllers/map.controller';
import { MemoryController } from 'src/controllers/memory.controller';
import { NotificationAdminController } from 'src/controllers/notification-admin.controller';
import { NotificationController } from 'src/controllers/notification.controller';
import { OAuthController } from 'src/controllers/oauth.controller';
import { PartnerController } from 'src/controllers/partner.controller';
import { PersonController } from 'src/controllers/person.controller';
@@ -47,6 +48,7 @@ export const controllers = [
LibraryController,
MapController,
MemoryController,
NotificationController,
NotificationAdminController,
OAuthController,
PartnerController,

View File

@@ -1,16 +1,28 @@
import { Body, Controller, HttpCode, HttpStatus, Param, Post } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import { TemplateDto, TemplateResponseDto, TestEmailResponseDto } from 'src/dtos/notification.dto';
import {
NotificationCreateDto,
NotificationDto,
TemplateDto,
TemplateResponseDto,
TestEmailResponseDto,
} from 'src/dtos/notification.dto';
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { EmailTemplate } from 'src/repositories/email.repository';
import { NotificationService } from 'src/services/notification.service';
import { NotificationAdminService } from 'src/services/notification-admin.service';
@ApiTags('Notifications (Admin)')
@Controller('notifications/admin')
@Controller('admin/notifications')
export class NotificationAdminController {
constructor(private service: NotificationService) {}
constructor(private service: NotificationAdminService) {}
@Post()
@Authenticated({ admin: true })
createNotification(@Auth() auth: AuthDto, @Body() dto: NotificationCreateDto): Promise<NotificationDto> {
return this.service.create(auth, dto);
}
@Post('test-email')
@HttpCode(HttpStatus.OK)

View File

@@ -0,0 +1,60 @@
import { Body, Controller, Delete, Get, Param, Put, Query } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import {
NotificationDeleteAllDto,
NotificationDto,
NotificationSearchDto,
NotificationUpdateAllDto,
NotificationUpdateDto,
} from 'src/dtos/notification.dto';
import { Permission } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { NotificationService } from 'src/services/notification.service';
import { UUIDParamDto } from 'src/validation';
@ApiTags('Notifications')
@Controller('notifications')
export class NotificationController {
constructor(private service: NotificationService) {}
@Get()
@Authenticated({ permission: Permission.NOTIFICATION_READ })
getNotifications(@Auth() auth: AuthDto, @Query() dto: NotificationSearchDto): Promise<NotificationDto[]> {
return this.service.search(auth, dto);
}
@Put()
@Authenticated({ permission: Permission.NOTIFICATION_UPDATE })
updateNotifications(@Auth() auth: AuthDto, @Body() dto: NotificationUpdateAllDto): Promise<void> {
return this.service.updateAll(auth, dto);
}
@Delete()
@Authenticated({ permission: Permission.NOTIFICATION_DELETE })
deleteNotifications(@Auth() auth: AuthDto, @Body() dto: NotificationDeleteAllDto): Promise<void> {
return this.service.deleteAll(auth, dto);
}
@Get(':id')
@Authenticated({ permission: Permission.NOTIFICATION_READ })
getNotification(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<NotificationDto> {
return this.service.get(auth, id);
}
@Put(':id')
@Authenticated({ permission: Permission.NOTIFICATION_UPDATE })
updateNotification(
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Body() dto: NotificationUpdateDto,
): Promise<NotificationDto> {
return this.service.update(auth, id, dto);
}
@Delete(':id')
@Authenticated({ permission: Permission.NOTIFICATION_DELETE })
deleteNotification(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<void> {
return this.service.delete(auth, id);
}
}

View File

@@ -90,7 +90,7 @@ export class StorageCore {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, person.ownerId, `${person.id}.jpeg`);
}
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: ImageFormat) {
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: 'jpeg' | 'webp') {
return StorageCore.getNestedPath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}-${type}.${format}`);
}

View File

@@ -9,6 +9,7 @@ import {
Permission,
SharedLinkType,
SourceType,
UserAvatarColor,
UserStatus,
} from 'src/enum';
import { OnThisDayData, UserMetadataItem } from 'src/types';
@@ -122,6 +123,7 @@ export type User = {
id: string;
name: string;
email: string;
avatarColor: UserAvatarColor | null;
profileImagePath: string;
profileChangedAt: Date;
};
@@ -264,7 +266,15 @@ export type AssetFace = {
person?: Person | null;
};
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
const userColumns = ['id', 'name', 'email', 'avatarColor', 'profileImagePath', 'profileChangedAt'] as const;
const userWithPrefixColumns = [
'users.id',
'users.name',
'users.email',
'users.avatarColor',
'users.profileImagePath',
'users.profileChangedAt',
] as const;
export const columns = {
asset: [
@@ -306,7 +316,7 @@ export const columns = {
'shared_links.password',
],
user: userColumns,
userWithPrefix: ['users.id', 'users.name', 'users.email', 'users.profileImagePath', 'users.profileChangedAt'],
userWithPrefix: userWithPrefixColumns,
userAdmin: [
...userColumns,
'createdAt',
@@ -323,6 +333,7 @@ export const columns = {
],
tag: ['tags.id', 'tags.value', 'tags.createdAt', 'tags.updatedAt', 'tags.color', 'tags.parentId'],
apiKey: ['id', 'name', 'userId', 'createdAt', 'updatedAt', 'permissions'],
notification: ['id', 'createdAt', 'level', 'type', 'title', 'description', 'data', 'readAt'],
syncAsset: [
'id',
'ownerId',

18
server/src/db.d.ts vendored
View File

@@ -11,6 +11,8 @@ import {
AssetStatus,
AssetType,
MemoryType,
NotificationLevel,
NotificationType,
Permission,
SharedLinkType,
SourceType,
@@ -263,6 +265,21 @@ export interface Memories {
updateId: Generated<string>;
}
export interface Notifications {
id: Generated<string>;
createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>;
deletedAt: Timestamp | null;
updateId: Generated<string>;
userId: string;
level: Generated<NotificationLevel>;
type: NotificationType;
title: string;
description: string | null;
data: any | null;
readAt: Timestamp | null;
}
export interface MemoriesAssetsAssets {
assetsId: string;
memoriesId: string;
@@ -463,6 +480,7 @@ export interface DB {
memories: Memories;
memories_assets_assets: MemoriesAssetsAssets;
migrations: Migrations;
notifications: Notifications;
move_history: MoveHistory;
naturalearth_countries: NaturalearthCountries;
partners_audit: PartnersAudit;

View File

@@ -1,4 +1,7 @@
import { IsString } from 'class-validator';
import { ApiProperty } from '@nestjs/swagger';
import { IsEnum, IsString } from 'class-validator';
import { NotificationLevel, NotificationType } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
export class TestEmailResponseDto {
messageId!: string;
@@ -11,3 +14,106 @@ export class TemplateDto {
@IsString()
template!: string;
}
export class NotificationDto {
id!: string;
@ValidateDate()
createdAt!: Date;
@ApiProperty({ enum: NotificationLevel, enumName: 'NotificationLevel' })
level!: NotificationLevel;
@ApiProperty({ enum: NotificationType, enumName: 'NotificationType' })
type!: NotificationType;
title!: string;
description?: string;
data?: any;
readAt?: Date;
}
export class NotificationSearchDto {
@Optional()
@ValidateUUID({ optional: true })
id?: string;
@IsEnum(NotificationLevel)
@Optional()
@ApiProperty({ enum: NotificationLevel, enumName: 'NotificationLevel' })
level?: NotificationLevel;
@IsEnum(NotificationType)
@Optional()
@ApiProperty({ enum: NotificationType, enumName: 'NotificationType' })
type?: NotificationType;
@ValidateBoolean({ optional: true })
unread?: boolean;
}
export class NotificationCreateDto {
@Optional()
@IsEnum(NotificationLevel)
@ApiProperty({ enum: NotificationLevel, enumName: 'NotificationLevel' })
level?: NotificationLevel;
@IsEnum(NotificationType)
@Optional()
@ApiProperty({ enum: NotificationType, enumName: 'NotificationType' })
type?: NotificationType;
@IsString()
title!: string;
@IsString()
@Optional({ nullable: true })
description?: string | null;
@Optional({ nullable: true })
data?: any;
@ValidateDate({ optional: true, nullable: true })
readAt?: Date | null;
@ValidateUUID()
userId!: string;
}
export class NotificationUpdateDto {
@ValidateDate({ optional: true, nullable: true })
readAt?: Date | null;
}
export class NotificationUpdateAllDto {
@ValidateUUID({ each: true, optional: true })
ids!: string[];
@ValidateDate({ optional: true, nullable: true })
readAt?: Date | null;
}
export class NotificationDeleteAllDto {
@ValidateUUID({ each: true })
ids!: string[];
}
export type MapNotification = {
id: string;
createdAt: Date;
updateId?: string;
level: NotificationLevel;
type: NotificationType;
data: any | null;
title: string;
description: string | null;
readAt: Date | null;
};
export const mapNotification = (notification: MapNotification): NotificationDto => {
return {
id: notification.id,
createdAt: notification.createdAt,
level: notification.level,
type: notification.type,
title: notification.title,
description: notification.description ?? undefined,
data: notification.data ?? undefined,
readAt: notification.readAt ?? undefined,
};
};

View File

@@ -137,11 +137,6 @@ export class UserPreferencesUpdateDto {
purchase?: PurchaseUpdate;
}
class AvatarResponse {
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
color!: UserAvatarColor;
}
class RatingsResponse {
enabled: boolean = false;
}
@@ -195,7 +190,6 @@ export class UserPreferencesResponseDto implements UserPreferences {
ratings!: RatingsResponse;
sharedLinks!: SharedLinksResponse;
tags!: TagsResponse;
avatar!: AvatarResponse;
emailNotifications!: EmailNotificationsResponse;
download!: DownloadResponse;
purchase!: PurchaseResponse;

View File

@@ -1,10 +1,9 @@
import { ApiProperty } from '@nestjs/swagger';
import { Transform } from 'class-transformer';
import { IsBoolean, IsEmail, IsNotEmpty, IsNumber, IsString, Min } from 'class-validator';
import { IsBoolean, IsEmail, IsEnum, IsNotEmpty, IsNumber, IsString, Min } from 'class-validator';
import { User, UserAdmin } from 'src/database';
import { UserAvatarColor, UserMetadataKey, UserStatus } from 'src/enum';
import { UserMetadataItem } from 'src/types';
import { getPreferences } from 'src/utils/preferences';
import { Optional, ValidateBoolean, toEmail, toSanitized } from 'src/validation';
export class UserUpdateMeDto {
@@ -23,6 +22,11 @@ export class UserUpdateMeDto {
@IsString()
@IsNotEmpty()
name?: string;
@Optional({ nullable: true })
@IsEnum(UserAvatarColor)
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
avatarColor?: UserAvatarColor | null;
}
export class UserResponseDto {
@@ -41,13 +45,21 @@ export class UserLicense {
activatedAt!: Date;
}
const emailToAvatarColor = (email: string): UserAvatarColor => {
const values = Object.values(UserAvatarColor);
const randomIndex = Math.floor(
[...email].map((letter) => letter.codePointAt(0) ?? 0).reduce((a, b) => a + b, 0) % values.length,
);
return values[randomIndex];
};
export const mapUser = (entity: User | UserAdmin): UserResponseDto => {
return {
id: entity.id,
email: entity.email,
name: entity.name,
profileImagePath: entity.profileImagePath,
avatarColor: getPreferences(entity.email, (entity as UserAdmin).metadata || []).avatar.color,
avatarColor: entity.avatarColor ?? emailToAvatarColor(entity.email),
profileChangedAt: entity.profileChangedAt,
};
};
@@ -69,6 +81,11 @@ export class UserAdminCreateDto {
@IsString()
name!: string;
@Optional({ nullable: true })
@IsEnum(UserAvatarColor)
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
avatarColor?: UserAvatarColor | null;
@Optional({ nullable: true })
@IsString()
@Transform(toSanitized)
@@ -104,6 +121,11 @@ export class UserAdminUpdateDto {
@IsNotEmpty()
name?: string;
@Optional({ nullable: true })
@IsEnum(UserAvatarColor)
@ApiProperty({ enumName: 'UserAvatarColor', enum: UserAvatarColor })
avatarColor?: UserAvatarColor | null;
@Optional({ nullable: true })
@IsString()
@Transform(toSanitized)

View File

@@ -126,6 +126,11 @@ export enum Permission {
MEMORY_UPDATE = 'memory.update',
MEMORY_DELETE = 'memory.delete',
NOTIFICATION_CREATE = 'notification.create',
NOTIFICATION_READ = 'notification.read',
NOTIFICATION_UPDATE = 'notification.update',
NOTIFICATION_DELETE = 'notification.delete',
PARTNER_CREATE = 'partner.create',
PARTNER_READ = 'partner.read',
PARTNER_UPDATE = 'partner.update',
@@ -332,6 +337,11 @@ export enum ImageFormat {
WEBP = 'webp',
}
export enum RawExtractedFormat {
JPEG = 'jpeg',
JXL = 'jxl',
}
export enum LogLevel {
VERBOSE = 'verbose',
DEBUG = 'debug',
@@ -515,6 +525,7 @@ export enum JobName {
NOTIFY_SIGNUP = 'notify-signup',
NOTIFY_ALBUM_INVITE = 'notify-album-invite',
NOTIFY_ALBUM_UPDATE = 'notify-album-update',
NOTIFICATIONS_CLEANUP = 'notifications-cleanup',
SEND_EMAIL = 'notification-send-email',
// Version check
@@ -580,3 +591,17 @@ export enum SyncEntityType {
PartnerAssetDeleteV1 = 'PartnerAssetDeleteV1',
PartnerAssetExifV1 = 'PartnerAssetExifV1',
}
export enum NotificationLevel {
Success = 'success',
Error = 'error',
Warning = 'warning',
Info = 'info',
}
export enum NotificationType {
JobFailed = 'JobFailed',
BackupFailed = 'BackupFailed',
SystemMessage = 'SystemMessage',
Custom = 'Custom',
}

View File

@@ -157,6 +157,15 @@ where
and "memories"."ownerId" = $2
and "memories"."deletedAt" is null
-- AccessRepository.notification.checkOwnerAccess
select
"notifications"."id"
from
"notifications"
where
"notifications"."id" in ($1)
and "notifications"."userId" = $2
-- AccessRepository.person.checkOwnerAccess
select
"person"."id"

View File

@@ -13,6 +13,7 @@ from
"users"."id",
"users"."name",
"users"."email",
"users"."avatarColor",
"users"."profileImagePath",
"users"."profileChangedAt"
from
@@ -44,6 +45,7 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from

View File

@@ -12,6 +12,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -36,6 +37,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -100,6 +102,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -124,6 +127,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -191,6 +195,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -215,6 +220,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -269,6 +275,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -292,6 +299,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -353,6 +361,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from

View File

@@ -259,6 +259,130 @@ from
where
"assets"."id" = $2
-- AssetJobRepository.getForSyncAssets
select
"assets"."id",
"assets"."isOffline",
"assets"."libraryId",
"assets"."originalPath",
"assets"."status",
"assets"."fileModifiedAt"
from
"assets"
where
"assets"."id" = any ($1::uuid[])
-- AssetJobRepository.getForAssetDeletion
select
"assets"."id",
"assets"."isVisible",
"assets"."libraryId",
"assets"."ownerId",
"assets"."livePhotoVideoId",
"assets"."sidecarPath",
"assets"."encodedVideoPath",
"assets"."originalPath",
to_json("exif") as "exifInfo",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_faces".*,
"person" as "person"
from
"asset_faces"
left join lateral (
select
"person".*
from
"person"
where
"asset_faces"."personId" = "person"."id"
) as "person" on true
where
"asset_faces"."assetId" = "assets"."id"
and "asset_faces"."deletedAt" is null
) as agg
) as "faces",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_files"."id",
"asset_files"."path",
"asset_files"."type"
from
"asset_files"
where
"asset_files"."assetId" = "assets"."id"
) as agg
) as "files",
to_json("stacked_assets") as "stack"
from
"assets"
left join "exif" on "assets"."id" = "exif"."assetId"
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
left join lateral (
select
"asset_stack"."id",
"asset_stack"."primaryAssetId",
array_agg("stacked") as "assets"
from
"assets" as "stacked"
where
"stacked"."deletedAt" is not null
and "stacked"."isArchived" = $1
and "stacked"."stackId" = "asset_stack"."id"
group by
"asset_stack"."id"
) as "stacked_assets" on "asset_stack"."id" is not null
where
"assets"."id" = $2
-- AssetJobRepository.streamForVideoConversion
select
"assets"."id"
from
"assets"
where
"assets"."type" = $1
and (
"assets"."encodedVideoPath" is null
or "assets"."encodedVideoPath" = $2
)
and "assets"."isVisible" = $3
and "assets"."deletedAt" is null
-- AssetJobRepository.getForVideoConversion
select
"assets"."id",
"assets"."ownerId",
"assets"."originalPath",
"assets"."encodedVideoPath"
from
"assets"
where
"assets"."id" = $1
and "assets"."type" = $2
-- AssetJobRepository.streamForMetadataExtraction
select
"assets"."id"
from
"assets"
left join "asset_job_status" on "asset_job_status"."assetId" = "assets"."id"
where
(
"asset_job_status"."metadataExtractedAt" is null
or "asset_job_status"."assetId" is null
)
and "assets"."isVisible" = $1
and "assets"."deletedAt" is null
-- AssetJobRepository.getForStorageTemplateJob
select
"assets"."id",

View File

@@ -0,0 +1,58 @@
-- NOTE: This file is auto generated by ./sql-generator
-- NotificationRepository.cleanup
delete from "notifications"
where
(
(
"deletedAt" is not null
and "deletedAt" < $1
)
or (
"readAt" > $2
and "createdAt" < $3
)
or (
"readAt" = $4
and "createdAt" < $5
)
)
-- NotificationRepository.search
select
"id",
"createdAt",
"level",
"type",
"title",
"description",
"data",
"readAt"
from
"notifications"
where
"userId" = $1
and "deletedAt" is null
order by
"createdAt" desc
-- NotificationRepository.search (unread)
select
"id",
"createdAt",
"level",
"type",
"title",
"description",
"data",
"readAt"
from
"notifications"
where
(
"userId" = $1
and "readAt" is null
)
and "deletedAt" is null
order by
"createdAt" desc

View File

@@ -12,6 +12,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -29,6 +30,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -61,6 +63,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -78,6 +81,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -112,6 +116,7 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -129,6 +134,7 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -156,6 +162,7 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from
@@ -173,6 +180,7 @@ returning
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt"
from

View File

@@ -5,6 +5,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -43,6 +44,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -90,6 +92,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -128,6 +131,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -152,6 +156,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -198,6 +203,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",
@@ -235,6 +241,7 @@ select
"id",
"name",
"email",
"avatarColor",
"profileImagePath",
"profileChangedAt",
"createdAt",

View File

@@ -279,6 +279,26 @@ class AuthDeviceAccess {
}
}
class NotificationAccess {
constructor(private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID_SET] })
@ChunkedSet({ paramIndex: 1 })
async checkOwnerAccess(userId: string, notificationIds: Set<string>) {
if (notificationIds.size === 0) {
return new Set<string>();
}
return this.db
.selectFrom('notifications')
.select('notifications.id')
.where('notifications.id', 'in', [...notificationIds])
.where('notifications.userId', '=', userId)
.execute()
.then((stacks) => new Set(stacks.map((stack) => stack.id)));
}
}
class StackAccess {
constructor(private db: Kysely<DB>) {}
@@ -426,6 +446,7 @@ export class AccessRepository {
asset: AssetAccess;
authDevice: AuthDeviceAccess;
memory: MemoryAccess;
notification: NotificationAccess;
person: PersonAccess;
partner: PartnerAccess;
stack: StackAccess;
@@ -438,6 +459,7 @@ export class AccessRepository {
this.asset = new AssetAccess(db);
this.authDevice = new AuthDeviceAccess(db);
this.memory = new MemoryAccess(db);
this.notification = new NotificationAccess(db);
this.person = new PersonAccess(db);
this.partner = new PartnerAccess(db);
this.stack = new StackAccess(db);

View File

@@ -2,12 +2,21 @@ import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { Asset, columns } from 'src/database';
import { DB } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType } from 'src/enum';
import { AssetFileType, AssetType } from 'src/enum';
import { StorageAsset } from 'src/types';
import { anyUuid, asUuid, withExifInner, withFaces, withFiles } from 'src/utils/database';
import {
anyUuid,
asUuid,
toJson,
withExif,
withExifInner,
withFaces,
withFacesAndPeople,
withFiles,
} from 'src/utils/database';
@Injectable()
export class AssetJobRepository {
@@ -148,6 +157,7 @@ export class AssetJobRepository {
.executeTakeFirst();
}
@GenerateSql({ params: [[DummyValue.UUID]] })
getForSyncAssets(ids: string[]) {
return this.db
.selectFrom('assets')
@@ -163,6 +173,84 @@ export class AssetJobRepository {
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForAssetDeletion(id: string) {
return this.db
.selectFrom('assets')
.select([
'assets.id',
'assets.isVisible',
'assets.libraryId',
'assets.ownerId',
'assets.livePhotoVideoId',
'assets.sidecarPath',
'assets.encodedVideoPath',
'assets.originalPath',
])
.$call(withExif)
.select(withFacesAndPeople)
.select(withFiles)
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('assets as stacked')
.select(['asset_stack.id', 'asset_stack.primaryAssetId'])
.select((eb) => eb.fn<Asset[]>('array_agg', [eb.table('stacked')]).as('assets'))
.where('stacked.deletedAt', 'is not', null)
.where('stacked.isArchived', '=', false)
.whereRef('stacked.stackId', '=', 'asset_stack.id')
.groupBy('asset_stack.id')
.as('stacked_assets'),
(join) => join.on('asset_stack.id', 'is not', null),
)
.select((eb) => toJson(eb, 'stacked_assets').as('stack'))
.where('assets.id', '=', id)
.executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForVideoConversion(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.where('assets.type', '=', AssetType.VIDEO)
.$if(!force, (qb) =>
qb
.where((eb) => eb.or([eb('assets.encodedVideoPath', 'is', null), eb('assets.encodedVideoPath', '=', '')]))
.where('assets.isVisible', '=', true),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForVideoConversion(id: string) {
return this.db
.selectFrom('assets')
.select(['assets.id', 'assets.ownerId', 'assets.originalPath', 'assets.encodedVideoPath'])
.where('assets.id', '=', id)
.where('assets.type', '=', AssetType.VIDEO)
.executeTakeFirst();
}
@GenerateSql({ params: [], stream: true })
streamForMetadataExtraction(force?: boolean) {
return this.db
.selectFrom('assets')
.select(['assets.id'])
.$if(!force, (qb) =>
qb
.leftJoin('asset_job_status', 'asset_job_status.assetId', 'assets.id')
.where((eb) =>
eb.or([eb('asset_job_status.metadataExtractedAt', 'is', null), eb('asset_job_status.assetId', 'is', null)]),
)
.where('assets.isVisible', '=', true),
)
.where('assets.deletedAt', 'is', null)
.stream();
}
private storageTemplateAssetQuery() {
return this.db
.selectFrom('assets')

View File

@@ -80,21 +80,12 @@ describe('getEnv', () => {
const { database } = getEnv();
expect(database).toEqual({
config: {
kysely: expect.objectContaining({
host: 'database',
port: 5432,
database: 'immich',
username: 'postgres',
password: 'postgres',
}),
typeorm: expect.objectContaining({
type: 'postgres',
host: 'database',
port: 5432,
database: 'immich',
username: 'postgres',
password: 'postgres',
}),
connectionType: 'parts',
host: 'database',
port: 5432,
database: 'immich',
username: 'postgres',
password: 'postgres',
},
skipMigrations: false,
vectorExtension: 'vectors',
@@ -110,88 +101,9 @@ describe('getEnv', () => {
it('should use DB_URL', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({
host: 'database1',
password: 'postgres2',
user: 'postgres1',
port: 54_320,
database: 'immich',
});
});
it('should handle sslmode=require', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=require';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({ ssl: {} });
});
it('should handle sslmode=prefer', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=prefer';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({ ssl: {} });
});
it('should handle sslmode=verify-ca', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=verify-ca';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({ ssl: {} });
});
it('should handle sslmode=verify-full', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=verify-full';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({ ssl: {} });
});
it('should handle sslmode=no-verify', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=no-verify';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({ ssl: { rejectUnauthorized: false } });
});
it('should handle ssl=true', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?ssl=true';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({ ssl: true });
});
it('should reject invalid ssl', () => {
process.env.DB_URL = 'postgres://postgres1:postgres2@database1:54320/immich?ssl=invalid';
expect(() => getEnv()).toThrowError('Invalid ssl option: invalid');
});
it('should handle socket: URLs', () => {
process.env.DB_URL = 'socket:/run/postgresql?db=database1';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({
host: '/run/postgresql',
database: 'database1',
});
});
it('should handle sockets in postgres: URLs', () => {
process.env.DB_URL = 'postgres:///database2?host=/path/to/socket';
const { database } = getEnv();
expect(database.config.kysely).toMatchObject({
host: '/path/to/socket',
database: 'database2',
expect(database.config).toMatchObject({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich',
});
});
});

View File

@@ -7,8 +7,7 @@ import { Request, Response } from 'express';
import { RedisOptions } from 'ioredis';
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
import { join, resolve } from 'node:path';
import { parse } from 'pg-connection-string';
import { join } from 'node:path';
import { citiesFile, excludePaths, IWorker } from 'src/constants';
import { Telemetry } from 'src/decorators';
import { EnvDto } from 'src/dtos/env.dto';
@@ -22,9 +21,7 @@ import {
QueueName,
} from 'src/enum';
import { DatabaseConnectionParams, VectorExtension } from 'src/types';
import { isValidSsl, PostgresConnectionConfig } from 'src/utils/database';
import { setDifference } from 'src/utils/set';
import { PostgresConnectionOptions } from 'typeorm/driver/postgres/PostgresConnectionOptions.js';
export interface EnvData {
host?: string;
@@ -59,7 +56,7 @@ export interface EnvData {
};
database: {
config: { typeorm: PostgresConnectionOptions & DatabaseConnectionParams; kysely: PostgresConnectionConfig };
config: DatabaseConnectionParams;
skipMigrations: boolean;
vectorExtension: VectorExtension;
};
@@ -152,14 +149,10 @@ const getEnv = (): EnvData => {
const isProd = environment === ImmichEnvironment.PRODUCTION;
const buildFolder = dto.IMMICH_BUILD_DATA || '/build';
const folders = {
// eslint-disable-next-line unicorn/prefer-module
dist: resolve(`${__dirname}/..`),
geodata: join(buildFolder, 'geodata'),
web: join(buildFolder, 'www'),
};
const databaseUrl = dto.DB_URL;
let redisConfig = {
host: dto.REDIS_HOSTNAME || 'redis',
port: dto.REDIS_PORT || 6379,
@@ -191,30 +184,16 @@ const getEnv = (): EnvData => {
}
}
const parts = {
connectionType: 'parts',
host: dto.DB_HOSTNAME || 'database',
port: dto.DB_PORT || 5432,
username: dto.DB_USERNAME || 'postgres',
password: dto.DB_PASSWORD || 'postgres',
database: dto.DB_DATABASE_NAME || 'immich',
} as const;
let parsedOptions: PostgresConnectionConfig = parts;
if (dto.DB_URL) {
const parsed = parse(dto.DB_URL);
if (!isValidSsl(parsed.ssl)) {
throw new Error(`Invalid ssl option: ${parsed.ssl}`);
}
parsedOptions = {
...parsed,
ssl: parsed.ssl,
host: parsed.host ?? undefined,
port: parsed.port ? Number(parsed.port) : undefined,
database: parsed.database ?? undefined,
};
}
const databaseConnection: DatabaseConnectionParams = dto.DB_URL
? { connectionType: 'url', url: dto.DB_URL }
: {
connectionType: 'parts',
host: dto.DB_HOSTNAME || 'database',
port: dto.DB_PORT || 5432,
username: dto.DB_USERNAME || 'postgres',
password: dto.DB_PASSWORD || 'postgres',
database: dto.DB_DATABASE_NAME || 'immich',
};
return {
host: dto.IMMICH_HOST,
@@ -269,21 +248,7 @@ const getEnv = (): EnvData => {
},
database: {
config: {
typeorm: {
type: 'postgres',
entities: [],
migrations: [`${folders.dist}/migrations` + '/*.{js,ts}'],
subscribers: [],
migrationsRun: false,
synchronize: false,
connectTimeoutMS: 10_000, // 10 seconds
parseInt8: true,
...(databaseUrl ? { connectionType: 'url', url: databaseUrl } : parts),
},
kysely: parsedOptions,
},
config: databaseConnection,
skipMigrations: dto.DB_SKIP_MIGRATIONS ?? false,
vectorExtension: dto.DB_VECTOR_EXTENSION === 'pgvector' ? DatabaseExtension.VECTOR : DatabaseExtension.VECTORS,
},

View File

@@ -3,7 +3,7 @@ import AsyncLock from 'async-lock';
import { FileMigrationProvider, Kysely, Migrator, sql, Transaction } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { readdir } from 'node:fs/promises';
import { join } from 'node:path';
import { join, resolve } from 'node:path';
import semver from 'semver';
import { EXTENSION_NAMES, POSTGRES_VERSION_RANGE, VECTOR_VERSION_RANGE, VECTORS_VERSION_RANGE } from 'src/constants';
import { DB } from 'src/db';
@@ -205,8 +205,29 @@ export class DatabaseRepository {
const { rows } = await tableExists.execute(this.db);
const hasTypeOrmMigrations = !!rows[0]?.result;
if (hasTypeOrmMigrations) {
// eslint-disable-next-line unicorn/prefer-module
const dist = resolve(`${__dirname}/..`);
this.logger.debug('Running typeorm migrations');
const dataSource = new DataSource(database.config.typeorm);
const dataSource = new DataSource({
type: 'postgres',
entities: [],
subscribers: [],
migrations: [`${dist}/migrations` + '/*.{js,ts}'],
migrationsRun: false,
synchronize: false,
connectTimeoutMS: 10_000, // 10 seconds
parseInt8: true,
...(database.config.connectionType === 'url'
? { url: database.config.url }
: {
host: database.config.host,
port: database.config.port,
username: database.config.username,
password: database.config.password,
database: database.config.database,
}),
});
await dataSource.initialize();
await dataSource.runMigrations(options);
await dataSource.destroy();

View File

@@ -14,6 +14,7 @@ import { SystemConfig } from 'src/config';
import { EventConfig } from 'src/decorators';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { NotificationDto } from 'src/dtos/notification.dto';
import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.dto';
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
@@ -64,6 +65,7 @@ type EventMap = {
'assets.restore': [{ assetIds: string[]; userId: string }];
'job.start': [QueueName, JobItem];
'job.failed': [{ job: JobItem; error: Error | any }];
// session events
'session.delete': [{ sessionId: string }];
@@ -104,6 +106,7 @@ export interface ClientEventMap {
on_server_version: [ServerVersionResponseDto];
on_config_update: [];
on_new_release: [ReleaseNotification];
on_notification: [NotificationDto];
on_session_delete: [string];
}

View File

@@ -22,6 +22,7 @@ import { MediaRepository } from 'src/repositories/media.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
@@ -55,6 +56,7 @@ export const repositories = [
CryptoRepository,
DatabaseRepository,
DownloadRepository,
EmailRepository,
EventRepository,
JobRepository,
LibraryRepository,
@@ -65,7 +67,7 @@ export const repositories = [
MemoryRepository,
MetadataRepository,
MoveRepository,
EmailRepository,
NotificationRepository,
OAuthRepository,
PartnerRepository,
PersonRepository,

View File

@@ -7,7 +7,7 @@ import { Writable } from 'node:stream';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Exif } from 'src/database';
import { Colorspace, LogLevel } from 'src/enum';
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import {
DecodeToBufferOptions,
@@ -36,34 +36,51 @@ type ProgressEvent = {
percent?: number;
};
export type ExtractResult = {
buffer: Buffer;
format: RawExtractedFormat;
};
@Injectable()
export class MediaRepository {
constructor(private logger: LoggingRepository) {
this.logger.setContext(MediaRepository.name);
}
async extract(input: string, output: string): Promise<boolean> {
/**
*
* @param input file path to the input image
* @returns ExtractResult if succeeded, or null if failed
*/
async extract(input: string): Promise<ExtractResult | null> {
try {
// remove existing output file if it exists
// as exiftool-vendored does not support overwriting via "-w!" flag
// and throws "1 files could not be read" error when the output file exists
await fs.unlink(output).catch(() => null);
await exiftool.extractBinaryTag('JpgFromRaw2', input, output);
} catch {
try {
this.logger.debug('Extracting JPEG from RAW image:', input);
await exiftool.extractJpgFromRaw(input, output);
} catch (error: any) {
this.logger.debug('Could not extract JPEG from image, trying preview', error.message);
try {
await exiftool.extractPreview(input, output);
} catch (error: any) {
this.logger.debug('Could not extract preview from image', error.message);
return false;
}
}
const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw2', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract JpgFromRaw2 buffer from image, trying JPEG from RAW next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('JpgFromRaw', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract JPEG buffer from image, trying PreviewJXL next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('PreviewJXL', input);
return { buffer, format: RawExtractedFormat.JXL };
} catch (error: any) {
this.logger.debug('Could not extract PreviewJXL buffer from image, trying PreviewImage next', error.message);
}
try {
const buffer = await exiftool.extractBinaryTagToBuffer('PreviewImage', input);
return { buffer, format: RawExtractedFormat.JPEG };
} catch (error: any) {
this.logger.debug('Could not extract preview buffer from image', error.message);
return null;
}
return true;
}
async writeExif(tags: Partial<Exif>, output: string): Promise<boolean> {
@@ -104,7 +121,7 @@ export class MediaRepository {
}
}
decodeImage(input: string, options: DecodeToBufferOptions) {
decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
return this.getImageDecodingPipeline(input, options).raw().toBuffer({ resolveWithObject: true });
}
@@ -235,7 +252,7 @@ export class MediaRepository {
});
}
async getImageDimensions(input: string): Promise<ImageDimensions> {
async getImageDimensions(input: string | Buffer): Promise<ImageDimensions> {
const { width = 0, height = 0 } = await sharp(input).metadata();
return { width, height };
}

View File

@@ -0,0 +1,103 @@
import { Insertable, Kysely, Updateable } from 'kysely';
import { DateTime } from 'luxon';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DB, Notifications } from 'src/db';
import { DummyValue, GenerateSql } from 'src/decorators';
import { NotificationSearchDto } from 'src/dtos/notification.dto';
export class NotificationRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID] })
cleanup() {
return this.db
.deleteFrom('notifications')
.where((eb) =>
eb.or([
// remove soft-deleted notifications
eb.and([eb('deletedAt', 'is not', null), eb('deletedAt', '<', DateTime.now().minus({ days: 3 }).toJSDate())]),
// remove old, read notifications
eb.and([
// keep recently read messages around for a few days
eb('readAt', '>', DateTime.now().minus({ days: 2 }).toJSDate()),
eb('createdAt', '<', DateTime.now().minus({ days: 15 }).toJSDate()),
]),
eb.and([
// remove super old, unread notifications
eb('readAt', '=', null),
eb('createdAt', '<', DateTime.now().minus({ days: 30 }).toJSDate()),
]),
]),
)
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, {}] }, { name: 'unread', params: [DummyValue.UUID, { unread: true }] })
search(userId: string, dto: NotificationSearchDto) {
return this.db
.selectFrom('notifications')
.select(columns.notification)
.where((qb) =>
qb.and({
userId,
id: dto.id,
level: dto.level,
type: dto.type,
readAt: dto.unread ? null : undefined,
}),
)
.where('deletedAt', 'is', null)
.orderBy('createdAt', 'desc')
.execute();
}
create(notification: Insertable<Notifications>) {
return this.db
.insertInto('notifications')
.values(notification)
.returning(columns.notification)
.executeTakeFirstOrThrow();
}
get(id: string) {
return this.db
.selectFrom('notifications')
.select(columns.notification)
.where('id', '=', id)
.where('deletedAt', 'is not', null)
.executeTakeFirst();
}
update(id: string, notification: Updateable<Notifications>) {
return this.db
.updateTable('notifications')
.set(notification)
.where('deletedAt', 'is', null)
.where('id', '=', id)
.returning(columns.notification)
.executeTakeFirstOrThrow();
}
async updateAll(ids: string[], notification: Updateable<Notifications>) {
await this.db.updateTable('notifications').set(notification).where('id', 'in', ids).execute();
}
async delete(id: string) {
await this.db
.updateTable('notifications')
.set({ deletedAt: DateTime.now().toJSDate() })
.where('id', '=', id)
.execute();
}
async deleteAll(ids: string[]) {
await this.db
.updateTable('notifications')
.set({ deletedAt: DateTime.now().toJSDate() })
.where('id', 'in', ids)
.execute();
}
}

View File

@@ -28,6 +28,7 @@ import { MemoryTable } from 'src/schema/tables/memory.table';
import { MemoryAssetTable } from 'src/schema/tables/memory_asset.table';
import { MoveTable } from 'src/schema/tables/move.table';
import { NaturalEarthCountriesTable } from 'src/schema/tables/natural-earth-countries.table';
import { NotificationTable } from 'src/schema/tables/notification.table';
import { PartnerAuditTable } from 'src/schema/tables/partner-audit.table';
import { PartnerTable } from 'src/schema/tables/partner.table';
import { PersonTable } from 'src/schema/tables/person.table';
@@ -76,6 +77,7 @@ export class ImmichDatabase {
MemoryTable,
MoveTable,
NaturalEarthCountriesTable,
NotificationTable,
PartnerAuditTable,
PartnerTable,
PersonTable,

View File

@@ -0,0 +1,22 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "notifications" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7(), "userId" uuid, "level" character varying NOT NULL DEFAULT 'info', "type" character varying NOT NULL DEFAULT 'info', "data" jsonb, "title" character varying NOT NULL, "description" text, "readAt" timestamp with time zone);`.execute(db);
await sql`ALTER TABLE "notifications" ADD CONSTRAINT "PK_6a72c3c0f683f6462415e653c3a" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "notifications" ADD CONSTRAINT "FK_692a909ee0fa9383e7859f9b406" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
await sql`CREATE INDEX "IDX_notifications_update_id" ON "notifications" ("updateId")`.execute(db);
await sql`CREATE INDEX "IDX_692a909ee0fa9383e7859f9b40" ON "notifications" ("userId")`.execute(db);
await sql`CREATE OR REPLACE TRIGGER "notifications_updated_at"
BEFORE UPDATE ON "notifications"
FOR EACH ROW
EXECUTE FUNCTION updated_at();`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TRIGGER "notifications_updated_at" ON "notifications";`.execute(db);
await sql`DROP INDEX "IDX_notifications_update_id";`.execute(db);
await sql`DROP INDEX "IDX_692a909ee0fa9383e7859f9b40";`.execute(db);
await sql`ALTER TABLE "notifications" DROP CONSTRAINT "PK_6a72c3c0f683f6462415e653c3a";`.execute(db);
await sql`ALTER TABLE "notifications" DROP CONSTRAINT "FK_692a909ee0fa9383e7859f9b406";`.execute(db);
await sql`DROP TABLE "notifications";`.execute(db);
}

View File

@@ -0,0 +1,14 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "users" ADD "avatarColor" character varying;`.execute(db);
await sql`
UPDATE "users"
SET "avatarColor" = "user_metadata"."value"->'avatar'->>'color'
FROM "user_metadata"
WHERE "users"."id" = "user_metadata"."userId" AND "user_metadata"."key" = 'preferences';`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "users" DROP COLUMN "avatarColor";`.execute(db);
}

View File

@@ -0,0 +1,52 @@
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { NotificationLevel, NotificationType } from 'src/enum';
import { UserTable } from 'src/schema/tables/user.table';
import {
Column,
CreateDateColumn,
DeleteDateColumn,
ForeignKeyColumn,
PrimaryGeneratedColumn,
Table,
UpdateDateColumn,
} from 'src/sql-tools';
@Table('notifications')
@UpdatedAtTrigger('notifications_updated_at')
export class NotificationTable {
@PrimaryGeneratedColumn()
id!: string;
@CreateDateColumn()
createdAt!: Date;
@UpdateDateColumn()
updatedAt!: Date;
@DeleteDateColumn()
deletedAt?: Date;
@UpdateIdColumn({ indexName: 'IDX_notifications_update_id' })
updateId?: string;
@ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: true })
userId!: string;
@Column({ default: NotificationLevel.Info })
level!: NotificationLevel;
@Column({ default: NotificationLevel.Info })
type!: NotificationType;
@Column({ type: 'jsonb', nullable: true })
data!: any | null;
@Column()
title!: string;
@Column({ type: 'text', nullable: true })
description!: string;
@Column({ type: 'timestamp with time zone', nullable: true })
readAt?: Date | null;
}

View File

@@ -1,6 +1,6 @@
import { ColumnType } from 'kysely';
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
import { UserStatus } from 'src/enum';
import { UserAvatarColor, UserStatus } from 'src/enum';
import { users_delete_audit } from 'src/schema/functions';
import {
AfterDeleteTrigger,
@@ -49,6 +49,9 @@ export class UserTable {
@Column({ type: 'boolean', default: true })
shouldChangePassword!: Generated<boolean>;
@Column({ default: null })
avatarColor!: UserAvatarColor | null;
@DeleteDateColumn()
deletedAt!: Timestamp | null;

View File

@@ -565,7 +565,7 @@ describe(AssetService.name, () => {
it('should remove faces', async () => {
const assetWithFace = { ...assetStub.image, faces: [faceStub.face1, faceStub.mergeFace1] };
mocks.asset.getById.mockResolvedValue(assetWithFace);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetWithFace);
await sut.handleAssetDeletion({ id: assetWithFace.id, deleteOnDisk: true });
@@ -592,7 +592,7 @@ describe(AssetService.name, () => {
it('should update stack primary asset if deleted asset was primary asset in a stack', async () => {
mocks.stack.update.mockResolvedValue(factory.stack() as any);
mocks.asset.getById.mockResolvedValue(assetStub.primaryImage);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.primaryImage);
await sut.handleAssetDeletion({ id: assetStub.primaryImage.id, deleteOnDisk: true });
@@ -604,7 +604,7 @@ describe(AssetService.name, () => {
it('should delete the entire stack if deleted asset was the primary asset and the stack would only contain one asset afterwards', async () => {
mocks.stack.delete.mockResolvedValue();
mocks.asset.getById.mockResolvedValue({
mocks.assetJob.getForAssetDeletion.mockResolvedValue({
...assetStub.primaryImage,
stack: { ...assetStub.primaryImage.stack, assets: assetStub.primaryImage.stack!.assets.slice(0, 2) },
});
@@ -615,7 +615,7 @@ describe(AssetService.name, () => {
});
it('should delete a live photo', async () => {
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.livePhotoStillAsset as any);
mocks.asset.getLivePhotoCount.mockResolvedValue(0);
await sut.handleAssetDeletion({
@@ -653,7 +653,7 @@ describe(AssetService.name, () => {
it('should not delete a live motion part if it is being used by another asset', async () => {
mocks.asset.getLivePhotoCount.mockResolvedValue(2);
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.livePhotoStillAsset as any);
await sut.handleAssetDeletion({
id: assetStub.livePhotoStillAsset.id,
@@ -680,12 +680,13 @@ describe(AssetService.name, () => {
});
it('should update usage', async () => {
mocks.asset.getById.mockResolvedValue(assetStub.image);
mocks.assetJob.getForAssetDeletion.mockResolvedValue(assetStub.image);
await sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true });
expect(mocks.user.updateUsage).toHaveBeenCalledWith(assetStub.image.ownerId, -5000);
});
it('should fail if asset could not be found', async () => {
mocks.assetJob.getForAssetDeletion.mockResolvedValue(void 0);
await expect(sut.handleAssetDeletion({ id: assetStub.image.id, deleteOnDisk: true })).resolves.toBe(
JobStatus.FAILED,
);

View File

@@ -189,13 +189,7 @@ export class AssetService extends BaseService {
async handleAssetDeletion(job: JobOf<JobName.ASSET_DELETION>): Promise<JobStatus> {
const { id, deleteOnDisk } = job;
const asset = await this.assetRepository.getById(id, {
faces: { person: true },
library: true,
stack: { assets: true },
exifInfo: true,
files: true,
});
const asset = await this.assetJobRepository.getForAssetDeletion(id);
if (!asset) {
return JobStatus.FAILED;

View File

@@ -142,52 +142,55 @@ describe(BackupService.name, () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
});
it('should run a database backup successfully', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
});
it('should rename file on success', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.storage.rename).toHaveBeenCalled();
});
it('should fail if pg_dumpall fails', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
});
it('should not rename file if pgdump fails and gzip succeeds', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
expect(mocks.storage.rename).not.toHaveBeenCalled();
});
it('should fail if gzip fails', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
});
it('should fail if write stream fails', async () => {
mocks.storage.createWriteStream.mockImplementation(() => {
throw new Error('error');
});
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
});
it('should fail if rename fails', async () => {
mocks.storage.rename.mockRejectedValue(new Error('error'));
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.FAILED);
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
});
it('should ignore unlink failing and still return failed job status', async () => {
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
mocks.storage.unlink.mockRejectedValue(new Error('error'));
const result = await sut.handleBackupDatabase();
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
expect(mocks.storage.unlink).toHaveBeenCalled();
expect(result).toBe(JobStatus.FAILED);
});
it.each`
postgresVersion | expectedVersion
${'14.10'} | ${14}

View File

@@ -70,7 +70,7 @@ export class BackupService extends BaseService {
async handleBackupDatabase(): Promise<JobStatus> {
this.logger.debug(`Database Backup Started`);
const { database } = this.configRepository.getEnv();
const config = database.config.typeorm;
const config = database.config;
const isUrlConnection = config.connectionType === 'url';
@@ -174,7 +174,7 @@ export class BackupService extends BaseService {
await this.storageRepository
.unlink(backupFilePath)
.catch((error) => this.logger.error('Failed to delete failed backup file', error));
return JobStatus.FAILED;
throw error;
}
this.logger.log(`Database Backup Success`);

View File

@@ -29,6 +29,7 @@ import { MediaRepository } from 'src/repositories/media.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
@@ -80,6 +81,7 @@ export class BaseService {
protected memoryRepository: MemoryRepository,
protected metadataRepository: MetadataRepository,
protected moveRepository: MoveRepository,
protected notificationRepository: NotificationRepository,
protected oauthRepository: OAuthRepository,
protected partnerRepository: PartnerRepository,
protected personRepository: PersonRepository,

View File

@@ -53,22 +53,12 @@ describe(DatabaseService.name, () => {
mockEnvData({
database: {
config: {
kysely: {
host: 'database',
port: 5432,
user: 'postgres',
password: 'postgres',
database: 'immich',
},
typeorm: {
connectionType: 'parts',
type: 'postgres',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
database: 'immich',
},
connectionType: 'parts',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
database: 'immich',
},
skipMigrations: false,
vectorExtension: extension,
@@ -292,22 +282,12 @@ describe(DatabaseService.name, () => {
mockEnvData({
database: {
config: {
kysely: {
host: 'database',
port: 5432,
user: 'postgres',
password: 'postgres',
database: 'immich',
},
typeorm: {
connectionType: 'parts',
type: 'postgres',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
database: 'immich',
},
connectionType: 'parts',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
database: 'immich',
},
skipMigrations: true,
vectorExtension: DatabaseExtension.VECTORS,
@@ -325,22 +305,12 @@ describe(DatabaseService.name, () => {
mockEnvData({
database: {
config: {
kysely: {
host: 'database',
port: 5432,
user: 'postgres',
password: 'postgres',
database: 'immich',
},
typeorm: {
connectionType: 'parts',
type: 'postgres',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
database: 'immich',
},
connectionType: 'parts',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
database: 'immich',
},
skipMigrations: true,
vectorExtension: DatabaseExtension.VECTOR,

View File

@@ -33,7 +33,7 @@ export class DownloadService extends BaseService {
const targetSize = dto.archiveSize || HumanReadableSize.GiB * 4;
const metadata = await this.userRepository.getMetadata(auth.user.id);
const preferences = getPreferences(auth.user.email, metadata);
const preferences = getPreferences(metadata);
const motionIds = new Set<string>();
const archives: DownloadArchiveInfo[] = [];
let archive: DownloadArchiveInfo = { size: 0, assetIds: [] };

View File

@@ -17,6 +17,7 @@ import { MapService } from 'src/services/map.service';
import { MediaService } from 'src/services/media.service';
import { MemoryService } from 'src/services/memory.service';
import { MetadataService } from 'src/services/metadata.service';
import { NotificationAdminService } from 'src/services/notification-admin.service';
import { NotificationService } from 'src/services/notification.service';
import { PartnerService } from 'src/services/partner.service';
import { PersonService } from 'src/services/person.service';
@@ -60,6 +61,7 @@ export const services = [
MemoryService,
MetadataService,
NotificationService,
NotificationAdminService,
PartnerService,
PersonService,
SearchService,

View File

@@ -215,11 +215,7 @@ export class JobService extends BaseService {
await this.onDone(job);
}
} catch (error: Error | any) {
this.logger.error(
`Unable to run job handler (${queueName}/${job.name}): ${error}`,
error?.stack,
JSON.stringify(job.data),
);
await this.eventRepository.emit('job.failed', { job, error });
} finally {
this.telemetryRepository.jobs.addToGauge(queueMetric, -1);
}

View File

@@ -1,7 +1,6 @@
import { OutputInfo } from 'sharp';
import { SystemConfig } from 'src/config';
import { Exif } from 'src/database';
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
import {
AssetFileType,
AssetPathType,
@@ -11,11 +10,11 @@ import {
ImageFormat,
JobName,
JobStatus,
RawExtractedFormat,
TranscodeHWAccel,
TranscodePolicy,
VideoCodec,
} from 'src/enum';
import { WithoutProperty } from 'src/repositories/asset.repository';
import { MediaService } from 'src/services/media.service';
import { JobCounts, RawImageInfo } from 'src/types';
import { assetStub } from 'test/fixtures/asset.stub';
@@ -232,17 +231,19 @@ describe(MediaService.name, () => {
describe('handleGenerateThumbnails', () => {
let rawBuffer: Buffer;
let fullsizeBuffer: Buffer;
let extractedBuffer: Buffer;
let rawInfo: RawImageInfo;
beforeEach(() => {
fullsizeBuffer = Buffer.from('embedded image data');
rawBuffer = Buffer.from('image data');
rawBuffer = Buffer.from('raw image data');
extractedBuffer = Buffer.from('embedded image file');
rawInfo = { width: 100, height: 100, channels: 3 };
mocks.media.decodeImage.mockImplementation((path) =>
mocks.media.decodeImage.mockImplementation((input) =>
Promise.resolve(
path.includes(AssetMediaSize.FULLSIZE)
? { data: fullsizeBuffer, info: rawInfo as OutputInfo }
: { data: rawBuffer, info: rawInfo as OutputInfo },
typeof input === 'string'
? { data: rawBuffer, info: rawInfo as OutputInfo } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo }, // buffer implies embedded image extracted
),
);
});
@@ -585,16 +586,15 @@ describe(MediaService.name, () => {
});
it('should extract embedded image if enabled and available', async () => {
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const convertedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(convertedPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@@ -602,16 +602,13 @@ describe(MediaService.name, () => {
});
it('should resize original image if embedded image is too small', async () => {
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 1000, height: 1000 });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const extractedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(extractedPath).toMatch(/-fullsize\.jpeg$/);
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageDng.originalPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
@@ -666,38 +663,40 @@ describe(MediaService.name, () => {
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(2);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: true }),
expect.objectContaining({ processInvalidImages: false }),
'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: true }),
expect.objectContaining({ processInvalidImages: false }),
'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp',
);
expect(mocks.media.generateThumbhash).toHaveBeenCalledOnce();
expect(mocks.media.generateThumbhash).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({ processInvalidImages: true }),
expect.objectContaining({ processInvalidImages: false }),
);
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
vi.unstubAllEnvs();
});
it('should generate full-size preview using embedded JPEG from RAW images when extractEmbedded is true', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: true } });
mocks.media.extract.mockResolvedValue(true);
it('should extract full-size JPEG preview from RAW', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
const extractedPath = mocks.media.extract.mock.lastCall?.[1].toString();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440, // capped to preview size as fullsize conversion is skipped
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(2);
@@ -715,9 +714,51 @@ describe(MediaService.name, () => {
);
});
it('should convert full-size WEBP preview from JXL preview of RAW', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JXL });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
colorspace: Colorspace.P3,
processInvalidImages: false,
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
fullsizeBuffer,
{
colorspace: Colorspace.P3,
format: ImageFormat.WEBP,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
'upload/thumbs/user-id/as/se/asset-id-fullsize.webp',
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
fullsizeBuffer,
{
colorspace: Colorspace.P3,
format: ImageFormat.JPEG,
size: 1440,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
);
});
it('should generate full-size preview directly from RAW images when extractEmbedded is false', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: false } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageDng);
@@ -757,7 +798,7 @@ describe(MediaService.name, () => {
it('should generate full-size preview from non-web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
@@ -786,7 +827,7 @@ describe(MediaService.name, () => {
it('should skip generating full-size preview for web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image);
@@ -811,7 +852,7 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.WEBP, quality: 90 } },
});
mocks.media.extract.mockResolvedValue(true);
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.JPEG });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
@@ -841,16 +882,12 @@ describe(MediaService.name, () => {
describe('handleQueueVideoConversion', () => {
it('should queue all video assets', async () => {
mocks.asset.getAll.mockResolvedValue({
items: [assetStub.video],
hasNextPage: false,
});
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueVideoConversion({ force: true });
expect(mocks.asset.getAll).toHaveBeenCalledWith({ skip: 0, take: 1000 }, { type: AssetType.VIDEO });
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.VIDEO_CONVERSION,
@@ -860,15 +897,11 @@ describe(MediaService.name, () => {
});
it('should queue all video assets without encoded videos', async () => {
mocks.asset.getWithout.mockResolvedValue({
items: [assetStub.video],
hasNextPage: false,
});
mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
await sut.handleQueueVideoConversion({});
expect(mocks.asset.getAll).not.toHaveBeenCalled();
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.ENCODED_VIDEO);
expect(mocks.assetJob.streamForVideoConversion).toHaveBeenCalledWith(void 0);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.VIDEO_CONVERSION,
@@ -880,26 +913,18 @@ describe(MediaService.name, () => {
describe('handleVideoConversion', () => {
beforeEach(() => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.assetJob.getForVideoConversion.mockResolvedValue(assetStub.video);
sut.videoInterfaces = { dri: ['renderD128'], mali: true };
});
it('should skip transcoding if asset not found', async () => {
mocks.asset.getByIds.mockResolvedValue([]);
mocks.assetJob.getForVideoConversion.mockResolvedValue(void 0);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip transcoding if non-video asset', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleVideoConversion({ id: assetStub.image.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should transcode the longest stream', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
@@ -921,14 +946,12 @@ describe(MediaService.name, () => {
it('should skip a video without any streams', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noVideoStreams);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
it('should skip a video without any height', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noHeight);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -936,7 +959,6 @@ describe(MediaService.name, () => {
it('should throw an error if an unknown transcode policy is configured', async () => {
mocks.media.probe.mockResolvedValue(probeStub.noAudioStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
@@ -947,7 +969,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, accel: TranscodeHWAccel.DISABLED },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValue(new Error('Error transcoding video'));
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
@@ -957,7 +978,6 @@ describe(MediaService.name, () => {
it('should transcode when set to all', async () => {
mocks.media.probe.mockResolvedValue(probeStub.multipleVideoStreams);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.ALL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1035,7 +1055,6 @@ describe(MediaService.name, () => {
it('should scale horizontally when video is horizontal', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1051,7 +1070,6 @@ describe(MediaService.name, () => {
it('should scale vertically when video is vertical', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1069,7 +1087,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1087,7 +1104,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { transcode: TranscodePolicy.ALL, targetResolution: 'original' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1105,7 +1121,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.HEVC, acceptedAudioCodecs: [AudioCodec.AAC] },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1127,7 +1142,6 @@ describe(MediaService.name, () => {
acceptedAudioCodecs: [AudioCodec.AAC],
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1149,7 +1163,6 @@ describe(MediaService.name, () => {
acceptedAudioCodecs: [AudioCodec.AAC],
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1165,7 +1178,6 @@ describe(MediaService.name, () => {
it('should copy audio stream when audio matches target', async () => {
mocks.media.probe.mockResolvedValue(probeStub.audioStreamAac);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1180,7 +1192,6 @@ describe(MediaService.name, () => {
it('should remux when input is not an accepted container', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamAvi);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1204,7 +1215,6 @@ describe(MediaService.name, () => {
it('should not transcode if transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1212,7 +1222,6 @@ describe(MediaService.name, () => {
it('should not remux when input is not an accepted container and transcoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1220,7 +1229,6 @@ describe(MediaService.name, () => {
it('should not transcode if target codec is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: 'invalid' as any } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1229,7 +1237,7 @@ describe(MediaService.name, () => {
const asset = assetStub.hasEncodedVideo;
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.DISABLED } });
mocks.asset.getByIds.mockResolvedValue([asset]);
mocks.assetJob.getForVideoConversion.mockResolvedValue(asset);
await sut.handleVideoConversion({ id: asset.id });
@@ -1243,7 +1251,6 @@ describe(MediaService.name, () => {
it('should set max bitrate if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1259,7 +1266,6 @@ describe(MediaService.name, () => {
it('should default max bitrate to kbps if no unit is provided', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { maxBitrate: '4500' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1275,7 +1281,6 @@ describe(MediaService.name, () => {
it('should transcode in two passes for h264/h265 when enabled and max bitrate is above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true, maxBitrate: '4500k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1291,7 +1296,6 @@ describe(MediaService.name, () => {
it('should fallback to one pass for h264/h265 if two-pass is enabled but no max bitrate is set', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { twoPass: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1313,7 +1317,6 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.VP9,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1335,7 +1338,6 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.VP9,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1351,7 +1353,6 @@ describe(MediaService.name, () => {
it('should configure preset for vp9', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, preset: 'slow' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1367,7 +1368,6 @@ describe(MediaService.name, () => {
it('should not configure preset for vp9 if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { preset: 'invalid', targetVideoCodec: VideoCodec.VP9 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1383,7 +1383,6 @@ describe(MediaService.name, () => {
it('should configure threads if above 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.VP9, threads: 2 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1399,7 +1398,6 @@ describe(MediaService.name, () => {
it('should disable thread pooling for h264 if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1415,7 +1413,6 @@ describe(MediaService.name, () => {
it('should omit thread flags for h264 if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1431,7 +1428,6 @@ describe(MediaService.name, () => {
it('should disable thread pooling for hevc if thread limit is 1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 1, targetVideoCodec: VideoCodec.HEVC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1447,7 +1443,6 @@ describe(MediaService.name, () => {
it('should omit thread flags for hevc if thread limit is at or below 0', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { threads: 0, targetVideoCodec: VideoCodec.HEVC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1463,7 +1458,6 @@ describe(MediaService.name, () => {
it('should use av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1489,7 +1483,6 @@ describe(MediaService.name, () => {
it('should map `veryslow` preset to 4 for av1', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, preset: 'veryslow' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1505,7 +1498,6 @@ describe(MediaService.name, () => {
it('should set max bitrate for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, maxBitrate: '2M' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1521,7 +1513,6 @@ describe(MediaService.name, () => {
it('should set threads for av1 if specified', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVp9);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4 } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1539,7 +1530,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { targetVideoCodec: VideoCodec.AV1, threads: 4, maxBitrate: '2M' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1561,7 +1551,6 @@ describe(MediaService.name, () => {
targetResolution: '1080p',
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1571,7 +1560,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1579,7 +1567,6 @@ describe(MediaService.name, () => {
it('should fail if hwaccel option is invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -1587,7 +1574,6 @@ describe(MediaService.name, () => {
it('should set options for nvenc', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1625,7 +1611,6 @@ describe(MediaService.name, () => {
twoPass: true,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1641,7 +1626,6 @@ describe(MediaService.name, () => {
it('should set vbr options for nvenc when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1657,7 +1641,6 @@ describe(MediaService.name, () => {
it('should set cq options for nvenc when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1673,7 +1656,6 @@ describe(MediaService.name, () => {
it('should omit preset for nvenc if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1689,7 +1671,6 @@ describe(MediaService.name, () => {
it('should ignore two pass for nvenc if max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1707,7 +1688,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1730,7 +1710,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1752,7 +1731,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.NVENC, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1768,7 +1746,6 @@ describe(MediaService.name, () => {
it('should set options for qsv', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1809,7 +1786,6 @@ describe(MediaService.name, () => {
preferredHwDevice: '/dev/dri/renderD128',
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1828,7 +1804,6 @@ describe(MediaService.name, () => {
it('should omit preset for qsv if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1849,7 +1824,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1869,7 +1843,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
@@ -1880,7 +1853,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -1901,7 +1873,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -1928,7 +1899,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -1958,7 +1928,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@@ -1977,7 +1946,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2000,7 +1968,6 @@ describe(MediaService.name, () => {
it('should set options for vaapi', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2031,7 +1998,6 @@ describe(MediaService.name, () => {
it('should set vbr options for vaapi when max bitrate is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2056,7 +2022,6 @@ describe(MediaService.name, () => {
it('should set cq options for vaapi when max bitrate is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2081,7 +2046,6 @@ describe(MediaService.name, () => {
it('should omit preset for vaapi if invalid', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2101,7 +2065,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2123,7 +2086,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2144,7 +2106,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2170,7 +2131,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2194,7 +2154,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2215,7 +2174,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
@@ -2232,7 +2190,6 @@ describe(MediaService.name, () => {
it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
@@ -2253,7 +2210,6 @@ describe(MediaService.name, () => {
it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2272,7 +2228,6 @@ describe(MediaService.name, () => {
it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.media.transcode.mockRejectedValueOnce(new Error('error'));
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledTimes(2);
@@ -2291,7 +2246,6 @@ describe(MediaService.name, () => {
sut.videoInterfaces = { dri: [], mali: true };
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
expect(mocks.media.transcode).not.toHaveBeenCalled();
});
@@ -2299,7 +2253,6 @@ describe(MediaService.name, () => {
it('should set options for rkmpp', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2340,7 +2293,6 @@ describe(MediaService.name, () => {
targetVideoCodec: VideoCodec.HEVC,
},
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2358,7 +2310,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2376,7 +2327,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2399,7 +2349,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2419,7 +2368,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2442,7 +2390,6 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
});
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2462,7 +2409,6 @@ describe(MediaService.name, () => {
it('should tonemap when policy is required and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2482,7 +2428,6 @@ describe(MediaService.name, () => {
it('should tonemap when policy is optimal and video is hdr', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamHDR);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.OPTIMAL } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2502,7 +2447,6 @@ describe(MediaService.name, () => {
it('should transcode when policy is required and video is not yuv420p', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream10Bit);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2518,7 +2462,6 @@ describe(MediaService.name, () => {
it('should convert to yuv420p when scaling without tone-mapping', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream4K10Bit);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { transcode: TranscodePolicy.REQUIRED } });
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
@@ -2534,7 +2477,6 @@ describe(MediaService.name, () => {
it('should count frames for progress when log level is debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.logger.isLevelEnabled.mockReturnValue(true);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
@@ -2557,7 +2499,6 @@ describe(MediaService.name, () => {
it('should not count frames for progress when log level is not debug', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.logger.isLevelEnabled.mockReturnValue(false);
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mocks.media.probe).toHaveBeenCalledWith(assetStub.video.originalPath, { countFrames: false });
@@ -2582,48 +2523,39 @@ describe(MediaService.name, () => {
describe('isSRGB', () => {
it('should return true for srgb colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB' } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ colorspace: 'sRGB' } as Exif)).toEqual(true);
});
it('should return true for srgb profile description', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB v1.31' } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ profileDescription: 'sRGB v1.31' } as Exif)).toEqual(true);
});
it('should return true for 8-bit image with no colorspace metadata', () => {
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 8 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ bitsPerSample: 8 } as Exif)).toEqual(true);
});
it('should return true for image with no colorspace or bit depth metadata', () => {
const asset = { ...assetStub.image, exifInfo: {} as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({} as Exif)).toEqual(true);
});
it('should return false for non-srgb colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'Adobe RGB' } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
expect(sut.isSRGB({ colorspace: 'Adobe RGB' } as Exif)).toEqual(false);
});
it('should return false for non-srgb profile description', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sP3C' } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
expect(sut.isSRGB({ profileDescription: 'sP3C' } as Exif)).toEqual(false);
});
it('should return false for 16-bit image with no colorspace metadata', () => {
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(false);
expect(sut.isSRGB({ bitsPerSample: 16 } as Exif)).toEqual(false);
});
it('should return true for 16-bit image with sRGB colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB', bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ colorspace: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
});
it('should return true for 16-bit image with sRGB profile', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB', bitsPerSample: 16 } as Exif };
expect(sut.isSRGB(asset)).toEqual(true);
expect(sut.isSRGB({ profileDescription: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
});
});
});

View File

@@ -10,11 +10,11 @@ import {
AssetType,
AudioCodec,
Colorspace,
ImageFormat,
JobName,
JobStatus,
LogLevel,
QueueName,
RawExtractedFormat,
StorageFolder,
TranscodeHWAccel,
TranscodePolicy,
@@ -22,12 +22,11 @@ import {
VideoCodec,
VideoContainer,
} from 'src/enum';
import { UpsertFileOptions, WithoutProperty } from 'src/repositories/asset.repository';
import { UpsertFileOptions } from 'src/repositories/asset.repository';
import { BaseService } from 'src/services/base.service';
import {
AudioStreamInfo,
DecodeToBufferOptions,
GenerateThumbnailOptions,
JobItem,
JobOf,
VideoFormat,
@@ -213,6 +212,29 @@ export class MediaService extends BaseService {
return JobStatus.SUCCESS;
}
private async extractImage(originalPath: string, minSize: number) {
let extracted = await this.mediaRepository.extract(originalPath);
if (extracted && !(await this.shouldUseExtractedImage(extracted.buffer, minSize))) {
extracted = null;
}
return extracted;
}
private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number) {
const { image } = await this.getConfig({ withCache: true });
const colorspace = this.isSRGB(exifInfo) ? Colorspace.SRGB : image.colorspace;
const decodeOptions: DecodeToBufferOptions = {
colorspace,
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
size: targetSize,
orientation: exifInfo.orientation ? Number(exifInfo.orientation) : undefined,
};
const { info, data } = await this.mediaRepository.decodeImage(thumbSource, decodeOptions);
return { info, data, colorspace };
}
private async generateImageThumbnails(asset: {
id: string;
ownerId: string;
@@ -225,68 +247,48 @@ export class MediaService extends BaseService {
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.THUMBNAIL, image.thumbnail.format);
this.storageCore.ensureFolders(previewPath);
const processInvalidImages = process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true';
const colorspace = this.isSRGB(asset) ? Colorspace.SRGB : image.colorspace;
// Handle embedded preview extraction for RAW files
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
const generateFullsize = image.fullsize.enabled && !mimeTypes.isWebSupportedImage(asset.originalPath);
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
// prevents this extra "enabled" from leaking into fullsizeOptions later
const { enabled: imageFullsizeEnabled, ...imageFullsizeConfig } = image.fullsize;
const { info, data, colorspace } = await this.decodeImage(
extracted ? extracted.buffer : asset.originalPath,
asset.exifInfo,
convertFullsize ? undefined : image.preview.size,
);
const shouldConvertFullsize = imageFullsizeEnabled && !mimeTypes.isWebSupportedImage(asset.originalFileName);
const shouldExtractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
const decodeOptions: DecodeToBufferOptions = { colorspace, processInvalidImages, size: image.preview.size };
let useExtracted = false;
let decodeInputPath: string = asset.originalPath;
// Converted or extracted image from non-web-supported formats (e.g. RAW)
let fullsizePath: string | undefined;
if (shouldConvertFullsize) {
// unset size to decode fullsize image
decodeOptions.size = undefined;
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, image.fullsize.format);
}
if (shouldExtractEmbedded) {
// For RAW files, try extracting embedded preview first
// Assume extracted image from RAW always in JPEG format, as implied from the `jpgFromRaw` tag name
const extractedPath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, ImageFormat.JPEG);
const didExtract = await this.mediaRepository.extract(asset.originalPath, extractedPath);
useExtracted = didExtract && (await this.shouldUseExtractedImage(extractedPath, image.preview.size));
if (useExtracted) {
if (shouldConvertFullsize) {
// skip re-encoding and directly use extracted as fullsize preview
// as usually the extracted image is already heavily compressed, no point doing lossy conversion again
fullsizePath = extractedPath;
}
// use this as origin of preview and thumbnail
decodeInputPath = extractedPath;
if (asset.exifInfo) {
// write essential orientation and colorspace EXIF for correct fullsize preview and subsequent processing
const exif = { orientation: asset.exifInfo.orientation, colorspace: asset.exifInfo.colorspace };
await this.mediaRepository.writeExif(exif, extractedPath);
}
}
}
const { info, data } = await this.mediaRepository.decodeImage(decodeInputPath, decodeOptions);
const thumbnailOptions = { colorspace, processInvalidImages, raw: info };
// generate final images
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info };
const promises = [
this.mediaRepository.generateThumbhash(data, thumbnailOptions),
this.mediaRepository.generateThumbnail(data, { ...image.thumbnail, ...thumbnailOptions }, thumbnailPath),
this.mediaRepository.generateThumbnail(data, { ...image.preview, ...thumbnailOptions }, previewPath),
];
// did not extract a usable image from RAW
if (fullsizePath && !useExtracted) {
const fullsizeOptions: GenerateThumbnailOptions = {
...imageFullsizeConfig,
...thumbnailOptions,
size: undefined,
};
let fullsizePath: string | undefined;
if (convertFullsize) {
// convert a new fullsize image from the same source as the thumbnail
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, image.fullsize.format);
const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions };
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.JPEG) {
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FULLSIZE, extracted.format);
this.storageCore.ensureFolders(fullsizePath);
// Write the buffer to disk with essential EXIF data
await this.storageRepository.createOrOverwriteFile(fullsizePath, extracted.buffer);
await this.mediaRepository.writeExif(
{
orientation: asset.exifInfo.orientation,
colorspace: asset.exifInfo.colorspace,
},
fullsizePath,
);
}
const outputs = await Promise.all(promises);
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
@@ -330,25 +332,25 @@ export class MediaService extends BaseService {
async handleQueueVideoConversion(job: JobOf<JobName.QUEUE_VIDEO_CONVERSION>): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, { type: AssetType.VIDEO })
: this.assetRepository.getWithout(pagination, WithoutProperty.ENCODED_VIDEO);
});
let queue: { name: JobName.VIDEO_CONVERSION; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
queue.push({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } });
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } })),
);
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
}
}
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
}
@OnJob({ name: JobName.VIDEO_CONVERSION, queue: QueueName.VIDEO_CONVERSION })
async handleVideoConversion({ id }: JobOf<JobName.VIDEO_CONVERSION>): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset || asset.type !== AssetType.VIDEO) {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.FAILED;
}
@@ -521,8 +523,7 @@ export class MediaService extends BaseService {
return name !== VideoContainer.MP4 && !ffmpegConfig.acceptedContainers.includes(name);
}
isSRGB(asset: { exifInfo: Exif }): boolean {
const { colorspace, profileDescription, bitsPerSample } = asset.exifInfo;
isSRGB({ colorspace, profileDescription, bitsPerSample }: Exif): boolean {
if (colorspace || profileDescription) {
return [colorspace, profileDescription].some((s) => s?.toLowerCase().includes('srgb'));
} else if (bitsPerSample) {
@@ -550,10 +551,9 @@ export class MediaService extends BaseService {
}
}
private async shouldUseExtractedImage(extractedPath: string, targetSize: number) {
const { width, height } = await this.mediaRepository.getImageDimensions(extractedPath);
private async shouldUseExtractedImage(extractedPathOrBuffer: string | Buffer, targetSize: number) {
const { width, height } = await this.mediaRepository.getImageDimensions(extractedPathOrBuffer);
const extractedSize = Math.min(width, height);
return extractedSize >= targetSize;
}

View File

@@ -14,7 +14,7 @@ import { probeStub } from 'test/fixtures/media.stub';
import { personStub } from 'test/fixtures/person.stub';
import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const makeFaceTags = (face: Partial<{ Name: string }> = {}) => ({
RegionInfo: {
@@ -104,10 +104,10 @@ describe(MetadataService.name, () => {
describe('handleQueueMetadataExtraction', () => {
it('should queue metadata extraction for all assets without exif values', async () => {
mocks.asset.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image]));
await expect(sut.handleQueueMetadataExtraction({ force: false })).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.getWithout).toHaveBeenCalled();
expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.METADATA_EXTRACTION,
@@ -117,10 +117,10 @@ describe(MetadataService.name, () => {
});
it('should queue metadata extraction for all assets', async () => {
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
mocks.assetJob.streamForMetadataExtraction.mockReturnValue(makeStream([assetStub.image]));
await expect(sut.handleQueueMetadataExtraction({ force: true })).resolves.toBe(JobStatus.SUCCESS);
expect(mocks.asset.getAll).toHaveBeenCalled();
expect(mocks.assetJob.streamForMetadataExtraction).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.METADATA_EXTRACTION,

View File

@@ -168,18 +168,18 @@ export class MetadataService extends BaseService {
@OnJob({ name: JobName.QUEUE_METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
async handleQueueMetadataExtraction(job: JobOf<JobName.QUEUE_METADATA_EXTRACTION>): Promise<JobStatus> {
const { force } = job;
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
: this.assetRepository.getWithout(pagination, WithoutProperty.EXIF);
});
for await (const assets of assetPagination) {
await this.jobRepository.queueAll(
assets.map((asset) => ({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } })),
);
let queue: { name: JobName.METADATA_EXTRACTION; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForMetadataExtraction(force)) {
queue.push({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
}
}
await this.jobRepository.queueAll(queue);
return JobStatus.SUCCESS;
}

View File

@@ -0,0 +1,111 @@
import { defaults, SystemConfig } from 'src/config';
import { EmailTemplate } from 'src/repositories/email.repository';
import { NotificationService } from 'src/services/notification.service';
import { userStub } from 'test/fixtures/user.stub';
import { newTestService, ServiceMocks } from 'test/utils';
const smtpTransport = Object.freeze<SystemConfig>({
...defaults,
notifications: {
smtp: {
...defaults.notifications.smtp,
enabled: true,
transport: {
ignoreCert: false,
host: 'localhost',
port: 587,
username: 'test',
password: 'test',
},
},
},
});
describe(NotificationService.name, () => {
let sut: NotificationService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(NotificationService));
});
it('should work', () => {
expect(sut).toBeDefined();
});
describe('sendTestEmail', () => {
it('should throw error if user could not be found', async () => {
await expect(sut.sendTestEmail('', smtpTransport.notifications.smtp)).rejects.toThrow('User not found');
});
it('should throw error if smtp validation fails', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockRejectedValue('');
await expect(sut.sendTestEmail('', smtpTransport.notifications.smtp)).rejects.toThrow(
'Failed to verify SMTP configuration',
);
});
it('should send email to default domain', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockResolvedValue(true);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.email.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
await expect(sut.sendTestEmail('', smtpTransport.notifications.smtp)).resolves.not.toThrow();
expect(mocks.email.renderEmail).toHaveBeenCalledWith({
template: EmailTemplate.TEST_EMAIL,
data: { baseUrl: 'https://my.immich.app', displayName: userStub.admin.name },
});
expect(mocks.email.sendEmail).toHaveBeenCalledWith(
expect.objectContaining({
subject: 'Test email from Immich',
smtp: smtpTransport.notifications.smtp.transport,
}),
);
});
it('should send email to external domain', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockResolvedValue(true);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.systemMetadata.get.mockResolvedValue({ server: { externalDomain: 'https://demo.immich.app' } });
mocks.email.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
await expect(sut.sendTestEmail('', smtpTransport.notifications.smtp)).resolves.not.toThrow();
expect(mocks.email.renderEmail).toHaveBeenCalledWith({
template: EmailTemplate.TEST_EMAIL,
data: { baseUrl: 'https://demo.immich.app', displayName: userStub.admin.name },
});
expect(mocks.email.sendEmail).toHaveBeenCalledWith(
expect.objectContaining({
subject: 'Test email from Immich',
smtp: smtpTransport.notifications.smtp.transport,
}),
);
});
it('should send email with replyTo', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockResolvedValue(true);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.email.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
await expect(
sut.sendTestEmail('', { ...smtpTransport.notifications.smtp, replyTo: 'demo@immich.app' }),
).resolves.not.toThrow();
expect(mocks.email.renderEmail).toHaveBeenCalledWith({
template: EmailTemplate.TEST_EMAIL,
data: { baseUrl: 'https://my.immich.app', displayName: userStub.admin.name },
});
expect(mocks.email.sendEmail).toHaveBeenCalledWith(
expect.objectContaining({
subject: 'Test email from Immich',
smtp: smtpTransport.notifications.smtp.transport,
replyTo: 'demo@immich.app',
}),
);
});
});
});

View File

@@ -0,0 +1,120 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { AuthDto } from 'src/dtos/auth.dto';
import { mapNotification, NotificationCreateDto } from 'src/dtos/notification.dto';
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
import { NotificationLevel, NotificationType } from 'src/enum';
import { EmailTemplate } from 'src/repositories/email.repository';
import { BaseService } from 'src/services/base.service';
import { getExternalDomain } from 'src/utils/misc';
@Injectable()
export class NotificationAdminService extends BaseService {
async create(auth: AuthDto, dto: NotificationCreateDto) {
const item = await this.notificationRepository.create({
userId: dto.userId,
type: dto.type ?? NotificationType.Custom,
level: dto.level ?? NotificationLevel.Info,
title: dto.title,
description: dto.description,
data: dto.data,
});
return mapNotification(item);
}
async sendTestEmail(id: string, dto: SystemConfigSmtpDto, tempTemplate?: string) {
const user = await this.userRepository.get(id, { withDeleted: false });
if (!user) {
throw new Error('User not found');
}
try {
await this.emailRepository.verifySmtp(dto.transport);
} catch (error) {
throw new BadRequestException('Failed to verify SMTP configuration', { cause: error });
}
const { server } = await this.getConfig({ withCache: false });
const { html, text } = await this.emailRepository.renderEmail({
template: EmailTemplate.TEST_EMAIL,
data: {
baseUrl: getExternalDomain(server),
displayName: user.name,
},
customTemplate: tempTemplate!,
});
const { messageId } = await this.emailRepository.sendEmail({
to: user.email,
subject: 'Test email from Immich',
html,
text,
from: dto.from,
replyTo: dto.replyTo || dto.from,
smtp: dto.transport,
});
return { messageId };
}
async getTemplate(name: EmailTemplate, customTemplate: string) {
const { server, templates } = await this.getConfig({ withCache: false });
let templateResponse = '';
switch (name) {
case EmailTemplate.WELCOME: {
const { html: _welcomeHtml } = await this.emailRepository.renderEmail({
template: EmailTemplate.WELCOME,
data: {
baseUrl: getExternalDomain(server),
displayName: 'John Doe',
username: 'john@doe.com',
password: 'thisIsAPassword123',
},
customTemplate: customTemplate || templates.email.welcomeTemplate,
});
templateResponse = _welcomeHtml;
break;
}
case EmailTemplate.ALBUM_UPDATE: {
const { html: _updateAlbumHtml } = await this.emailRepository.renderEmail({
template: EmailTemplate.ALBUM_UPDATE,
data: {
baseUrl: getExternalDomain(server),
albumId: '1',
albumName: 'Favorite Photos',
recipientName: 'Jane Doe',
cid: undefined,
},
customTemplate: customTemplate || templates.email.albumInviteTemplate,
});
templateResponse = _updateAlbumHtml;
break;
}
case EmailTemplate.ALBUM_INVITE: {
const { html } = await this.emailRepository.renderEmail({
template: EmailTemplate.ALBUM_INVITE,
data: {
baseUrl: getExternalDomain(server),
albumId: '1',
albumName: "John Doe's Favorites",
senderName: 'John Doe',
recipientName: 'Jane Doe',
cid: undefined,
},
customTemplate: customTemplate || templates.email.albumInviteTemplate,
});
templateResponse = html;
break;
}
default: {
templateResponse = '';
break;
}
}
return { name, html: templateResponse };
}
}

View File

@@ -3,7 +3,6 @@ import { defaults, SystemConfig } from 'src/config';
import { AlbumUser } from 'src/database';
import { SystemConfigDto } from 'src/dtos/system-config.dto';
import { AssetFileType, JobName, JobStatus, UserMetadataKey } from 'src/enum';
import { EmailTemplate } from 'src/repositories/email.repository';
import { NotificationService } from 'src/services/notification.service';
import { INotifyAlbumUpdateJob } from 'src/types';
import { albumStub } from 'test/fixtures/album.stub';
@@ -241,82 +240,6 @@ describe(NotificationService.name, () => {
});
});
describe('sendTestEmail', () => {
it('should throw error if user could not be found', async () => {
await expect(sut.sendTestEmail('', configs.smtpTransport.notifications.smtp)).rejects.toThrow('User not found');
});
it('should throw error if smtp validation fails', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockRejectedValue('');
await expect(sut.sendTestEmail('', configs.smtpTransport.notifications.smtp)).rejects.toThrow(
'Failed to verify SMTP configuration',
);
});
it('should send email to default domain', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockResolvedValue(true);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.email.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
await expect(sut.sendTestEmail('', configs.smtpTransport.notifications.smtp)).resolves.not.toThrow();
expect(mocks.email.renderEmail).toHaveBeenCalledWith({
template: EmailTemplate.TEST_EMAIL,
data: { baseUrl: 'https://my.immich.app', displayName: userStub.admin.name },
});
expect(mocks.email.sendEmail).toHaveBeenCalledWith(
expect.objectContaining({
subject: 'Test email from Immich',
smtp: configs.smtpTransport.notifications.smtp.transport,
}),
);
});
it('should send email to external domain', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockResolvedValue(true);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.systemMetadata.get.mockResolvedValue({ server: { externalDomain: 'https://demo.immich.app' } });
mocks.email.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
await expect(sut.sendTestEmail('', configs.smtpTransport.notifications.smtp)).resolves.not.toThrow();
expect(mocks.email.renderEmail).toHaveBeenCalledWith({
template: EmailTemplate.TEST_EMAIL,
data: { baseUrl: 'https://demo.immich.app', displayName: userStub.admin.name },
});
expect(mocks.email.sendEmail).toHaveBeenCalledWith(
expect.objectContaining({
subject: 'Test email from Immich',
smtp: configs.smtpTransport.notifications.smtp.transport,
}),
);
});
it('should send email with replyTo', async () => {
mocks.user.get.mockResolvedValue(userStub.admin);
mocks.email.verifySmtp.mockResolvedValue(true);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.email.sendEmail.mockResolvedValue({ messageId: 'message-1', response: '' });
await expect(
sut.sendTestEmail('', { ...configs.smtpTransport.notifications.smtp, replyTo: 'demo@immich.app' }),
).resolves.not.toThrow();
expect(mocks.email.renderEmail).toHaveBeenCalledWith({
template: EmailTemplate.TEST_EMAIL,
data: { baseUrl: 'https://my.immich.app', displayName: userStub.admin.name },
});
expect(mocks.email.sendEmail).toHaveBeenCalledWith(
expect.objectContaining({
subject: 'Test email from Immich',
smtp: configs.smtpTransport.notifications.smtp.transport,
replyTo: 'demo@immich.app',
}),
);
});
});
describe('handleUserSignup', () => {
it('should skip if user could not be found', async () => {
await expect(sut.handleUserSignup({ id: '' })).resolves.toBe(JobStatus.SKIPPED);

View File

@@ -1,7 +1,24 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { OnEvent, OnJob } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import {
mapNotification,
NotificationDeleteAllDto,
NotificationDto,
NotificationSearchDto,
NotificationUpdateAllDto,
NotificationUpdateDto,
} from 'src/dtos/notification.dto';
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
import { AssetFileType, JobName, JobStatus, QueueName } from 'src/enum';
import {
AssetFileType,
JobName,
JobStatus,
NotificationLevel,
NotificationType,
Permission,
QueueName,
} from 'src/enum';
import { EmailTemplate } from 'src/repositories/email.repository';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
@@ -15,6 +32,80 @@ import { getPreferences } from 'src/utils/preferences';
export class NotificationService extends BaseService {
private static albumUpdateEmailDelayMs = 300_000;
async search(auth: AuthDto, dto: NotificationSearchDto): Promise<NotificationDto[]> {
const items = await this.notificationRepository.search(auth.user.id, dto);
return items.map((item) => mapNotification(item));
}
async updateAll(auth: AuthDto, dto: NotificationUpdateAllDto) {
await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NOTIFICATION_UPDATE });
await this.notificationRepository.updateAll(dto.ids, {
readAt: dto.readAt,
});
}
async deleteAll(auth: AuthDto, dto: NotificationDeleteAllDto) {
await this.requireAccess({ auth, ids: dto.ids, permission: Permission.NOTIFICATION_DELETE });
await this.notificationRepository.deleteAll(dto.ids);
}
async get(auth: AuthDto, id: string) {
await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_READ });
const item = await this.notificationRepository.get(id);
if (!item) {
throw new BadRequestException('Notification not found');
}
return mapNotification(item);
}
async update(auth: AuthDto, id: string, dto: NotificationUpdateDto) {
await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_UPDATE });
const item = await this.notificationRepository.update(id, {
readAt: dto.readAt,
});
return mapNotification(item);
}
async delete(auth: AuthDto, id: string) {
await this.requireAccess({ auth, ids: [id], permission: Permission.NOTIFICATION_DELETE });
await this.notificationRepository.delete(id);
}
@OnJob({ name: JobName.NOTIFICATIONS_CLEANUP, queue: QueueName.BACKGROUND_TASK })
async onNotificationsCleanup() {
await this.notificationRepository.cleanup();
}
@OnEvent({ name: 'job.failed' })
async onJobFailed({ job, error }: ArgOf<'job.failed'>) {
const admin = await this.userRepository.getAdmin();
if (!admin) {
return;
}
this.logger.error(`Unable to run job handler (${job.name}): ${error}`, error?.stack, JSON.stringify(job.data));
switch (job.name) {
case JobName.BACKUP_DATABASE: {
const errorMessage = error instanceof Error ? error.message : error;
const item = await this.notificationRepository.create({
userId: admin.id,
type: NotificationType.JobFailed,
level: NotificationLevel.Error,
title: 'Job Failed',
description: `Job ${[job.name]} failed with error: ${errorMessage}`,
});
this.eventRepository.clientSend('on_notification', admin.id, mapNotification(item));
break;
}
default: {
return;
}
}
}
@OnEvent({ name: 'config.update' })
onConfigUpdate({ oldConfig, newConfig }: ArgOf<'config.update'>) {
this.eventRepository.clientBroadcast('on_config_update');
@@ -271,7 +362,7 @@ export class NotificationService extends BaseService {
return JobStatus.SKIPPED;
}
const { emailNotifications } = getPreferences(recipient.email, recipient.metadata);
const { emailNotifications } = getPreferences(recipient.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumInvite) {
return JobStatus.SKIPPED;
@@ -333,7 +424,7 @@ export class NotificationService extends BaseService {
continue;
}
const { emailNotifications } = getPreferences(user.email, user.metadata);
const { emailNotifications } = getPreferences(user.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
continue;

View File

@@ -106,21 +106,19 @@ export class UserAdminService extends BaseService {
}
async getPreferences(auth: AuthDto, id: string): Promise<UserPreferencesResponseDto> {
const { email } = await this.findOrFail(id, { withDeleted: true });
await this.findOrFail(id, { withDeleted: true });
const metadata = await this.userRepository.getMetadata(id);
const preferences = getPreferences(email, metadata);
return mapPreferences(preferences);
return mapPreferences(getPreferences(metadata));
}
async updatePreferences(auth: AuthDto, id: string, dto: UserPreferencesUpdateDto) {
const { email } = await this.findOrFail(id, { withDeleted: false });
await this.findOrFail(id, { withDeleted: false });
const metadata = await this.userRepository.getMetadata(id);
const preferences = getPreferences(email, metadata);
const newPreferences = mergePreferences(preferences, dto);
const newPreferences = mergePreferences(getPreferences(metadata), dto);
await this.userRepository.upsertMetadata(id, {
key: UserMetadataKey.PREFERENCES,
value: getPreferencesPartial({ email }, newPreferences),
value: getPreferencesPartial(newPreferences),
});
return mapPreferences(newPreferences);

View File

@@ -53,6 +53,7 @@ export class UserService extends BaseService {
const update: Updateable<UserTable> = {
email: dto.email,
name: dto.name,
avatarColor: dto.avatarColor,
};
if (dto.password) {
@@ -68,18 +69,16 @@ export class UserService extends BaseService {
async getMyPreferences(auth: AuthDto): Promise<UserPreferencesResponseDto> {
const metadata = await this.userRepository.getMetadata(auth.user.id);
const preferences = getPreferences(auth.user.email, metadata);
return mapPreferences(preferences);
return mapPreferences(getPreferences(metadata));
}
async updateMyPreferences(auth: AuthDto, dto: UserPreferencesUpdateDto) {
const metadata = await this.userRepository.getMetadata(auth.user.id);
const current = getPreferences(auth.user.email, metadata);
const updated = mergePreferences(current, dto);
const updated = mergePreferences(getPreferences(metadata), dto);
await this.userRepository.upsertMetadata(auth.user.id, {
key: UserMetadataKey.PREFERENCES,
value: getPreferencesPartial(auth.user, updated),
value: getPreferencesPartial(updated),
});
return mapPreferences(updated);

View File

@@ -11,7 +11,6 @@ import {
SyncEntityType,
SystemMetadataKey,
TranscodeTarget,
UserAvatarColor,
UserMetadataKey,
VideoCodec,
} from 'src/enum';
@@ -298,6 +297,10 @@ export type JobItem =
// Metadata Extraction
| { name: JobName.QUEUE_METADATA_EXTRACTION; data: IBaseJob }
| { name: JobName.METADATA_EXTRACTION; data: IEntityJob }
// Notifications
| { name: JobName.NOTIFICATIONS_CLEANUP; data?: IBaseJob }
// Sidecar Scanning
| { name: JobName.QUEUE_SIDECAR; data: IBaseJob }
| { name: JobName.SIDECAR_DISCOVERY; data: IEntityJob }
@@ -486,9 +489,6 @@ export interface UserPreferences {
enabled: boolean;
sidebarWeb: boolean;
};
avatar: {
color: UserAvatarColor;
};
emailNotifications: {
enabled: boolean;
albumInvite: boolean;

View File

@@ -221,6 +221,12 @@ const checkOtherAccess = async (access: AccessRepository, request: OtherAccessRe
return access.person.checkFaceOwnerAccess(auth.user.id, ids);
}
case Permission.NOTIFICATION_READ:
case Permission.NOTIFICATION_UPDATE:
case Permission.NOTIFICATION_DELETE: {
return access.notification.checkOwnerAccess(auth.user.id, ids);
}
case Permission.TAG_ASSET:
case Permission.TAG_READ:
case Permission.TAG_UPDATE:

View File

@@ -0,0 +1,83 @@
import { asPostgresConnectionConfig } from 'src/utils/database';
describe('database utils', () => {
describe('asPostgresConnectionConfig', () => {
it('should handle sslmode=require', () => {
expect(
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=require',
}),
).toMatchObject({ ssl: {} });
});
it('should handle sslmode=prefer', () => {
expect(
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=prefer',
}),
).toMatchObject({ ssl: {} });
});
it('should handle sslmode=verify-ca', () => {
expect(
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=verify-ca',
}),
).toMatchObject({ ssl: {} });
});
it('should handle sslmode=verify-full', () => {
expect(
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=verify-full',
}),
).toMatchObject({ ssl: {} });
});
it('should handle sslmode=no-verify', () => {
expect(
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?sslmode=no-verify',
}),
).toMatchObject({ ssl: { rejectUnauthorized: false } });
});
it('should handle ssl=true', () => {
expect(
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?ssl=true',
}),
).toMatchObject({ ssl: true });
});
it('should reject invalid ssl', () => {
expect(() =>
asPostgresConnectionConfig({
connectionType: 'url',
url: 'postgres://postgres1:postgres2@database1:54320/immich?ssl=invalid',
}),
).toThrowError('Invalid ssl option');
});
it('should handle socket: URLs', () => {
expect(
asPostgresConnectionConfig({ connectionType: 'url', url: 'socket:/run/postgresql?db=database1' }),
).toMatchObject({ host: '/run/postgresql', database: 'database1' });
});
it('should handle sockets in postgres: URLs', () => {
expect(
asPostgresConnectionConfig({ connectionType: 'url', url: 'postgres:///database2?host=/path/to/socket' }),
).toMatchObject({
host: '/path/to/socket',
database: 'database2',
});
});
});
});

View File

@@ -14,33 +14,57 @@ import {
} from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import { parse } from 'pg-connection-string';
import postgres, { Notice } from 'postgres';
import { columns, Exif, Person } from 'src/database';
import { DB } from 'src/db';
import { AssetFileType } from 'src/enum';
import { TimeBucketSize } from 'src/repositories/asset.repository';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
import { DatabaseConnectionParams } from 'src/types';
type Ssl = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
export type PostgresConnectionConfig = {
host?: string;
password?: string;
user?: string;
port?: number;
database?: string;
max?: number;
client_encoding?: string;
ssl?: Ssl;
application_name?: string;
fallback_application_name?: string;
options?: string;
};
export const isValidSsl = (ssl?: string | boolean | object): ssl is Ssl =>
const isValidSsl = (ssl?: string | boolean | object): ssl is Ssl =>
typeof ssl !== 'string' || ssl === 'require' || ssl === 'allow' || ssl === 'prefer' || ssl === 'verify-full';
export const getKyselyConfig = (options: PostgresConnectionConfig): KyselyConfig => {
export const asPostgresConnectionConfig = (params: DatabaseConnectionParams) => {
if (params.connectionType === 'parts') {
return {
host: params.host,
port: params.port,
username: params.username,
password: params.password,
database: params.database,
ssl: undefined,
};
}
const { host, port, user, password, database, ...rest } = parse(params.url);
let ssl: Ssl | undefined;
if (rest.ssl) {
if (!isValidSsl(rest.ssl)) {
throw new Error(`Invalid ssl option: ${rest.ssl}`);
}
ssl = rest.ssl;
}
return {
host: host ?? undefined,
port: port ? Number(port) : undefined,
username: user,
password,
database: database ?? undefined,
ssl,
};
};
export const getKyselyConfig = (
params: DatabaseConnectionParams,
options: Partial<postgres.Options<Record<string, postgres.PostgresType>>> = {},
): KyselyConfig => {
const config = asPostgresConnectionConfig(params);
return {
dialect: new PostgresJSDialect({
postgres: postgres({
@@ -67,6 +91,12 @@ export const getKyselyConfig = (options: PostgresConnectionConfig): KyselyConfig
connection: {
TimeZone: 'UTC',
},
host: config.host,
port: config.port,
username: config.username,
password: config.password,
database: config.database,
ssl: config.ssl,
...options,
}),
}),

View File

@@ -34,45 +34,40 @@ const raw: Record<string, string[]> = {
'.x3f': ['image/x3f', 'image/x-sigma-x3f'],
};
/**
* list of supported image extensions from https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types excluding svg
* @TODO share with the client
* @see {@link web/src/lib/utils/asset-utils.ts#L329}
**/
const webSupportedImage = {
'.avif': ['image/avif'],
'.gif': ['image/gif'],
'.jpeg': ['image/jpeg'],
'.jpg': ['image/jpeg'],
'.png': ['image/png', 'image/apng'],
'.webp': ['image/webp'],
};
const image: Record<string, string[]> = {
...raw,
'.avif': ['image/avif'],
...webSupportedImage,
'.bmp': ['image/bmp'],
'.gif': ['image/gif'],
'.heic': ['image/heic'],
'.heif': ['image/heif'],
'.hif': ['image/hif'],
'.insp': ['image/jpeg'],
'.jp2': ['image/jp2'],
'.jpe': ['image/jpeg'],
'.jpeg': ['image/jpeg'],
'.jpg': ['image/jpeg'],
'.jxl': ['image/jxl'],
'.png': ['image/png'],
'.svg': ['image/svg'],
'.tif': ['image/tiff'],
'.tiff': ['image/tiff'],
'.webp': ['image/webp'],
};
const extensionOverrides: Record<string, string> = {
'image/jpeg': '.jpg',
};
/**
* list of supported image extensions from https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types excluding svg
* @TODO share with the client
* @see {@link web/src/lib/utils/asset-utils.ts#L329}
**/
const webSupportedImageMimeTypes = new Set([
'image/apng',
'image/avif',
'image/gif',
'image/jpeg',
'image/png',
'image/webp',
]);
const profileExtensions = new Set(['.avif', '.dng', '.heic', '.heif', '.jpeg', '.jpg', '.png', '.webp', '.svg']);
const profile: Record<string, string[]> = Object.fromEntries(
Object.entries(image).filter(([key]) => profileExtensions.has(key)),
@@ -123,7 +118,7 @@ export const mimeTypes = {
isAsset: (filename: string) => isType(filename, image) || isType(filename, video),
isImage: (filename: string) => isType(filename, image),
isWebSupportedImage: (filename: string) => webSupportedImageMimeTypes.has(lookup(filename)),
isWebSupportedImage: (filename: string) => isType(filename, webSupportedImage),
isProfile: (filename: string) => isType(filename, profile),
isSidecar: (filename: string) => isType(filename, sidecar),
isVideo: (filename: string) => isType(filename, video),

View File

@@ -1,16 +1,11 @@
import _ from 'lodash';
import { UserPreferencesUpdateDto } from 'src/dtos/user-preferences.dto';
import { UserAvatarColor, UserMetadataKey } from 'src/enum';
import { UserMetadataKey } from 'src/enum';
import { DeepPartial, UserMetadataItem, UserPreferences } from 'src/types';
import { HumanReadableSize } from 'src/utils/bytes';
import { getKeysDeep } from 'src/utils/misc';
const getDefaultPreferences = (user: { email: string }): UserPreferences => {
const values = Object.values(UserAvatarColor);
const randomIndex = Math.floor(
[...user.email].map((letter) => letter.codePointAt(0) ?? 0).reduce((a, b) => a + b, 0) % values.length,
);
const getDefaultPreferences = (): UserPreferences => {
return {
folders: {
enabled: false,
@@ -34,9 +29,6 @@ const getDefaultPreferences = (user: { email: string }): UserPreferences => {
enabled: false,
sidebarWeb: false,
},
avatar: {
color: values[randomIndex],
},
emailNotifications: {
enabled: true,
albumInvite: true,
@@ -53,8 +45,8 @@ const getDefaultPreferences = (user: { email: string }): UserPreferences => {
};
};
export const getPreferences = (email: string, metadata: UserMetadataItem[]): UserPreferences => {
const preferences = getDefaultPreferences({ email });
export const getPreferences = (metadata: UserMetadataItem[]): UserPreferences => {
const preferences = getDefaultPreferences();
const item = metadata.find(({ key }) => key === UserMetadataKey.PREFERENCES);
const partial = item?.value || {};
for (const property of getKeysDeep(partial)) {
@@ -64,8 +56,8 @@ export const getPreferences = (email: string, metadata: UserMetadataItem[]): Use
return preferences;
};
export const getPreferencesPartial = (user: { email: string }, newPreferences: UserPreferences) => {
const defaultPreferences = getDefaultPreferences(user);
export const getPreferencesPartial = (newPreferences: UserPreferences) => {
const defaultPreferences = getDefaultPreferences();
const partial: DeepPartial<UserPreferences> = {};
for (const property of getKeysDeep(defaultPreferences)) {
const newValue = _.get(newPreferences, property);

View File

@@ -1,6 +1,7 @@
import { NestFactory } from '@nestjs/core';
import { NestExpressApplication } from '@nestjs/platform-express';
import { json } from 'body-parser';
import compression from 'compression';
import cookieParser from 'cookie-parser';
import { existsSync } from 'node:fs';
import sirv from 'sirv';
@@ -59,6 +60,7 @@ async function bootstrap() {
);
}
app.use(app.get(ApiService).ssr(excludePaths));
app.use(compression());
const server = await (host ? app.listen(port, host) : app.listen(port));
server.requestTimeout = 24 * 60 * 60 * 1000;