Merge remote-tracking branch 'origin/main' into timeline_events

This commit is contained in:
Min Idzelis
2025-06-15 02:58:04 +00:00
209 changed files with 7000 additions and 6519 deletions
+67
View File
@@ -0,0 +1,67 @@
import { Command, CommandRunner, InquirerService, Question, QuestionSet } from 'nest-commander';
import { CliService } from 'src/services/cli.service';
const prompt = (inquirer: InquirerService) => {
return function ask(): Promise<string> {
return inquirer.ask<{ email: string }>('prompt-email', {}).then(({ email }: { email: string }) => email);
};
};
@Command({
name: 'grant-admin',
description: 'Grant admin privileges to a user (by email)',
})
export class GrantAdminCommand extends CommandRunner {
constructor(
private service: CliService,
private inquirer: InquirerService,
) {
super();
}
async run(): Promise<void> {
try {
const email = await prompt(this.inquirer)();
await this.service.grantAdminAccess(email);
console.debug('Admin access has been granted to', email);
} catch (error) {
console.error(error);
console.error('Unable to grant admin access to user');
}
}
}
@Command({
name: 'revoke-admin',
description: 'Revoke admin privileges from a user (by email)',
})
export class RevokeAdminCommand extends CommandRunner {
constructor(
private service: CliService,
private inquirer: InquirerService,
) {
super();
}
async run(): Promise<void> {
try {
const email = await prompt(this.inquirer)();
await this.service.revokeAdminAccess(email);
console.debug('Admin access has been revoked from', email);
} catch (error) {
console.error(error);
console.error('Unable to revoke admin access from user');
}
}
}
@QuestionSet({ name: 'prompt-email' })
export class PromptEmailQuestion {
@Question({
message: 'Please enter the user email: ',
name: 'email',
})
parseEmail(value: string) {
return value;
}
}
+4
View File
@@ -1,3 +1,4 @@
import { GrantAdminCommand, PromptEmailQuestion, RevokeAdminCommand } from 'src/commands/grant-admin';
import { ListUsersCommand } from 'src/commands/list-users.command';
import { DisableOAuthLogin, EnableOAuthLogin } from 'src/commands/oauth-login';
import { DisablePasswordLoginCommand, EnablePasswordLoginCommand } from 'src/commands/password-login';
@@ -7,10 +8,13 @@ import { VersionCommand } from 'src/commands/version.command';
export const commands = [
ResetAdminPasswordCommand,
PromptPasswordQuestions,
PromptEmailQuestion,
EnablePasswordLoginCommand,
DisablePasswordLoginCommand,
EnableOAuthLogin,
DisableOAuthLogin,
ListUsersCommand,
VersionCommand,
GrantAdminCommand,
RevokeAdminCommand,
];
+2 -2
View File
@@ -89,7 +89,7 @@ export interface SystemConfig {
buttonText: string;
clientId: string;
clientSecret: string;
defaultStorageQuota: number;
defaultStorageQuota: number | null;
enabled: boolean;
issuerUrl: string;
mobileOverrideEnabled: boolean;
@@ -253,7 +253,7 @@ export const defaults = Object.freeze<SystemConfig>({
buttonText: 'Login with OAuth',
clientId: '',
clientSecret: '',
defaultStorageQuota: 0,
defaultStorageQuota: null,
enabled: false,
issuerUrl: '',
mobileOverrideEnabled: false,
+15 -1
View File
@@ -1,9 +1,11 @@
import { Controller, Get } from '@nestjs/common';
import { Body, Controller, Delete, Get, Param } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { DuplicateService } from 'src/services/duplicate.service';
import { UUIDParamDto } from 'src/validation';
@ApiTags('Duplicates')
@Controller('duplicates')
@@ -15,4 +17,16 @@ export class DuplicateController {
getAssetDuplicates(@Auth() auth: AuthDto): Promise<DuplicateResponseDto[]> {
return this.service.getDuplicates(auth);
}
@Delete()
@Authenticated()
deleteDuplicates(@Auth() auth: AuthDto, @Body() dto: BulkIdsDto): Promise<void> {
return this.service.deleteAll(auth, dto);
}
@Delete(':id')
@Authenticated()
deleteDuplicate(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<void> {
return this.service.delete(auth, id);
}
}
+4
View File
@@ -46,6 +46,10 @@ class AssetMediaBase {
@IsString()
duration?: string;
@Optional()
@IsString()
filename?: string;
// The properties below are added to correctly generate the API docs
// and client SDKs. Validation should be handled in the controller.
@ApiProperty({ type: 'string', format: 'binary' })
-8
View File
@@ -1,14 +1,6 @@
import { IsNotEmpty } from 'class-validator';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { ValidateUUID } from 'src/validation';
export class DuplicateResponseDto {
duplicateId!: string;
assets!: AssetResponseDto[];
}
export class ResolveDuplicatesDto {
@IsNotEmpty()
@ValidateUUID({ each: true })
assetIds!: string[];
}
+3 -1
View File
@@ -360,7 +360,9 @@ class SystemConfigOAuthDto {
@IsNumber()
@Min(0)
defaultStorageQuota!: number;
@Optional({ nullable: true })
@ApiProperty({ type: 'integer', format: 'int64' })
defaultStorageQuota!: number | null;
@ValidateBoolean()
enabled!: boolean;
+8
View File
@@ -106,6 +106,10 @@ export class UserAdminCreateDto {
@Optional()
@IsBoolean()
notify?: boolean;
@Optional()
@IsBoolean()
isAdmin?: boolean;
}
export class UserAdminUpdateDto {
@@ -145,6 +149,10 @@ export class UserAdminUpdateDto {
@Min(0)
@ApiProperty({ type: 'integer', format: 'int64' })
quotaSizeInBytes?: number | null;
@Optional()
@IsBoolean()
isAdmin?: boolean;
}
export class UserAdminDeleteDto {
+8 -3
View File
@@ -23,9 +23,9 @@ from
) as "dummy"
) as "user" on true
left join "assets" on "assets"."id" = "activity"."assetId"
and "assets"."deletedAt" is null
where
"activity"."albumId" = $1
and "assets"."deletedAt" is null
order by
"activity"."createdAt" asc
@@ -78,5 +78,10 @@ from
where
"activity"."assetId" = $3
and "activity"."albumId" = $4
and "assets"."deletedAt" is null
and "assets"."visibility" != 'locked'
and (
(
"assets"."deletedAt" is null
and "assets"."visibility" != 'locked'
)
or "assets"."id" is null
)
-70
View File
@@ -185,16 +185,6 @@ set
where
"id" = any ($2::uuid[])
-- AssetRepository.updateDuplicates
update "assets"
set
"duplicateId" = $1
where
(
"duplicateId" = any ($2::uuid[])
or "id" = any ($3::uuid[])
)
-- AssetRepository.getByChecksum
select
"assets".*
@@ -349,66 +339,6 @@ select
from
"agg"
-- AssetRepository.getDuplicates
with
"duplicates" as (
select
"assets"."duplicateId",
json_agg(
"asset"
order by
"assets"."localDateTime" asc
) as "assets"
from
"assets"
left join lateral (
select
"assets".*,
"exif" as "exifInfo"
from
"exif"
where
"exif"."assetId" = "assets"."id"
) as "asset" on true
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = $1::uuid
and "assets"."duplicateId" is not null
and "assets"."deletedAt" is null
and "assets"."stackId" is null
group by
"assets"."duplicateId"
),
"unique" as (
select
"duplicateId"
from
"duplicates"
where
json_array_length("assets") = $2
),
"removed_unique" as (
update "assets"
set
"duplicateId" = $3
from
"unique"
where
"assets"."duplicateId" = "unique"."duplicateId"
)
select
*
from
"duplicates"
where
not exists (
select
from
"unique"
where
"unique"."duplicateId" = "duplicates"."duplicateId"
)
-- AssetRepository.getAssetIdByCity
with
"cities" as (
+119
View File
@@ -0,0 +1,119 @@
-- NOTE: This file is auto generated by ./sql-generator
-- DuplicateRepository.getAll
with
"duplicates" as (
select
"assets"."duplicateId",
json_agg(
"asset"
order by
"assets"."localDateTime" asc
) as "assets"
from
"assets"
left join lateral (
select
"assets".*,
"exif" as "exifInfo"
from
"exif"
where
"exif"."assetId" = "assets"."id"
) as "asset" on true
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = $1::uuid
and "assets"."duplicateId" is not null
and "assets"."deletedAt" is null
and "assets"."stackId" is null
group by
"assets"."duplicateId"
),
"unique" as (
select
"duplicateId"
from
"duplicates"
where
json_array_length("assets") = $2
),
"removed_unique" as (
update "assets"
set
"duplicateId" = $3
from
"unique"
where
"assets"."duplicateId" = "unique"."duplicateId"
)
select
*
from
"duplicates"
where
not exists (
select
from
"unique"
where
"unique"."duplicateId" = "duplicates"."duplicateId"
)
-- DuplicateRepository.delete
update "assets"
set
"duplicateId" = $1
where
"ownerId" = $2
and "duplicateId" = $3
-- DuplicateRepository.deleteAll
update "assets"
set
"duplicateId" = $1
where
"ownerId" = $2
and "duplicateId" in ($3)
-- DuplicateRepository.search
begin
set
local vchordrq.probes = 1
with
"cte" as (
select
"assets"."id" as "assetId",
"assets"."duplicateId",
smart_search.embedding <=> $1 as "distance"
from
"assets"
inner join "smart_search" on "assets"."id" = "smart_search"."assetId"
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = any ($2::uuid[])
and "assets"."deletedAt" is null
and "assets"."type" = $3
and "assets"."id" != $4::uuid
and "assets"."stackId" is null
order by
"distance"
limit
$5
)
select
*
from
"cte"
where
"cte"."distance" <= $6
commit
-- DuplicateRepository.merge
update "assets"
set
where
(
"duplicateId" = any ($1::uuid[])
or "id" = any ($2::uuid[])
)
-33
View File
@@ -102,39 +102,6 @@ offset
$8
commit
-- SearchRepository.searchDuplicates
begin
set
local vchordrq.probes = 1
with
"cte" as (
select
"assets"."id" as "assetId",
"assets"."duplicateId",
smart_search.embedding <=> $1 as "distance"
from
"assets"
inner join "smart_search" on "assets"."id" = "smart_search"."assetId"
where
"assets"."visibility" in ('archive', 'timeline')
and "assets"."ownerId" = any ($2::uuid[])
and "assets"."deletedAt" is null
and "assets"."type" = $3
and "assets"."id" != $4::uuid
and "assets"."stackId" is null
order by
"distance"
limit
$5
)
select
*
from
"cte"
where
"cte"."distance" <= $6
commit
-- SearchRepository.searchFaces
begin
set
@@ -36,12 +36,13 @@ export class ActivityRepository {
(join) => join.onTrue(),
)
.select((eb) => eb.fn.toJson('user').as('user'))
.leftJoin('assets', (join) => join.onRef('assets.id', '=', 'activity.assetId').on('assets.deletedAt', 'is', null))
.leftJoin('assets', 'assets.id', 'activity.assetId')
.$if(!!userId, (qb) => qb.where('activity.userId', '=', userId!))
.$if(assetId === null, (qb) => qb.where('assetId', 'is', null))
.$if(!!assetId, (qb) => qb.where('activity.assetId', '=', assetId!))
.$if(!!albumId, (qb) => qb.where('activity.albumId', '=', albumId!))
.$if(isLiked !== undefined, (qb) => qb.where('activity.isLiked', '=', isLiked!))
.where('assets.deletedAt', 'is', null)
.orderBy('activity.createdAt', 'asc')
.execute();
}
@@ -84,8 +85,12 @@ export class ActivityRepository {
.leftJoin('assets', 'assets.id', 'activity.assetId')
.$if(!!assetId, (qb) => qb.where('activity.assetId', '=', assetId!))
.where('activity.albumId', '=', albumId)
.where('assets.deletedAt', 'is', null)
.where('assets.visibility', '!=', sql.lit(AssetVisibility.LOCKED))
.where(({ or, and, eb }) =>
or([
and([eb('assets.deletedAt', 'is', null), eb('assets.visibility', '!=', sql.lit(AssetVisibility.LOCKED))]),
eb('assets.id', 'is', null),
]),
)
.executeTakeFirstOrThrow();
return result;
+8 -100
View File
@@ -5,7 +5,6 @@ import { InjectKysely } from 'nestjs-kysely';
import { Stack } from 'src/database';
import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import {
anyUuid,
@@ -29,13 +28,13 @@ import { globToSqlPattern } from 'src/utils/misc';
export type AssetStats = Record<AssetType, number>;
export interface AssetStatsOptions {
interface AssetStatsOptions {
isFavorite?: boolean;
isTrashed?: boolean;
visibility?: AssetVisibility;
}
export interface LivePhotoSearchOptions {
interface LivePhotoSearchOptions {
ownerId: string;
libraryId?: string | null;
livePhotoCID: string;
@@ -43,16 +42,12 @@ export interface LivePhotoSearchOptions {
type: AssetType;
}
export enum WithProperty {
SIDECAR = 'sidecar',
}
export enum TimeBucketSize {
DAY = 'DAY',
MONTH = 'MONTH',
}
export interface AssetBuilderOptions {
interface AssetBuilderOptions {
isFavorite?: boolean;
isTrashed?: boolean;
isDuplicate?: boolean;
@@ -81,43 +76,31 @@ export interface MonthDay {
month: number;
}
export interface AssetExploreFieldOptions {
interface AssetExploreFieldOptions {
maxFields: number;
minAssetsPerField: number;
}
export interface AssetFullSyncOptions {
interface AssetFullSyncOptions {
ownerId: string;
lastId?: string;
updatedUntil: Date;
limit: number;
}
export interface AssetDeltaSyncOptions {
interface AssetDeltaSyncOptions {
userIds: string[];
updatedAfter: Date;
limit: number;
}
export interface AssetUpdateDuplicateOptions {
targetDuplicateId: string | null;
assetIds: string[];
duplicateIds: string[];
}
export interface UpsertFileOptions {
assetId: string;
type: AssetFileType;
path: string;
}
export interface AssetGetByChecksumOptions {
interface AssetGetByChecksumOptions {
ownerId: string;
checksum: Buffer;
libraryId?: string;
}
export interface GetByIdsRelations {
interface GetByIdsRelations {
exifInfo?: boolean;
faces?: { person?: boolean; withDeleted?: boolean };
files?: boolean;
@@ -128,16 +111,6 @@ export interface GetByIdsRelations {
tags?: boolean;
}
export interface DuplicateGroup {
duplicateId: string;
assets: MapAsset[];
}
export interface DayOfYearAssets {
yearsAgo: number;
assets: MapAsset[];
}
@Injectable()
export class AssetRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@@ -418,19 +391,6 @@ export class AssetRepository {
await this.db.updateTable('assets').set(options).where('libraryId', '=', asUuid(libraryId)).execute();
}
@GenerateSql({
params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }],
})
async updateDuplicates(options: AssetUpdateDuplicateOptions): Promise<void> {
await this.db
.updateTable('assets')
.set({ duplicateId: options.targetDuplicateId })
.where((eb) =>
eb.or([eb('duplicateId', '=', anyUuid(options.duplicateIds)), eb('id', '=', anyUuid(options.assetIds))]),
)
.execute();
}
async update(asset: Updateable<Assets> & { id: string }) {
const value = omitBy(asset, isUndefined);
delete value.id;
@@ -694,58 +654,6 @@ export class AssetRepository {
return query.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
getDuplicates(userId: string) {
return (
this.db
.with('duplicates', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.leftJoinLateral(
(qb) =>
qb
.selectFrom('exif')
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.whereRef('exif.assetId', '=', 'assets.id')
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) =>
eb.fn.jsonAgg('asset').orderBy('assets.localDateTime', 'asc').$castTo<MapAsset[]>().as('assets'),
)
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId'),
)
.with('unique', (qb) =>
qb
.selectFrom('duplicates')
.select('duplicateId')
.where((eb) => eb(eb.fn('json_array_length', ['assets']), '=', 1)),
)
.with('removed_unique', (qb) =>
qb
.updateTable('assets')
.set({ duplicateId: null })
.from('unique')
.whereRef('assets.duplicateId', '=', 'unique.duplicateId'),
)
.selectFrom('duplicates')
.selectAll()
// TODO: compare with filtering by json_array_length > 1
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute()
);
}
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
async getAssetIdByCity(ownerId: string, { minAssetsPerField, maxFields }: AssetExploreFieldOptions) {
const items = await this.db
@@ -0,0 +1,158 @@
import { Injectable } from '@nestjs/common';
import { Kysely, NotNull, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { DB } from 'src/db';
import { Chunked, DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, VectorIndex } from 'src/enum';
import { probes } from 'src/repositories/database.repository';
import { anyUuid, asUuid, withDefaultVisibility } from 'src/utils/database';
interface DuplicateSearch {
assetId: string;
embedding: string;
maxDistance: number;
type: AssetType;
userIds: string[];
}
interface DuplicateMergeOptions {
targetId: string | null;
assetIds: string[];
sourceIds: string[];
}
@Injectable()
export class DuplicateRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [DummyValue.UUID] })
getAll(userId: string) {
return (
this.db
.with('duplicates', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.leftJoinLateral(
(qb) =>
qb
.selectFrom('exif')
.selectAll('assets')
.select((eb) => eb.table('exif').as('exifInfo'))
.whereRef('exif.assetId', '=', 'assets.id')
.as('asset'),
(join) => join.onTrue(),
)
.select('assets.duplicateId')
.select((eb) =>
eb.fn.jsonAgg('asset').orderBy('assets.localDateTime', 'asc').$castTo<MapAsset[]>().as('assets'),
)
.where('assets.ownerId', '=', asUuid(userId))
.where('assets.duplicateId', 'is not', null)
.$narrowType<{ duplicateId: NotNull }>()
.where('assets.deletedAt', 'is', null)
.where('assets.stackId', 'is', null)
.groupBy('assets.duplicateId'),
)
.with('unique', (qb) =>
qb
.selectFrom('duplicates')
.select('duplicateId')
.where((eb) => eb(eb.fn('json_array_length', ['assets']), '=', 1)),
)
.with('removed_unique', (qb) =>
qb
.updateTable('assets')
.set({ duplicateId: null })
.from('unique')
.whereRef('assets.duplicateId', '=', 'unique.duplicateId'),
)
.selectFrom('duplicates')
.selectAll()
// TODO: compare with filtering by json_array_length > 1
.where(({ not, exists }) =>
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
)
.execute()
);
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
async delete(userId: string, id: string): Promise<void> {
await this.db
.updateTable('assets')
.set({ duplicateId: null })
.where('ownerId', '=', userId)
.where('duplicateId', '=', id)
.execute();
}
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.UUID]] })
@Chunked({ paramIndex: 1 })
async deleteAll(userId: string, ids: string[]): Promise<void> {
if (ids.length === 0) {
return;
}
await this.db
.updateTable('assets')
.set({ duplicateId: null })
.where('ownerId', '=', userId)
.where('duplicateId', 'in', ids)
.execute();
}
@GenerateSql({
params: [
{
assetId: DummyValue.UUID,
embedding: DummyValue.VECTOR,
maxDistance: 0.6,
type: AssetType.IMAGE,
userIds: [DummyValue.UUID],
},
],
})
search({ assetId, embedding, maxDistance, type, userIds }: DuplicateSearch) {
return this.db.transaction().execute(async (trx) => {
await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx);
return await trx
.with('cte', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.select([
'assets.id as assetId',
'assets.duplicateId',
sql<number>`smart_search.embedding <=> ${embedding}`.as('distance'),
])
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
.where('assets.ownerId', '=', anyUuid(userIds))
.where('assets.deletedAt', 'is', null)
.where('assets.type', '=', type)
.where('assets.id', '!=', asUuid(assetId))
.where('assets.stackId', 'is', null)
.orderBy('distance')
.limit(64),
)
.selectFrom('cte')
.selectAll()
.where('cte.distance', '<=', maxDistance as number)
.execute();
});
}
@GenerateSql({
params: [{ targetDuplicateId: DummyValue.UUID, duplicateIds: [DummyValue.UUID], assetIds: [DummyValue.UUID] }],
})
async merge(options: DuplicateMergeOptions): Promise<void> {
await this.db
.updateTable('assets')
.set({ duplicateId: options.targetId })
.where((eb) =>
eb.or([eb('duplicateId', '=', anyUuid(options.sourceIds)), eb('id', '=', anyUuid(options.assetIds))]),
)
.execute();
}
}
+2
View File
@@ -11,6 +11,7 @@ import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
@@ -56,6 +57,7 @@ export const repositories = [
CryptoRepository,
DatabaseRepository,
DownloadRepository,
DuplicateRepository,
EmailRepository,
EventRepository,
JobRepository,
+1 -2
View File
@@ -179,9 +179,8 @@ export class PersonRepository {
)
.$if(!options?.closestFaceAssetId, (qb) =>
qb
.orderBy(sql`NULLIF(person.name, '') is null`, 'asc')
.orderBy((eb) => eb.fn.count('asset_faces.assetId'), 'desc')
.orderBy(sql`NULLIF(person.name, '')`, sql`asc nulls last`)
.orderBy(sql`NULLIF(person.name, '') asc nulls last`)
.orderBy('person.createdAt'),
)
.$if(!options?.withHidden, (qb) => qb.where('person.isHidden', '=', false))
+1 -49
View File
@@ -7,7 +7,7 @@ import { DummyValue, GenerateSql } from 'src/decorators';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetStatus, AssetType, AssetVisibility, VectorIndex } from 'src/enum';
import { probes } from 'src/repositories/database.repository';
import { anyUuid, asUuid, searchAssetBuilder, withDefaultVisibility } from 'src/utils/database';
import { anyUuid, searchAssetBuilder } from 'src/utils/database';
import { paginationHelper } from 'src/utils/pagination';
import { isValidInteger } from 'src/validation';
@@ -135,14 +135,6 @@ export interface FaceEmbeddingSearch extends SearchEmbeddingOptions {
minBirthDate?: Date | null;
}
export interface AssetDuplicateSearch {
assetId: string;
embedding: string;
maxDistance: number;
type: AssetType;
userIds: string[];
}
export interface FaceSearchResult {
distance: number;
id: string;
@@ -275,46 +267,6 @@ export class SearchRepository {
});
}
@GenerateSql({
params: [
{
assetId: DummyValue.UUID,
embedding: DummyValue.VECTOR,
maxDistance: 0.6,
type: AssetType.IMAGE,
userIds: [DummyValue.UUID],
},
],
})
searchDuplicates({ assetId, embedding, maxDistance, type, userIds }: AssetDuplicateSearch) {
return this.db.transaction().execute(async (trx) => {
await sql`set local vchordrq.probes = ${sql.lit(probes[VectorIndex.CLIP])}`.execute(trx);
return await trx
.with('cte', (qb) =>
qb
.selectFrom('assets')
.$call(withDefaultVisibility)
.select([
'assets.id as assetId',
'assets.duplicateId',
sql<number>`smart_search.embedding <=> ${embedding}`.as('distance'),
])
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
.where('assets.ownerId', '=', anyUuid(userIds))
.where('assets.deletedAt', 'is', null)
.where('assets.type', '=', type)
.where('assets.id', '!=', asUuid(assetId))
.where('assets.stackId', 'is', null)
.orderBy('distance')
.limit(64),
)
.selectFrom('cte')
.selectAll()
.where('cte.distance', '<=', maxDistance as number)
.execute();
});
}
@GenerateSql({
params: [
{
+1 -1
View File
@@ -418,7 +418,7 @@ export class AssetMediaService extends BaseService {
duration: dto.duration || null,
visibility: dto.visibility ?? AssetVisibility.TIMELINE,
livePhotoVideoId: dto.livePhotoVideoId,
originalFileName: file.originalName,
originalFileName: dto.filename || file.originalName,
sidecarPath: sidecarFile?.originalPath,
});
+2 -2
View File
@@ -704,7 +704,7 @@ describe(AuthService.name, () => {
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ quotaSizeInBytes: 1_073_741_824 }));
});
it('should not set quota for 0 quota', async () => {
it('should set quota for 0 quota', async () => {
const user = factory.userAdmin({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithStorageQuota);
@@ -726,7 +726,7 @@ describe(AuthService.name, () => {
email: user.email,
name: ' ',
oauthId: user.oauthId,
quotaSizeInBytes: null,
quotaSizeInBytes: 0,
storageLabel: null,
});
});
+1 -1
View File
@@ -300,7 +300,7 @@ export class AuthService extends BaseService {
name: userName,
email: profile.email,
oauthId: profile.sub,
quotaSizeInBytes: storageQuota * HumanReadableSize.GiB || null,
quotaSizeInBytes: storageQuota === null ? null : storageQuota * HumanReadableSize.GiB,
storageLabel: storageLabel || null,
});
}
+2
View File
@@ -18,6 +18,7 @@ import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { DownloadRepository } from 'src/repositories/download.repository';
import { DuplicateRepository } from 'src/repositories/duplicate.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
@@ -71,6 +72,7 @@ export class BaseService {
protected cryptoRepository: CryptoRepository,
protected databaseRepository: DatabaseRepository,
protected downloadRepository: DownloadRepository,
protected duplicateRepository: DuplicateRepository,
protected emailRepository: EmailRepository,
protected eventRepository: EventRepository,
protected jobRepository: JobRepository,
+18
View File
@@ -37,6 +37,24 @@ export class CliService extends BaseService {
await this.updateConfig(config);
}
async grantAdminAccess(email: string): Promise<void> {
const user = await this.userRepository.getByEmail(email);
if (!user) {
throw new Error('User does not exist');
}
await this.userRepository.update(user.id, { isAdmin: true });
}
async revokeAdminAccess(email: string): Promise<void> {
const user = await this.userRepository.getByEmail(email);
if (!user) {
throw new Error('User does not exist');
}
await this.userRepository.update(user.id, { isAdmin: false });
}
async disableOAuthLogin(): Promise<void> {
const config = await this.getConfig({ withCache: false });
config.oauth.enabled = false;
+14 -12
View File
@@ -38,7 +38,7 @@ describe(SearchService.name, () => {
describe('getDuplicates', () => {
it('should get duplicates', async () => {
mocks.asset.getDuplicates.mockResolvedValue([
mocks.duplicateRepository.getAll.mockResolvedValue([
{
duplicateId: 'duplicate-id',
assets: [assetStub.image, assetStub.image],
@@ -218,25 +218,26 @@ describe(SearchService.name, () => {
it('should search for duplicates and update asset with duplicateId', async () => {
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue(hasEmbedding);
mocks.search.searchDuplicates.mockResolvedValue([
mocks.duplicateRepository.search.mockResolvedValue([
{ assetId: assetStub.image.id, distance: 0.01, duplicateId: null },
]);
mocks.duplicateRepository.merge.mockResolvedValue();
const expectedAssetIds = [assetStub.image.id, hasEmbedding.id];
const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.search.searchDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({
assetId: hasEmbedding.id,
embedding: hasEmbedding.embedding,
maxDistance: 0.01,
type: hasEmbedding.type,
userIds: [hasEmbedding.ownerId],
});
expect(mocks.asset.updateDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.merge).toHaveBeenCalledWith({
assetIds: expectedAssetIds,
targetDuplicateId: expect.any(String),
duplicateIds: [],
targetId: expect.any(String),
sourceIds: [],
});
expect(mocks.asset.upsertJobStatus).toHaveBeenCalledWith(
...expectedAssetIds.map((assetId) => ({ assetId, duplicatesDetectedAt: expect.any(Date) })),
@@ -246,23 +247,24 @@ describe(SearchService.name, () => {
it('should use existing duplicate ID among matched duplicates', async () => {
const duplicateId = hasDupe.duplicateId;
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue(hasEmbedding);
mocks.search.searchDuplicates.mockResolvedValue([{ assetId: hasDupe.id, distance: 0.01, duplicateId }]);
mocks.duplicateRepository.search.mockResolvedValue([{ assetId: hasDupe.id, distance: 0.01, duplicateId }]);
mocks.duplicateRepository.merge.mockResolvedValue();
const expectedAssetIds = [hasEmbedding.id];
const result = await sut.handleSearchDuplicates({ id: hasEmbedding.id });
expect(result).toBe(JobStatus.SUCCESS);
expect(mocks.search.searchDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.search).toHaveBeenCalledWith({
assetId: hasEmbedding.id,
embedding: hasEmbedding.embedding,
maxDistance: 0.01,
type: hasEmbedding.type,
userIds: [hasEmbedding.ownerId],
});
expect(mocks.asset.updateDuplicates).toHaveBeenCalledWith({
expect(mocks.duplicateRepository.merge).toHaveBeenCalledWith({
assetIds: expectedAssetIds,
targetDuplicateId: duplicateId,
duplicateIds: [],
targetId: duplicateId,
sourceIds: [],
});
expect(mocks.asset.upsertJobStatus).toHaveBeenCalledWith(
...expectedAssetIds.map((assetId) => ({ assetId, duplicatesDetectedAt: expect.any(Date) })),
@@ -271,7 +273,7 @@ describe(SearchService.name, () => {
it('should remove duplicateId if no duplicates found and asset has duplicateId', async () => {
mocks.assetJob.getForSearchDuplicatesJob.mockResolvedValue(hasDupe);
mocks.search.searchDuplicates.mockResolvedValue([]);
mocks.duplicateRepository.search.mockResolvedValue([]);
const result = await sut.handleSearchDuplicates({ id: hasDupe.id });
+21 -3
View File
@@ -1,6 +1,7 @@
import { Injectable } from '@nestjs/common';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { OnJob } from 'src/decorators';
import { BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
@@ -13,13 +14,21 @@ import { isDuplicateDetectionEnabled } from 'src/utils/misc';
@Injectable()
export class DuplicateService extends BaseService {
async getDuplicates(auth: AuthDto): Promise<DuplicateResponseDto[]> {
const duplicates = await this.assetRepository.getDuplicates(auth.user.id);
const duplicates = await this.duplicateRepository.getAll(auth.user.id);
return duplicates.map(({ duplicateId, assets }) => ({
duplicateId,
assets: assets.map((asset) => mapAsset(asset, { auth })),
}));
}
async delete(auth: AuthDto, id: string): Promise<void> {
await this.duplicateRepository.delete(auth.user.id, id);
}
async deleteAll(auth: AuthDto, dto: BulkIdsDto) {
await this.duplicateRepository.deleteAll(auth.user.id, dto.ids);
}
@OnJob({ name: JobName.QUEUE_DUPLICATE_DETECTION, queue: QueueName.DUPLICATE_DETECTION })
async handleQueueSearchDuplicates({ force }: JobOf<JobName.QUEUE_DUPLICATE_DETECTION>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
@@ -69,12 +78,17 @@ export class DuplicateService extends BaseService {
return JobStatus.SKIPPED;
}
if (asset.visibility === AssetVisibility.LOCKED) {
this.logger.debug(`Asset ${id} is locked, skipping`);
return JobStatus.SKIPPED;
}
if (!asset.embedding) {
this.logger.debug(`Asset ${id} is missing embedding`);
return JobStatus.FAILED;
}
const duplicateAssets = await this.searchRepository.searchDuplicates({
const duplicateAssets = await this.duplicateRepository.search({
assetId: asset.id,
embedding: asset.embedding,
maxDistance: machineLearning.duplicateDetection.maxDistance,
@@ -117,7 +131,11 @@ export class DuplicateService extends BaseService {
.map((duplicate) => duplicate.assetId);
assetIdsToUpdate.push(asset.id);
await this.assetRepository.updateDuplicates({ targetDuplicateId, assetIds: assetIdsToUpdate, duplicateIds });
await this.duplicateRepository.merge({
targetId: targetDuplicateId,
assetIds: assetIdsToUpdate,
sourceIds: duplicateIds,
});
return assetIdsToUpdate;
}
}
+5 -1
View File
@@ -24,7 +24,6 @@ import {
VideoCodec,
VideoContainer,
} from 'src/enum';
import { UpsertFileOptions } from 'src/repositories/asset.repository';
import { BoundingBox } from 'src/repositories/machine-learning.repository';
import { BaseService } from 'src/services/base.service';
import {
@@ -42,6 +41,11 @@ import { getAssetFiles } from 'src/utils/asset.util';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
interface UpsertFileOptions {
assetId: string;
type: AssetFileType;
path: string;
}
@Injectable()
export class MediaService extends BaseService {
@@ -112,7 +112,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
buttonText: 'Login with OAuth',
clientId: '',
clientSecret: '',
defaultStorageQuota: 0,
defaultStorageQuota: null,
enabled: false,
issuerUrl: '',
mobileOverrideEnabled: false,
@@ -345,6 +345,11 @@ describe(SystemConfigService.name, () => {
{ should: 'with a trailing slash', externalDomain: 'https://demo.immich.app/' },
{ should: 'without a trailing slash', externalDomain: 'https://demo.immich.app' },
{ should: 'with a port', externalDomain: 'https://demo.immich.app:42', result: 'https://demo.immich.app:42' },
{
should: 'with basic auth',
externalDomain: 'https://user:password@example.com:123',
result: 'https://user:password@example.com:123',
},
];
for (const { should, externalDomain, result } of externalDomainTests) {
@@ -4,6 +4,7 @@ import { JobName, UserStatus } from 'src/enum';
import { UserAdminService } from 'src/services/user-admin.service';
import { authStub } from 'test/fixtures/auth.stub';
import { userStub } from 'test/fixtures/user.stub';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
import { describe } from 'vitest';
@@ -116,7 +117,7 @@ describe(UserAdminService.name, () => {
it('should throw error if user could not be found', async () => {
mocks.user.get.mockResolvedValue(void 0);
await expect(sut.delete(authStub.admin, userStub.admin.id, {})).rejects.toThrowError(BadRequestException);
await expect(sut.delete(authStub.admin, 'not-found', {})).rejects.toThrowError(BadRequestException);
expect(mocks.user.delete).not.toHaveBeenCalled();
});
@@ -124,8 +125,11 @@ describe(UserAdminService.name, () => {
await expect(sut.delete(authStub.admin, userStub.admin.id, {})).rejects.toBeInstanceOf(ForbiddenException);
});
it('should require the auth user be an admin', async () => {
await expect(sut.delete(authStub.user1, authStub.admin.user.id, {})).rejects.toBeInstanceOf(ForbiddenException);
it('should not allow deleting own account', async () => {
const user = factory.userAdmin({ isAdmin: false });
const auth = factory.auth({ user });
mocks.user.get.mockResolvedValue(user);
await expect(sut.delete(auth, user.id, {})).rejects.toBeInstanceOf(ForbiddenException);
expect(mocks.user.delete).not.toHaveBeenCalled();
});
+7 -3
View File
@@ -52,6 +52,10 @@ export class UserAdminService extends BaseService {
async update(auth: AuthDto, id: string, dto: UserAdminUpdateDto): Promise<UserAdminResponseDto> {
const user = await this.findOrFail(id, {});
if (dto.isAdmin !== undefined && dto.isAdmin !== auth.user.isAdmin && auth.user.id === id) {
throw new BadRequestException('Admin status can only be changed by another admin');
}
if (dto.quotaSizeInBytes && user.quotaSizeInBytes !== dto.quotaSizeInBytes) {
await this.userRepository.syncUsage(id);
}
@@ -89,9 +93,9 @@ export class UserAdminService extends BaseService {
async delete(auth: AuthDto, id: string, dto: UserAdminDeleteDto): Promise<UserAdminResponseDto> {
const { force } = dto;
const { isAdmin } = await this.findOrFail(id, {});
if (isAdmin) {
throw new ForbiddenException('Cannot delete admin user');
await this.findOrFail(id, {});
if (auth.user.id === id) {
throw new ForbiddenException('Cannot delete your own account');
}
await this.albumRepository.softDeleteAll(id);
+8 -1
View File
@@ -116,7 +116,14 @@ const buildConfig = async (repos: RepoDeps) => {
const config = instanceToPlain(instance) as SystemConfig;
if (config.server.externalDomain.length > 0) {
config.server.externalDomain = new URL(config.server.externalDomain).origin;
const domain = new URL(config.server.externalDomain);
let externalDomain = domain.origin;
if (domain.password && domain.username) {
externalDomain = `${domain.protocol}//${domain.username}:${domain.password}@${domain.host}`;
}
config.server.externalDomain = externalDomain;
}
if (!config.ffmpeg.acceptedVideoCodecs.includes(config.ffmpeg.targetVideoCodec)) {