Compare commits

..

2 Commits

Author SHA1 Message Date
Alex
091ecaeeaf Merge branch 'main' into fix/notification-from-native 2025-10-05 09:39:17 -05:00
shenlong-tanwen
6fc1bca8c2 fix: show notification from native 2025-10-01 05:29:23 +05:30
33 changed files with 224 additions and 401 deletions

View File

@@ -1006,7 +1006,7 @@ describe('/libraries', () => {
rmSync(`${testAssetDir}/temp/xmp`, { recursive: true, force: true });
});
it('should switch from using file metadata to file.ext.xmp metadata when asset refreshes', async () => {
it('should switch from using file metadata to file.xmp metadata when asset refreshes', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/xmp`],

View File

@@ -136,6 +136,7 @@ private open class BackgroundWorkerPigeonCodec : StandardMessageCodec() {
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
interface BackgroundWorkerFgHostApi {
fun enable()
fun saveNotificationMessage(title: String, body: String)
fun configure(settings: BackgroundWorkerSettings)
fun disable()
@@ -164,6 +165,25 @@ interface BackgroundWorkerFgHostApi {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.saveNotificationMessage$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List<Any?>
val titleArg = args[0] as String
val bodyArg = args[1] as String
val wrapped: List<Any?> = try {
api.saveNotificationMessage(titleArg, bodyArg)
listOf(null)
} catch (exception: Throwable) {
BackgroundWorkerPigeonUtils.wrapError(exception)
}
reply.reply(wrapped)
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.configure$separatedMessageChannelSuffix", codec)
if (api != null) {
@@ -204,7 +224,6 @@ interface BackgroundWorkerFgHostApi {
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
interface BackgroundWorkerBgHostApi {
fun onInitialized()
fun showNotification(title: String, content: String)
fun close()
companion object {
@@ -232,25 +251,6 @@ interface BackgroundWorkerBgHostApi {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.showNotification$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List<Any?>
val titleArg = args[0] as String
val contentArg = args[1] as String
val wrapped: List<Any?> = try {
api.showNotification(titleArg, contentArg)
listOf(null)
} catch (exception: Throwable) {
BackgroundWorkerPigeonUtils.wrapError(exception)
}
reply.reply(wrapped)
}
} else {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.close$separatedMessageChannelSuffix", codec)
if (api != null) {

View File

@@ -73,6 +73,8 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
NotificationManager.IMPORTANCE_LOW
)
notificationManager.createNotificationChannel(notificationChannel)
val notificationConfig = BackgroundWorkerPreferences(ctx).getNotificationConfig()
showNotification(notificationConfig.first, notificationConfig.second)
loader.ensureInitializationCompleteAsync(ctx, null, Handler(Looper.getMainLooper())) {
engine = FlutterEngine(ctx)
@@ -109,7 +111,7 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
}
// TODO: Move this to a separate NotificationManager class
override fun showNotification(title: String, content: String) {
private fun showNotification(title: String, content: String) {
val notification = NotificationCompat.Builder(applicationContext, NOTIFICATION_CHANNEL_ID)
.setSmallIcon(R.drawable.notification_icon)
.setOnlyAlertOnce(true)

View File

@@ -20,6 +20,10 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
enqueueMediaObserver(ctx)
}
override fun saveNotificationMessage(title: String, body: String) {
BackgroundWorkerPreferences(ctx).updateNotificationConfig(title, body)
}
override fun configure(settings: BackgroundWorkerSettings) {
BackgroundWorkerPreferences(ctx).updateSettings(settings)
enqueueMediaObserver(ctx)

View File

@@ -10,9 +10,13 @@ class BackgroundWorkerPreferences(private val ctx: Context) {
private const val SHARED_PREF_MIN_DELAY_KEY = "BackgroundWorker::minDelaySeconds"
private const val SHARED_PREF_REQUIRE_CHARGING_KEY = "BackgroundWorker::requireCharging"
private const val SHARED_PREF_LOCK_KEY = "BackgroundWorker::isLocked"
private const val SHARED_PREF_NOTIF_TITLE_KEY = "BackgroundWorker::notificationTitle"
private const val SHARED_PREF_NOTIF_MSG_KEY = "BackgroundWorker::notificationMessage"
private const val DEFAULT_MIN_DELAY_SECONDS = 30L
private const val DEFAULT_REQUIRE_CHARGING = false
private const val DEFAULT_NOTIF_TITLE = "Uploading media"
private const val DEFAULT_NOTIF_MSG = "Checking for new assets…"
}
private val sp: SharedPreferences by lazy {
@@ -38,6 +42,20 @@ class BackgroundWorkerPreferences(private val ctx: Context) {
)
}
fun updateNotificationConfig(title: String, message: String) {
sp.edit {
putString(SHARED_PREF_NOTIF_TITLE_KEY, title)
putString(SHARED_PREF_NOTIF_MSG_KEY, message)
}
}
fun getNotificationConfig(): Pair<String, String> {
val title =
sp.getString(SHARED_PREF_NOTIF_TITLE_KEY, DEFAULT_NOTIF_TITLE) ?: DEFAULT_NOTIF_TITLE
val message = sp.getString(SHARED_PREF_NOTIF_MSG_KEY, DEFAULT_NOTIF_MSG) ?: DEFAULT_NOTIF_MSG
return Pair(title, message)
}
fun setLocked(paused: Boolean) {
sp.edit {
putBoolean(SHARED_PREF_LOCK_KEY, paused)

View File

@@ -182,6 +182,7 @@ class BackgroundWorkerPigeonCodec: FlutterStandardMessageCodec, @unchecked Senda
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol BackgroundWorkerFgHostApi {
func enable() throws
func saveNotificationMessage(title: String, body: String) throws
func configure(settings: BackgroundWorkerSettings) throws
func disable() throws
}
@@ -205,6 +206,22 @@ class BackgroundWorkerFgHostApiSetup {
} else {
enableChannel.setMessageHandler(nil)
}
let saveNotificationMessageChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.saveNotificationMessage\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
saveNotificationMessageChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let titleArg = args[0] as! String
let bodyArg = args[1] as! String
do {
try api.saveNotificationMessage(title: titleArg, body: bodyArg)
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
}
}
} else {
saveNotificationMessageChannel.setMessageHandler(nil)
}
let configureChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.configure\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
configureChannel.setMessageHandler { message, reply in
@@ -238,7 +255,6 @@ class BackgroundWorkerFgHostApiSetup {
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol BackgroundWorkerBgHostApi {
func onInitialized() throws
func showNotification(title: String, content: String) throws
func close() throws
}
@@ -261,22 +277,6 @@ class BackgroundWorkerBgHostApiSetup {
} else {
onInitializedChannel.setMessageHandler(nil)
}
let showNotificationChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.showNotification\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
showNotificationChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let titleArg = args[0] as! String
let contentArg = args[1] as! String
do {
try api.showNotification(title: titleArg, content: contentArg)
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
}
}
} else {
showNotificationChannel.setMessageHandler(nil)
}
let closeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.close\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
closeChannel.setMessageHandler { _, reply in

View File

@@ -119,10 +119,6 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
})
}
func showNotification(title: String, content: String) throws {
// No-op on iOS for the time being
}
/**
* Cancels the currently running background task, either due to timeout or external request.
* Sends a cancel signal to the Flutter side and sets up a fallback timer to ensure

View File

@@ -12,6 +12,10 @@ class BackgroundWorkerApiImpl: BackgroundWorkerFgHostApi {
// Android only
}
func saveNotificationMessage(title: String, body: String) throws {
// Android only
}
func disable() throws {
BGTaskScheduler.shared.cancel(taskRequestWithIdentifier: BackgroundWorkerApiImpl.refreshTaskID);
BGTaskScheduler.shared.cancel(taskRequestWithIdentifier: BackgroundWorkerApiImpl.processingTaskID);

View File

@@ -11,8 +11,6 @@ import 'package:immich_mobile/domain/services/log.service.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/network_capability_extensions.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/generated/intl_keys.g.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
import 'package:immich_mobile/platform/background_worker_api.g.dart';
@@ -44,6 +42,9 @@ class BackgroundWorkerFgService {
// TODO: Move this call to native side once old timeline is removed
Future<void> enable() => _foregroundHostApi.enable();
Future<void> saveNotificationMessage(String title, String body) =>
_foregroundHostApi.saveNotificationMessage(title, body);
Future<void> configure({int? minimumDelaySeconds, bool? requireCharging}) => _foregroundHostApi.configure(
BackgroundWorkerSettings(
minimumDelaySeconds:
@@ -112,13 +113,6 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
configureFileDownloaderNotifications();
if (Platform.isAndroid) {
await _backgroundHostApi.showNotification(
IntlKeys.uploading_media.t(),
IntlKeys.backup_background_service_default_notification.t(),
);
}
// Notify the host that the background worker service has been initialized and is ready to use
_backgroundHostApi.onInitialized();
} catch (error, stack) {

View File

@@ -15,7 +15,9 @@ import 'package:immich_mobile/constants/locales.dart';
import 'package:immich_mobile/domain/services/background_worker.service.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/generated/codegen_loader.g.dart';
import 'package:immich_mobile/generated/intl_keys.g.dart';
import 'package:immich_mobile/platform/background_worker_lock_api.g.dart';
import 'package:immich_mobile/providers/app_life_cycle.provider.dart';
import 'package:immich_mobile/providers/asset_viewer/share_intent_upload.provider.dart';
@@ -210,6 +212,14 @@ class ImmichAppState extends ConsumerState<ImmichApp> with WidgetsBindingObserve
if (Store.isBetaTimelineEnabled) {
ref.read(backgroundServiceProvider).disableService();
ref.read(backgroundWorkerFgServiceProvider).enable();
if (Platform.isAndroid) {
ref
.read(backgroundWorkerFgServiceProvider)
.saveNotificationMessage(
IntlKeys.uploading_media.t(),
IntlKeys.backup_background_service_default_notification.t(),
);
}
} else {
ref.read(backgroundWorkerFgServiceProvider).disable();
ref.read(backgroundServiceProvider).resumeServiceIfEnabled();

View File

@@ -138,6 +138,29 @@ class BackgroundWorkerFgHostApi {
}
}
Future<void> saveNotificationMessage(String title, String body) async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.saveNotificationMessage$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
pigeonVar_channelName,
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(<Object?>[title, body]);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
} else if (pigeonVar_replyList.length > 1) {
throw PlatformException(
code: pigeonVar_replyList[0]! as String,
message: pigeonVar_replyList[1] as String?,
details: pigeonVar_replyList[2],
);
} else {
return;
}
}
Future<void> configure(BackgroundWorkerSettings settings) async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.configure$pigeonVar_messageChannelSuffix';
@@ -221,29 +244,6 @@ class BackgroundWorkerBgHostApi {
}
}
Future<void> showNotification(String title, String content) async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.showNotification$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
pigeonVar_channelName,
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(<Object?>[title, content]);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
} else if (pigeonVar_replyList.length > 1) {
throw PlatformException(
code: pigeonVar_replyList[0]! as String,
message: pigeonVar_replyList[1] as String?,
details: pigeonVar_replyList[2],
);
} else {
return;
}
}
Future<void> close() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.close$pigeonVar_messageChannelSuffix';

View File

@@ -22,6 +22,8 @@ class BackgroundWorkerSettings {
abstract class BackgroundWorkerFgHostApi {
void enable();
void saveNotificationMessage(String title, String body);
void configure(BackgroundWorkerSettings settings);
void disable();
@@ -33,8 +35,6 @@ abstract class BackgroundWorkerBgHostApi {
// required platform channels to notify the native side to start the background upload
void onInitialized();
void showNotification(String title, String content);
// Called from the background flutter engine to request the native side to cleanup
void close();
}

View File

@@ -305,7 +305,7 @@ export class StorageCore {
return this.assetRepository.update({ id, encodedVideoPath: newPath });
}
case AssetPathType.Sidecar: {
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: newPath });
return this.assetRepository.update({ id, sidecarPath: newPath });
}
case PersonPathType.Face: {
return this.personRepository.update({ id, thumbnailPath: newPath });

View File

@@ -117,6 +117,7 @@ export type Asset = {
originalFileName: string;
originalPath: string;
ownerId: string;
sidecarPath: string | null;
type: AssetType;
};
@@ -301,6 +302,7 @@ export const columns = {
'asset.originalFileName',
'asset.originalPath',
'asset.ownerId',
'asset.sidecarPath',
'asset.type',
],
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],

View File

@@ -124,6 +124,7 @@ export type MapAsset = {
originalPath: string;
owner?: User | null;
ownerId: string;
sidecarPath: string | null;
stack?: Stack | null;
stackId: string | null;
tags?: Tag[];

View File

@@ -43,7 +43,6 @@ export enum AssetFileType {
FullSize = 'fullsize',
Preview = 'preview',
Thumbnail = 'thumbnail',
Sidecar = 'sidecar',
}
export enum AlbumUserRole {

View File

@@ -20,23 +20,8 @@ limit
-- AssetJobRepository.getForSidecarWriteJob
select
"id",
"sidecarPath",
"originalPath",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files",
(
select
coalesce(json_agg(agg), '[]')
@@ -54,36 +39,21 @@ select
from
"asset"
where
"asset"."id" = $2::uuid
"asset"."id" = $1::uuid
limit
$3
$2
-- AssetJobRepository.getForSidecarCheckJob
select
"id",
"originalPath",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
"sidecarPath",
"originalPath"
from
"asset"
where
"asset"."id" = $2::uuid
"asset"."id" = $1::uuid
limit
$3
$2
-- AssetJobRepository.streamForThumbnailJob
select
@@ -188,6 +158,7 @@ select
"asset"."originalFileName",
"asset"."originalPath",
"asset"."ownerId",
"asset"."sidecarPath",
"asset"."type",
(
select
@@ -202,27 +173,11 @@ select
"asset_face"."assetId" = "asset"."id"
and "asset_face"."deletedAt" is null
) as agg
) as "faces",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_file"."id",
"asset_file"."path",
"asset_file"."type"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
) as agg
) as "files"
) as "faces"
from
"asset"
where
"asset"."id" = $2
"asset"."id" = $1
-- AssetJobRepository.getAlbumThumbnailFiles
select
@@ -350,6 +305,7 @@ select
"asset"."libraryId",
"asset"."ownerId",
"asset"."livePhotoVideoId",
"asset"."sidecarPath",
"asset"."encodedVideoPath",
"asset"."originalPath",
to_json("asset_exif") as "exifInfo",
@@ -460,28 +416,18 @@ select
"asset"."checksum",
"asset"."originalPath",
"asset"."isExternal",
"asset"."sidecarPath",
"asset"."originalFileName",
"asset"."livePhotoVideoId",
"asset"."fileCreatedAt",
"asset_exif"."timeZone",
"asset_exif"."fileSizeInByte",
(
select
"asset_file"."path"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
limit
$2
) as "sidecarPath"
"asset_exif"."fileSizeInByte"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
where
"asset"."deletedAt" is null
and "asset"."id" = $3
and "asset"."id" = $1
-- AssetJobRepository.streamForStorageTemplateJob
select
@@ -491,22 +437,12 @@ select
"asset"."checksum",
"asset"."originalPath",
"asset"."isExternal",
"asset"."sidecarPath",
"asset"."originalFileName",
"asset"."livePhotoVideoId",
"asset"."fileCreatedAt",
"asset_exif"."timeZone",
"asset_exif"."fileSizeInByte",
(
select
"asset_file"."path"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
limit
$2
) as "sidecarPath"
"asset_exif"."fileSizeInByte"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
@@ -528,15 +464,11 @@ select
from
"asset"
where
not exists (
select
"asset_file"."id"
from
"asset_file"
where
"asset_file"."assetId" = "asset"."id"
and "asset_file"."type" = $1
(
"asset"."sidecarPath" = $1
or "asset"."sidecarPath" is null
)
and "asset"."visibility" != $2
-- AssetJobRepository.streamForDetectFacesJob
select

View File

@@ -39,8 +39,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'originalPath'])
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.select(['id', 'sidecarPath', 'originalPath'])
.select((eb) =>
jsonArrayFrom(
eb
@@ -59,8 +58,7 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'originalPath'])
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.select(['id', 'sidecarPath', 'originalPath'])
.limit(1)
.executeTakeFirst();
}
@@ -123,7 +121,6 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(columns.asset)
.select(withFaces)
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.where('asset.id', '=', id)
.executeTakeFirst();
}
@@ -221,6 +218,7 @@ export class AssetJobRepository {
'asset.libraryId',
'asset.ownerId',
'asset.livePhotoVideoId',
'asset.sidecarPath',
'asset.encodedVideoPath',
'asset.originalPath',
])
@@ -298,19 +296,12 @@ export class AssetJobRepository {
'asset.checksum',
'asset.originalPath',
'asset.isExternal',
'asset.sidecarPath',
'asset.originalFileName',
'asset.livePhotoVideoId',
'asset.fileCreatedAt',
'asset_exif.timeZone',
'asset_exif.fileSizeInByte',
(eb) =>
eb
.selectFrom('asset_file')
.select('asset_file.path')
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', '=', AssetFileType.Sidecar)
.limit(1)
.as('sidecarPath'),
])
.where('asset.deletedAt', 'is', null);
}
@@ -342,18 +333,9 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['asset.id'])
.$if(!force, (qb) =>
qb.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom('asset_file')
.select('asset_file.id')
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', '=', AssetFileType.Sidecar),
),
),
),
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.stream();
}

View File

@@ -840,14 +840,6 @@ export class AssetRepository {
.execute();
}
async deleteFile(file: Pick<Selectable<AssetFileTable>, 'assetId' | 'type'>): Promise<void> {
await this.db
.deleteFrom('asset_file')
.where('assetId', '=', asUuid(file.assetId))
.where('type', '=', file.type)
.execute();
}
async deleteFiles(files: Pick<Selectable<AssetFileTable>, 'id'>[]): Promise<void> {
if (files.length === 0) {
return;

View File

@@ -414,6 +414,7 @@ export class DatabaseRepository {
.set((eb) => ({
originalPath: eb.fn('REGEXP_REPLACE', ['originalPath', source, target]),
encodedVideoPath: eb.fn('REGEXP_REPLACE', ['encodedVideoPath', source, target]),
sidecarPath: eb.fn('REGEXP_REPLACE', ['sidecarPath', source, target]),
}))
.execute();

View File

@@ -1,24 +0,0 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`INSERT INTO asset_file ("assetId", path, type)
SELECT
id, "sidecarPath", 'sidecar'
FROM asset
WHERE "sidecarPath" IS NOT NULL;`.execute(db);
await sql`ALTER TABLE "asset" DROP COLUMN "sidecarPath";`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset" ADD "sidecarPath" character varying;`.execute(db);
await sql`
UPDATE asset
SET "sidecarPath" = asset_file.path
FROM asset_file
WHERE asset.id = asset_file."assetId";
`.execute(db);
await sql`DELETE FROM asset_file WHERE type = 'sidecar';`.execute(db);
}

View File

@@ -105,6 +105,9 @@ export class AssetTable {
@Column({ index: true })
originalFileName!: string;
@Column({ nullable: true })
sidecarPath!: string | null;
@Column({ type: 'bytea', nullable: true })
thumbhash!: Buffer | null;

View File

@@ -173,6 +173,7 @@ const assetEntity = Object.freeze({
longitude: 10.703_075,
},
livePhotoVideoId: null,
sidecarPath: null,
} as MapAsset);
const existingAsset = Object.freeze({
@@ -710,22 +711,18 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
id: existingAsset.id,
sidecarPath: null,
originalFileName: 'photo1.jpeg',
originalPath: 'fake_path/photo1.jpeg',
}),
);
expect(mocks.asset.create).toHaveBeenCalledWith(
expect.objectContaining({
sidecarPath: null,
originalFileName: 'existing-filename.jpeg',
originalPath: 'fake_path/asset_1.jpeg',
}),
);
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
expect.objectContaining({
assetId: existingAsset.id,
type: AssetFileType.Sidecar,
}),
);
expect(mocks.asset.updateAll).toHaveBeenCalledWith([copiedAsset.id], {
deletedAt: expect.any(Date),
@@ -762,13 +759,6 @@ describe(AssetMediaService.name, () => {
deletedAt: expect.any(Date),
status: AssetStatus.Trashed,
});
expect(mocks.asset.upsertFile).toHaveBeenCalledWith(
expect.objectContaining({
assetId: existingAsset.id,
path: sidecarFile.originalPath,
type: AssetFileType.Sidecar,
}),
);
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
updatedFile.originalPath,
@@ -798,12 +788,6 @@ describe(AssetMediaService.name, () => {
deletedAt: expect.any(Date),
status: AssetStatus.Trashed,
});
expect(mocks.asset.deleteFile).toHaveBeenCalledWith(
expect.objectContaining({
assetId: existingAsset.id,
type: AssetFileType.Sidecar,
}),
);
expect(mocks.user.updateUsage).toHaveBeenCalledWith(authStub.user1.user.id, updatedFile.size);
expect(mocks.storage.utimes).toHaveBeenCalledWith(
updatedFile.originalPath,
@@ -833,9 +817,6 @@ describe(AssetMediaService.name, () => {
expect(mocks.asset.create).not.toHaveBeenCalled();
expect(mocks.asset.updateAll).not.toHaveBeenCalled();
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: { files: [updatedFile.originalPath, undefined] },

View File

@@ -21,16 +21,7 @@ import {
UploadFieldName,
} from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetFileType,
AssetStatus,
AssetType,
AssetVisibility,
CacheControl,
JobName,
Permission,
StorageFolder,
} from 'src/enum';
import { AssetStatus, AssetType, AssetVisibility, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { BaseService } from 'src/services/base.service';
import { UploadFile, UploadRequest } from 'src/types';
@@ -363,12 +354,9 @@ export class AssetMediaService extends BaseService {
duration: dto.duration || null,
livePhotoVideoId: null,
sidecarPath: sidecarPath || null,
});
await (sidecarPath
? this.assetRepository.upsertFile({ assetId, path: sidecarPath, type: AssetFileType.Sidecar })
: this.assetRepository.deleteFile({ assetId, type: AssetFileType.Sidecar }));
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
await this.jobRepository.queue({
@@ -396,6 +384,7 @@ export class AssetMediaService extends BaseService {
localDateTime: asset.localDateTime,
fileModifiedAt: asset.fileModifiedAt,
livePhotoVideoId: asset.livePhotoVideoId,
sidecarPath: asset.sidecarPath,
});
const { size } = await this.storageRepository.stat(created.originalPath);
@@ -425,6 +414,7 @@ export class AssetMediaService extends BaseService {
visibility: dto.visibility ?? AssetVisibility.Timeline,
livePhotoVideoId: dto.livePhotoVideoId,
originalFileName: dto.filename || file.originalName,
sidecarPath: sidecarFile?.originalPath,
});
if (dto.metadata) {
@@ -432,11 +422,6 @@ export class AssetMediaService extends BaseService {
}
if (sidecarFile) {
await this.assetRepository.upsertFile({
assetId: asset.id,
path: sidecarFile.originalPath,
type: AssetFileType.Sidecar,
});
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
}
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));

View File

@@ -585,8 +585,8 @@ describe(AssetService.name, () => {
'/uploads/user-id/webp/path.ext',
'/uploads/user-id/thumbs/path.jpg',
'/uploads/user-id/fullsize/path.webp',
assetWithFace.encodedVideoPath, // this value is null
undefined, // no sidecar path
assetWithFace.encodedVideoPath,
assetWithFace.sidecarPath,
assetWithFace.originalPath,
],
},

View File

@@ -258,11 +258,11 @@ export class AssetService extends BaseService {
}
}
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(asset.files ?? []);
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
if (deleteOnDisk) {
files.push(sidecarFile?.path, asset.originalPath);
files.push(asset.sidecarPath, asset.originalPath);
}
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files } });

View File

@@ -3,16 +3,7 @@ import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { defaults } from 'src/config';
import { MapAsset } from 'src/dtos/asset-response.dto';
import {
AssetFileType,
AssetType,
AssetVisibility,
ExifOrientation,
ImmichWorker,
JobName,
JobStatus,
SourceType,
} from 'src/enum';
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
import { assetStub } from 'test/fixtures/asset.stub';
@@ -23,24 +14,17 @@ import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
function removeNonSidecarFiles(asset: any) {
return {
...asset,
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
};
}
const forSidecarJob = (
asset: {
id?: string;
originalPath?: string;
files?: { id: string; type: AssetFileType; path: string }[];
sidecarPath?: string | null;
} = {},
) => {
return {
id: factory.uuid(),
originalPath: '/path/to/IMG_123.jpg',
files: [],
sidecarPath: null,
...asset,
};
};
@@ -181,7 +165,7 @@ describe(MetadataService.name, () => {
it('should handle a date in a sidecar file', async () => {
const originalDate = new Date('2023-11-21T16:13:17.517Z');
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -200,7 +184,7 @@ describe(MetadataService.name, () => {
it('should take the file modification date when missing exif and earlier than creation date', async () => {
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: fileModifiedAt,
@@ -226,7 +210,7 @@ describe(MetadataService.name, () => {
it('should take the file creation date when missing exif and earlier than modification date', async () => {
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: fileModifiedAt,
@@ -249,7 +233,7 @@ describe(MetadataService.name, () => {
it('should account for the server being in a non-UTC timezone', async () => {
process.env.TZ = 'America/Los_Angeles';
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -267,7 +251,7 @@ describe(MetadataService.name, () => {
});
it('should handle lists of numbers', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: assetStub.image.fileModifiedAt,
@@ -320,7 +304,7 @@ describe(MetadataService.name, () => {
});
it('should apply reverse geocoding', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
mocks.storage.stat.mockResolvedValue({
@@ -349,7 +333,7 @@ describe(MetadataService.name, () => {
});
it('should discard latitude and longitude on null island', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
mockReadTags({
GPSLatitude: 0,
GPSLongitude: 0,
@@ -361,7 +345,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ TagsList: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -371,7 +355,7 @@ describe(MetadataService.name, () => {
});
it('should extract hierarchy from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -391,7 +375,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from Keywords as a string', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ Keywords: 'Parent' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -401,7 +385,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from Keywords as a list', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ Keywords: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -411,7 +395,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from Keywords as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ Keywords: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -422,7 +406,7 @@ describe(MetadataService.name, () => {
});
it('should extract hierarchal tags from Keywords', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ Keywords: 'Parent/Child' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -441,7 +425,7 @@ describe(MetadataService.name, () => {
});
it('should ignore Keywords when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -460,7 +444,7 @@ describe(MetadataService.name, () => {
});
it('should extract hierarchy from HierarchicalSubject', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -481,7 +465,7 @@ describe(MetadataService.name, () => {
});
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -1518,25 +1502,18 @@ describe(MetadataService.name, () => {
});
it('should detect a new sidecar at .jpg.xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', files: [] });
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: asset.id,
type: AssetFileType.Sidecar,
path: '/path/to/IMG_123.jpg.xmp',
});
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
});
it('should detect a new sidecar at .xmp', async () => {
const asset = forSidecarJob({
originalPath: '/path/to/IMG_123.jpg',
files: [],
});
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
@@ -1544,44 +1521,33 @@ describe(MetadataService.name, () => {
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
assetId: asset.id,
type: AssetFileType.Sidecar,
path: '/path/to/IMG_123.xmp',
});
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
});
it('should unset sidecar path if file no longer exist', async () => {
const asset = forSidecarJob({
originalPath: '/path/to/IMG_123.jpg',
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
});
it('should unset sidecar path if file does not exist anymore', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValue(false);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.deleteFile).toHaveBeenCalledWith({ assetId: asset.id, type: AssetFileType.Sidecar });
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
});
it('should do nothing if the sidecar file still exists', async () => {
const asset = forSidecarJob({
originalPath: '/path/to/IMG_123.jpg',
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
});
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
expect(mocks.asset.deleteFile).not.toHaveBeenCalled();
expect(mocks.asset.update).not.toHaveBeenCalled();
});
});
describe('handleSidecarWrite', () => {
it('should skip assets that no longer exist', async () => {
it('should skip assets that do not exist anymore', async () => {
mocks.assetJob.getForSidecarWriteJob.mockResolvedValue(void 0);
await expect(sut.handleSidecarWrite({ id: 'asset-123' })).resolves.toBe(JobStatus.Failed);
expect(mocks.metadata.writeTags).not.toHaveBeenCalled();

View File

@@ -8,10 +8,9 @@ import { constants } from 'node:fs/promises';
import { join, parse } from 'node:path';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { Asset, AssetFace, AssetFile } from 'src/database';
import { Asset, AssetFace } from 'src/database';
import { OnEvent, OnJob } from 'src/decorators';
import {
AssetFileType,
AssetType,
AssetVisibility,
DatabaseLock,
@@ -361,21 +360,17 @@ export class MetadataService extends BaseService {
break;
}
const existingSidecar = asset.files ? asset.files.find((file) => file.type === AssetFileType.Sidecar) : null;
const isChanged = sidecarPath !== existingSidecar?.path;
const isChanged = sidecarPath !== asset.sidecarPath;
this.logger.debug(
`Sidecar check found old=${existingSidecar?.path}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
);
if (!isChanged) {
return JobStatus.Skipped;
}
await (sidecarPath === null
? this.assetRepository.deleteFile({ assetId: asset.id, type: AssetFileType.Sidecar })
: this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }));
await this.assetRepository.update({ id: asset.id, sidecarPath });
return JobStatus.Success;
}
@@ -400,7 +395,7 @@ export class MetadataService extends BaseService {
const tagsList = (asset.tags || []).map((tag) => tag.value);
const sidecarPath = asset.files[0]?.path || `${asset.originalPath}.xmp`;
const sidecarPath = asset.sidecarPath || `${asset.originalPath}.xmp`;
const exif = _.omitBy(
<Tags>{
Description: description,
@@ -420,20 +415,18 @@ export class MetadataService extends BaseService {
await this.metadataRepository.writeTags(sidecarPath, exif);
if (asset.files.length === 0) {
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: sidecarPath });
if (!asset.sidecarPath) {
await this.assetRepository.update({ id, sidecarPath });
}
return JobStatus.Success;
}
private getSidecarCandidates({ files, originalPath }: { files: AssetFile[] | null; originalPath: string }) {
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
const candidates: string[] = [];
const existingSidecar = files?.find((file) => file.type === AssetFileType.Sidecar);
if (existingSidecar) {
candidates.push(existingSidecar.path);
if (sidecarPath) {
candidates.push(sidecarPath);
}
const assetPath = parse(originalPath);
@@ -461,17 +454,13 @@ export class MetadataService extends BaseService {
return { width, height };
}
private getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
if (asset.type === AssetType.Image) {
let hasSidecar = false;
if (asset.files && asset.files.length > 0) {
hasSidecar = asset.files.some((file) => file.type === AssetFileType.Sidecar);
}
if (!hasSidecar) {
return this.metadataRepository.readTags(asset.originalPath);
}
private getExifTags(asset: {
originalPath: string;
sidecarPath: string | null;
type: AssetType;
}): Promise<ImmichTags> {
if (!asset.sidecarPath && asset.type === AssetType.Image) {
return this.metadataRepository.readTags(asset.originalPath);
}
return this.mergeExifTags(asset);
@@ -479,16 +468,12 @@ export class MetadataService extends BaseService {
private async mergeExifTags(asset: {
originalPath: string;
files: AssetFile[];
sidecarPath: string | null;
type: AssetType;
}): Promise<ImmichTags> {
if (asset.files && asset.files.length > 1) {
throw new Error(`Asset ${asset.originalPath} has multiple sidecar files`);
}
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
this.metadataRepository.readTags(asset.originalPath),
asset.files && asset.files.length > 0 ? this.metadataRepository.readTags(asset.files[0].path) : null,
asset.sidecarPath ? this.metadataRepository.readTags(asset.sidecarPath) : null,
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
]);

View File

@@ -450,7 +450,7 @@ export type StorageAsset = {
fileCreatedAt: Date;
originalPath: string;
originalFileName: string;
sidecarPath?: string | null;
sidecarPath: string | null;
fileSizeInByte: number | null;
};

View File

@@ -21,7 +21,6 @@ export const getAssetFiles = (files: AssetFile[]) => ({
fullsizeFile: getAssetFile(files, AssetFileType.FullSize),
previewFile: getAssetFile(files, AssetFileType.Preview),
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
});
export const addAssets = async (

View File

@@ -24,18 +24,6 @@ const fullsizeFile: AssetFile = {
path: '/uploads/user-id/fullsize/path.webp',
};
const sidecarFileWithExt: AssetFile = {
id: 'sidecar-with-ext',
type: AssetFileType.Sidecar,
path: '/original/path.ext.xmp',
};
const sidecarFileWithoutExt: AssetFile = {
id: 'sidecar-without-ext',
type: AssetFileType.Sidecar,
path: '/original/path.xmp',
};
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
@@ -63,6 +51,7 @@ export const assetStub = {
fileCreatedAt: new Date('2022-06-19T23:41:36.910Z'),
originalPath: '/original/path.jpg',
originalFileName: 'IMG_123.jpg',
sidecarPath: null,
fileSizeInByte: 12_345,
...asset,
}),
@@ -92,6 +81,7 @@ export const assetStub = {
sharedLinks: [],
faces: [],
exifInfo: {} as Exif,
sidecarPath: null,
deletedAt: null,
isExternal: false,
duplicateId: null,
@@ -127,6 +117,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'IMG_456.jpg',
faces: [],
sidecarPath: null,
isExternal: false,
exifInfo: {
fileSizeInByte: 123_000,
@@ -166,6 +157,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: null,
deletedAt: null,
duplicateId: null,
isOffline: false,
@@ -202,6 +194,7 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 1000,
@@ -250,6 +243,7 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -291,6 +285,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -333,6 +328,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -371,6 +367,7 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
@@ -412,6 +409,7 @@ export const assetStub = {
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -450,6 +448,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -491,6 +490,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -526,6 +526,7 @@ export const assetStub = {
livePhotoVideoId: null,
sharedLinks: [],
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 100_000,
exifImageHeight: 2160,
@@ -572,7 +573,7 @@ export const assetStub = {
files,
faces: [] as AssetFace[],
visibility: AssetVisibility.Timeline,
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
} as MapAsset & { faces: AssetFace[] }),
livePhotoWithOriginalFileName: Object.freeze({
id: 'live-photo-still-asset',
@@ -591,7 +592,7 @@ export const assetStub = {
libraryId: null,
faces: [] as AssetFace[],
visibility: AssetVisibility.Timeline,
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
} as MapAsset & { faces: AssetFace[] }),
withLocation: Object.freeze({
id: 'asset-with-favorite-id',
@@ -604,6 +605,7 @@ export const assetStub = {
deviceId: 'device-id',
checksum: Buffer.from('file hash', 'utf8'),
originalPath: '/original/path.ext',
sidecarPath: null,
type: AssetType.Image,
files: [previewFile],
thumbhash: null,
@@ -650,7 +652,7 @@ export const assetStub = {
thumbhash: null,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files: [previewFile, sidecarFileWithExt],
files: [previewFile],
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
@@ -663,6 +665,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: '/original/path.ext.xmp',
deletedAt: null,
duplicateId: null,
isOffline: false,
@@ -685,7 +688,7 @@ export const assetStub = {
thumbhash: null,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files: [previewFile, sidecarFileWithoutExt],
files: [previewFile],
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
@@ -698,6 +701,7 @@ export const assetStub = {
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
sidecarPath: '/original/path.xmp',
deletedAt: null,
duplicateId: null,
isOffline: false,
@@ -730,6 +734,7 @@ export const assetStub = {
livePhotoVideoId: null,
sharedLinks: [],
faces: [],
sidecarPath: null,
exifInfo: {
fileSizeInByte: 100_000,
} as Exif,
@@ -771,6 +776,7 @@ export const assetStub = {
originalFileName: 'photo.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
@@ -806,6 +812,7 @@ export const assetStub = {
originalFileName: 'asset-id.dng',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',
@@ -846,6 +853,7 @@ export const assetStub = {
originalFileName: 'asset-id.hif',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',

View File

@@ -36,7 +36,6 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
getChangedDeltaSync: vitest.fn(),
upsertFile: vitest.fn(),
upsertFiles: vitest.fn(),
deleteFile: vitest.fn(),
deleteFiles: vitest.fn(),
detectOfflineExternalAssets: vitest.fn(),
filterNewExternalAssetPaths: vitest.fn(),

View File

@@ -14,16 +14,7 @@ import {
} from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetFileType,
AssetStatus,
AssetType,
AssetVisibility,
MemoryType,
Permission,
UserMetadataKey,
UserStatus,
} from 'src/enum';
import { AssetStatus, AssetType, AssetVisibility, MemoryType, Permission, UserMetadataKey, UserStatus } from 'src/enum';
import { OnThisDayData, UserMetadataItem } from 'src/types';
import { v4, v7 } from 'uuid';
@@ -314,13 +305,6 @@ const assetSidecarWriteFactory = (asset: Partial<SidecarWriteAsset> = {}) => ({
sidecarPath: '/path/to/original-path.jpg.xmp',
originalPath: '/path/to/original-path.jpg.xmp',
tags: [],
files: [
{
id: newUuid(),
path: '/path/to/original-path.jpg.xmp',
type: AssetFileType.Sidecar,
},
],
...asset,
});