Compare commits

...

7 Commits

Author SHA1 Message Date
midzelis
9b881cbf1a feat: replace makefile with dev script 2025-09-03 11:39:21 +00:00
renovate[bot]
183a285584 chore(deps): update base-image to v202509021104 (major) (#21513)
chore(deps): update base-image to v202509021104

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-02 21:20:21 -05:00
Alex
5ce946bb5b fix: null check (#21536) 2025-09-02 19:21:41 -05:00
shenlong
674faf2e57 fix: local sync task never runs on iOS (#21491)
* fix: local sync task never runs on iOS

* chore: rename ios register method

* refactor from using dart callback to dart entrypoint + more logs

* check if file exists before hashing

* reschedule local sync task

* chore: rename background worker logger

* refactor: move file exists check inside repo

---------

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2025-09-03 02:05:58 +05:30
Alex
4f7702c6bf fix: iOS portrait photo saved as jpg extension (#21388)
remove bad merged settings

remove console log
2025-09-02 14:26:12 -05:00
bo0tzz
28edf5664d fix: set specific AssetUpload permission on checkBulkUpload endpoint (#21470)
* fix: set specific AssetUpload permission on checkBulkUpload endpoint

Fixes #21456

* fix: make open-api
2025-09-02 14:21:14 -05:00
shenlong
ec2f94cae8 fix: handle datetime outside the valid range supported by dart (#21526)
* fix: handle datetime outside the valid range supported by dart

* add tests for tryFromSecondsSinceEpoch

---------

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2025-09-02 14:20:49 -05:00
29 changed files with 643 additions and 233 deletions

1
.prettierignore Normal file
View File

@@ -0,0 +1 @@
dev.ts

228
dev.ts Executable file
View File

@@ -0,0 +1,228 @@
#!/bin/sh
':' //; exec node --disable-warning=ExperimentalWarning --experimental-strip-types "$0" "$@"
':' /*
@echo off
node "%~dpnx0" %*
exit /b %errorlevel%
*/
import { execSync, type ExecSyncOptions, spawn } from 'node:child_process';
import { Dir, Dirent, existsSync, mkdirSync, opendirSync, readFileSync, rmSync } from 'node:fs';
import { platform } from 'node:os';
import { join, resolve } from 'node:path';
import { parseArgs } from 'node:util';
// Utilities
const tryRun = <T>(fn: () => T, onSuccess?: (result: T) => void, onError?: (e: unknown) => void, onFinally?: (result: T | undefined) => void): T | void => {
let result: T | undefined= undefined;
try {
result = fn();
onSuccess?.(result);
return result;
} catch (e: unknown) {
onError?.(e);
} finally {
onFinally?.(result);
}
};
const FALSE = () => false;
const exit0 = () => process.exit(0);
const exit1 = () => process.exit(1);
const log = (msg: string) => { console.log(msg); return msg; };
const err = (msg: string, e?: unknown) => { console.log(msg, e); return undefined; };
const errExit = (msg: string, e?: unknown) => ()=>{ console.log(msg, e); exit1(); };
const exec = (cmd: string, opts: ExecSyncOptions = { stdio: 'inherit' }) => execSync(cmd, opts);
const isWSL = () => platform() === 'linux' &&
tryRun(() => readFileSync('/proc/version', 'utf-8').toLowerCase().includes('microsoft'), undefined, FALSE);
const isWindows = () => platform() === 'win32';
const supportsChown = () => !isWindows() || isWSL();
const onExit = (handler: () => void) => {
['SIGINT', 'SIGTERM'].forEach(sig => process.on(sig, () => { handler(); exit0(); }));
if (isWindows()) process.on('SIGBREAK', () => { handler(); exit0(); });
};
// Directory operations
const mkdirs = (dirs: string[]) => dirs.forEach(dir =>
tryRun(
() => mkdirSync(dir, { recursive: true }),
() => log(`Created directory: ${dir}`),
e => err(`Error creating directory ${dir}:`, e)
));
const chown = (dirs: string[], uid: string, gid: string) => {
if (!supportsChown()) {
log('Skipping ownership changes on Windows (not supported outside WSL)');
return;
}
for (const dir of dirs) {
tryRun(
() => exec(`chown -R ${uid}:${gid} "${dir}"`),
undefined,
errExit(`Permission denied when changing owner of volumes. Try running 'sudo ./dev.ts prepare-volumes' first.`)
);
}
};
const findAndRemove = (path: string, target: string) => {
if (!existsSync(path)) return;
const removeLoop = (dir: Dir) => {
let dirent: Dirent | null;
while ((dirent = dir.readSync()) !== null) {
if (!dirent.isDirectory()) continue;
const itemPath = join(path, dirent.name);
if (dirent.name === target) {
log(` Removing: ${itemPath}`);
rmSync(itemPath, { recursive: true, force: true });
} else {
findAndRemove(itemPath, target);
}
}
}
tryRun(() => opendirSync(path), removeLoop, errExit( `Error opening directory ${path}`), (dir) => dir?.closeSync());
};
// Docker DSL
const docker = {
compose: (file: string) => ({
up: (opts?: string[]) => spawn('docker', ['compose', '-f', file, 'up', ...(opts || [])], {
stdio: 'inherit',
env: { ...process.env, COMPOSE_BAKE: 'true' },
shell: true
}),
down: () => tryRun(() => exec(`docker compose -f ${file} down --remove-orphans`))
}),
isAvailable: () => !!tryRun(() => exec('docker --version', { stdio: 'ignore' }), undefined, FALSE)
};
// Environment configuration
const envConfig = {
volumeDirs: [
'./.pnpm-store', './web/.svelte-kit', './web/node_modules', './web/coverage',
'./e2e/node_modules', './docs/node_modules', './server/node_modules',
'./open-api/typescript-sdk/node_modules', './.github/node_modules',
'./node_modules', './cli/node_modules'
],
cleanDirs: ['node_modules', 'dist', 'build', '.svelte-kit', 'coverage', '.pnpm-store'],
composeFiles: {
dev: './docker/docker-compose.dev.yml',
e2e: './e2e/docker-compose.yml',
prod: './docker/docker-compose.prod.yml'
},
getEnv: () => ({
uid: process.env.UID || '1000',
gid: process.env.GID || '1000'
})
};
// Commands
const commands = {
'prepare-volumes': () => {
log('Preparing volumes...');
const { uid, gid } = envConfig.getEnv();
mkdirs(envConfig.volumeDirs);
chown(envConfig.volumeDirs, uid, gid);
// Handle UPLOAD_LOCATION
const uploadLocation = tryRun(() => {
const content = readFileSync('./docker/.env', 'utf-8');
const match = content.match(/^UPLOAD_LOCATION=(.+)$/m);
return match?.[1]?.trim();
});
if (uploadLocation) {
const targetPath = resolve('docker', uploadLocation);
mkdirs([targetPath]);
if (supportsChown()) {
tryRun(
() => {
// First chown the uploadLocation directory itself
exec(`chown ${uid}:${gid} "${targetPath}"`);
// Then chown all contents except postgres folder (using -prune to skip it entirely)
exec(`find "${targetPath}" -mindepth 1 -name postgres -prune -o -exec chown ${uid}:${gid} {} +`);
},
undefined,
errExit(`Permission denied when changing owner of volumes. Try running 'sudo ./dev.ts prepare-volumes' first.`)
);
} else {
log('Skipping ownership changes on Windows (not supported outside WSL)');
}
}
log('Volume preparation completed.');
},
clean: () => {
log('Starting clean process...');
envConfig.cleanDirs.forEach(dir => {
log(`Removing ${dir} directories...`);
findAndRemove('.', dir);
});
docker.isAvailable() &&
log('Stopping and removing Docker containers...') &&
docker.compose(envConfig.composeFiles.dev).down();
log('Clean process completed.');
},
down: (opts: { e2e?: boolean; prod?: boolean }) => {
const type = opts.prod ? 'prod' : opts.e2e ? 'e2e' : 'dev';
const file = envConfig.composeFiles[type];
log(`\nStopping ${type} environment...`);
docker.compose(file).down();
},
up: (opts: { e2e?: boolean; prod?: boolean }) => {
commands['prepare-volumes']();
const type = opts.prod ? 'prod' : opts.e2e ? 'e2e' : 'dev';
const file = envConfig.composeFiles[type];
const args = opts.prod ? ['--build', '-V', '--remove-orphans'] : ['--remove-orphans'];
onExit(() => commands.down(opts));
log(`Starting ${type} environment...`);
const proc = docker.compose(file).up(args);
proc.on('error',errExit('Failed to start docker compose:' ));
proc.on('exit', (code: number) => { commands.down(opts); code ? exit1() : exit0(); });
}
};
// Main
const { positionals, values } = parseArgs({
args: process.argv.slice(2),
allowPositionals: true,
options: {
e2e: { type: 'boolean', default: false },
prod: { type: 'boolean', default: false }
}
});
const command = positionals[0];
const handler = commands[command as keyof typeof commands];
if (!handler) {
log('Usage: ./dev.ts [clean|prepare-volumes|up [--e2e] [--prod]|down [--e2e] [--prod]]');
exit1();
}
handler(values);

View File

@@ -18,6 +18,7 @@ services:
container_name: immich_server
command: ['immich-dev']
image: immich-server-dev:latest
pull_policy: never
# extends:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
@@ -80,6 +81,7 @@ services:
immich-web:
container_name: immich_web
image: immich-web-dev:latest
pull_policy: never
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
# user: 0:0
user: '${UID:-1000}:${GID:-1000}'
@@ -120,6 +122,7 @@ services:
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning-dev:latest
pull_policy: never
# extends:
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference

View File

@@ -62,7 +62,7 @@ private open class BackgroundWorkerPigeonCodec : StandardMessageCodec() {
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
interface BackgroundWorkerFgHostApi {
fun enableSyncWorker()
fun enableUploadWorker(callbackHandle: Long)
fun enableUploadWorker()
fun disableUploadWorker()
companion object {
@@ -93,11 +93,9 @@ interface BackgroundWorkerFgHostApi {
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.enableUploadWorker$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List<Any?>
val callbackHandleArg = args[0] as Long
channel.setMessageHandler { _, reply ->
val wrapped: List<Any?> = try {
api.enableUploadWorker(callbackHandleArg)
api.enableUploadWorker()
listOf(null)
} catch (exception: Throwable) {
BackgroundWorkerPigeonUtils.wrapError(exception)
@@ -130,6 +128,7 @@ interface BackgroundWorkerFgHostApi {
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
interface BackgroundWorkerBgHostApi {
fun onInitialized()
fun close()
companion object {
/** The codec used by BackgroundWorkerBgHostApi. */
@@ -156,6 +155,22 @@ interface BackgroundWorkerBgHostApi {
channel.setMessageHandler(null)
}
}
run {
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.close$separatedMessageChannelSuffix", codec)
if (api != null) {
channel.setMessageHandler { _, reply ->
val wrapped: List<Any?> = try {
api.close()
listOf(null)
} catch (exception: Throwable) {
BackgroundWorkerPigeonUtils.wrapError(exception)
}
reply.reply(wrapped)
}
} else {
channel.setMessageHandler(null)
}
}
}
}
}

View File

@@ -11,9 +11,8 @@ import com.google.common.util.concurrent.ListenableFuture
import com.google.common.util.concurrent.SettableFuture
import io.flutter.FlutterInjector
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.embedding.engine.dart.DartExecutor.DartCallback
import io.flutter.embedding.engine.dart.DartExecutor
import io.flutter.embedding.engine.loader.FlutterLoader
import io.flutter.view.FlutterCallbackInformation
private const val TAG = "BackgroundWorker"
@@ -58,25 +57,6 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
loader.ensureInitializationCompleteAsync(ctx, null, Handler(Looper.getMainLooper())) {
engine = FlutterEngine(ctx)
// Retrieve the callback handle stored by the main Flutter app
// This handle points to the Flutter function that should be executed in the background
val callbackHandle =
ctx.getSharedPreferences(BackgroundWorkerApiImpl.SHARED_PREF_NAME, Context.MODE_PRIVATE)
.getLong(BackgroundWorkerApiImpl.SHARED_PREF_CALLBACK_HANDLE, 0L)
if (callbackHandle == 0L) {
// Without a valid callback handle, we cannot start the Flutter background execution
complete(Result.failure())
return@ensureInitializationCompleteAsync
}
// Start the Flutter engine with the specified callback as the entry point
val callback = FlutterCallbackInformation.lookupCallbackInformation(callbackHandle)
if (callback == null) {
complete(Result.failure())
return@ensureInitializationCompleteAsync
}
// Register custom plugins
MainActivity.registerPlugins(ctx, engine!!)
flutterApi =
@@ -86,8 +66,12 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
api = this
)
engine!!.dartExecutor.executeDartCallback(
DartCallback(ctx.assets, loader.findAppBundlePath(), callback)
engine!!.dartExecutor.executeDartEntrypoint(
DartExecutor.DartEntrypoint(
loader.findAppBundlePath(),
"package:immich_mobile/domain/services/background_worker.service.dart",
"backgroundSyncNativeEntrypoint"
)
)
}
@@ -109,14 +93,7 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
}
}
/**
* Called when the system has to stop this worker because constraints are
* no longer met or the system needs resources for more important tasks
* This is also called when the worker has been explicitly cancelled or replaced
*/
override fun onStopped() {
Log.d(TAG, "About to stop BackupWorker")
override fun close() {
if (isComplete) {
return
}
@@ -134,6 +111,16 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
}, 5000)
}
/**
* Called when the system has to stop this worker because constraints are
* no longer met or the system needs resources for more important tasks
* This is also called when the worker has been explicitly cancelled or replaced
*/
override fun onStopped() {
Log.d(TAG, "About to stop BackupWorker")
close()
}
private fun handleHostResult(result: kotlin.Result<Unit>) {
if (isComplete) {
return

View File

@@ -21,9 +21,8 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
Log.i(TAG, "Scheduled media observer")
}
override fun enableUploadWorker(callbackHandle: Long) {
override fun enableUploadWorker() {
updateUploadEnabled(ctx, true)
updateCallbackHandle(ctx, callbackHandle)
Log.i(TAG, "Scheduled background upload tasks")
}
@@ -41,7 +40,6 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
const val SHARED_PREF_NAME = "Immich::Background"
const val SHARED_PREF_BACKUP_ENABLED = "Background::backup::enabled"
const val SHARED_PREF_CALLBACK_HANDLE = "Background::backup::callbackHandle"
private fun updateUploadEnabled(context: Context, enabled: Boolean) {
context.getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE).edit {
@@ -49,12 +47,6 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
}
}
private fun updateCallbackHandle(context: Context, callbackHandle: Long) {
context.getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE).edit {
putLong(SHARED_PREF_CALLBACK_HANDLE, callbackHandle)
}
}
fun enqueueMediaObserver(ctx: Context) {
val constraints = Constraints.Builder()
.addContentUriTrigger(MediaStore.Images.Media.INTERNAL_CONTENT_URI, true)

View File

@@ -24,7 +24,7 @@ import UIKit
BackgroundServicePlugin.register(with: self.registrar(forPlugin: "BackgroundServicePlugin")!)
BackgroundServicePlugin.registerBackgroundProcessing()
BackgroundWorkerApiImpl.registerBackgroundProcessing()
BackgroundWorkerApiImpl.registerBackgroundWorkers()
BackgroundServicePlugin.setPluginRegistrantCallback { registry in
if !registry.hasPlugin("org.cocoapods.path-provider-foundation") {

View File

@@ -74,7 +74,7 @@ class BackgroundWorkerPigeonCodec: FlutterStandardMessageCodec, @unchecked Senda
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol BackgroundWorkerFgHostApi {
func enableSyncWorker() throws
func enableUploadWorker(callbackHandle: Int64) throws
func enableUploadWorker() throws
func disableUploadWorker() throws
}
@@ -99,11 +99,9 @@ class BackgroundWorkerFgHostApiSetup {
}
let enableUploadWorkerChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.enableUploadWorker\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
enableUploadWorkerChannel.setMessageHandler { message, reply in
let args = message as! [Any?]
let callbackHandleArg = args[0] as! Int64
enableUploadWorkerChannel.setMessageHandler { _, reply in
do {
try api.enableUploadWorker(callbackHandle: callbackHandleArg)
try api.enableUploadWorker()
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
@@ -130,6 +128,7 @@ class BackgroundWorkerFgHostApiSetup {
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
protocol BackgroundWorkerBgHostApi {
func onInitialized() throws
func close() throws
}
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
@@ -151,6 +150,19 @@ class BackgroundWorkerBgHostApiSetup {
} else {
onInitializedChannel.setMessageHandler(nil)
}
let closeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.close\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
if let api = api {
closeChannel.setMessageHandler { _, reply in
do {
try api.close()
reply(wrapResult(nil))
} catch {
reply(wrapError(error))
}
}
} else {
closeChannel.setMessageHandler(nil)
}
}
}
/// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift.

View File

@@ -86,28 +86,10 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
* starts the engine, and sets up a timeout timer if specified.
*/
func run() {
// Retrieve the callback handle stored by the main Flutter app
// This handle points to the Flutter function that should be executed in the background
let callbackHandle = Int64(UserDefaults.standard.string(
forKey: BackgroundWorkerApiImpl.backgroundUploadCallbackHandleKey) ?? "0") ?? 0
if callbackHandle == 0 {
// Without a valid callback handle, we cannot start the Flutter background execution
complete(success: false)
return
}
// Use the callback handle to retrieve the actual Flutter callback information
guard let callback = FlutterCallbackCache.lookupCallbackInformation(callbackHandle) else {
// The callback handle is invalid or the callback was not found
complete(success: false)
return
}
// Start the Flutter engine with the specified callback as the entry point
let isRunning = engine.run(
withEntrypoint: callback.callbackName,
libraryURI: callback.callbackLibraryPath
withEntrypoint: "backgroundSyncNativeEntrypoint",
libraryURI: "package:immich_mobile/domain/services/background_worker.service.dart"
)
// Verify that the Flutter engine started successfully
@@ -127,7 +109,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
if maxSeconds != nil {
// Schedule a timer to cancel the task after the specified timeout period
Timer.scheduledTimer(withTimeInterval: TimeInterval(maxSeconds!), repeats: false) { _ in
self.cancel()
self.close()
}
}
}
@@ -156,7 +138,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
* Sends a cancel signal to the Flutter side and sets up a fallback timer to ensure
* the completion handler is eventually called even if Flutter doesn't respond.
*/
func cancel() {
func close() {
if isComplete {
return
}
@@ -182,7 +164,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
private func handleHostResult(result: Result<Void, PigeonError>) {
switch result {
case .success(): self.complete(success: true)
case .failure(_): self.cancel()
case .failure(_): self.close()
}
}

View File

@@ -6,10 +6,8 @@ class BackgroundWorkerApiImpl: BackgroundWorkerFgHostApi {
print("BackgroundUploadImpl:enableSyncWorker Local Sync worker scheduled")
}
func enableUploadWorker(callbackHandle: Int64) throws {
func enableUploadWorker() throws {
BackgroundWorkerApiImpl.updateUploadEnabled(true)
// Store the callback handle for later use when starting background Flutter isolates
BackgroundWorkerApiImpl.updateUploadCallbackHandle(callbackHandle)
BackgroundWorkerApiImpl.scheduleRefreshUpload()
BackgroundWorkerApiImpl.scheduleProcessingUpload()
@@ -23,7 +21,6 @@ class BackgroundWorkerApiImpl: BackgroundWorkerFgHostApi {
}
public static let backgroundUploadEnabledKey = "immich:background:backup:enabled"
public static let backgroundUploadCallbackHandleKey = "immich:background:backup:callbackHandle"
private static let localSyncTaskID = "app.alextran.immich.background.localSync"
private static let refreshUploadTaskID = "app.alextran.immich.background.refreshUpload"
@@ -33,17 +30,13 @@ class BackgroundWorkerApiImpl: BackgroundWorkerFgHostApi {
return UserDefaults.standard.set(isEnabled, forKey: BackgroundWorkerApiImpl.backgroundUploadEnabledKey)
}
private static func updateUploadCallbackHandle(_ callbackHandle: Int64) {
return UserDefaults.standard.set(String(callbackHandle), forKey: BackgroundWorkerApiImpl.backgroundUploadCallbackHandleKey)
}
private static func cancelUploadTasks() {
BackgroundWorkerApiImpl.updateUploadEnabled(false)
BGTaskScheduler.shared.cancel(taskRequestWithIdentifier: refreshUploadTaskID);
BGTaskScheduler.shared.cancel(taskRequestWithIdentifier: processingUploadTaskID);
}
public static func registerBackgroundProcessing() {
public static func registerBackgroundWorkers() {
BGTaskScheduler.shared.register(
forTaskWithIdentifier: processingUploadTaskID, using: nil) { task in
if task is BGProcessingTask {
@@ -102,9 +95,22 @@ class BackgroundWorkerApiImpl: BackgroundWorkerFgHostApi {
}
private static func handleBackgroundRefresh(task: BGAppRefreshTask, taskType: BackgroundTaskType) {
scheduleRefreshUpload()
// Restrict the refresh task to run only for a maximum of 20 seconds
runBackgroundWorker(task: task, taskType: taskType, maxSeconds: 20)
let maxSeconds: Int?
switch taskType {
case .localSync:
maxSeconds = 15
scheduleLocalSync()
case .refreshUpload:
maxSeconds = 20
scheduleRefreshUpload()
case .processingUpload:
print("Unexpected background refresh task encountered")
return;
}
// Restrict the refresh task to run only for a maximum of (maxSeconds) seconds
runBackgroundWorker(task: task, taskType: taskType, maxSeconds: maxSeconds)
}
private static func handleBackgroundProcessing(task: BGProcessingTask) {
@@ -134,7 +140,7 @@ class BackgroundWorkerApiImpl: BackgroundWorkerFgHostApi {
task.expirationHandler = {
DispatchQueue.main.async {
backgroundWorker.cancel()
backgroundWorker.close()
}
isSuccess = false

View File

@@ -14,6 +14,7 @@ import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/repositories/file_media.repository.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:immich_mobile/services/auth.service.dart';
import 'package:immich_mobile/services/localization.service.dart';
@@ -31,9 +32,7 @@ class BackgroundWorkerFgService {
// TODO: Move this call to native side once old timeline is removed
Future<void> enableSyncService() => _foregroundHostApi.enableSyncWorker();
Future<void> enableUploadService() => _foregroundHostApi.enableUploadWorker(
PluginUtilities.getCallbackHandle(_backgroundSyncNativeEntrypoint)!.toRawHandle(),
);
Future<void> enableUploadService() => _foregroundHostApi.enableUploadWorker();
Future<void> disableUploadService() => _foregroundHostApi.disableUploadWorker();
}
@@ -44,7 +43,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final Drift _drift;
final DriftLogger _driftLogger;
final BackgroundWorkerBgHostApi _backgroundHostApi;
final Logger _logger = Logger('BackgroundUploadBgService');
final Logger _logger = Logger('BackgroundWorkerBgService');
bool _isCleanedUp = false;
@@ -66,37 +65,50 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
bool get _isBackupEnabled => _ref.read(appSettingsServiceProvider).getSetting(AppSettingsEnum.enableBackup);
Future<void> init() async {
await loadTranslations();
HttpSSLOptions.apply(applyNative: false);
await _ref.read(authServiceProvider).setOpenApiServiceEndpoint();
try {
await loadTranslations();
HttpSSLOptions.apply(applyNative: false);
await _ref.read(authServiceProvider).setOpenApiServiceEndpoint();
// Initialize the file downloader
await FileDownloader().configure(
globalConfig: [
// maxConcurrent: 6, maxConcurrentByHost(server):6, maxConcurrentByGroup: 3
(Config.holdingQueue, (6, 6, 3)),
// On Android, if files are larger than 256MB, run in foreground service
(Config.runInForegroundIfFileLargerThan, 256),
],
);
await FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false);
await FileDownloader().trackTasks();
configureFileDownloaderNotifications();
// Initialize the file downloader
await FileDownloader().configure(
globalConfig: [
// maxConcurrent: 6, maxConcurrentByHost(server):6, maxConcurrentByGroup: 3
(Config.holdingQueue, (6, 6, 3)),
// On Android, if files are larger than 256MB, run in foreground service
(Config.runInForegroundIfFileLargerThan, 256),
],
);
await FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false);
await FileDownloader().trackTasks();
configureFileDownloaderNotifications();
// Notify the host that the background upload service has been initialized and is ready to use
await _backgroundHostApi.onInitialized();
await _ref.read(fileMediaRepositoryProvider).enableBackgroundAccess();
// Notify the host that the background worker service has been initialized and is ready to use
_backgroundHostApi.onInitialized();
} catch (error, stack) {
_logger.severe("Failed to initialize background worker", error, stack);
_backgroundHostApi.close();
}
}
@override
Future<void> onLocalSync(int? maxSeconds) async {
_logger.info('Local background syncing started');
final sw = Stopwatch()..start();
try {
_logger.info('Local background syncing started');
final sw = Stopwatch()..start();
final timeout = maxSeconds != null ? Duration(seconds: maxSeconds) : null;
await _syncAssets(hashTimeout: timeout, syncRemote: false);
final timeout = maxSeconds != null ? Duration(seconds: maxSeconds) : null;
await _syncAssets(hashTimeout: timeout, syncRemote: false);
sw.stop();
_logger.info("Local sync completed in ${sw.elapsed.inSeconds}s");
sw.stop();
_logger.info("Local sync completed in ${sw.elapsed.inSeconds}s");
} catch (error, stack) {
_logger.severe("Failed to complete local sync", error, stack);
} finally {
await _cleanup();
}
}
/* We do the following on Android upload
@@ -107,16 +119,20 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
*/
@override
Future<void> onAndroidUpload() async {
_logger.info('Android background processing started');
final sw = Stopwatch()..start();
try {
_logger.info('Android background processing started');
final sw = Stopwatch()..start();
await _syncAssets(hashTimeout: Duration(minutes: _isBackupEnabled ? 3 : 6));
await _handleBackup(processBulk: false);
await _syncAssets(hashTimeout: Duration(minutes: _isBackupEnabled ? 3 : 6));
await _handleBackup(processBulk: false);
await _cleanup();
sw.stop();
_logger.info("Android background processing completed in ${sw.elapsed.inSeconds}s");
sw.stop();
_logger.info("Android background processing completed in ${sw.elapsed.inSeconds}s");
} catch (error, stack) {
_logger.severe("Failed to complete Android background processing", error, stack);
} finally {
await _cleanup();
}
}
/* We do the following on background upload
@@ -129,29 +145,37 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
*/
@override
Future<void> onIosUpload(bool isRefresh, int? maxSeconds) async {
_logger.info('iOS background upload started with maxSeconds: ${maxSeconds}s');
final sw = Stopwatch()..start();
try {
_logger.info('iOS background upload started with maxSeconds: ${maxSeconds}s');
final sw = Stopwatch()..start();
final timeout = isRefresh ? const Duration(seconds: 5) : Duration(minutes: _isBackupEnabled ? 3 : 6);
await _syncAssets(hashTimeout: timeout);
final timeout = isRefresh ? const Duration(seconds: 5) : Duration(minutes: _isBackupEnabled ? 3 : 6);
await _syncAssets(hashTimeout: timeout);
final backupFuture = _handleBackup();
if (maxSeconds != null) {
await backupFuture.timeout(Duration(seconds: maxSeconds - 1), onTimeout: () {});
} else {
await backupFuture;
final backupFuture = _handleBackup();
if (maxSeconds != null) {
await backupFuture.timeout(Duration(seconds: maxSeconds - 1), onTimeout: () {});
} else {
await backupFuture;
}
sw.stop();
_logger.info("iOS background upload completed in ${sw.elapsed.inSeconds}s");
} catch (error, stack) {
_logger.severe("Failed to complete iOS background upload", error, stack);
} finally {
await _cleanup();
}
await _cleanup();
sw.stop();
_logger.info("iOS background upload completed in ${sw.elapsed.inSeconds}s");
}
@override
Future<void> cancel() async {
_logger.warning("Background upload cancelled");
await _cleanup();
_logger.warning("Background worker cancelled");
try {
await _cleanup();
} catch (error, stack) {
debugPrint('Failed to cleanup background worker: $error with stack: $stack');
}
}
Future<void> _cleanup() async {
@@ -159,13 +183,21 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
return;
}
_isCleanedUp = true;
await _ref.read(backgroundSyncProvider).cancel();
await _ref.read(backgroundSyncProvider).cancelLocal();
await _isar.close();
await _drift.close();
await _driftLogger.close();
_ref.dispose();
try {
_isCleanedUp = true;
_logger.info("Cleaning up background worker");
await _ref.read(backgroundSyncProvider).cancel();
await _ref.read(backgroundSyncProvider).cancelLocal();
if (_isar.isOpen) {
await _isar.close();
}
await _drift.close();
await _driftLogger.close();
_ref.dispose();
debugPrint("Background worker cleaned up");
} catch (error, stack) {
debugPrint('Failed to cleanup background worker: $error with stack: $stack');
}
}
Future<void> _handleBackup({bool processBulk = true}) async {
@@ -221,8 +253,10 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
}
}
/// Native entry invoked from the background worker. If renaming or moving this to a different
/// library, make sure to update the entry points and URI in native workers as well
@pragma('vm:entry-point')
Future<void> _backgroundSyncNativeEntrypoint() async {
Future<void> backgroundSyncNativeEntrypoint() async {
WidgetsFlutterBinding.ensureInitialized();
DartPluginRegistrant.ensureInitialized();

View File

@@ -6,6 +6,7 @@ import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
import 'package:immich_mobile/platform/native_sync_api.g.dart';
import 'package:immich_mobile/utils/datetime_helpers.dart';
import 'package:immich_mobile/utils/diff.dart';
import 'package:logging/logging.dart';
import 'package:platform/platform.dart';
@@ -285,7 +286,7 @@ extension on Iterable<PlatformAlbum> {
(e) => LocalAlbum(
id: e.id,
name: e.name,
updatedAt: e.updatedAt == null ? DateTime.now() : DateTime.fromMillisecondsSinceEpoch(e.updatedAt! * 1000),
updatedAt: tryFromSecondsSinceEpoch(e.updatedAt) ?? DateTime.now(),
assetCount: e.assetCount,
),
).toList();
@@ -300,8 +301,8 @@ extension on Iterable<PlatformAsset> {
name: e.name,
checksum: null,
type: AssetType.values.elementAtOrNull(e.type) ?? AssetType.other,
createdAt: e.createdAt == null ? DateTime.now() : DateTime.fromMillisecondsSinceEpoch(e.createdAt! * 1000),
updatedAt: e.updatedAt == null ? DateTime.now() : DateTime.fromMillisecondsSinceEpoch(e.updatedAt! * 1000),
createdAt: tryFromSecondsSinceEpoch(e.createdAt) ?? DateTime.now(),
updatedAt: tryFromSecondsSinceEpoch(e.updatedAt) ?? DateTime.now(),
width: e.width,
height: e.height,
durationInSeconds: e.durationInSeconds,

View File

@@ -16,6 +16,13 @@ class StorageRepository {
file = await entity?.originFile;
if (file == null) {
log.warning("Cannot get file for asset $assetId");
return null;
}
final exists = await file.exists();
if (!exists) {
log.warning("File for asset $assetId does not exist");
return null;
}
} catch (error, stackTrace) {
log.warning("Error getting file for asset $assetId", error, stackTrace);
@@ -34,6 +41,13 @@ class StorageRepository {
log.warning(
"Cannot get motion file for asset ${asset.id}, name: ${asset.name}, created on: ${asset.createdAt}",
);
return null;
}
final exists = await file.exists();
if (!exists) {
log.warning("Motion file for asset ${asset.id} does not exist");
return null;
}
} catch (error, stackTrace) {
log.warning(

View File

@@ -206,14 +206,14 @@ class ImmichAppState extends ConsumerState<ImmichApp> with WidgetsBindingObserve
WidgetsBinding.instance.addPostFrameCallback((_) {
// needs to be delayed so that EasyLocalization is working
if (Store.isBetaTimelineEnabled) {
ref.read(backgroundServiceProvider).disableService();
ref.read(driftBackgroundUploadFgService).enableSyncService();
if (ref.read(appSettingsServiceProvider).getSetting(AppSettingsEnum.enableBackup)) {
ref.read(backgroundServiceProvider).disableService();
ref.read(driftBackgroundUploadFgService).enableUploadService();
}
} else {
ref.read(backgroundServiceProvider).resumeServiceIfEnabled();
ref.read(driftBackgroundUploadFgService).disableUploadService();
ref.read(backgroundServiceProvider).resumeServiceIfEnabled();
}
});

View File

@@ -82,7 +82,7 @@ class BackgroundWorkerFgHostApi {
}
}
Future<void> enableUploadWorker(int callbackHandle) async {
Future<void> enableUploadWorker() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.BackgroundWorkerFgHostApi.enableUploadWorker$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
@@ -90,7 +90,7 @@ class BackgroundWorkerFgHostApi {
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(<Object?>[callbackHandle]);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
@@ -164,6 +164,29 @@ class BackgroundWorkerBgHostApi {
return;
}
}
Future<void> close() async {
final String pigeonVar_channelName =
'dev.flutter.pigeon.immich_mobile.BackgroundWorkerBgHostApi.close$pigeonVar_messageChannelSuffix';
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
pigeonVar_channelName,
pigeonChannelCodec,
binaryMessenger: pigeonVar_binaryMessenger,
);
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
if (pigeonVar_replyList == null) {
throw _createConnectionError(pigeonVar_channelName);
} else if (pigeonVar_replyList.length > 1) {
throw PlatformException(
code: pigeonVar_replyList[0]! as String,
message: pigeonVar_replyList[1] as String?,
details: pigeonVar_replyList[2],
);
} else {
return;
}
}
}
abstract class BackgroundWorkerFlutterApi {

View File

@@ -0,0 +1,19 @@
const int _maxMillisecondsSinceEpoch = 8640000000000000; // 275760-09-13
const int _minMillisecondsSinceEpoch = -62135596800000; // 0001-01-01
DateTime? tryFromSecondsSinceEpoch(int? secondsSinceEpoch) {
if (secondsSinceEpoch == null) {
return null;
}
final milliSeconds = secondsSinceEpoch * 1000;
if (milliSeconds < _minMillisecondsSinceEpoch || milliSeconds > _maxMillisecondsSinceEpoch) {
return null;
}
try {
return DateTime.fromMillisecondsSinceEpoch(milliSeconds);
} catch (e) {
return null;
}
}

View File

@@ -18,7 +18,7 @@ class AssetsApi {
/// checkBulkUpload
///
/// Checks if assets exist by checksums
/// Checks if assets exist by checksums. This endpoint requires the `asset.upload` permission.
///
/// Note: This method returns the HTTP [Response].
///
@@ -52,7 +52,7 @@ class AssetsApi {
/// checkBulkUpload
///
/// Checks if assets exist by checksums
/// Checks if assets exist by checksums. This endpoint requires the `asset.upload` permission.
///
/// Parameters:
///

View File

@@ -15,8 +15,7 @@ import 'package:pigeon/pigeon.dart';
abstract class BackgroundWorkerFgHostApi {
void enableSyncWorker();
// Enables the background upload service with the given callback handle
void enableUploadWorker(int callbackHandle);
void enableUploadWorker();
// Disables the background upload service
void disableUploadWorker();
@@ -27,6 +26,8 @@ abstract class BackgroundWorkerBgHostApi {
// Called from the background flutter engine when it has bootstrapped and established the
// required platform channels to notify the native side to start the background upload
void onInitialized();
void close();
}
@FlutterApi()

View File

@@ -0,0 +1,58 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:immich_mobile/utils/datetime_helpers.dart';
void main() {
group('tryFromSecondsSinceEpoch', () {
test('returns null for null input', () {
final result = tryFromSecondsSinceEpoch(null);
expect(result, isNull);
});
test('returns null for value below minimum allowed range', () {
// _minMillisecondsSinceEpoch = -62135596800000
final seconds = -62135596800000 ~/ 1000 - 1; // One second before min allowed
final result = tryFromSecondsSinceEpoch(seconds);
expect(result, isNull);
});
test('returns null for value above maximum allowed range', () {
// _maxMillisecondsSinceEpoch = 8640000000000000
final seconds = 8640000000000000 ~/ 1000 + 1; // One second after max allowed
final result = tryFromSecondsSinceEpoch(seconds);
expect(result, isNull);
});
test('returns correct DateTime for minimum allowed value', () {
final seconds = -62135596800000 ~/ 1000; // Minimum allowed timestamp
final result = tryFromSecondsSinceEpoch(seconds);
expect(result, DateTime.fromMillisecondsSinceEpoch(-62135596800000));
});
test('returns correct DateTime for maximum allowed value', () {
final seconds = 8640000000000000 ~/ 1000; // Maximum allowed timestamp
final result = tryFromSecondsSinceEpoch(seconds);
expect(result, DateTime.fromMillisecondsSinceEpoch(8640000000000000));
});
test('returns correct DateTime for negative timestamp', () {
final seconds = -1577836800; // Dec 31, 1919 (pre-epoch)
final result = tryFromSecondsSinceEpoch(seconds);
expect(result, DateTime.fromMillisecondsSinceEpoch(-1577836800 * 1000));
});
test('returns correct DateTime for zero timestamp', () {
final seconds = 0; // Jan 1, 1970 (epoch)
final result = tryFromSecondsSinceEpoch(seconds);
expect(result, DateTime.fromMillisecondsSinceEpoch(0));
});
test('returns correct DateTime for recent timestamp', () {
final now = DateTime.now();
final seconds = now.millisecondsSinceEpoch ~/ 1000;
final result = tryFromSecondsSinceEpoch(seconds);
expect(result?.year, now.year);
expect(result?.month, now.month);
expect(result?.day, now.day);
});
});
}

View File

@@ -1855,7 +1855,7 @@
},
"/assets/bulk-upload-check": {
"post": {
"description": "Checks if assets exist by checksums",
"description": "Checks if assets exist by checksums. This endpoint requires the `asset.upload` permission.",
"operationId": "checkBulkUpload",
"parameters": [],
"requestBody": {
@@ -1894,7 +1894,8 @@
"summary": "checkBulkUpload",
"tags": [
"Assets"
]
],
"x-immich-permission": "asset.upload"
}
},
"/assets/device/{deviceId}": {

View File

@@ -6,5 +6,12 @@
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
"engines": {
"pnpm": ">=10.0.0"
},
"type": "module",
"scripts": {
"dev": "node --experimental-strip-types dev.ts"
},
"devDependencies": {
"@types/node": "22.18.0"
}
}

109
pnpm-lock.yaml generated
View File

@@ -15,7 +15,11 @@ pnpmfileChecksum: sha256-AG/qwrPNpmy9q60PZwCpecoYVptglTHgH+N6RKQHOM0=
importers:
.: {}
.:
devDependencies:
'@types/node':
specifier: 22.18.0
version: 22.18.0
.github:
devDependencies:
@@ -4658,6 +4662,9 @@ packages:
'@types/node@22.17.2':
resolution: {integrity: sha512-gL6z5N9Jm9mhY+U2KXZpteb+09zyffliRkZyZOHODGATyC5B1Jt/7TzuuiLkFsSUMLbS1OLmlj/E+/3KF4Q/4w==}
'@types/node@22.18.0':
resolution: {integrity: sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ==}
'@types/node@24.3.0':
resolution: {integrity: sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==}
@@ -14530,7 +14537,7 @@ snapshots:
'@jest/schemas': 29.6.3
'@types/istanbul-lib-coverage': 2.0.6
'@types/istanbul-reports': 3.0.4
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/yargs': 17.0.33
chalk: 4.1.2
@@ -16349,7 +16356,7 @@ snapshots:
'@types/accepts@1.3.7':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/archiver@6.0.3':
dependencies:
@@ -16363,22 +16370,22 @@ snapshots:
'@types/bcrypt@6.0.0':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/body-parser@1.19.6':
dependencies:
'@types/connect': 3.4.38
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/bonjour@3.5.13':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/braces@3.0.5': {}
'@types/bunyan@1.8.11':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/byte-size@8.1.2': {}
@@ -16397,21 +16404,21 @@ snapshots:
'@types/cli-progress@3.11.6':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/compression@1.8.1':
dependencies:
'@types/express': 5.0.3
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/connect-history-api-fallback@1.5.4':
dependencies:
'@types/express-serve-static-core': 5.0.6
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/connect@3.4.38':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/content-disposition@0.5.9': {}
@@ -16428,11 +16435,11 @@ snapshots:
'@types/connect': 3.4.38
'@types/express': 5.0.3
'@types/keygrip': 1.0.6
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/cors@2.8.19':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/debug@4.1.12':
dependencies:
@@ -16442,13 +16449,13 @@ snapshots:
'@types/docker-modem@3.0.6':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ssh2': 1.15.5
'@types/dockerode@3.3.42':
dependencies:
'@types/docker-modem': 3.0.6
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ssh2': 1.15.5
'@types/dom-to-image@2.6.7': {}
@@ -16471,14 +16478,14 @@ snapshots:
'@types/express-serve-static-core@4.19.6':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/qs': 6.14.0
'@types/range-parser': 1.2.7
'@types/send': 0.17.5
'@types/express-serve-static-core@5.0.6':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/qs': 6.14.0
'@types/range-parser': 1.2.7
'@types/send': 0.17.5
@@ -16504,7 +16511,7 @@ snapshots:
'@types/fluent-ffmpeg@2.1.27':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/geojson-vt@3.2.5':
dependencies:
@@ -16541,7 +16548,7 @@ snapshots:
'@types/http-proxy@1.17.16':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/inquirer@8.2.11':
dependencies:
@@ -16579,7 +16586,7 @@ snapshots:
'@types/http-errors': 2.0.5
'@types/keygrip': 1.0.6
'@types/koa-compose': 3.2.8
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/leaflet@1.9.19':
dependencies:
@@ -16605,7 +16612,7 @@ snapshots:
'@types/memcached@2.2.10':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/methods@1.1.4': {}
@@ -16617,7 +16624,7 @@ snapshots:
'@types/mock-fs@4.13.4':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ms@2.1.0': {}
@@ -16627,16 +16634,16 @@ snapshots:
'@types/mysql@2.15.27':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/node-fetch@2.6.12':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
form-data: 4.0.3
'@types/node-forge@1.3.11':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/node@17.0.45': {}
@@ -16656,6 +16663,10 @@ snapshots:
dependencies:
undici-types: 6.21.0
'@types/node@22.18.0':
dependencies:
undici-types: 6.21.0
'@types/node@24.3.0':
dependencies:
undici-types: 7.10.0
@@ -16663,17 +16674,17 @@ snapshots:
'@types/nodemailer@6.4.17':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/oidc-provider@9.1.2':
dependencies:
'@types/keygrip': 1.0.6
'@types/koa': 3.0.0
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/oracledb@6.5.2':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/parse5@5.0.3': {}
@@ -16683,13 +16694,13 @@ snapshots:
'@types/pg@8.15.4':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
pg-protocol: 1.10.3
pg-types: 2.2.0
'@types/pg@8.15.5':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
pg-protocol: 1.10.3
pg-types: 2.2.0
@@ -16697,13 +16708,13 @@ snapshots:
'@types/pngjs@6.0.5':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/prismjs@1.26.5': {}
'@types/qrcode@1.5.5':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/qs@6.14.0': {}
@@ -16743,7 +16754,7 @@ snapshots:
'@types/readdir-glob@1.1.5':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/retry@0.12.0': {}
@@ -16753,14 +16764,14 @@ snapshots:
'@types/sax@1.2.7':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/semver@7.7.0': {}
'@types/send@0.17.5':
dependencies:
'@types/mime': 1.3.5
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/serve-index@1.9.4':
dependencies:
@@ -16769,20 +16780,20 @@ snapshots:
'@types/serve-static@1.15.8':
dependencies:
'@types/http-errors': 2.0.5
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/send': 0.17.5
'@types/sockjs@0.3.36':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ssh2-streams@0.1.12':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ssh2@0.5.52':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ssh2-streams': 0.1.12
'@types/ssh2@1.15.5':
@@ -16793,7 +16804,7 @@ snapshots:
dependencies:
'@types/cookiejar': 2.1.5
'@types/methods': 1.1.4
'@types/node': 22.17.2
'@types/node': 22.18.0
form-data: 4.0.3
'@types/supercluster@7.1.3':
@@ -16807,11 +16818,11 @@ snapshots:
'@types/tedious@4.0.14':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/through@0.0.33':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/ua-parser-js@0.7.39': {}
@@ -16825,7 +16836,7 @@ snapshots:
'@types/ws@8.18.1':
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
'@types/yargs-parser@21.0.3': {}
@@ -18915,7 +18926,7 @@ snapshots:
engine.io@6.6.4:
dependencies:
'@types/cors': 2.8.19
'@types/node': 22.17.2
'@types/node': 22.18.0
accepts: 1.3.8
base64id: 2.0.0
cookie: 0.7.2
@@ -19339,7 +19350,7 @@ snapshots:
eval@0.1.8:
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
require-like: 0.1.2
event-emitter@0.3.5:
@@ -20591,7 +20602,7 @@ snapshots:
jest-util@29.7.0:
dependencies:
'@jest/types': 29.6.3
'@types/node': 22.17.2
'@types/node': 22.18.0
chalk: 4.1.2
ci-info: 3.9.0
graceful-fs: 4.2.11
@@ -20599,13 +20610,13 @@ snapshots:
jest-worker@27.5.1:
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
merge-stream: 2.0.0
supports-color: 8.1.1
jest-worker@29.7.0:
dependencies:
'@types/node': 22.17.2
'@types/node': 22.18.0
jest-util: 29.7.0
merge-stream: 2.0.0
supports-color: 8.1.1
@@ -23098,7 +23109,7 @@ snapshots:
'@protobufjs/path': 1.1.2
'@protobufjs/pool': 1.1.0
'@protobufjs/utf8': 1.1.0
'@types/node': 22.17.2
'@types/node': 22.18.0
long: 5.3.2
protocol-buffers-schema@3.6.0: {}

View File

@@ -1,5 +1,5 @@
# dev build
FROM ghcr.io/immich-app/base-server-dev:202508191104@sha256:0608857ef682099c458f0fb319afdcaf09462bbb5670b6dcd3642029f12eee1c AS dev
FROM ghcr.io/immich-app/base-server-dev:202509021104@sha256:47d38c94775332000a93fbbeca1c796687b2d2919e3c75b6e26ab8a65d1864f3 AS dev
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
CI=1 \
@@ -77,7 +77,7 @@ RUN apt-get update \
RUN dart --disable-analytics
# production-builder-base image
FROM ghcr.io/immich-app/base-server-dev:202508191104@sha256:0608857ef682099c458f0fb319afdcaf09462bbb5670b6dcd3642029f12eee1c AS prod-builder-base
FROM ghcr.io/immich-app/base-server-dev:202509021104@sha256:47d38c94775332000a93fbbeca1c796687b2d2919e3c75b6e26ab8a65d1864f3 AS prod-builder-base
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
CI=1 \
COREPACK_HOME=/tmp
@@ -115,7 +115,7 @@ RUN pnpm --filter @immich/sdk --filter @immich/cli --frozen-lockfile install &&
pnpm --filter @immich/cli --prod --no-optional deploy /output/cli-pruned
# prod base image
FROM ghcr.io/immich-app/base-server-prod:202508191104@sha256:4cce4119f5555fce5e383b681e4feea31956ceadb94cafcbcbbae2c7b94a1b62
FROM ghcr.io/immich-app/base-server-prod:202509021104@sha256:84f3727cff75c623f79236cdd9a2b72c84f7665057f474851016f702c67157af
WORKDIR /usr/src/app
ENV NODE_ENV=production \

View File

@@ -188,7 +188,7 @@ export class AssetMediaController {
* Checks if assets exist by checksums
*/
@Post('bulk-upload-check')
@Authenticated()
@Authenticated({ permission: Permission.AssetUpload })
@ApiOperation({
summary: 'checkBulkUpload',
description: 'Checks if assets exist by checksums',

View File

@@ -12,7 +12,7 @@ import { AuthRequest } from 'src/middleware/auth.guard';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { AssetMediaService } from 'src/services/asset-media.service';
import { ImmichFile, UploadFile, UploadFiles } from 'src/types';
import { asRequest, mapToUploadFile } from 'src/utils/asset.util';
import { asUploadRequest, mapToUploadFile } from 'src/utils/asset.util';
export function getFile(files: UploadFiles, property: 'assetData' | 'sidecarData') {
const file = files[property]?.[0];
@@ -99,18 +99,21 @@ export class FileUploadInterceptor implements NestInterceptor {
}
private fileFilter(request: AuthRequest, file: Express.Multer.File, callback: multer.FileFilterCallback) {
return callbackify(() => this.assetService.canUploadFile(asRequest(request, file)), callback);
return callbackify(() => this.assetService.canUploadFile(asUploadRequest(request, file)), callback);
}
private filename(request: AuthRequest, file: Express.Multer.File, callback: DiskStorageCallback) {
return callbackify(
() => this.assetService.getUploadFilename(asRequest(request, file)),
() => this.assetService.getUploadFilename(asUploadRequest(request, file)),
callback as Callback<string>,
);
}
private destination(request: AuthRequest, file: Express.Multer.File, callback: DiskStorageCallback) {
return callbackify(() => this.assetService.getUploadFolder(asRequest(request, file)), callback as Callback<string>);
return callbackify(
() => this.assetService.getUploadFolder(asUploadRequest(request, file)),
callback as Callback<string>,
);
}
private handleFile(request: AuthRequest, file: Express.Multer.File, callback: Callback<Partial<ImmichFile>>) {

View File

@@ -25,6 +25,7 @@ const file1 = Buffer.from('d2947b871a706081be194569951b7db246907957', 'hex');
const uploadFile = {
nullAuth: {
auth: null,
body: {},
fieldName: UploadFieldName.ASSET_DATA,
file: {
uuid: 'random-uuid',
@@ -37,6 +38,7 @@ const uploadFile = {
filename: (fieldName: UploadFieldName, filename: string) => {
return {
auth: authStub.admin,
body: {},
fieldName,
file: {
uuid: 'random-uuid',
@@ -897,7 +899,10 @@ describe(AssetMediaService.name, () => {
describe('onUploadError', () => {
it('should queue a job to delete the uploaded file', async () => {
const request = { user: authStub.user1 } as AuthRequest;
const request = {
body: {},
user: authStub.user1,
} as AuthRequest;
const file = {
fieldname: UploadFieldName.ASSET_DATA,

View File

@@ -24,20 +24,14 @@ import { AuthDto } from 'src/dtos/auth.dto';
import { AssetStatus, AssetType, AssetVisibility, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { BaseService } from 'src/services/base.service';
import { UploadFile } from 'src/types';
import { UploadFile, UploadRequest } from 'src/types';
import { requireUploadAccess } from 'src/utils/access';
import { asRequest, getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
import { asUploadRequest, getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
import { isAssetChecksumConstraint } from 'src/utils/database';
import { getFilenameExtension, getFileNameWithoutExtension, ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { fromChecksum } from 'src/utils/request';
interface UploadRequest {
auth: AuthDto | null;
fieldName: UploadFieldName;
file: UploadFile;
}
export interface AssetMediaRedirectResponse {
targetSize: AssetMediaSize | 'original';
}
@@ -89,15 +83,15 @@ export class AssetMediaService extends BaseService {
throw new BadRequestException(`Unsupported file type ${filename}`);
}
getUploadFilename({ auth, fieldName, file }: UploadRequest): string {
getUploadFilename({ auth, fieldName, file, body }: UploadRequest): string {
requireUploadAccess(auth);
const originalExtension = extname(file.originalName);
const extension = extname(body.filename || file.originalName);
const lookup = {
[UploadFieldName.ASSET_DATA]: originalExtension,
[UploadFieldName.ASSET_DATA]: extension,
[UploadFieldName.SIDECAR_DATA]: '.xmp',
[UploadFieldName.PROFILE_DATA]: originalExtension,
[UploadFieldName.PROFILE_DATA]: extension,
};
return sanitize(`${file.uuid}${lookup[fieldName]}`);
@@ -117,8 +111,8 @@ export class AssetMediaService extends BaseService {
}
async onUploadError(request: AuthRequest, file: Express.Multer.File) {
const uploadFilename = this.getUploadFilename(asRequest(request, file));
const uploadFolder = this.getUploadFolder(asRequest(request, file));
const uploadFilename = this.getUploadFilename(asUploadRequest(request, file));
const uploadFolder = this.getUploadFolder(asUploadRequest(request, file));
const uploadPath = `${uploadFolder}/${uploadFilename}`;
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: [uploadPath] } });

View File

@@ -1,5 +1,7 @@
import { SystemConfig } from 'src/config';
import { VECTOR_EXTENSIONS } from 'src/constants';
import { UploadFieldName } from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
AssetMetadataKey,
AssetOrder,
@@ -408,6 +410,16 @@ export interface UploadFile {
size: number;
}
export type UploadRequest = {
auth: AuthDto | null;
fieldName: UploadFieldName;
file: UploadFile;
body: {
filename?: string;
[key: string]: unknown;
};
};
export interface UploadFiles {
assetData: ImmichFile[];
sidecarData: ImmichFile[];

View File

@@ -10,7 +10,7 @@ import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { IBulkAsset, ImmichFile, UploadFile } from 'src/types';
import { IBulkAsset, ImmichFile, UploadFile, UploadRequest } from 'src/types';
import { checkAccess } from 'src/utils/access';
export const getAssetFile = (files: AssetFile[], type: AssetFileType | GeneratedImageType) => {
@@ -190,9 +190,10 @@ export function mapToUploadFile(file: ImmichFile): UploadFile {
};
}
export const asRequest = (request: AuthRequest, file: Express.Multer.File) => {
export const asUploadRequest = (request: AuthRequest, file: Express.Multer.File): UploadRequest => {
return {
auth: request.user || null,
body: request.body,
fieldName: file.fieldname as UploadFieldName,
file: mapToUploadFile(file as ImmichFile),
};