Compare commits

...

20 Commits

Author SHA1 Message Date
mertalev
b84216180c update sync 2025-09-20 10:41:45 -04:00
Alex
e1c6813ee0 Merge branch 'main' of github.com:immich-app/immich into feat/mobile-platform-clients 2025-09-20 08:18:22 -05:00
Jason Rasmussen
de57fecb69 fix(web): copy to clipboard on safari (#22217) 2025-09-19 17:44:18 -04:00
renovate[bot]
1e0b4fac04 fix(deps): update typescript-projects (#21510)
* fix(deps): update typescript-projects

* chore: downgrade dependencies

* chore: downgrade svelte-gestures

* fix: svelte/no-navigation-without-resolve

* fix: dumb test

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Zack Pollard <zack@futo.org>
Co-authored-by: Daniel Dietzler <mail@ddietzler.dev>
Co-authored-by: Jason Rasmussen <jason@rasm.me>
2025-09-19 12:29:01 -04:00
Jason Rasmussen
34339ea69f fix(web): show danger/warning when taken dates overlap (#22213) 2025-09-19 12:20:09 -04:00
Jason Rasmussen
6da039780e fix: automatically remove leading/trailing whitespace from search que… (#22214)
fix: automatically remove leading/trailing whitespace from search queries
2025-09-19 12:19:26 -04:00
Jason Rasmussen
3f2e0780d5 feat: availability checks (#22185) 2025-09-19 12:18:42 -04:00
mertalev
5054719f43 disable disk cache by default 2025-09-19 09:23:32 -04:00
Alex
3423cf90bc Merge branch 'main' into feat/mobile-platform-clients 2025-09-18 23:58:49 -05:00
mertalev
f406ba1e6c add back client parameter for testing 2025-09-18 20:31:29 -04:00
mertalev
df186cc326 unrelated change 2025-09-18 20:18:07 -04:00
mertalev
11ebbe51a1 don't close client 2025-09-18 20:16:48 -04:00
mertalev
52fbf6fbc7 update other usages 2025-09-18 20:16:38 -04:00
mertalev
cf7a3a91c2 move to bootstrap 2025-09-18 20:13:37 -04:00
mertalev
dc73a860cc set defaults 2025-09-18 20:13:37 -04:00
mertalev
88b6da5e0a init before app launch 2025-09-18 20:13:37 -04:00
mertalev
740c50122e custom user agent 2025-09-18 20:13:37 -04:00
mertalev
9836392fbe fix hot reload 2025-09-18 20:13:37 -04:00
mertalev
b1f3051608 uppercase http method 2025-09-18 20:13:37 -04:00
mertalev
87e1539912 platform clients 2025-09-18 20:13:37 -04:00
63 changed files with 1919 additions and 1824 deletions

View File

@@ -169,8 +169,6 @@ Redis (Sentinel) URL example JSON before encoding:
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |

View File

@@ -123,6 +123,13 @@
"logging_enable_description": "Enable logging",
"logging_level_description": "When enabled, what log level to use.",
"logging_settings": "Logging",
"machine_learning_availability_checks": "Availability checks",
"machine_learning_availability_checks_description": "Automatically detect and prefer available machine learning servers",
"machine_learning_availability_checks_enabled": "Enable availability checks",
"machine_learning_availability_checks_interval": "Check interval",
"machine_learning_availability_checks_interval_description": "Interval in milliseconds between availability checks",
"machine_learning_availability_checks_timeout": "Request timeout",
"machine_learning_availability_checks_timeout_description": "Timeout in milliseconds for availability checks",
"machine_learning_clip_model": "CLIP model",
"machine_learning_clip_model_description": "The name of a CLIP model listed <link>here</link>. Note that you must re-run the 'Smart Search' job for all images upon changing a model.",
"machine_learning_duplicate_detection": "Duplicate Detection",
@@ -913,6 +920,7 @@
"cant_get_number_of_comments": "Can't get number of comments",
"cant_search_people": "Can't search people",
"cant_search_places": "Can't search places",
"clipboard_unsupported_mime_type": "The system clipboard does not support copying this type of content: {mimeType}",
"error_adding_assets_to_album": "Error adding assets to album",
"error_adding_users_to_album": "Error adding users to album",
"error_deleting_shared_user": "Error deleting shared user",
@@ -1916,6 +1924,7 @@
"stacktrace": "Stacktrace",
"start": "Start",
"start_date": "Start date",
"start_date_before_end_date": "Start date must be before end date",
"state": "State",
"status": "Status",
"stop_casting": "Stop casting",

View File

@@ -1,7 +1,7 @@
[tools]
node = "22.19.0"
flutter = "3.35.4"
pnpm = "10.14.0"
pnpm = "10.15.1"
dart = "3.8.2"
[tools."github:CQLabs/homebrew-dcm"]

View File

@@ -6,6 +6,9 @@ PODS:
- FlutterMacOS
- connectivity_plus (0.0.1):
- Flutter
- cupertino_http (0.0.1):
- Flutter
- FlutterMacOS
- device_info_plus (0.0.1):
- Flutter
- DKImagePickerController/Core (4.3.9):
@@ -77,6 +80,8 @@ PODS:
- Flutter
- network_info_plus (0.0.1):
- Flutter
- objective_c (0.0.1):
- Flutter
- package_info_plus (0.4.5):
- Flutter
- path_provider_foundation (0.0.1):
@@ -136,6 +141,7 @@ DEPENDENCIES:
- background_downloader (from `.symlinks/plugins/background_downloader/ios`)
- bonsoir_darwin (from `.symlinks/plugins/bonsoir_darwin/darwin`)
- connectivity_plus (from `.symlinks/plugins/connectivity_plus/ios`)
- cupertino_http (from `.symlinks/plugins/cupertino_http/darwin`)
- device_info_plus (from `.symlinks/plugins/device_info_plus/ios`)
- file_picker (from `.symlinks/plugins/file_picker/ios`)
- Flutter (from `Flutter`)
@@ -154,6 +160,7 @@ DEPENDENCIES:
- maplibre_gl (from `.symlinks/plugins/maplibre_gl/ios`)
- native_video_player (from `.symlinks/plugins/native_video_player/ios`)
- network_info_plus (from `.symlinks/plugins/network_info_plus/ios`)
- objective_c (from `.symlinks/plugins/objective_c/ios`)
- package_info_plus (from `.symlinks/plugins/package_info_plus/ios`)
- path_provider_foundation (from `.symlinks/plugins/path_provider_foundation/darwin`)
- permission_handler_apple (from `.symlinks/plugins/permission_handler_apple/ios`)
@@ -184,6 +191,8 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/bonsoir_darwin/darwin"
connectivity_plus:
:path: ".symlinks/plugins/connectivity_plus/ios"
cupertino_http:
:path: ".symlinks/plugins/cupertino_http/darwin"
device_info_plus:
:path: ".symlinks/plugins/device_info_plus/ios"
file_picker:
@@ -220,6 +229,8 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/native_video_player/ios"
network_info_plus:
:path: ".symlinks/plugins/network_info_plus/ios"
objective_c:
:path: ".symlinks/plugins/objective_c/ios"
package_info_plus:
:path: ".symlinks/plugins/package_info_plus/ios"
path_provider_foundation:
@@ -249,6 +260,7 @@ SPEC CHECKSUMS:
background_downloader: 50e91d979067b82081aba359d7d916b3ba5fadad
bonsoir_darwin: 29c7ccf356646118844721f36e1de4b61f6cbd0e
connectivity_plus: cb623214f4e1f6ef8fe7403d580fdad517d2f7dd
cupertino_http: 94ac07f5ff090b8effa6c5e2c47871d48ab7c86c
device_info_plus: 21fcca2080fbcd348be798aa36c3e5ed849eefbe
DKImagePickerController: 946cec48c7873164274ecc4624d19e3da4c1ef3c
DKPhotoGallery: b3834fecb755ee09a593d7c9e389d8b5d6deed60
@@ -270,6 +282,7 @@ SPEC CHECKSUMS:
maplibre_gl: 3c924e44725147b03dda33430ad216005b40555f
native_video_player: b65c58951ede2f93d103a25366bdebca95081265
network_info_plus: cf61925ab5205dce05a4f0895989afdb6aade5fc
objective_c: 89e720c30d716b036faf9c9684022048eee1eee2
package_info_plus: af8e2ca6888548050f16fa2f1938db7b5a5df499
path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564
permission_handler_apple: 4ed2196e43d0651e8ff7ca3483a069d469701f2d

View File

@@ -3,7 +3,6 @@ import 'dart:io';
import 'dart:ui';
import 'package:background_downloader/background_downloader.dart';
import 'package:cancellation_token_http/http.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
@@ -63,7 +62,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final Drift _drift;
final DriftLogger _driftLogger;
final BackgroundWorkerBgHostApi _backgroundHostApi;
final CancellationToken _cancellationToken = CancellationToken();
final Completer _cancellationToken = Completer();
final Logger _logger = Logger('BackgroundWorkerBgService');
bool _isCleanedUp = false;
@@ -188,7 +187,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
_isCleanedUp = true;
_ref.dispose();
_cancellationToken.cancel();
_cancellationToken.complete();
_logger.info("Cleaning up background worker");
final cleanupFutures = [
workerManager.dispose().catchError((_) async {

View File

@@ -1,15 +1,16 @@
import 'dart:async';
import 'dart:ffi';
import 'dart:io';
import 'dart:ui' as ui;
import 'package:cronet_http/cronet_http.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:ffi/ffi.dart';
import 'package:http/http.dart' as http;
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/providers/image/cache/remote_image_cache_manager.dart';
import 'package:immich_mobile/presentation/widgets/timeline/constants.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:logging/logging.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
part 'local_image_request.dart';
part 'thumbhash_image_request.dart';

View File

@@ -1,14 +1,18 @@
part of 'image_request.dart';
class RemoteImageRequest extends ImageRequest {
static final log = Logger('RemoteImageRequest');
static final client = HttpClient()..maxConnectionsPerHost = 16;
final RemoteCacheManager? cacheManager;
static final _client = const NetworkRepository().getHttpClient(
'thumbnails',
diskCapacity: kThumbnailDiskCacheSize,
memoryCapacity: 0,
maxConnections: 16,
cacheMode: CacheMode.disk,
);
final String uri;
final Map<String, String> headers;
HttpClientRequest? _request;
final abortTrigger = Completer<void>();
RemoteImageRequest({required this.uri, required this.headers, this.cacheManager});
RemoteImageRequest({required this.uri, required this.headers});
@override
Future<ImageInfo?> load(ImageDecoderCallback decode, {double scale = 1.0}) async {
@@ -16,15 +20,8 @@ class RemoteImageRequest extends ImageRequest {
return null;
}
// TODO: the cache manager makes everything sequential with its DB calls and its operations cannot be cancelled,
// so it ends up being a bottleneck. We only prefer fetching from it when it can skip the DB call.
final cachedFileImage = await _loadCachedFile(uri, decode, scale, inMemoryOnly: true);
if (cachedFileImage != null) {
return cachedFileImage;
}
try {
final buffer = await _downloadImage(uri);
final buffer = await _downloadImage();
if (buffer == null) {
return null;
}
@@ -35,57 +32,41 @@ class RemoteImageRequest extends ImageRequest {
return null;
}
final cachedFileImage = await _loadCachedFile(uri, decode, scale, inMemoryOnly: false);
if (cachedFileImage != null) {
return cachedFileImage;
}
rethrow;
} finally {
_request = null;
}
}
Future<ImmutableBuffer?> _downloadImage(String url) async {
Future<ImmutableBuffer?> _downloadImage() async {
if (_isCancelled) {
return null;
}
final request = _request = await client.getUrl(Uri.parse(url));
if (_isCancelled) {
request.abort();
return _request = null;
}
for (final entry in headers.entries) {
request.headers.set(entry.key, entry.value);
}
final response = await request.close();
final req = http.AbortableRequest('GET', Uri.parse(uri), abortTrigger: abortTrigger.future);
req.headers.addAll(headers);
final res = await _client.send(req);
if (_isCancelled) {
_onCancelled();
return null;
}
final cacheManager = this.cacheManager;
final streamController = StreamController<List<int>>(sync: true);
final Stream<List<int>> stream;
cacheManager?.putStreamedFile(url, streamController.stream);
stream = response.map((chunk) {
if (res.statusCode != 200) {
throw Exception('Failed to download $uri: ${res.statusCode}');
}
final stream = res.stream.map((chunk) {
if (_isCancelled) {
throw StateError('Cancelled request');
}
if (cacheManager != null) {
streamController.add(chunk);
}
return chunk;
});
try {
final Uint8List bytes = await _downloadBytes(stream, response.contentLength);
streamController.close();
final Uint8List bytes = await _downloadBytes(stream, res.contentLength ?? -1);
if (_isCancelled) {
return null;
}
return await ImmutableBuffer.fromUint8List(bytes);
} catch (e) {
streamController.addError(e);
streamController.close();
if (_isCancelled) {
return null;
}
@@ -122,40 +103,6 @@ class RemoteImageRequest extends ImageRequest {
return bytes;
}
Future<ImageInfo?> _loadCachedFile(
String url,
ImageDecoderCallback decode,
double scale, {
required bool inMemoryOnly,
}) async {
final cacheManager = this.cacheManager;
if (_isCancelled || cacheManager == null) {
return null;
}
final file = await (inMemoryOnly ? cacheManager.getFileFromMemory(url) : cacheManager.getFileFromCache(url));
if (_isCancelled || file == null) {
return null;
}
try {
final buffer = await ImmutableBuffer.fromFilePath(file.file.path);
return await _decodeBuffer(buffer, decode, scale);
} catch (e) {
log.severe('Failed to decode cached image', e);
_evictFile(url);
return null;
}
}
Future<void> _evictFile(String url) async {
try {
await cacheManager?.removeFile(url);
} catch (e) {
log.severe('Failed to remove cached image', e);
}
}
Future<ImageInfo?> _decodeBuffer(ImmutableBuffer buffer, ImageDecoderCallback decode, scale) async {
if (_isCancelled) {
buffer.dispose();
@@ -173,7 +120,6 @@ class RemoteImageRequest extends ImageRequest {
@override
void _onCancelled() {
_request?.abort();
_request = null;
abortTrigger.complete();
}
}

View File

@@ -0,0 +1,67 @@
import 'dart:io';
import 'package:cronet_http/cronet_http.dart';
import 'package:cupertino_http/cupertino_http.dart';
import 'package:http/http.dart' as http;
import 'package:immich_mobile/utils/user_agent.dart';
import 'package:path_provider/path_provider.dart';
class NetworkRepository {
static late Directory _cachePath;
static late String _userAgent;
static final _clients = <String, http.Client>{};
static Future<void> init() {
return (
getTemporaryDirectory().then((cachePath) => _cachePath = cachePath),
getUserAgentString().then((userAgent) => _userAgent = userAgent),
).wait;
}
static void reset() {
Future.microtask(init);
for (final client in _clients.values) {
client.close();
}
_clients.clear();
}
const NetworkRepository();
/// Note: when disk caching is enabled, only one client may use a given directory at a time.
/// Different isolates or engines must use different directories.
http.Client getHttpClient(
String directoryName, {
CacheMode cacheMode = CacheMode.memory,
int diskCapacity = 0,
int maxConnections = 6,
int memoryCapacity = 10 << 20,
}) {
final cachedClient = _clients[directoryName];
if (cachedClient != null) {
return cachedClient;
}
final directory = Directory('${_cachePath.path}/$directoryName');
directory.createSync(recursive: true);
if (Platform.isAndroid) {
final engine = CronetEngine.build(
cacheMode: cacheMode,
cacheMaxSize: diskCapacity,
storagePath: directory.path,
userAgent: _userAgent,
);
return _clients[directoryName] = CronetClient.fromCronetEngine(engine, closeEngine: true);
}
final config = URLSessionConfiguration.defaultSessionConfiguration()
..httpMaximumConnectionsPerHost = maxConnections
..cache = URLCache.withCapacity(
diskCapacity: diskCapacity,
memoryCapacity: memoryCapacity,
directory: directory.uri,
)
..httpAdditionalHeaders = {'User-Agent': _userAgent};
return _clients[directoryName] = CupertinoClient.fromSessionConfiguration(config);
}
}

View File

@@ -6,11 +6,13 @@ import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/sync_event.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
class SyncApiRepository {
static final _client = const NetworkRepository().getHttpClient('api');
final Logger _logger = Logger('SyncApiRepository');
final ApiService _api;
SyncApiRepository(this._api);
@@ -26,7 +28,7 @@ class SyncApiRepository {
http.Client? httpClient,
}) async {
final stopwatch = Stopwatch()..start();
final client = httpClient ?? http.Client();
final client = httpClient ?? _client;
final endpoint = "${_api.apiClient.basePath}/sync/stream";
final headers = {'Content-Type': 'application/json', 'Accept': 'application/jsonlines+json'};
@@ -112,8 +114,6 @@ class SyncApiRepository {
} catch (error, stack) {
_logger.severe("Error processing stream", error, stack);
return Future.error(error, stack);
} finally {
client.close();
}
stopwatch.stop();
_logger.info("Remote Sync completed in ${stopwatch.elapsed.inMilliseconds}ms");

View File

@@ -15,6 +15,7 @@ import 'package:immich_mobile/constants/locales.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/generated/codegen_loader.g.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
import 'package:immich_mobile/providers/app_life_cycle.provider.dart';
import 'package:immich_mobile/providers/asset_viewer/share_intent_upload.provider.dart';
import 'package:immich_mobile/providers/db.provider.dart';
@@ -222,6 +223,14 @@ class ImmichAppState extends ConsumerState<ImmichApp> with WidgetsBindingObserve
super.dispose();
}
@override
void reassemble() {
if (kDebugMode) {
NetworkRepository.reset();
}
super.reassemble();
}
@override
Widget build(BuildContext context) {
final router = ref.watch(appRouterProvider);

View File

@@ -7,13 +7,11 @@ import 'package:immich_mobile/domain/services/setting.service.dart';
import 'package:immich_mobile/infrastructure/loaders/image_request.dart';
import 'package:immich_mobile/presentation/widgets/images/image_provider.dart';
import 'package:immich_mobile/presentation/widgets/images/one_frame_multi_image_stream_completer.dart';
import 'package:immich_mobile/providers/image/cache/remote_image_cache_manager.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:immich_mobile/utils/image_url_builder.dart';
class RemoteThumbProvider extends CancellableImageProvider<RemoteThumbProvider>
with CancellableImageProviderMixin<RemoteThumbProvider> {
static final cacheManager = RemoteThumbnailCacheManager();
final String assetId;
RemoteThumbProvider({required this.assetId});
@@ -39,7 +37,6 @@ class RemoteThumbProvider extends CancellableImageProvider<RemoteThumbProvider>
final request = this.request = RemoteImageRequest(
uri: getThumbnailUrlForRemoteId(key.assetId),
headers: ApiService.getRequestHeaders(),
cacheManager: cacheManager,
);
return loadRequest(request, decode);
}
@@ -60,7 +57,6 @@ class RemoteThumbProvider extends CancellableImageProvider<RemoteThumbProvider>
class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImageProvider>
with CancellableImageProviderMixin<RemoteFullImageProvider> {
static final cacheManager = RemoteThumbnailCacheManager();
final String assetId;
RemoteFullImageProvider({required this.assetId});
@@ -92,11 +88,7 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
}
final headers = ApiService.getRequestHeaders();
final request = this.request = RemoteImageRequest(
uri: getPreviewUrlForRemoteId(key.assetId),
headers: headers,
cacheManager: cacheManager,
);
final request = this.request = RemoteImageRequest(uri: getPreviewUrlForRemoteId(key.assetId), headers: headers);
yield* loadRequest(request, decode);
if (isCancelled) {

View File

@@ -2,9 +2,11 @@ import 'dart:ui';
const double kTimelineHeaderExtent = 80.0;
const Size kTimelineFixedTileExtent = Size.square(256);
const Size kThumbnailResolution = Size.square(320); // TODO: make the resolution vary based on actual tile size
const double kTimelineSpacing = 2.0;
const int kTimelineColumnCount = 3;
const Duration kTimelineScrubberFadeInDuration = Duration(milliseconds: 300);
const Duration kTimelineScrubberFadeOutDuration = Duration(milliseconds: 800);
const Size kThumbnailResolution = Size.square(320); // TODO: make the resolution vary based on actual tile size
const kThumbnailDiskCacheSize = 1024 << 20; // 1GiB

View File

@@ -1,148 +1,25 @@
import 'package:flutter_cache_manager/flutter_cache_manager.dart';
// ignore: implementation_imports
import 'package:flutter_cache_manager/src/cache_store.dart';
import 'package:logging/logging.dart';
import 'package:uuid/uuid.dart';
abstract class RemoteCacheManager extends CacheManager {
static final _log = Logger('RemoteCacheManager');
RemoteCacheManager.custom(super.config, CacheStore store)
// Unfortunately, CacheStore is not a public API
// ignore: invalid_use_of_visible_for_testing_member
: super.custom(cacheStore: store);
Future<void> putStreamedFile(
String url,
Stream<List<int>> source, {
String? key,
String? eTag,
Duration maxAge = const Duration(days: 30),
String fileExtension = 'file',
});
// Unlike `putFileStream`, this method handles request cancellation,
// does not make a (slow) DB call checking if the file is already cached,
// does not synchronously check if a file exists,
// and deletes the file on cancellation without making these checks again.
Future<void> putStreamedFileToStore(
CacheStore store,
String url,
Stream<List<int>> source, {
String? key,
String? eTag,
Duration maxAge = const Duration(days: 30),
String fileExtension = 'file',
}) async {
final path = '${const Uuid().v1()}.$fileExtension';
final file = await store.fileSystem.createFile(path);
final sink = file.openWrite();
try {
await source.listen(sink.add, cancelOnError: true).asFuture();
} catch (e) {
try {
await sink.close();
await file.delete();
} catch (e) {
_log.severe('Failed to delete incomplete cache file: $e');
}
return;
}
try {
await sink.flush();
await sink.close();
} catch (e) {
try {
await file.delete();
} catch (e) {
_log.severe('Failed to delete incomplete cache file: $e');
}
return;
}
final cacheObject = CacheObject(
url,
key: key,
relativePath: path,
validTill: DateTime.now().add(maxAge),
eTag: eTag,
);
try {
await store.putFile(cacheObject);
} catch (e) {
try {
await file.delete();
} catch (e) {
_log.severe('Failed to delete untracked cache file: $e');
}
}
}
}
class RemoteImageCacheManager extends RemoteCacheManager {
class RemoteImageCacheManager extends CacheManager {
static const key = 'remoteImageCacheKey';
static final RemoteImageCacheManager _instance = RemoteImageCacheManager._();
static final _config = Config(key, maxNrOfCacheObjects: 500, stalePeriod: const Duration(days: 30));
static final _store = CacheStore(_config);
factory RemoteImageCacheManager() {
return _instance;
}
RemoteImageCacheManager._() : super.custom(_config, _store);
@override
Future<void> putStreamedFile(
String url,
Stream<List<int>> source, {
String? key,
String? eTag,
Duration maxAge = const Duration(days: 30),
String fileExtension = 'file',
}) {
return putStreamedFileToStore(
_store,
url,
source,
key: key,
eTag: eTag,
maxAge: maxAge,
fileExtension: fileExtension,
);
}
RemoteImageCacheManager._() : super(_config);
}
/// The cache manager for full size images [ImmichRemoteImageProvider]
class RemoteThumbnailCacheManager extends RemoteCacheManager {
class RemoteThumbnailCacheManager extends CacheManager {
static const key = 'remoteThumbnailCacheKey';
static final RemoteThumbnailCacheManager _instance = RemoteThumbnailCacheManager._();
static final _config = Config(key, maxNrOfCacheObjects: 5000, stalePeriod: const Duration(days: 30));
static final _store = CacheStore(_config);
factory RemoteThumbnailCacheManager() {
return _instance;
}
RemoteThumbnailCacheManager._() : super.custom(_config, _store);
@override
Future<void> putStreamedFile(
String url,
Stream<List<int>> source, {
String? key,
String? eTag,
Duration maxAge = const Duration(days: 30),
String fileExtension = 'file',
}) {
return putStreamedFileToStore(
_store,
url,
source,
key: key,
eTag: eTag,
maxAge: maxAge,
fileExtension: fileExtension,
);
}
RemoteThumbnailCacheManager._() : super(_config);
}

View File

@@ -1,12 +1,14 @@
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:background_downloader/background_downloader.dart';
import 'package:cancellation_token_http/http.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:http/http.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
import 'package:logging/logging.dart';
import 'package:immich_mobile/utils/debug_print.dart';
@@ -20,6 +22,8 @@ class UploadTaskWithFile {
final uploadRepositoryProvider = Provider((ref) => UploadRepository());
class UploadRepository {
static final _client = const NetworkRepository().getHttpClient('upload');
void Function(TaskStatusUpdate)? onUploadStatus;
void Function(TaskProgressUpdate)? onTaskProgress;
@@ -92,13 +96,12 @@ class UploadRepository {
);
}
Future<void> backupWithDartClient(Iterable<UploadTaskWithFile> tasks, CancellationToken cancelToken) async {
final httpClient = Client();
Future<void> backupWithDartClient(Iterable<UploadTaskWithFile> tasks, Completer cancelToken) async {
final String savedEndpoint = Store.get(StoreKey.serverEndpoint);
Logger logger = Logger('UploadRepository');
for (final candidate in tasks) {
if (cancelToken.isCancelled) {
if (cancelToken.isCompleted) {
logger.warning("Backup was cancelled by the user");
break;
}
@@ -112,13 +115,17 @@ class UploadRepository {
filename: candidate.task.filename,
);
final baseRequest = MultipartRequest('POST', Uri.parse('$savedEndpoint/assets'));
final baseRequest = AbortableMultipartRequest(
'POST',
Uri.parse('$savedEndpoint/assets'),
abortTrigger: cancelToken.future,
)..headers['Accept'] = 'application/json';
baseRequest.headers.addAll(candidate.task.headers);
baseRequest.fields.addAll(candidate.task.fields);
baseRequest.files.add(assetRawUploadData);
final response = await httpClient.send(baseRequest, cancellationToken: cancelToken);
final response = await _client.send(baseRequest);
final responseBody = jsonDecode(await response.stream.bytesToString());
@@ -131,7 +138,7 @@ class UploadRepository {
continue;
}
} on CancelledException {
} on RequestAbortedException {
logger.warning("Backup was cancelled by the user");
break;
} catch (error, stackTrace) {

View File

@@ -3,9 +3,9 @@ import 'dart:convert';
import 'dart:io';
import 'package:device_info_plus/device_info_plus.dart';
import 'package:http/http.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
import 'package:immich_mobile/utils/url_helper.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
@@ -13,6 +13,7 @@ import 'package:immich_mobile/utils/user_agent.dart';
import 'package:immich_mobile/utils/debug_print.dart';
class ApiService implements Authentication {
static final _client = const NetworkRepository().getHttpClient('api');
late ApiClient _apiClient;
late UsersApi usersApi;
@@ -50,6 +51,7 @@ class ApiService implements Authentication {
setEndpoint(String endpoint) {
_apiClient = ApiClient(basePath: endpoint, authentication: this);
_apiClient.client = _client;
_setUserAgentHeader();
if (_accessToken != null) {
setAccessToken(_accessToken!);
@@ -134,13 +136,11 @@ class ApiService implements Authentication {
}
Future<String> _getWellKnownEndpoint(String baseUrl) async {
final Client client = Client();
try {
var headers = {"Accept": "application/json"};
headers.addAll(getRequestHeaders());
final res = await client
final res = await _client
.get(Uri.parse("$baseUrl/.well-known/immich"), headers: headers)
.timeout(const Duration(seconds: 5));

View File

@@ -3,7 +3,6 @@ import 'dart:convert';
import 'dart:io';
import 'package:background_downloader/background_downloader.dart';
import 'package:cancellation_token_http/http.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
@@ -158,7 +157,7 @@ class UploadService {
}
}
Future<void> startBackupWithHttpClient(String userId, bool hasWifi, CancellationToken token) async {
Future<void> startBackupWithHttpClient(String userId, bool hasWifi, Completer token) async {
await _storageRepository.clearCache();
shouldAbortQueuingTasks = false;
@@ -170,7 +169,7 @@ class UploadService {
const batchSize = 100;
for (int i = 0; i < candidates.length; i += batchSize) {
if (shouldAbortQueuingTasks || token.isCancelled) {
if (shouldAbortQueuingTasks || token.isCompleted) {
break;
}

View File

@@ -21,6 +21,7 @@ import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
import 'package:isar/isar.dart';
import 'package:path_provider/path_provider.dart';
@@ -106,5 +107,7 @@ abstract final class Bootstrap {
storeRepository: storeRepo,
shouldBuffer: shouldBufferLogs,
);
await NetworkRepository.init();
}
}

View File

@@ -393,6 +393,7 @@ Class | Method | HTTP request | Description
- [LoginCredentialDto](doc//LoginCredentialDto.md)
- [LoginResponseDto](doc//LoginResponseDto.md)
- [LogoutResponseDto](doc//LogoutResponseDto.md)
- [MachineLearningAvailabilityChecksDto](doc//MachineLearningAvailabilityChecksDto.md)
- [ManualJobName](doc//ManualJobName.md)
- [MapMarkerResponseDto](doc//MapMarkerResponseDto.md)
- [MapReverseGeocodeResponseDto](doc//MapReverseGeocodeResponseDto.md)

View File

@@ -164,6 +164,7 @@ part 'model/log_level.dart';
part 'model/login_credential_dto.dart';
part 'model/login_response_dto.dart';
part 'model/logout_response_dto.dart';
part 'model/machine_learning_availability_checks_dto.dart';
part 'model/manual_job_name.dart';
part 'model/map_marker_response_dto.dart';
part 'model/map_reverse_geocode_response_dto.dart';

View File

@@ -382,6 +382,8 @@ class ApiClient {
return LoginResponseDto.fromJson(value);
case 'LogoutResponseDto':
return LogoutResponseDto.fromJson(value);
case 'MachineLearningAvailabilityChecksDto':
return MachineLearningAvailabilityChecksDto.fromJson(value);
case 'ManualJobName':
return ManualJobNameTypeTransformer().decode(value);
case 'MapMarkerResponseDto':

View File

@@ -0,0 +1,115 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class MachineLearningAvailabilityChecksDto {
/// Returns a new [MachineLearningAvailabilityChecksDto] instance.
MachineLearningAvailabilityChecksDto({
required this.enabled,
required this.interval,
required this.timeout,
});
bool enabled;
num interval;
num timeout;
@override
bool operator ==(Object other) => identical(this, other) || other is MachineLearningAvailabilityChecksDto &&
other.enabled == enabled &&
other.interval == interval &&
other.timeout == timeout;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(enabled.hashCode) +
(interval.hashCode) +
(timeout.hashCode);
@override
String toString() => 'MachineLearningAvailabilityChecksDto[enabled=$enabled, interval=$interval, timeout=$timeout]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'enabled'] = this.enabled;
json[r'interval'] = this.interval;
json[r'timeout'] = this.timeout;
return json;
}
/// Returns a new [MachineLearningAvailabilityChecksDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static MachineLearningAvailabilityChecksDto? fromJson(dynamic value) {
upgradeDto(value, "MachineLearningAvailabilityChecksDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return MachineLearningAvailabilityChecksDto(
enabled: mapValueOfType<bool>(json, r'enabled')!,
interval: num.parse('${json[r'interval']}'),
timeout: num.parse('${json[r'timeout']}'),
);
}
return null;
}
static List<MachineLearningAvailabilityChecksDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MachineLearningAvailabilityChecksDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MachineLearningAvailabilityChecksDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, MachineLearningAvailabilityChecksDto> mapFromJson(dynamic json) {
final map = <String, MachineLearningAvailabilityChecksDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = MachineLearningAvailabilityChecksDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of MachineLearningAvailabilityChecksDto-objects as value to a dart map
static Map<String, List<MachineLearningAvailabilityChecksDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<MachineLearningAvailabilityChecksDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = MachineLearningAvailabilityChecksDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'enabled',
'interval',
'timeout',
};
}

View File

@@ -13,14 +13,16 @@ part of openapi.api;
class SystemConfigMachineLearningDto {
/// Returns a new [SystemConfigMachineLearningDto] instance.
SystemConfigMachineLearningDto({
required this.availabilityChecks,
required this.clip,
required this.duplicateDetection,
required this.enabled,
required this.facialRecognition,
this.url,
this.urls = const [],
});
MachineLearningAvailabilityChecksDto availabilityChecks;
CLIPConfig clip;
DuplicateDetectionConfig duplicateDetection;
@@ -29,50 +31,37 @@ class SystemConfigMachineLearningDto {
FacialRecognitionConfig facialRecognition;
/// This property was deprecated in v1.122.0
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? url;
List<String> urls;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigMachineLearningDto &&
other.availabilityChecks == availabilityChecks &&
other.clip == clip &&
other.duplicateDetection == duplicateDetection &&
other.enabled == enabled &&
other.facialRecognition == facialRecognition &&
other.url == url &&
_deepEquality.equals(other.urls, urls);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(availabilityChecks.hashCode) +
(clip.hashCode) +
(duplicateDetection.hashCode) +
(enabled.hashCode) +
(facialRecognition.hashCode) +
(url == null ? 0 : url!.hashCode) +
(urls.hashCode);
@override
String toString() => 'SystemConfigMachineLearningDto[clip=$clip, duplicateDetection=$duplicateDetection, enabled=$enabled, facialRecognition=$facialRecognition, url=$url, urls=$urls]';
String toString() => 'SystemConfigMachineLearningDto[availabilityChecks=$availabilityChecks, clip=$clip, duplicateDetection=$duplicateDetection, enabled=$enabled, facialRecognition=$facialRecognition, urls=$urls]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'availabilityChecks'] = this.availabilityChecks;
json[r'clip'] = this.clip;
json[r'duplicateDetection'] = this.duplicateDetection;
json[r'enabled'] = this.enabled;
json[r'facialRecognition'] = this.facialRecognition;
if (this.url != null) {
json[r'url'] = this.url;
} else {
// json[r'url'] = null;
}
json[r'urls'] = this.urls;
return json;
}
@@ -86,11 +75,11 @@ class SystemConfigMachineLearningDto {
final json = value.cast<String, dynamic>();
return SystemConfigMachineLearningDto(
availabilityChecks: MachineLearningAvailabilityChecksDto.fromJson(json[r'availabilityChecks'])!,
clip: CLIPConfig.fromJson(json[r'clip'])!,
duplicateDetection: DuplicateDetectionConfig.fromJson(json[r'duplicateDetection'])!,
enabled: mapValueOfType<bool>(json, r'enabled')!,
facialRecognition: FacialRecognitionConfig.fromJson(json[r'facialRecognition'])!,
url: mapValueOfType<String>(json, r'url'),
urls: json[r'urls'] is Iterable
? (json[r'urls'] as Iterable).cast<String>().toList(growable: false)
: const [],
@@ -141,6 +130,7 @@ class SystemConfigMachineLearningDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'availabilityChecks',
'clip',
'duplicateDetection',
'enabled',

View File

@@ -337,6 +337,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "3.1.2"
cronet_http:
dependency: "direct main"
description:
name: cronet_http
sha256: "1b99ad5ae81aa9d2f12900e5f17d3681f3828629bb7f7fe7ad88076a34209840"
url: "https://pub.dev"
source: hosted
version: "1.5.0"
crop_image:
dependency: "direct main"
description:
@@ -369,6 +377,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.0.2"
cupertino_http:
dependency: "direct main"
description:
name: cupertino_http
sha256: "72187f715837290a63479a5b0ae709f4fedad0ed6bd0441c275eceaa02d5abae"
url: "https://pub.dev"
source: hosted
version: "2.3.0"
custom_lint:
dependency: "direct dev"
description:
@@ -899,10 +915,10 @@ packages:
dependency: "direct main"
description:
name: http
sha256: fe7ab022b76f3034adc518fb6ea04a82387620e19977665ea18d30a1cf43442f
sha256: bb2ce4590bc2667c96f318d68cac1b5a7987ec819351d32b1c987239a815e007
url: "https://pub.dev"
source: hosted
version: "1.3.0"
version: "1.5.0"
http_multi_server:
dependency: transitive
description:
@@ -919,6 +935,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "4.1.2"
http_profile:
dependency: transitive
description:
name: http_profile
sha256: "7e679e355b09aaee2ab5010915c932cce3f2d1c11c3b2dc177891687014ffa78"
url: "https://pub.dev"
source: hosted
version: "0.1.0"
image:
dependency: transitive
description:
@@ -1044,6 +1068,14 @@ packages:
url: "https://github.com/immich-app/isar"
source: git
version: "3.1.8"
jni:
dependency: transitive
description:
name: jni
sha256: d2c361082d554d4593c3012e26f6b188f902acd291330f13d6427641a92b3da1
url: "https://pub.dev"
source: hosted
version: "0.14.2"
js:
dependency: transitive
description:
@@ -1237,6 +1269,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "0.5.0"
objective_c:
dependency: transitive
description:
name: objective_c
sha256: "9f034ba1eeca53ddb339bc8f4813cb07336a849cd735559b60cdc068ecce2dc7"
url: "https://pub.dev"
source: hosted
version: "7.1.0"
octo_image:
dependency: "direct main"
description:

View File

@@ -89,6 +89,8 @@ dependencies:
# DB
drift: ^2.23.1
drift_flutter: ^0.2.4
cronet_http: ^1.5.0
cupertino_http: ^2.3.0
dev_dependencies:
flutter_test:

View File

@@ -118,7 +118,6 @@ void main() {
expect(onDataCallCount, 1);
expect(abortWasCalledInCallback, isTrue);
expect(receivedEventsBatch1.length, testBatchSize);
verify(() => mockHttpClient.close()).called(1);
});
test('streamChanges does not process remaining lines in finally block if aborted', () async {
@@ -159,7 +158,6 @@ void main() {
expect(onDataCallCount, 1);
expect(abortWasCalledInCallback, isTrue);
verify(() => mockHttpClient.close()).called(1);
});
test('streamChanges processes remaining lines in finally block if not aborted', () async {
@@ -204,7 +202,6 @@ void main() {
expect(onDataCallCount, 2);
expect(receivedEventsBatch1.length, testBatchSize);
expect(receivedEventsBatch2.length, 1);
verify(() => mockHttpClient.close()).called(1);
});
test('streamChanges handles stream error gracefully', () async {
@@ -229,7 +226,6 @@ void main() {
await expectLater(streamChangesFuture, throwsA(streamError));
expect(onDataCallCount, 0);
verify(() => mockHttpClient.close()).called(1);
});
test('streamChanges throws ApiException on non-200 status code', () async {
@@ -257,6 +253,5 @@ void main() {
);
expect(onDataCallCount, 0);
verify(() => mockHttpClient.close()).called(1);
});
}

View File

@@ -12259,6 +12259,25 @@
],
"type": "object"
},
"MachineLearningAvailabilityChecksDto": {
"properties": {
"enabled": {
"type": "boolean"
},
"interval": {
"type": "number"
},
"timeout": {
"type": "number"
}
},
"required": [
"enabled",
"interval",
"timeout"
],
"type": "object"
},
"ManualJobName": {
"enum": [
"person-cleanup",
@@ -16395,6 +16414,9 @@
},
"SystemConfigMachineLearningDto": {
"properties": {
"availabilityChecks": {
"$ref": "#/components/schemas/MachineLearningAvailabilityChecksDto"
},
"clip": {
"$ref": "#/components/schemas/CLIPConfig"
},
@@ -16407,11 +16429,6 @@
"facialRecognition": {
"$ref": "#/components/schemas/FacialRecognitionConfig"
},
"url": {
"deprecated": true,
"description": "This property was deprecated in v1.122.0",
"type": "string"
},
"urls": {
"format": "uri",
"items": {
@@ -16423,6 +16440,7 @@
}
},
"required": [
"availabilityChecks",
"clip",
"duplicateDetection",
"enabled",

View File

@@ -1383,6 +1383,11 @@ export type SystemConfigLoggingDto = {
enabled: boolean;
level: LogLevel;
};
export type MachineLearningAvailabilityChecksDto = {
enabled: boolean;
interval: number;
timeout: number;
};
export type ClipConfig = {
enabled: boolean;
modelName: string;
@@ -1399,12 +1404,11 @@ export type FacialRecognitionConfig = {
modelName: string;
};
export type SystemConfigMachineLearningDto = {
availabilityChecks: MachineLearningAvailabilityChecksDto;
clip: ClipConfig;
duplicateDetection: DuplicateDetectionConfig;
enabled: boolean;
facialRecognition: FacialRecognitionConfig;
/** This property was deprecated in v1.122.0 */
url?: string;
urls: string[];
};
export type SystemConfigMapDto = {

View File

@@ -3,7 +3,7 @@
"version": "0.0.1",
"description": "Monorepo for Immich",
"private": true,
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
"packageManager": "pnpm@10.15.1+sha512.34e538c329b5553014ca8e8f4535997f96180a1d0f614339357449935350d924e22f8614682191264ec33d1462ac21561aff97f6bb18065351c162c7e8f6de67",
"engines": {
"pnpm": ">=10.0.0"
}

2602
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -44,14 +44,14 @@
"@nestjs/websockets": "^11.0.4",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^2.0.0",
"@opentelemetry/exporter-prometheus": "^0.203.0",
"@opentelemetry/instrumentation-http": "^0.203.0",
"@opentelemetry/instrumentation-ioredis": "^0.51.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.49.0",
"@opentelemetry/instrumentation-pg": "^0.56.0",
"@opentelemetry/exporter-prometheus": "^0.205.0",
"@opentelemetry/instrumentation-http": "^0.205.0",
"@opentelemetry/instrumentation-ioredis": "^0.53.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.51.0",
"@opentelemetry/instrumentation-pg": "^0.58.0",
"@opentelemetry/resources": "^2.0.1",
"@opentelemetry/sdk-metrics": "^2.0.1",
"@opentelemetry/sdk-node": "^0.203.0",
"@opentelemetry/sdk-node": "^0.205.0",
"@opentelemetry/semantic-conventions": "^1.34.0",
"@react-email/components": "^0.5.0",
"@react-email/render": "^1.1.2",

View File

@@ -15,6 +15,7 @@ import { repositories } from 'src/repositories';
import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { AuthService } from 'src/services/auth.service';
import { getKyselyConfig } from 'src/utils/database';
@@ -57,7 +58,7 @@ class SqlGenerator {
try {
await this.setup();
for (const Repository of repositories) {
if (Repository === LoggingRepository) {
if (Repository === LoggingRepository || Repository === MachineLearningRepository) {
continue;
}
await this.process(Repository);

View File

@@ -54,6 +54,11 @@ export interface SystemConfig {
machineLearning: {
enabled: boolean;
urls: string[];
availabilityChecks: {
enabled: boolean;
timeout: number;
interval: number;
};
clip: {
enabled: boolean;
modelName: string;
@@ -176,6 +181,8 @@ export interface SystemConfig {
};
}
export type MachineLearningConfig = SystemConfig['machineLearning'];
export const defaults = Object.freeze<SystemConfig>({
backup: {
database: {
@@ -227,6 +234,11 @@ export const defaults = Object.freeze<SystemConfig>({
machineLearning: {
enabled: process.env.IMMICH_MACHINE_LEARNING_ENABLED !== 'false',
urls: [process.env.IMMICH_MACHINE_LEARNING_URL || 'http://immich-machine-learning:3003'],
availabilityChecks: {
enabled: true,
timeout: Number(process.env.IMMICH_MACHINE_LEARNING_PING_TIMEOUT) || 2000,
interval: 30_000,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',

View File

@@ -51,11 +51,6 @@ export const serverVersion = new SemVer(version);
export const AUDIT_LOG_MAX_DURATION = Duration.fromObject({ days: 100 });
export const ONE_HOUR = Duration.fromObject({ hours: 1 });
export const MACHINE_LEARNING_PING_TIMEOUT = Number(process.env.MACHINE_LEARNING_PING_TIMEOUT || 2000);
export const MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME = Number(
process.env.MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME || 30_000,
);
export const citiesFile = 'cities500.txt';
export const reverseGeocodeMaxDistance = 25_000;

View File

@@ -6,7 +6,7 @@ import { PropertyLifecycle } from 'src/decorators';
import { AlbumResponseDto } from 'src/dtos/album.dto';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetOrder, AssetType, AssetVisibility } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateUUID } from 'src/validation';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateString, ValidateUUID } from 'src/validation';
class BaseSearchDto {
@ValidateUUID({ optional: true, nullable: true })
@@ -144,9 +144,7 @@ export class MetadataSearchDto extends RandomSearchDto {
@Optional()
deviceAssetId?: string;
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
description?: string;
@IsString()
@@ -154,9 +152,7 @@ export class MetadataSearchDto extends RandomSearchDto {
@Optional()
checksum?: string;
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
originalFileName?: string;
@IsString()
@@ -190,16 +186,12 @@ export class MetadataSearchDto extends RandomSearchDto {
}
export class StatisticsSearchDto extends BaseSearchDto {
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
description?: string;
}
export class SmartSearchDto extends BaseSearchWithResultsDto {
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
query?: string;
@ValidateUUID({ optional: true })

View File

@@ -1,5 +1,5 @@
import { ApiProperty } from '@nestjs/swagger';
import { Exclude, Transform, Type } from 'class-transformer';
import { Type } from 'class-transformer';
import {
ArrayMinSize,
IsInt,
@@ -15,7 +15,6 @@ import {
ValidateNested,
} from 'class-validator';
import { SystemConfig } from 'src/config';
import { PropertyLifecycle } from 'src/decorators';
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
import {
AudioCodec,
@@ -257,21 +256,32 @@ class SystemConfigLoggingDto {
level!: LogLevel;
}
class MachineLearningAvailabilityChecksDto {
@ValidateBoolean()
enabled!: boolean;
@IsInt()
timeout!: number;
@IsInt()
interval!: number;
}
class SystemConfigMachineLearningDto {
@ValidateBoolean()
enabled!: boolean;
@PropertyLifecycle({ deprecatedAt: 'v1.122.0' })
@Exclude()
url?: string;
@IsUrl({ require_tld: false, allow_underscores: true }, { each: true })
@ArrayMinSize(1)
@Transform(({ obj, value }) => (obj.url ? [obj.url] : value))
@ValidateIf((dto) => dto.enabled)
@ApiProperty({ type: 'array', items: { type: 'string', format: 'uri' }, minItems: 1 })
urls!: string[];
@Type(() => MachineLearningAvailabilityChecksDto)
@ValidateNested()
@IsObject()
availabilityChecks!: MachineLearningAvailabilityChecksDto;
@Type(() => CLIPConfig)
@ValidateNested()
@IsObject()

View File

@@ -142,6 +142,10 @@ export class LoggingRepository {
this.handleMessage(LogLevel.Fatal, message, details);
}
deprecate(message: string) {
this.warn(`[Deprecated] ${message}`);
}
private handleFunction(level: LogLevel, message: LogFunction, details: LogDetails[]) {
if (this.logger.isLevelEnabled(level)) {
this.handleMessage(level, message(), details);

View File

@@ -1,6 +1,7 @@
import { Injectable } from '@nestjs/common';
import { Duration } from 'luxon';
import { readFile } from 'node:fs/promises';
import { MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME, MACHINE_LEARNING_PING_TIMEOUT } from 'src/constants';
import { MachineLearningConfig } from 'src/config';
import { CLIPConfig } from 'src/dtos/model-config.dto';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -57,82 +58,100 @@ export type TextEncodingOptions = ModelOptions & { language?: string };
@Injectable()
export class MachineLearningRepository {
// Note that deleted URL's are not removed from this map (ie: they're leaked)
// Cleaning them up is low priority since there should be very few over a
// typical server uptime cycle
private urlAvailability: {
[url: string]:
| {
active: boolean;
lastChecked: number;
}
| undefined;
};
private healthyMap: Record<string, boolean> = {};
private interval?: ReturnType<typeof setInterval>;
private _config?: MachineLearningConfig;
private get config(): MachineLearningConfig {
if (!this._config) {
throw new Error('Machine learning repository not been setup');
}
return this._config;
}
constructor(private logger: LoggingRepository) {
this.logger.setContext(MachineLearningRepository.name);
this.urlAvailability = {};
}
private setUrlAvailability(url: string, active: boolean) {
const current = this.urlAvailability[url];
if (current?.active !== active) {
this.logger.verbose(`Setting ${url} ML server to ${active ? 'active' : 'inactive'}.`);
setup(config: MachineLearningConfig) {
this._config = config;
this.teardown();
// delete old servers
for (const url of Object.keys(this.healthyMap)) {
if (!config.urls.includes(url)) {
delete this.healthyMap[url];
}
}
this.urlAvailability[url] = {
active,
lastChecked: Date.now(),
};
if (!config.availabilityChecks.enabled) {
return;
}
this.tick();
this.interval = setInterval(
() => this.tick(),
Duration.fromObject({ milliseconds: config.availabilityChecks.interval }).as('milliseconds'),
);
}
private async checkAvailability(url: string) {
let active = false;
teardown() {
if (this.interval) {
clearInterval(this.interval);
}
}
private tick() {
for (const url of this.config.urls) {
void this.check(url);
}
}
private async check(url: string) {
let healthy = false;
try {
const response = await fetch(new URL('/ping', url), {
signal: AbortSignal.timeout(MACHINE_LEARNING_PING_TIMEOUT),
signal: AbortSignal.timeout(this.config.availabilityChecks.timeout),
});
active = response.ok;
if (response.ok) {
healthy = true;
}
} catch {
// nothing to do here
}
this.setUrlAvailability(url, active);
return active;
this.setHealthy(url, healthy);
}
private async shouldSkipUrl(url: string) {
const availability = this.urlAvailability[url];
if (availability === undefined) {
// If this is a new endpoint, then check inline and skip if it fails
if (!(await this.checkAvailability(url))) {
return true;
}
return false;
private setHealthy(url: string, healthy: boolean) {
if (this.healthyMap[url] !== healthy) {
this.logger.log(`Machine learning server became ${healthy ? 'healthy' : 'unhealthy'} (${url}).`);
}
if (!availability.active && Date.now() - availability.lastChecked < MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME) {
// If this is an old inactive endpoint that hasn't been checked in a
// while then check but don't wait for the result, just skip it
// This avoids delays on every search whilst allowing higher priority
// ML servers to recover over time.
void this.checkAvailability(url);
this.healthyMap[url] = healthy;
}
private isHealthy(url: string) {
if (!this.config.availabilityChecks.enabled) {
return true;
}
return false;
return this.healthyMap[url];
}
private async predict<T>(urls: string[], payload: ModelPayload, config: MachineLearningRequest): Promise<T> {
private async predict<T>(payload: ModelPayload, config: MachineLearningRequest): Promise<T> {
const formData = await this.getFormData(payload, config);
let urlCounter = 0;
for (const url of urls) {
urlCounter++;
const isLast = urlCounter >= urls.length;
if (!isLast && (await this.shouldSkipUrl(url))) {
continue;
}
for (const url of [
// try healthy servers first
...this.config.urls.filter((url) => this.isHealthy(url)),
...this.config.urls.filter((url) => !this.isHealthy(url)),
]) {
try {
const response = await fetch(new URL('/predict', url), { method: 'POST', body: formData });
if (response.ok) {
this.setUrlAvailability(url, true);
this.setHealthy(url, true);
return response.json();
}
@@ -144,20 +163,21 @@ export class MachineLearningRepository {
`Machine learning request to "${url}" failed: ${error instanceof Error ? error.message : error}`,
);
}
this.setUrlAvailability(url, false);
this.setHealthy(url, false);
}
throw new Error(`Machine learning request '${JSON.stringify(config)}' failed for all URLs`);
}
async detectFaces(urls: string[], imagePath: string, { modelName, minScore }: FaceDetectionOptions) {
async detectFaces(imagePath: string, { modelName, minScore }: FaceDetectionOptions) {
const request = {
[ModelTask.FACIAL_RECOGNITION]: {
[ModelType.DETECTION]: { modelName, options: { minScore } },
[ModelType.RECOGNITION]: { modelName },
},
};
const response = await this.predict<FacialRecognitionResponse>(urls, { imagePath }, request);
const response = await this.predict<FacialRecognitionResponse>({ imagePath }, request);
return {
imageHeight: response.imageHeight,
imageWidth: response.imageWidth,
@@ -165,15 +185,15 @@ export class MachineLearningRepository {
};
}
async encodeImage(urls: string[], imagePath: string, { modelName }: CLIPConfig) {
async encodeImage(imagePath: string, { modelName }: CLIPConfig) {
const request = { [ModelTask.SEARCH]: { [ModelType.VISUAL]: { modelName } } };
const response = await this.predict<ClipVisualResponse>(urls, { imagePath }, request);
const response = await this.predict<ClipVisualResponse>({ imagePath }, request);
return response[ModelTask.SEARCH];
}
async encodeText(urls: string[], text: string, { language, modelName }: TextEncodingOptions) {
async encodeText(text: string, { language, modelName }: TextEncodingOptions) {
const request = { [ModelTask.SEARCH]: { [ModelType.TEXTUAL]: { modelName, options: { language } } } };
const response = await this.predict<ClipTextualResponse>(urls, { text }, request);
const response = await this.predict<ClipTextualResponse>({ text }, request);
return response[ModelTask.SEARCH];
}

View File

@@ -729,7 +729,6 @@ describe(PersonService.name, () => {
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(mocks.machineLearning.detectFaces).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ minScore: 0.7, modelName: 'buffalo_l' }),
);

View File

@@ -316,7 +316,6 @@ export class PersonService extends BaseService {
}
const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces(
machineLearning.urls,
previewFile.path,
machineLearning.facialRecognition,
);

View File

@@ -211,7 +211,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: expect.any(String) }),
);
@@ -225,7 +224,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test', page: 2, size: 50 });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: expect.any(String) }),
);
@@ -243,7 +241,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: 'ViT-B-16-SigLIP__webli' }),
);
@@ -253,7 +250,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test', language: 'de' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ language: 'de' }),
);

View File

@@ -118,7 +118,7 @@ export class SearchService extends BaseService {
const key = machineLearning.clip.modelName + dto.query + dto.language;
embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
embedding = await this.machineLearningRepository.encodeText(dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});

View File

@@ -205,7 +205,6 @@ describe(SmartInfoService.name, () => {
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ modelName: 'ViT-B-32__openai' }),
);
@@ -242,7 +241,6 @@ describe(SmartInfoService.name, () => {
expect(mocks.database.wait).toHaveBeenCalledWith(512);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ modelName: 'ViT-B-32__openai' }),
);

View File

@@ -108,11 +108,7 @@ export class SmartInfoService extends BaseService {
return JobStatus.Skipped;
}
const embedding = await this.machineLearningRepository.encodeImage(
machineLearning.urls,
asset.files[0].path,
machineLearning.clip,
);
const embedding = await this.machineLearningRepository.encodeImage(asset.files[0].path, machineLearning.clip);
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);

View File

@@ -82,6 +82,11 @@ const updatedConfig = Object.freeze<SystemConfig>({
machineLearning: {
enabled: true,
urls: ['http://immich-machine-learning:3003'],
availabilityChecks: {
enabled: true,
interval: 30_000,
timeout: 2000,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',

View File

@@ -16,6 +16,20 @@ export class SystemConfigService extends BaseService {
async onBootstrap() {
const config = await this.getConfig({ withCache: false });
await this.eventRepository.emit('ConfigInit', { newConfig: config });
if (
process.env.IMMICH_MACHINE_LEARNING_PING_TIMEOUT ||
process.env.IMMICH_MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME
) {
this.logger.deprecate(
'IMMICH_MACHINE_LEARNING_PING_TIMEOUT and MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME have been moved to system config(`machineLearning.availabilityChecks`) and will be removed in a future release.',
);
}
}
@OnEvent({ name: 'AppShutdown' })
onShutdown() {
this.machineLearningRepository.teardown();
}
async getSystemConfig(): Promise<SystemConfigDto> {
@@ -28,12 +42,14 @@ export class SystemConfigService extends BaseService {
}
@OnEvent({ name: 'ConfigInit', priority: -100 })
onConfigInit({ newConfig: { logging } }: ArgOf<'ConfigInit'>) {
onConfigInit({ newConfig: { logging, machineLearning } }: ArgOf<'ConfigInit'>) {
const { logLevel: envLevel } = this.configRepository.getEnv();
const configLevel = logging.enabled ? logging.level : false;
const level = envLevel ?? configLevel;
this.logger.setLogLevel(level);
this.logger.log(`LogLevel=${level} ${envLevel ? '(set via IMMICH_LOG_LEVEL)' : '(set via system config)'}`);
this.machineLearningRepository.setup(machineLearning);
}
@OnEvent({ name: 'ConfigUpdate', server: true })

View File

@@ -34,7 +34,8 @@ type SendFile = Parameters<Response['sendFile']>;
type SendFileOptions = SendFile[1];
const cacheControlHeaders: Record<CacheControl, string | null> = {
[CacheControl.PrivateWithCache]: 'private, max-age=86400, no-transform',
[CacheControl.PrivateWithCache]:
'private, max-age=86400, no-transform, stale-while-revalidate=2592000, stale-if-error=2592000',
[CacheControl.PrivateWithoutCache]: 'private, no-cache, no-transform',
[CacheControl.None]: null, // falsy value to prevent adding Cache-Control header
};

View File

@@ -211,6 +211,18 @@ export const ValidateDate = (options?: DateOptions & ApiPropertyOptions) => {
return applyDecorators(...decorators);
};
type StringOptions = { optional?: boolean; nullable?: boolean; trim?: boolean };
export const ValidateString = (options?: StringOptions & ApiPropertyOptions) => {
const { optional, nullable, trim, ...apiPropertyOptions } = options || {};
const decorators = [ApiProperty(apiPropertyOptions), IsString(), optional ? Optional({ nullable }) : IsNotEmpty()];
if (trim) {
decorators.push(Transform(({ value }: { value: string }) => value?.trim()));
}
return applyDecorators(...decorators);
};
type BooleanOptions = { optional?: boolean; nullable?: boolean };
export const ValidateBoolean = (options?: BooleanOptions & ApiPropertyOptions) => {
const { optional, nullable, ...apiPropertyOptions } = options || {};

View File

@@ -127,6 +127,7 @@ export default typescriptEslint.config(
'@typescript-eslint/no-misused-promises': 'error',
'@typescript-eslint/require-await': 'error',
'object-shorthand': ['error', 'always'],
'svelte/no-navigation-without-resolve': 'off',
},
},
{

View File

@@ -55,7 +55,7 @@
"qrcode": "^1.5.4",
"simple-icons": "^15.15.0",
"socket.io-client": "~4.8.0",
"svelte-gestures": "^5.1.3",
"svelte-gestures": "5.1.4",
"svelte-i18n": "^4.0.1",
"svelte-maplibre": "^1.2.0",
"svelte-persisted-store": "^0.12.0",
@@ -70,7 +70,7 @@
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/enhanced-img": "^0.8.0",
"@sveltejs/kit": "^2.27.1",
"@sveltejs/vite-plugin-svelte": "6.1.2",
"@sveltejs/vite-plugin-svelte": "6.2.0",
"@tailwindcss/vite": "^4.1.7",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/svelte": "^5.2.8",
@@ -85,7 +85,7 @@
"dotenv": "^17.0.0",
"eslint": "^9.18.0",
"eslint-config-prettier": "^10.1.8",
"eslint-p": "^0.25.0",
"eslint-p": "^0.26.0",
"eslint-plugin-compat": "^6.0.2",
"eslint-plugin-svelte": "^3.9.0",
"eslint-plugin-unicorn": "^60.0.0",
@@ -97,7 +97,7 @@
"prettier-plugin-sort-json": "^4.1.1",
"prettier-plugin-svelte": "^3.3.3",
"rollup-plugin-visualizer": "^6.0.0",
"svelte": "5.35.5",
"svelte": "5.38.10",
"svelte-check": "^4.1.5",
"svelte-eslint-parser": "^1.2.0",
"tailwindcss": "^4.1.7",

View File

@@ -9,7 +9,7 @@
import { featureFlags } from '$lib/stores/server-config.store';
import type { SystemConfigDto } from '@immich/sdk';
import { Button, IconButton } from '@immich/ui';
import { mdiMinusCircle } from '@mdi/js';
import { mdiPlus, mdiTrashCanOutline } from '@mdi/js';
import { isEqual } from 'lodash-es';
import { t } from 'svelte-i18n';
import { fade } from 'svelte/transition';
@@ -46,19 +46,6 @@
<div>
{#each config.machineLearning.urls as _, i (i)}
{#snippet removeButton()}
{#if config.machineLearning.urls.length > 1}
<IconButton
size="large"
shape="round"
color="danger"
aria-label=""
onclick={() => config.machineLearning.urls.splice(i, 1)}
icon={mdiMinusCircle}
/>
{/if}
{/snippet}
<SettingInputField
inputType={SettingInputFieldType.TEXT}
label={i === 0 ? $t('url') : undefined}
@@ -67,20 +54,69 @@
required={i === 0}
disabled={disabled || !config.machineLearning.enabled}
isEdited={i === 0 && !isEqual(config.machineLearning.urls, savedConfig.machineLearning.urls)}
trailingSnippet={removeButton}
/>
>
{#snippet trailingSnippet()}
{#if config.machineLearning.urls.length > 1}
<IconButton
aria-label=""
onclick={() => config.machineLearning.urls.splice(i, 1)}
icon={mdiTrashCanOutline}
color="danger"
/>
{/if}
{/snippet}
</SettingInputField>
{/each}
</div>
<Button
class="mb-2"
size="small"
shape="round"
onclick={() => config.machineLearning.urls.splice(0, 0, '')}
disabled={disabled || !config.machineLearning.enabled}>{$t('add_url')}</Button
>
<div class="flex justify-end">
<Button
class="mb-2"
size="small"
shape="round"
leadingIcon={mdiPlus}
onclick={() => config.machineLearning.urls.push('')}
disabled={disabled || !config.machineLearning.enabled}>{$t('add_url')}</Button
>
</div>
</div>
<SettingAccordion
key="availability-checks"
title={$t('admin.machine_learning_availability_checks')}
subtitle={$t('admin.machine_learning_availability_checks_description')}
>
<div class="ms-4 mt-4 flex flex-col gap-4">
<SettingSwitch
title={$t('admin.machine_learning_availability_checks_enabled')}
bind:checked={config.machineLearning.availabilityChecks.enabled}
disabled={disabled || !config.machineLearning.enabled}
/>
<hr />
<SettingInputField
inputType={SettingInputFieldType.NUMBER}
label={$t('admin.machine_learning_availability_checks_interval')}
bind:value={config.machineLearning.availabilityChecks.interval}
description={$t('admin.machine_learning_availability_checks_interval_description')}
disabled={disabled || !config.machineLearning.enabled || !config.machineLearning.availabilityChecks.enabled}
isEdited={config.machineLearning.availabilityChecks.interval !==
savedConfig.machineLearning.availabilityChecks.interval}
/>
<SettingInputField
inputType={SettingInputFieldType.NUMBER}
label={$t('admin.machine_learning_availability_checks_timeout')}
bind:value={config.machineLearning.availabilityChecks.timeout}
description={$t('admin.machine_learning_availability_checks_timeout_description')}
disabled={disabled || !config.machineLearning.enabled || !config.machineLearning.availabilityChecks.enabled}
isEdited={config.machineLearning.availabilityChecks.timeout !==
savedConfig.machineLearning.availabilityChecks.timeout}
/>
</div>
</SettingAccordion>
<SettingAccordion
key="smart-search"
title={$t('admin.machine_learning_smart_search')}

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import SupportedDatetimePanel from '$lib/components/admin-settings/SupportedDatetimePanel.svelte';
import SupportedVariablesPanel from '$lib/components/admin-settings/SupportedVariablesPanel.svelte';
import SettingButtonsRow from '$lib/components/shared-components/settings/setting-buttons-row.svelte';
@@ -262,7 +263,7 @@
values={{ job: $t('admin.storage_template_migration_job') }}
>
{#snippet children({ message })}
<a href={AppRoute.ADMIN_JOBS} class="text-primary">
<a href={resolve(AppRoute.ADMIN_JOBS)} class="text-primary">
{message}
</a>
{/snippet}

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import AlbumCard from '$lib/components/album-page/album-card.svelte';
import { AppRoute } from '$lib/constants';
import { albumViewSettings } from '$lib/stores/preferences.store';
@@ -65,7 +66,7 @@
{#each albums as album, index (album.id)}
<a
data-sveltekit-preload-data="hover"
href="{AppRoute.ALBUMS}/{album.id}"
href={resolve(`${AppRoute.ALBUMS}/${album.id}`)}
animate:flip={{ duration: 400 }}
oncontextmenu={(event) => oncontextmenu(event, album)}
>

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import AlbumCardGroup from '$lib/components/album-page/album-card-group.svelte';
import AlbumsTable from '$lib/components/album-page/albums-table.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
@@ -315,7 +316,7 @@
button: {
text: $t('view_album'),
onClick() {
return goto(`${AppRoute.ALBUMS}/${album.id}`);
return goto(resolve(`${AppRoute.ALBUMS}/${album.id}`));
},
},
});

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import { AppRoute, dateFormats } from '$lib/constants';
import { locale } from '$lib/stores/preferences.store';
import { user } from '$lib/stores/user.store';
@@ -32,7 +33,7 @@
<tr
class="flex h-[50px] w-full place-items-center border-[3px] border-transparent p-2 text-center even:bg-subtle/20 odd:bg-subtle/80 hover:cursor-pointer hover:border-immich-primary/75 odd:dark:bg-immich-dark-gray/75 even:dark:bg-immich-dark-gray/50 dark:hover:border-immich-dark-primary/75 md:p-5"
onclick={() => goto(`${AppRoute.ALBUMS}/${album.id}`)}
onclick={() => goto(resolve(`${AppRoute.ALBUMS}/${album.id}`))}
{oncontextmenu}
>
<td class="text-md text-ellipsis text-start w-8/12 sm:w-4/12 md:w-4/12 xl:w-[30%] 2xl:w-[40%] items-center">

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import { autoGrowHeight } from '$lib/actions/autogrow';
import { shortcut } from '$lib/actions/shortcut';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
@@ -146,7 +147,10 @@
<div class="w-full leading-4 overflow-hidden self-center break-words text-sm">{reaction.comment}</div>
{#if assetId === undefined && reaction.assetId}
<a class="aspect-square w-[75px] h-[75px]" href="{AppRoute.ALBUMS}/{albumId}/photos/{reaction.assetId}">
<a
class="aspect-square w-[75px] h-[75px]"
href={resolve(`${AppRoute.ALBUMS}/${albumId}/photos/${reaction.assetId}`)}
>
<img
class="rounded-lg w-[75px] h-[75px] object-cover"
src={getAssetThumbnailUrl(reaction.assetId)}
@@ -198,7 +202,7 @@
{#if assetId === undefined && reaction.assetId}
<a
class="aspect-square w-[75px] h-[75px]"
href="{AppRoute.ALBUMS}/{albumId}/photos/{reaction.assetId}"
href={resolve(`${AppRoute.ALBUMS}/${albumId}/photos/${reaction.assetId}`)}
>
<img
class="rounded-lg w-[75px] h-[75px] object-cover"

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import CastButton from '$lib/cast/cast-button.svelte';
import type { OnAction, PreAction } from '$lib/components/asset-viewer/actions/action';
import AddToAlbumAction from '$lib/components/asset-viewer/actions/add-to-album-action.svelte';
@@ -224,14 +225,15 @@
{#if !asset.isArchived && !asset.isTrashed}
<MenuOption
icon={mdiImageSearch}
onClick={() => goto(`${AppRoute.PHOTOS}?at=${stack?.primaryAssetId ?? asset.id}`)}
onClick={() => goto(resolve(`${AppRoute.PHOTOS}?at=${stack?.primaryAssetId ?? asset.id}`))}
text={$t('view_in_timeline')}
/>
{/if}
{#if !asset.isArchived && !asset.isTrashed && smartSearchEnabled}
<MenuOption
icon={mdiCompare}
onClick={() => goto(`${AppRoute.SEARCH}?query={"queryAssetId":"${stack?.primaryAssetId ?? asset.id}"}`)}
onClick={() =>
goto(resolve(`${AppRoute.SEARCH}?query={"queryAssetId":"${stack?.primaryAssetId ?? asset.id}"}`))}
text={$t('view_similar_photos')}
/>
{/if}

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import { shortcut } from '$lib/actions/shortcut';
import { AppRoute } from '$lib/constants';
import { authManager } from '$lib/managers/auth-manager.svelte';
@@ -45,7 +46,7 @@
<div class="flex group transition-all">
<a
class="inline-block h-min whitespace-nowrap ps-3 pe-1 group-hover:ps-3 py-1 text-center align-baseline leading-none text-gray-100 dark:text-immich-dark-gray bg-primary rounded-s-full hover:bg-immich-primary/80 dark:hover:bg-immich-dark-primary/80 transition-all"
href={encodeURI(`${AppRoute.TAGS}/?path=${tag.value}`)}
href={resolve(`${AppRoute.TAGS}/?path=${encodeURI(tag.value)}`)}
>
<p class="text-sm">
{tag.value}

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import DetailPanelDescription from '$lib/components/asset-viewer/detail-panel-description.svelte';
import DetailPanelLocation from '$lib/components/asset-viewer/detail-panel-location.svelte';
import DetailPanelRating from '$lib/components/asset-viewer/detail-panel-star-rating.svelte';
@@ -208,9 +209,11 @@
{#if showingHiddenPeople || !person.isHidden}
<a
class="w-[90px]"
href="{AppRoute.PEOPLE}/{person.id}?{QueryParameter.PREVIOUS_ROUTE}={currentAlbum?.id
? `${AppRoute.ALBUMS}/${currentAlbum?.id}`
: AppRoute.PHOTOS}"
href={resolve(
`${AppRoute.PEOPLE}/${person.id}?${QueryParameter.PREVIOUS_ROUTE}=${
currentAlbum?.id ? `${AppRoute.ALBUMS}/${currentAlbum?.id}` : AppRoute.PHOTOS
}`,
)}
onfocus={() => ($boundingBoxesArray = people[index].faces)}
onblur={() => ($boundingBoxesArray = [])}
onmouseover={() => ($boundingBoxesArray = people[index].faces)}
@@ -362,6 +365,7 @@
</p>
{#if showAssetPath}
<p class="text-xs opacity-50 break-all pb-2 hover:text-primary" transition:slide={{ duration: 250 }}>
<!-- eslint-disable-next-line svelte/no-navigation-without-resolve this is supposed to be treated as an absolute/external link -->
<a href={getAssetFolderHref(asset)} title={$t('go_to_folder')} class="whitespace-pre-wrap">
{asset.originalPath}
</a>
@@ -394,10 +398,12 @@
{#if asset.exifInfo?.make || asset.exifInfo?.model}
<p>
<a
href="{AppRoute.SEARCH}?{getMetadataSearchQuery({
...(asset.exifInfo?.make ? { make: asset.exifInfo.make } : {}),
...(asset.exifInfo?.model ? { model: asset.exifInfo.model } : {}),
})}"
href={resolve(
`${AppRoute.SEARCH}?${getMetadataSearchQuery({
...(asset.exifInfo?.make ? { make: asset.exifInfo.make } : {}),
...(asset.exifInfo?.model ? { model: asset.exifInfo.model } : {}),
})}`,
)}
title="{$t('search_for')} {asset.exifInfo.make || ''} {asset.exifInfo.model || ''}"
class="hover:text-primary"
>
@@ -411,7 +417,9 @@
<div class="flex gap-2 text-sm">
<p>
<a
href="{AppRoute.SEARCH}?{getMetadataSearchQuery({ lensModel: asset.exifInfo.lensModel })}"
href={resolve(
`${AppRoute.SEARCH}?${getMetadataSearchQuery({ lensModel: asset.exifInfo.lensModel })}`,
)}
title="{$t('search_for')} {asset.exifInfo.lensModel}"
class="hover:text-primary line-clamp-1"
>
@@ -475,7 +483,7 @@
simplified
useLocationPin
showSimpleControls={!showEditFaces}
onOpenInMapView={() => goto(`${AppRoute.MAP}#12.5/${latlng.lat}/${latlng.lng}`)}
onOpenInMapView={() => goto(resolve(`${AppRoute.MAP}#12.5/${latlng.lat}/${latlng.lng}`))}
>
{#snippet popup({ marker })}
{@const { lat, lon } = marker}
@@ -516,7 +524,7 @@
<section class="px-6 pt-6 dark:text-immich-dark-fg">
<p class="uppercase pb-4 text-sm">{$t('appears_in')}</p>
{#each albums as album (album.id)}
<a href="{AppRoute.ALBUMS}/{album.id}">
<a href={resolve(`${AppRoute.ALBUMS}/${album.id}`)}>
<div class="flex gap-4 pt-2 hover:cursor-pointer items-center">
<div>
<img

View File

@@ -97,12 +97,15 @@
}
try {
await copyImageToClipboard($photoViewerImgElement ?? assetFileUrl);
notificationController.show({
type: NotificationType.Info,
message: $t('copied_image_to_clipboard'),
timeout: 3000,
});
const result = await copyImageToClipboard($photoViewerImgElement ?? assetFileUrl);
if (result.success) {
notificationController.show({ type: NotificationType.Info, message: $t('copied_image_to_clipboard') });
} else {
notificationController.show({
type: NotificationType.Error,
message: $t('errors.clipboard_unsupported_mime_type', { values: { mimeType: result.mimeType } }),
});
}
} catch (error) {
handleError(error, $t('copy_error'));
}

View File

@@ -7,6 +7,7 @@
<script lang="ts">
import DateInput from '$lib/elements/DateInput.svelte';
import { Text } from '@immich/ui';
import { t } from 'svelte-i18n';
interface Props {
@@ -14,31 +15,27 @@
}
let { filters = $bindable() }: Props = $props();
let invalid = $derived(filters.takenAfter && filters.takenBefore && filters.takenAfter > filters.takenBefore);
const inputClasses = $derived(
`immich-form-input w-full mt-1 hover:cursor-pointer ${invalid ? 'border border-danger' : ''}`,
);
</script>
<div id="date-range-selection" class="grid grid-auto-fit-40 gap-5">
<label class="immich-form-label" for="start-date">
<span class="uppercase">{$t('start_date')}</span>
<DateInput
class="immich-form-input w-full mt-1 hover:cursor-pointer"
type="date"
id="start-date"
name="start-date"
max={filters.takenBefore}
bind:value={filters.takenAfter}
/>
</label>
<div class="flex flex-col gap-1">
<div id="date-range-selection" class="grid grid-auto-fit-40 gap-5">
<label class="immich-form-label" for="start-date">
<span class="uppercase">{$t('start_date')}</span>
<DateInput class={inputClasses} type="date" id="start-date" name="start-date" bind:value={filters.takenAfter} />
</label>
<label class="immich-form-label" for="end-date">
<span class="uppercase">{$t('end_date')}</span>
<DateInput
class="immich-form-input w-full mt-1 hover:cursor-pointer"
type="date"
id="end-date"
name="end-date"
placeholder=""
min={filters.takenAfter}
bind:value={filters.takenBefore}
/>
</label>
<label class="immich-form-label" for="end-date">
<span class="uppercase">{$t('end_date')}</span>
<DateInput class={inputClasses} type="date" id="end-date" name="end-date" bind:value={filters.takenBefore} />
</label>
</div>
{#if invalid}
<Text color="danger">{$t('start_date_before_end_date')}</Text>
{/if}
</div>

View File

@@ -105,7 +105,7 @@
{/if}
{#if inputType !== SettingInputFieldType.PASSWORD}
<div class="flex place-items-center place-content-center">
<div class="flex place-items-center place-content-center gap-2">
{#if inputType === SettingInputFieldType.COLOR}
<input
bind:this={input}

View File

@@ -17,6 +17,9 @@ describe('RecentAlbums component', () => {
render(RecentAlbums);
expect(sdkMock.getAllAlbums).toBeCalledTimes(1);
// wtf
await tick();
await tick();
const links = screen.getAllByRole('link');

View File

@@ -625,7 +625,21 @@ const urlToBlob = async (imageSource: string) => {
return await response.blob();
};
export const copyImageToClipboard = async (source: HTMLImageElement | string) => {
const blob = source instanceof HTMLImageElement ? await imgToBlob(source) : await urlToBlob(source);
export const copyImageToClipboard = async (
source: HTMLImageElement | string,
): Promise<{ success: true } | { success: false; mimeType: string }> => {
if (source instanceof HTMLImageElement) {
// do not await, so the Safari clipboard write happens in the context of the user gesture
await navigator.clipboard.write([new ClipboardItem({ ['image/png']: imgToBlob(source) })]);
return { success: true };
}
// if we had a way to get the mime type synchronously, we could do the same thing here
const blob = await urlToBlob(source);
if (!ClipboardItem.supports(blob.type)) {
return { success: false, mimeType: blob.type };
}
await navigator.clipboard.write([new ClipboardItem({ [blob.type]: blob })]);
return { success: true };
};