Compare commits

...

10 Commits

Author SHA1 Message Date
bwees
91d6fedbf2 fix: restore ancestors album to currentRemoteAlbumProvider when popping 2025-09-21 23:19:12 -05:00
bwees
8465d6c493 fix: rebase conflict 2025-09-20 10:31:37 -05:00
bwees
dc8c705420 fix: bottom sheet now is usable when navigating to another asset viewer 2025-09-20 10:31:37 -05:00
bwees
a437a947c3 feat: show "appears in" albums on asset viewer bottom sheet
fix: multiple RemoteAlbumPages in navigation stack

this also allows us to not have to set the current album before navigating to RemoteAlbumPage

chore: clarification comments

handle nested album pages

fix: hide "appears in" when an asset is not in any albums

fix: way more bottom padding

for some reason we can't query the safe area here :/
2025-09-20 10:31:35 -05:00
Alex
aaeac2ab73 fix(web): revert do not upscale small pictures (#22191) (#22233) 2025-09-20 09:25:27 -05:00
Jason Rasmussen
de57fecb69 fix(web): copy to clipboard on safari (#22217) 2025-09-19 17:44:18 -04:00
renovate[bot]
1e0b4fac04 fix(deps): update typescript-projects (#21510)
* fix(deps): update typescript-projects

* chore: downgrade dependencies

* chore: downgrade svelte-gestures

* fix: svelte/no-navigation-without-resolve

* fix: dumb test

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Zack Pollard <zack@futo.org>
Co-authored-by: Daniel Dietzler <mail@ddietzler.dev>
Co-authored-by: Jason Rasmussen <jason@rasm.me>
2025-09-19 12:29:01 -04:00
Jason Rasmussen
34339ea69f fix(web): show danger/warning when taken dates overlap (#22213) 2025-09-19 12:20:09 -04:00
Jason Rasmussen
6da039780e fix: automatically remove leading/trailing whitespace from search que… (#22214)
fix: automatically remove leading/trailing whitespace from search queries
2025-09-19 12:19:26 -04:00
Jason Rasmussen
3f2e0780d5 feat: availability checks (#22185) 2025-09-19 12:18:42 -04:00
54 changed files with 1877 additions and 1625 deletions

View File

@@ -169,8 +169,6 @@ Redis (Sentinel) URL example JSON before encoding:
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |

View File

@@ -123,6 +123,13 @@
"logging_enable_description": "Enable logging",
"logging_level_description": "When enabled, what log level to use.",
"logging_settings": "Logging",
"machine_learning_availability_checks": "Availability checks",
"machine_learning_availability_checks_description": "Automatically detect and prefer available machine learning servers",
"machine_learning_availability_checks_enabled": "Enable availability checks",
"machine_learning_availability_checks_interval": "Check interval",
"machine_learning_availability_checks_interval_description": "Interval in milliseconds between availability checks",
"machine_learning_availability_checks_timeout": "Request timeout",
"machine_learning_availability_checks_timeout_description": "Timeout in milliseconds for availability checks",
"machine_learning_clip_model": "CLIP model",
"machine_learning_clip_model_description": "The name of a CLIP model listed <link>here</link>. Note that you must re-run the 'Smart Search' job for all images upon changing a model.",
"machine_learning_duplicate_detection": "Duplicate Detection",
@@ -913,6 +920,7 @@
"cant_get_number_of_comments": "Can't get number of comments",
"cant_search_people": "Can't search people",
"cant_search_places": "Can't search places",
"clipboard_unsupported_mime_type": "The system clipboard does not support copying this type of content: {mimeType}",
"error_adding_assets_to_album": "Error adding assets to album",
"error_adding_users_to_album": "Error adding users to album",
"error_deleting_shared_user": "Error deleting shared user",
@@ -1916,6 +1924,7 @@
"stacktrace": "Stacktrace",
"start": "Start",
"start_date": "Start date",
"start_date_before_end_date": "Start date must be before end date",
"state": "State",
"status": "Status",
"stop_casting": "Stop casting",

View File

@@ -1,7 +1,7 @@
[tools]
node = "22.19.0"
flutter = "3.35.4"
pnpm = "10.14.0"
pnpm = "10.15.1"
dart = "3.8.2"
[tools."github:CQLabs/homebrew-dcm"]

View File

@@ -160,6 +160,10 @@ class RemoteAlbumService {
return _repository.getCount();
}
Future<List<RemoteAlbum>> getAlbumsContainingAsset(String assetId) {
return _repository.getAlbumsContainingAsset(assetId);
}
Future<List<RemoteAlbum>> _sortByNewestAsset(List<RemoteAlbum> albums) async {
// map album IDs to their newest asset dates
final Map<String, Future<DateTime?>> assetTimestampFutures = {};

View File

@@ -372,6 +372,18 @@ class DriftRemoteAlbumRepository extends DriftDatabaseRepository {
return query.map((row) => row.read(_db.remoteAssetEntity.id)!).get();
}
Future<List<RemoteAlbum>> getAlbumsContainingAsset(String assetId) async {
final albumIdsQuery = _db.remoteAlbumAssetEntity.select()..where((row) => row.assetId.equals(assetId));
final albumIds = (await albumIdsQuery.get()).map((e) => e.albumId).toSet();
if (albumIds.isEmpty) {
return [];
}
return getAll().then((albums) => albums.where((album) => albumIds.contains(album.id)).toList());
}
}
extension on RemoteAlbumEntityData {

View File

@@ -32,6 +32,7 @@ import 'package:immich_mobile/theme/dynamic_theme.dart';
import 'package:immich_mobile/theme/theme_data.dart';
import 'package:immich_mobile/utils/bootstrap.dart';
import 'package:immich_mobile/utils/cache/widgets_binding.dart';
import 'package:immich_mobile/utils/debug_print.dart';
import 'package:immich_mobile/utils/http_ssl_options.dart';
import 'package:immich_mobile/utils/licenses.dart';
import 'package:immich_mobile/utils/migration.dart';
@@ -39,7 +40,6 @@ import 'package:intl/date_symbol_data_local.dart';
import 'package:logging/logging.dart';
import 'package:timezone/data/latest.dart';
import 'package:worker_manager/worker_manager.dart';
import 'package:immich_mobile/utils/debug_print.dart';
void main() async {
ImmichWidgetsBinding();

View File

@@ -1,4 +1,5 @@
import 'package:auto_route/auto_route.dart';
import 'package:collection/collection.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
@@ -221,14 +222,24 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
@override
Widget build(BuildContext context) {
return PopScope(
canPop: false,
onPopInvokedWithResult: (didPop, _) {
if (didPop) {
Future.microtask(() {
if (mounted) {
ref.read(currentRemoteAlbumProvider.notifier).dispose();
ref.read(remoteAlbumProvider.notifier).refresh();
}
});
if (didPop || !mounted) {
return;
}
final ancestors = context.router.stack.take(context.router.stack.length - 1);
final ancestorPage = ancestors.lastWhereOrNull((route) {
return route.name == RemoteAlbumRoute.page.name;
});
Navigator.of(context).pop();
if (ancestorPage == null) {
ref.read(currentRemoteAlbumProvider.notifier).dispose();
} else {
final album = (ancestorPage.routeData.args as RemoteAlbumRouteArgs).album;
ref.read(currentRemoteAlbumProvider.notifier).setAlbum(album);
}
},
child: ProviderScope(

View File

@@ -12,7 +12,7 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/models/albums/album_search.model.dart';
import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/presentation/widgets/album/album_tile.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
@@ -516,38 +516,6 @@ class _AlbumList extends ConsumerWidget {
sliver: SliverList.builder(
itemBuilder: (_, index) {
final album = albums[index];
final albumTile = LargeLeadingTile(
title: Text(
album.name,
maxLines: 2,
overflow: TextOverflow.ellipsis,
style: context.textTheme.titleSmall?.copyWith(fontWeight: FontWeight.w600),
),
subtitle: Text(
'${'items_count'.t(context: context, args: {'count': album.assetCount})} • ${album.ownerId != userId ? 'shared_by_user'.t(context: context, args: {'user': album.ownerName}) : 'owned'.t(context: context)}',
overflow: TextOverflow.ellipsis,
style: context.textTheme.bodyMedium?.copyWith(color: context.colorScheme.onSurfaceSecondary),
),
onTap: () => onAlbumSelected(album),
leadingPadding: const EdgeInsets.only(right: 16),
leading: album.thumbnailAssetId != null
? ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(15)),
child: SizedBox(width: 80, height: 80, child: Thumbnail.remote(remoteId: album.thumbnailAssetId!)),
)
: SizedBox(
width: 80,
height: 80,
child: Container(
decoration: BoxDecoration(
color: context.colorScheme.surfaceContainer,
borderRadius: const BorderRadius.all(Radius.circular(16)),
border: Border.all(color: context.colorScheme.outline.withAlpha(50), width: 1),
),
child: const Icon(Icons.photo_album_rounded, size: 24, color: Colors.grey),
),
),
);
final isOwner = album.ownerId == userId;
if (isOwner) {
@@ -576,11 +544,14 @@ class _AlbumList extends ConsumerWidget {
onDismissed: (direction) async {
await ref.read(remoteAlbumProvider.notifier).deleteAlbum(album.id);
},
child: albumTile,
child: AlbumTile(album: album, isOwner: isOwner, onAlbumSelected: onAlbumSelected),
),
);
} else {
return Padding(padding: const EdgeInsets.only(bottom: 8.0), child: albumTile);
return Padding(
padding: const EdgeInsets.only(bottom: 8.0),
child: AlbumTile(album: album, isOwner: isOwner, onAlbumSelected: onAlbumSelected),
);
}
},
itemCount: albums.length,

View File

@@ -0,0 +1,51 @@
import 'package:flutter/material.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
class AlbumTile extends StatelessWidget {
const AlbumTile({super.key, required this.album, required this.isOwner, this.onAlbumSelected});
final RemoteAlbum album;
final bool isOwner;
final Function(RemoteAlbum)? onAlbumSelected;
@override
Widget build(BuildContext context) {
return LargeLeadingTile(
title: Text(
album.name,
maxLines: 2,
overflow: TextOverflow.ellipsis,
style: context.textTheme.titleSmall?.copyWith(fontWeight: FontWeight.w600),
),
subtitle: Text(
'${'items_count'.t(context: context, args: {'count': album.assetCount})} • ${isOwner ? 'owned'.t(context: context) : 'shared_by_user'.t(context: context, args: {'user': album.ownerName})}',
overflow: TextOverflow.ellipsis,
style: context.textTheme.bodyMedium?.copyWith(color: context.colorScheme.onSurfaceSecondary),
),
onTap: () => onAlbumSelected?.call(album),
leadingPadding: const EdgeInsets.only(right: 16),
leading: album.thumbnailAssetId != null
? ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(15)),
child: SizedBox(width: 80, height: 80, child: Thumbnail.remote(remoteId: album.thumbnailAssetId!)),
)
: SizedBox(
width: 80,
height: 80,
child: Container(
decoration: BoxDecoration(
color: context.colorScheme.surfaceContainer,
borderRadius: const BorderRadius.all(Radius.circular(16)),
border: Border.all(color: context.colorScheme.outline.withAlpha(50), width: 1),
),
child: const Icon(Icons.photo_album_rounded, size: 24, color: Colors.grey),
),
),
);
}
}

View File

@@ -1,3 +1,5 @@
import 'package:auto_route/auto_route.dart';
import 'package:collection/collection.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
@@ -8,17 +10,21 @@ import 'package:immich_mobile/domain/models/exif.model.dart';
import 'package:immich_mobile/domain/models/setting.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/presentation/widgets/album/album_tile.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/bottom_sheet/sheet_location_details.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/bottom_sheet/sheet_people_details.widget.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
import 'package:immich_mobile/providers/haptic_feedback.provider.dart';
import 'package:immich_mobile/providers/infrastructure/action.provider.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/setting.provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/utils/action_button.utils.dart';
import 'package:immich_mobile/utils/bytes_units.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
@@ -130,6 +136,58 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
await ref.read(actionProvider.notifier).editDateTime(ActionSource.viewer, context);
}
Widget _buildAppearsInList(WidgetRef ref, BuildContext context) {
final isRemote = ref.watch(currentAssetNotifier)?.hasRemote ?? false;
if (!isRemote) {
return const SizedBox.shrink();
}
final remoteAsset = ref.watch(currentAssetNotifier) as RemoteAsset;
final albums = ref.watch(remoteAlbumServiceProvider).getAlbumsContainingAsset(remoteAsset.id);
final userId = ref.watch(currentUserProvider)?.id;
return FutureBuilder(
future: albums,
builder: (_, snap) {
final albums = snap.data ?? [];
if (albums.isEmpty) {
return const SizedBox.shrink();
}
albums.sortBy((a) => a.name);
return Padding(
padding: const EdgeInsets.only(left: 16, right: 16, top: 8),
child: Column(
spacing: 12,
children: [
if (albums.isNotEmpty)
_SheetTile(
title: 'appears_in'.t(context: context).toUpperCase(),
titleStyle: context.textTheme.labelMedium?.copyWith(
color: context.textTheme.labelMedium?.color?.withAlpha(200),
fontWeight: FontWeight.w600,
),
),
...albums.map((album) {
final isOwner = album.ownerId == userId;
return AlbumTile(
album: album,
isOwner: isOwner,
onAlbumSelected: (album) async {
ref.read(currentRemoteAlbumProvider.notifier).setAlbum(album);
ref.invalidate(assetViewerProvider);
context.router.popAndPush(RemoteAlbumRoute(album: album));
},
);
}),
],
),
);
},
);
}
@override
Widget build(BuildContext context, WidgetRef ref) {
final asset = ref.watch(currentAssetNotifier);
@@ -185,6 +243,10 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
),
),
// Appears in (Albums)
_buildAppearsInList(ref, context),
// padding at the bottom to avoid cut-off
const SizedBox(height: 100),
],
);
}

View File

@@ -303,7 +303,7 @@ class AppRouter extends RootStackRouter {
AutoRoute(page: DriftBackupAlbumSelectionRoute.page, guards: [_authGuard, _duplicateGuard]),
AutoRoute(page: LocalTimelineRoute.page, guards: [_authGuard, _duplicateGuard]),
AutoRoute(page: MainTimelineRoute.page, guards: [_authGuard, _duplicateGuard]),
AutoRoute(page: RemoteAlbumRoute.page, guards: [_authGuard, _duplicateGuard]),
AutoRoute(page: RemoteAlbumRoute.page, guards: [_authGuard]),
AutoRoute(
page: AssetViewerRoute.page,
guards: [_authGuard, _duplicateGuard],

View File

@@ -18,7 +18,6 @@ import 'package:immich_mobile/providers/infrastructure/current_album.provider.da
import 'package:immich_mobile/providers/infrastructure/remote_album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/album/remote_album_shared_user_icons.dart';
class RemoteAlbumSliverAppBar extends ConsumerStatefulWidget {
@@ -89,7 +88,7 @@ class _MesmerizingSliverAppBarState extends ConsumerState<RemoteAlbumSliverAppBa
color: actionIconColor,
shadows: actionIconShadows,
),
onPressed: () => context.navigateTo(const TabShellRoute(children: [DriftAlbumsRoute()])),
onPressed: () => context.maybePop(),
),
actions: [
if (widget.onToggleAlbumOrder != null)

View File

@@ -393,6 +393,7 @@ Class | Method | HTTP request | Description
- [LoginCredentialDto](doc//LoginCredentialDto.md)
- [LoginResponseDto](doc//LoginResponseDto.md)
- [LogoutResponseDto](doc//LogoutResponseDto.md)
- [MachineLearningAvailabilityChecksDto](doc//MachineLearningAvailabilityChecksDto.md)
- [ManualJobName](doc//ManualJobName.md)
- [MapMarkerResponseDto](doc//MapMarkerResponseDto.md)
- [MapReverseGeocodeResponseDto](doc//MapReverseGeocodeResponseDto.md)

View File

@@ -164,6 +164,7 @@ part 'model/log_level.dart';
part 'model/login_credential_dto.dart';
part 'model/login_response_dto.dart';
part 'model/logout_response_dto.dart';
part 'model/machine_learning_availability_checks_dto.dart';
part 'model/manual_job_name.dart';
part 'model/map_marker_response_dto.dart';
part 'model/map_reverse_geocode_response_dto.dart';

View File

@@ -382,6 +382,8 @@ class ApiClient {
return LoginResponseDto.fromJson(value);
case 'LogoutResponseDto':
return LogoutResponseDto.fromJson(value);
case 'MachineLearningAvailabilityChecksDto':
return MachineLearningAvailabilityChecksDto.fromJson(value);
case 'ManualJobName':
return ManualJobNameTypeTransformer().decode(value);
case 'MapMarkerResponseDto':

View File

@@ -0,0 +1,115 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class MachineLearningAvailabilityChecksDto {
/// Returns a new [MachineLearningAvailabilityChecksDto] instance.
MachineLearningAvailabilityChecksDto({
required this.enabled,
required this.interval,
required this.timeout,
});
bool enabled;
num interval;
num timeout;
@override
bool operator ==(Object other) => identical(this, other) || other is MachineLearningAvailabilityChecksDto &&
other.enabled == enabled &&
other.interval == interval &&
other.timeout == timeout;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(enabled.hashCode) +
(interval.hashCode) +
(timeout.hashCode);
@override
String toString() => 'MachineLearningAvailabilityChecksDto[enabled=$enabled, interval=$interval, timeout=$timeout]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'enabled'] = this.enabled;
json[r'interval'] = this.interval;
json[r'timeout'] = this.timeout;
return json;
}
/// Returns a new [MachineLearningAvailabilityChecksDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static MachineLearningAvailabilityChecksDto? fromJson(dynamic value) {
upgradeDto(value, "MachineLearningAvailabilityChecksDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return MachineLearningAvailabilityChecksDto(
enabled: mapValueOfType<bool>(json, r'enabled')!,
interval: num.parse('${json[r'interval']}'),
timeout: num.parse('${json[r'timeout']}'),
);
}
return null;
}
static List<MachineLearningAvailabilityChecksDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MachineLearningAvailabilityChecksDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MachineLearningAvailabilityChecksDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, MachineLearningAvailabilityChecksDto> mapFromJson(dynamic json) {
final map = <String, MachineLearningAvailabilityChecksDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = MachineLearningAvailabilityChecksDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of MachineLearningAvailabilityChecksDto-objects as value to a dart map
static Map<String, List<MachineLearningAvailabilityChecksDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<MachineLearningAvailabilityChecksDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = MachineLearningAvailabilityChecksDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'enabled',
'interval',
'timeout',
};
}

View File

@@ -13,14 +13,16 @@ part of openapi.api;
class SystemConfigMachineLearningDto {
/// Returns a new [SystemConfigMachineLearningDto] instance.
SystemConfigMachineLearningDto({
required this.availabilityChecks,
required this.clip,
required this.duplicateDetection,
required this.enabled,
required this.facialRecognition,
this.url,
this.urls = const [],
});
MachineLearningAvailabilityChecksDto availabilityChecks;
CLIPConfig clip;
DuplicateDetectionConfig duplicateDetection;
@@ -29,50 +31,37 @@ class SystemConfigMachineLearningDto {
FacialRecognitionConfig facialRecognition;
/// This property was deprecated in v1.122.0
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? url;
List<String> urls;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigMachineLearningDto &&
other.availabilityChecks == availabilityChecks &&
other.clip == clip &&
other.duplicateDetection == duplicateDetection &&
other.enabled == enabled &&
other.facialRecognition == facialRecognition &&
other.url == url &&
_deepEquality.equals(other.urls, urls);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(availabilityChecks.hashCode) +
(clip.hashCode) +
(duplicateDetection.hashCode) +
(enabled.hashCode) +
(facialRecognition.hashCode) +
(url == null ? 0 : url!.hashCode) +
(urls.hashCode);
@override
String toString() => 'SystemConfigMachineLearningDto[clip=$clip, duplicateDetection=$duplicateDetection, enabled=$enabled, facialRecognition=$facialRecognition, url=$url, urls=$urls]';
String toString() => 'SystemConfigMachineLearningDto[availabilityChecks=$availabilityChecks, clip=$clip, duplicateDetection=$duplicateDetection, enabled=$enabled, facialRecognition=$facialRecognition, urls=$urls]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'availabilityChecks'] = this.availabilityChecks;
json[r'clip'] = this.clip;
json[r'duplicateDetection'] = this.duplicateDetection;
json[r'enabled'] = this.enabled;
json[r'facialRecognition'] = this.facialRecognition;
if (this.url != null) {
json[r'url'] = this.url;
} else {
// json[r'url'] = null;
}
json[r'urls'] = this.urls;
return json;
}
@@ -86,11 +75,11 @@ class SystemConfigMachineLearningDto {
final json = value.cast<String, dynamic>();
return SystemConfigMachineLearningDto(
availabilityChecks: MachineLearningAvailabilityChecksDto.fromJson(json[r'availabilityChecks'])!,
clip: CLIPConfig.fromJson(json[r'clip'])!,
duplicateDetection: DuplicateDetectionConfig.fromJson(json[r'duplicateDetection'])!,
enabled: mapValueOfType<bool>(json, r'enabled')!,
facialRecognition: FacialRecognitionConfig.fromJson(json[r'facialRecognition'])!,
url: mapValueOfType<String>(json, r'url'),
urls: json[r'urls'] is Iterable
? (json[r'urls'] as Iterable).cast<String>().toList(growable: false)
: const [],
@@ -141,6 +130,7 @@ class SystemConfigMachineLearningDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'availabilityChecks',
'clip',
'duplicateDetection',
'enabled',

View File

@@ -12259,6 +12259,25 @@
],
"type": "object"
},
"MachineLearningAvailabilityChecksDto": {
"properties": {
"enabled": {
"type": "boolean"
},
"interval": {
"type": "number"
},
"timeout": {
"type": "number"
}
},
"required": [
"enabled",
"interval",
"timeout"
],
"type": "object"
},
"ManualJobName": {
"enum": [
"person-cleanup",
@@ -16395,6 +16414,9 @@
},
"SystemConfigMachineLearningDto": {
"properties": {
"availabilityChecks": {
"$ref": "#/components/schemas/MachineLearningAvailabilityChecksDto"
},
"clip": {
"$ref": "#/components/schemas/CLIPConfig"
},
@@ -16407,11 +16429,6 @@
"facialRecognition": {
"$ref": "#/components/schemas/FacialRecognitionConfig"
},
"url": {
"deprecated": true,
"description": "This property was deprecated in v1.122.0",
"type": "string"
},
"urls": {
"format": "uri",
"items": {
@@ -16423,6 +16440,7 @@
}
},
"required": [
"availabilityChecks",
"clip",
"duplicateDetection",
"enabled",

View File

@@ -1383,6 +1383,11 @@ export type SystemConfigLoggingDto = {
enabled: boolean;
level: LogLevel;
};
export type MachineLearningAvailabilityChecksDto = {
enabled: boolean;
interval: number;
timeout: number;
};
export type ClipConfig = {
enabled: boolean;
modelName: string;
@@ -1399,12 +1404,11 @@ export type FacialRecognitionConfig = {
modelName: string;
};
export type SystemConfigMachineLearningDto = {
availabilityChecks: MachineLearningAvailabilityChecksDto;
clip: ClipConfig;
duplicateDetection: DuplicateDetectionConfig;
enabled: boolean;
facialRecognition: FacialRecognitionConfig;
/** This property was deprecated in v1.122.0 */
url?: string;
urls: string[];
};
export type SystemConfigMapDto = {

View File

@@ -3,7 +3,7 @@
"version": "0.0.1",
"description": "Monorepo for Immich",
"private": true,
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
"packageManager": "pnpm@10.15.1+sha512.34e538c329b5553014ca8e8f4535997f96180a1d0f614339357449935350d924e22f8614682191264ec33d1462ac21561aff97f6bb18065351c162c7e8f6de67",
"engines": {
"pnpm": ">=10.0.0"
}

2602
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -44,14 +44,14 @@
"@nestjs/websockets": "^11.0.4",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^2.0.0",
"@opentelemetry/exporter-prometheus": "^0.203.0",
"@opentelemetry/instrumentation-http": "^0.203.0",
"@opentelemetry/instrumentation-ioredis": "^0.51.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.49.0",
"@opentelemetry/instrumentation-pg": "^0.56.0",
"@opentelemetry/exporter-prometheus": "^0.205.0",
"@opentelemetry/instrumentation-http": "^0.205.0",
"@opentelemetry/instrumentation-ioredis": "^0.53.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.51.0",
"@opentelemetry/instrumentation-pg": "^0.58.0",
"@opentelemetry/resources": "^2.0.1",
"@opentelemetry/sdk-metrics": "^2.0.1",
"@opentelemetry/sdk-node": "^0.203.0",
"@opentelemetry/sdk-node": "^0.205.0",
"@opentelemetry/semantic-conventions": "^1.34.0",
"@react-email/components": "^0.5.0",
"@react-email/render": "^1.1.2",

View File

@@ -15,6 +15,7 @@ import { repositories } from 'src/repositories';
import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { AuthService } from 'src/services/auth.service';
import { getKyselyConfig } from 'src/utils/database';
@@ -57,7 +58,7 @@ class SqlGenerator {
try {
await this.setup();
for (const Repository of repositories) {
if (Repository === LoggingRepository) {
if (Repository === LoggingRepository || Repository === MachineLearningRepository) {
continue;
}
await this.process(Repository);

View File

@@ -54,6 +54,11 @@ export interface SystemConfig {
machineLearning: {
enabled: boolean;
urls: string[];
availabilityChecks: {
enabled: boolean;
timeout: number;
interval: number;
};
clip: {
enabled: boolean;
modelName: string;
@@ -176,6 +181,8 @@ export interface SystemConfig {
};
}
export type MachineLearningConfig = SystemConfig['machineLearning'];
export const defaults = Object.freeze<SystemConfig>({
backup: {
database: {
@@ -227,6 +234,11 @@ export const defaults = Object.freeze<SystemConfig>({
machineLearning: {
enabled: process.env.IMMICH_MACHINE_LEARNING_ENABLED !== 'false',
urls: [process.env.IMMICH_MACHINE_LEARNING_URL || 'http://immich-machine-learning:3003'],
availabilityChecks: {
enabled: true,
timeout: Number(process.env.IMMICH_MACHINE_LEARNING_PING_TIMEOUT) || 2000,
interval: 30_000,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',

View File

@@ -51,11 +51,6 @@ export const serverVersion = new SemVer(version);
export const AUDIT_LOG_MAX_DURATION = Duration.fromObject({ days: 100 });
export const ONE_HOUR = Duration.fromObject({ hours: 1 });
export const MACHINE_LEARNING_PING_TIMEOUT = Number(process.env.MACHINE_LEARNING_PING_TIMEOUT || 2000);
export const MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME = Number(
process.env.MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME || 30_000,
);
export const citiesFile = 'cities500.txt';
export const reverseGeocodeMaxDistance = 25_000;

View File

@@ -6,7 +6,7 @@ import { PropertyLifecycle } from 'src/decorators';
import { AlbumResponseDto } from 'src/dtos/album.dto';
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
import { AssetOrder, AssetType, AssetVisibility } from 'src/enum';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateUUID } from 'src/validation';
import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateString, ValidateUUID } from 'src/validation';
class BaseSearchDto {
@ValidateUUID({ optional: true, nullable: true })
@@ -144,9 +144,7 @@ export class MetadataSearchDto extends RandomSearchDto {
@Optional()
deviceAssetId?: string;
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
description?: string;
@IsString()
@@ -154,9 +152,7 @@ export class MetadataSearchDto extends RandomSearchDto {
@Optional()
checksum?: string;
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
originalFileName?: string;
@IsString()
@@ -190,16 +186,12 @@ export class MetadataSearchDto extends RandomSearchDto {
}
export class StatisticsSearchDto extends BaseSearchDto {
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
description?: string;
}
export class SmartSearchDto extends BaseSearchWithResultsDto {
@IsString()
@IsNotEmpty()
@Optional()
@ValidateString({ optional: true, trim: true })
query?: string;
@ValidateUUID({ optional: true })

View File

@@ -1,5 +1,5 @@
import { ApiProperty } from '@nestjs/swagger';
import { Exclude, Transform, Type } from 'class-transformer';
import { Type } from 'class-transformer';
import {
ArrayMinSize,
IsInt,
@@ -15,7 +15,6 @@ import {
ValidateNested,
} from 'class-validator';
import { SystemConfig } from 'src/config';
import { PropertyLifecycle } from 'src/decorators';
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
import {
AudioCodec,
@@ -257,21 +256,32 @@ class SystemConfigLoggingDto {
level!: LogLevel;
}
class MachineLearningAvailabilityChecksDto {
@ValidateBoolean()
enabled!: boolean;
@IsInt()
timeout!: number;
@IsInt()
interval!: number;
}
class SystemConfigMachineLearningDto {
@ValidateBoolean()
enabled!: boolean;
@PropertyLifecycle({ deprecatedAt: 'v1.122.0' })
@Exclude()
url?: string;
@IsUrl({ require_tld: false, allow_underscores: true }, { each: true })
@ArrayMinSize(1)
@Transform(({ obj, value }) => (obj.url ? [obj.url] : value))
@ValidateIf((dto) => dto.enabled)
@ApiProperty({ type: 'array', items: { type: 'string', format: 'uri' }, minItems: 1 })
urls!: string[];
@Type(() => MachineLearningAvailabilityChecksDto)
@ValidateNested()
@IsObject()
availabilityChecks!: MachineLearningAvailabilityChecksDto;
@Type(() => CLIPConfig)
@ValidateNested()
@IsObject()

View File

@@ -142,6 +142,10 @@ export class LoggingRepository {
this.handleMessage(LogLevel.Fatal, message, details);
}
deprecate(message: string) {
this.warn(`[Deprecated] ${message}`);
}
private handleFunction(level: LogLevel, message: LogFunction, details: LogDetails[]) {
if (this.logger.isLevelEnabled(level)) {
this.handleMessage(level, message(), details);

View File

@@ -1,6 +1,7 @@
import { Injectable } from '@nestjs/common';
import { Duration } from 'luxon';
import { readFile } from 'node:fs/promises';
import { MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME, MACHINE_LEARNING_PING_TIMEOUT } from 'src/constants';
import { MachineLearningConfig } from 'src/config';
import { CLIPConfig } from 'src/dtos/model-config.dto';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -57,82 +58,100 @@ export type TextEncodingOptions = ModelOptions & { language?: string };
@Injectable()
export class MachineLearningRepository {
// Note that deleted URL's are not removed from this map (ie: they're leaked)
// Cleaning them up is low priority since there should be very few over a
// typical server uptime cycle
private urlAvailability: {
[url: string]:
| {
active: boolean;
lastChecked: number;
}
| undefined;
};
private healthyMap: Record<string, boolean> = {};
private interval?: ReturnType<typeof setInterval>;
private _config?: MachineLearningConfig;
private get config(): MachineLearningConfig {
if (!this._config) {
throw new Error('Machine learning repository not been setup');
}
return this._config;
}
constructor(private logger: LoggingRepository) {
this.logger.setContext(MachineLearningRepository.name);
this.urlAvailability = {};
}
private setUrlAvailability(url: string, active: boolean) {
const current = this.urlAvailability[url];
if (current?.active !== active) {
this.logger.verbose(`Setting ${url} ML server to ${active ? 'active' : 'inactive'}.`);
setup(config: MachineLearningConfig) {
this._config = config;
this.teardown();
// delete old servers
for (const url of Object.keys(this.healthyMap)) {
if (!config.urls.includes(url)) {
delete this.healthyMap[url];
}
}
this.urlAvailability[url] = {
active,
lastChecked: Date.now(),
};
if (!config.availabilityChecks.enabled) {
return;
}
this.tick();
this.interval = setInterval(
() => this.tick(),
Duration.fromObject({ milliseconds: config.availabilityChecks.interval }).as('milliseconds'),
);
}
private async checkAvailability(url: string) {
let active = false;
teardown() {
if (this.interval) {
clearInterval(this.interval);
}
}
private tick() {
for (const url of this.config.urls) {
void this.check(url);
}
}
private async check(url: string) {
let healthy = false;
try {
const response = await fetch(new URL('/ping', url), {
signal: AbortSignal.timeout(MACHINE_LEARNING_PING_TIMEOUT),
signal: AbortSignal.timeout(this.config.availabilityChecks.timeout),
});
active = response.ok;
if (response.ok) {
healthy = true;
}
} catch {
// nothing to do here
}
this.setUrlAvailability(url, active);
return active;
this.setHealthy(url, healthy);
}
private async shouldSkipUrl(url: string) {
const availability = this.urlAvailability[url];
if (availability === undefined) {
// If this is a new endpoint, then check inline and skip if it fails
if (!(await this.checkAvailability(url))) {
return true;
}
return false;
private setHealthy(url: string, healthy: boolean) {
if (this.healthyMap[url] !== healthy) {
this.logger.log(`Machine learning server became ${healthy ? 'healthy' : 'unhealthy'} (${url}).`);
}
if (!availability.active && Date.now() - availability.lastChecked < MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME) {
// If this is an old inactive endpoint that hasn't been checked in a
// while then check but don't wait for the result, just skip it
// This avoids delays on every search whilst allowing higher priority
// ML servers to recover over time.
void this.checkAvailability(url);
this.healthyMap[url] = healthy;
}
private isHealthy(url: string) {
if (!this.config.availabilityChecks.enabled) {
return true;
}
return false;
return this.healthyMap[url];
}
private async predict<T>(urls: string[], payload: ModelPayload, config: MachineLearningRequest): Promise<T> {
private async predict<T>(payload: ModelPayload, config: MachineLearningRequest): Promise<T> {
const formData = await this.getFormData(payload, config);
let urlCounter = 0;
for (const url of urls) {
urlCounter++;
const isLast = urlCounter >= urls.length;
if (!isLast && (await this.shouldSkipUrl(url))) {
continue;
}
for (const url of [
// try healthy servers first
...this.config.urls.filter((url) => this.isHealthy(url)),
...this.config.urls.filter((url) => !this.isHealthy(url)),
]) {
try {
const response = await fetch(new URL('/predict', url), { method: 'POST', body: formData });
if (response.ok) {
this.setUrlAvailability(url, true);
this.setHealthy(url, true);
return response.json();
}
@@ -144,20 +163,21 @@ export class MachineLearningRepository {
`Machine learning request to "${url}" failed: ${error instanceof Error ? error.message : error}`,
);
}
this.setUrlAvailability(url, false);
this.setHealthy(url, false);
}
throw new Error(`Machine learning request '${JSON.stringify(config)}' failed for all URLs`);
}
async detectFaces(urls: string[], imagePath: string, { modelName, minScore }: FaceDetectionOptions) {
async detectFaces(imagePath: string, { modelName, minScore }: FaceDetectionOptions) {
const request = {
[ModelTask.FACIAL_RECOGNITION]: {
[ModelType.DETECTION]: { modelName, options: { minScore } },
[ModelType.RECOGNITION]: { modelName },
},
};
const response = await this.predict<FacialRecognitionResponse>(urls, { imagePath }, request);
const response = await this.predict<FacialRecognitionResponse>({ imagePath }, request);
return {
imageHeight: response.imageHeight,
imageWidth: response.imageWidth,
@@ -165,15 +185,15 @@ export class MachineLearningRepository {
};
}
async encodeImage(urls: string[], imagePath: string, { modelName }: CLIPConfig) {
async encodeImage(imagePath: string, { modelName }: CLIPConfig) {
const request = { [ModelTask.SEARCH]: { [ModelType.VISUAL]: { modelName } } };
const response = await this.predict<ClipVisualResponse>(urls, { imagePath }, request);
const response = await this.predict<ClipVisualResponse>({ imagePath }, request);
return response[ModelTask.SEARCH];
}
async encodeText(urls: string[], text: string, { language, modelName }: TextEncodingOptions) {
async encodeText(text: string, { language, modelName }: TextEncodingOptions) {
const request = { [ModelTask.SEARCH]: { [ModelType.TEXTUAL]: { modelName, options: { language } } } };
const response = await this.predict<ClipTextualResponse>(urls, { text }, request);
const response = await this.predict<ClipTextualResponse>({ text }, request);
return response[ModelTask.SEARCH];
}

View File

@@ -729,7 +729,6 @@ describe(PersonService.name, () => {
mocks.assetJob.getForDetectFacesJob.mockResolvedValue({ ...assetStub.image, files: [assetStub.image.files[1]] });
await sut.handleDetectFaces({ id: assetStub.image.id });
expect(mocks.machineLearning.detectFaces).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ minScore: 0.7, modelName: 'buffalo_l' }),
);

View File

@@ -316,7 +316,6 @@ export class PersonService extends BaseService {
}
const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces(
machineLearning.urls,
previewFile.path,
machineLearning.facialRecognition,
);

View File

@@ -211,7 +211,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: expect.any(String) }),
);
@@ -225,7 +224,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test', page: 2, size: 50 });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: expect.any(String) }),
);
@@ -243,7 +241,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ modelName: 'ViT-B-16-SigLIP__webli' }),
);
@@ -253,7 +250,6 @@ describe(SearchService.name, () => {
await sut.searchSmart(authStub.user1, { query: 'test', language: 'de' });
expect(mocks.machineLearning.encodeText).toHaveBeenCalledWith(
[expect.any(String)],
'test',
expect.objectContaining({ language: 'de' }),
);

View File

@@ -118,7 +118,7 @@ export class SearchService extends BaseService {
const key = machineLearning.clip.modelName + dto.query + dto.language;
embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
embedding = await this.machineLearningRepository.encodeText(dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});

View File

@@ -205,7 +205,6 @@ describe(SmartInfoService.name, () => {
expect(await sut.handleEncodeClip({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ modelName: 'ViT-B-32__openai' }),
);
@@ -242,7 +241,6 @@ describe(SmartInfoService.name, () => {
expect(mocks.database.wait).toHaveBeenCalledWith(512);
expect(mocks.machineLearning.encodeImage).toHaveBeenCalledWith(
['http://immich-machine-learning:3003'],
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({ modelName: 'ViT-B-32__openai' }),
);

View File

@@ -108,11 +108,7 @@ export class SmartInfoService extends BaseService {
return JobStatus.Skipped;
}
const embedding = await this.machineLearningRepository.encodeImage(
machineLearning.urls,
asset.files[0].path,
machineLearning.clip,
);
const embedding = await this.machineLearningRepository.encodeImage(asset.files[0].path, machineLearning.clip);
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);

View File

@@ -82,6 +82,11 @@ const updatedConfig = Object.freeze<SystemConfig>({
machineLearning: {
enabled: true,
urls: ['http://immich-machine-learning:3003'],
availabilityChecks: {
enabled: true,
interval: 30_000,
timeout: 2000,
},
clip: {
enabled: true,
modelName: 'ViT-B-32__openai',

View File

@@ -16,6 +16,20 @@ export class SystemConfigService extends BaseService {
async onBootstrap() {
const config = await this.getConfig({ withCache: false });
await this.eventRepository.emit('ConfigInit', { newConfig: config });
if (
process.env.IMMICH_MACHINE_LEARNING_PING_TIMEOUT ||
process.env.IMMICH_MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME
) {
this.logger.deprecate(
'IMMICH_MACHINE_LEARNING_PING_TIMEOUT and MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME have been moved to system config(`machineLearning.availabilityChecks`) and will be removed in a future release.',
);
}
}
@OnEvent({ name: 'AppShutdown' })
onShutdown() {
this.machineLearningRepository.teardown();
}
async getSystemConfig(): Promise<SystemConfigDto> {
@@ -28,12 +42,14 @@ export class SystemConfigService extends BaseService {
}
@OnEvent({ name: 'ConfigInit', priority: -100 })
onConfigInit({ newConfig: { logging } }: ArgOf<'ConfigInit'>) {
onConfigInit({ newConfig: { logging, machineLearning } }: ArgOf<'ConfigInit'>) {
const { logLevel: envLevel } = this.configRepository.getEnv();
const configLevel = logging.enabled ? logging.level : false;
const level = envLevel ?? configLevel;
this.logger.setLogLevel(level);
this.logger.log(`LogLevel=${level} ${envLevel ? '(set via IMMICH_LOG_LEVEL)' : '(set via system config)'}`);
this.machineLearningRepository.setup(machineLearning);
}
@OnEvent({ name: 'ConfigUpdate', server: true })

View File

@@ -211,6 +211,18 @@ export const ValidateDate = (options?: DateOptions & ApiPropertyOptions) => {
return applyDecorators(...decorators);
};
type StringOptions = { optional?: boolean; nullable?: boolean; trim?: boolean };
export const ValidateString = (options?: StringOptions & ApiPropertyOptions) => {
const { optional, nullable, trim, ...apiPropertyOptions } = options || {};
const decorators = [ApiProperty(apiPropertyOptions), IsString(), optional ? Optional({ nullable }) : IsNotEmpty()];
if (trim) {
decorators.push(Transform(({ value }: { value: string }) => value?.trim()));
}
return applyDecorators(...decorators);
};
type BooleanOptions = { optional?: boolean; nullable?: boolean };
export const ValidateBoolean = (options?: BooleanOptions & ApiPropertyOptions) => {
const { optional, nullable, ...apiPropertyOptions } = options || {};

View File

@@ -127,6 +127,7 @@ export default typescriptEslint.config(
'@typescript-eslint/no-misused-promises': 'error',
'@typescript-eslint/require-await': 'error',
'object-shorthand': ['error', 'always'],
'svelte/no-navigation-without-resolve': 'off',
},
},
{

View File

@@ -55,7 +55,7 @@
"qrcode": "^1.5.4",
"simple-icons": "^15.15.0",
"socket.io-client": "~4.8.0",
"svelte-gestures": "^5.1.3",
"svelte-gestures": "5.1.4",
"svelte-i18n": "^4.0.1",
"svelte-maplibre": "^1.2.0",
"svelte-persisted-store": "^0.12.0",
@@ -70,7 +70,7 @@
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/enhanced-img": "^0.8.0",
"@sveltejs/kit": "^2.27.1",
"@sveltejs/vite-plugin-svelte": "6.1.2",
"@sveltejs/vite-plugin-svelte": "6.2.0",
"@tailwindcss/vite": "^4.1.7",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/svelte": "^5.2.8",
@@ -85,7 +85,7 @@
"dotenv": "^17.0.0",
"eslint": "^9.18.0",
"eslint-config-prettier": "^10.1.8",
"eslint-p": "^0.25.0",
"eslint-p": "^0.26.0",
"eslint-plugin-compat": "^6.0.2",
"eslint-plugin-svelte": "^3.9.0",
"eslint-plugin-unicorn": "^60.0.0",
@@ -97,7 +97,7 @@
"prettier-plugin-sort-json": "^4.1.1",
"prettier-plugin-svelte": "^3.3.3",
"rollup-plugin-visualizer": "^6.0.0",
"svelte": "5.35.5",
"svelte": "5.38.10",
"svelte-check": "^4.1.5",
"svelte-eslint-parser": "^1.2.0",
"tailwindcss": "^4.1.7",

View File

@@ -9,7 +9,7 @@
import { featureFlags } from '$lib/stores/server-config.store';
import type { SystemConfigDto } from '@immich/sdk';
import { Button, IconButton } from '@immich/ui';
import { mdiMinusCircle } from '@mdi/js';
import { mdiPlus, mdiTrashCanOutline } from '@mdi/js';
import { isEqual } from 'lodash-es';
import { t } from 'svelte-i18n';
import { fade } from 'svelte/transition';
@@ -46,19 +46,6 @@
<div>
{#each config.machineLearning.urls as _, i (i)}
{#snippet removeButton()}
{#if config.machineLearning.urls.length > 1}
<IconButton
size="large"
shape="round"
color="danger"
aria-label=""
onclick={() => config.machineLearning.urls.splice(i, 1)}
icon={mdiMinusCircle}
/>
{/if}
{/snippet}
<SettingInputField
inputType={SettingInputFieldType.TEXT}
label={i === 0 ? $t('url') : undefined}
@@ -67,20 +54,69 @@
required={i === 0}
disabled={disabled || !config.machineLearning.enabled}
isEdited={i === 0 && !isEqual(config.machineLearning.urls, savedConfig.machineLearning.urls)}
trailingSnippet={removeButton}
/>
>
{#snippet trailingSnippet()}
{#if config.machineLearning.urls.length > 1}
<IconButton
aria-label=""
onclick={() => config.machineLearning.urls.splice(i, 1)}
icon={mdiTrashCanOutline}
color="danger"
/>
{/if}
{/snippet}
</SettingInputField>
{/each}
</div>
<Button
class="mb-2"
size="small"
shape="round"
onclick={() => config.machineLearning.urls.splice(0, 0, '')}
disabled={disabled || !config.machineLearning.enabled}>{$t('add_url')}</Button
>
<div class="flex justify-end">
<Button
class="mb-2"
size="small"
shape="round"
leadingIcon={mdiPlus}
onclick={() => config.machineLearning.urls.push('')}
disabled={disabled || !config.machineLearning.enabled}>{$t('add_url')}</Button
>
</div>
</div>
<SettingAccordion
key="availability-checks"
title={$t('admin.machine_learning_availability_checks')}
subtitle={$t('admin.machine_learning_availability_checks_description')}
>
<div class="ms-4 mt-4 flex flex-col gap-4">
<SettingSwitch
title={$t('admin.machine_learning_availability_checks_enabled')}
bind:checked={config.machineLearning.availabilityChecks.enabled}
disabled={disabled || !config.machineLearning.enabled}
/>
<hr />
<SettingInputField
inputType={SettingInputFieldType.NUMBER}
label={$t('admin.machine_learning_availability_checks_interval')}
bind:value={config.machineLearning.availabilityChecks.interval}
description={$t('admin.machine_learning_availability_checks_interval_description')}
disabled={disabled || !config.machineLearning.enabled || !config.machineLearning.availabilityChecks.enabled}
isEdited={config.machineLearning.availabilityChecks.interval !==
savedConfig.machineLearning.availabilityChecks.interval}
/>
<SettingInputField
inputType={SettingInputFieldType.NUMBER}
label={$t('admin.machine_learning_availability_checks_timeout')}
bind:value={config.machineLearning.availabilityChecks.timeout}
description={$t('admin.machine_learning_availability_checks_timeout_description')}
disabled={disabled || !config.machineLearning.enabled || !config.machineLearning.availabilityChecks.enabled}
isEdited={config.machineLearning.availabilityChecks.timeout !==
savedConfig.machineLearning.availabilityChecks.timeout}
/>
</div>
</SettingAccordion>
<SettingAccordion
key="smart-search"
title={$t('admin.machine_learning_smart_search')}

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import SupportedDatetimePanel from '$lib/components/admin-settings/SupportedDatetimePanel.svelte';
import SupportedVariablesPanel from '$lib/components/admin-settings/SupportedVariablesPanel.svelte';
import SettingButtonsRow from '$lib/components/shared-components/settings/setting-buttons-row.svelte';
@@ -262,7 +263,7 @@
values={{ job: $t('admin.storage_template_migration_job') }}
>
{#snippet children({ message })}
<a href={AppRoute.ADMIN_JOBS} class="text-primary">
<a href={resolve(AppRoute.ADMIN_JOBS)} class="text-primary">
{message}
</a>
{/snippet}

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import AlbumCard from '$lib/components/album-page/album-card.svelte';
import { AppRoute } from '$lib/constants';
import { albumViewSettings } from '$lib/stores/preferences.store';
@@ -65,7 +66,7 @@
{#each albums as album, index (album.id)}
<a
data-sveltekit-preload-data="hover"
href="{AppRoute.ALBUMS}/{album.id}"
href={resolve(`${AppRoute.ALBUMS}/${album.id}`)}
animate:flip={{ duration: 400 }}
oncontextmenu={(event) => oncontextmenu(event, album)}
>

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import AlbumCardGroup from '$lib/components/album-page/album-card-group.svelte';
import AlbumsTable from '$lib/components/album-page/albums-table.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
@@ -315,7 +316,7 @@
button: {
text: $t('view_album'),
onClick() {
return goto(`${AppRoute.ALBUMS}/${album.id}`);
return goto(resolve(`${AppRoute.ALBUMS}/${album.id}`));
},
},
});

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import { AppRoute, dateFormats } from '$lib/constants';
import { locale } from '$lib/stores/preferences.store';
import { user } from '$lib/stores/user.store';
@@ -32,7 +33,7 @@
<tr
class="flex h-[50px] w-full place-items-center border-[3px] border-transparent p-2 text-center even:bg-subtle/20 odd:bg-subtle/80 hover:cursor-pointer hover:border-immich-primary/75 odd:dark:bg-immich-dark-gray/75 even:dark:bg-immich-dark-gray/50 dark:hover:border-immich-dark-primary/75 md:p-5"
onclick={() => goto(`${AppRoute.ALBUMS}/${album.id}`)}
onclick={() => goto(resolve(`${AppRoute.ALBUMS}/${album.id}`))}
{oncontextmenu}
>
<td class="text-md text-ellipsis text-start w-8/12 sm:w-4/12 md:w-4/12 xl:w-[30%] 2xl:w-[40%] items-center">

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import { autoGrowHeight } from '$lib/actions/autogrow';
import { shortcut } from '$lib/actions/shortcut';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
@@ -146,7 +147,10 @@
<div class="w-full leading-4 overflow-hidden self-center break-words text-sm">{reaction.comment}</div>
{#if assetId === undefined && reaction.assetId}
<a class="aspect-square w-[75px] h-[75px]" href="{AppRoute.ALBUMS}/{albumId}/photos/{reaction.assetId}">
<a
class="aspect-square w-[75px] h-[75px]"
href={resolve(`${AppRoute.ALBUMS}/${albumId}/photos/${reaction.assetId}`)}
>
<img
class="rounded-lg w-[75px] h-[75px] object-cover"
src={getAssetThumbnailUrl(reaction.assetId)}
@@ -198,7 +202,7 @@
{#if assetId === undefined && reaction.assetId}
<a
class="aspect-square w-[75px] h-[75px]"
href="{AppRoute.ALBUMS}/{albumId}/photos/{reaction.assetId}"
href={resolve(`${AppRoute.ALBUMS}/${albumId}/photos/${reaction.assetId}`)}
>
<img
class="rounded-lg w-[75px] h-[75px] object-cover"

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import CastButton from '$lib/cast/cast-button.svelte';
import type { OnAction, PreAction } from '$lib/components/asset-viewer/actions/action';
import AddToAlbumAction from '$lib/components/asset-viewer/actions/add-to-album-action.svelte';
@@ -224,14 +225,15 @@
{#if !asset.isArchived && !asset.isTrashed}
<MenuOption
icon={mdiImageSearch}
onClick={() => goto(`${AppRoute.PHOTOS}?at=${stack?.primaryAssetId ?? asset.id}`)}
onClick={() => goto(resolve(`${AppRoute.PHOTOS}?at=${stack?.primaryAssetId ?? asset.id}`))}
text={$t('view_in_timeline')}
/>
{/if}
{#if !asset.isArchived && !asset.isTrashed && smartSearchEnabled}
<MenuOption
icon={mdiCompare}
onClick={() => goto(`${AppRoute.SEARCH}?query={"queryAssetId":"${stack?.primaryAssetId ?? asset.id}"}`)}
onClick={() =>
goto(resolve(`${AppRoute.SEARCH}?query={"queryAssetId":"${stack?.primaryAssetId ?? asset.id}"}`))}
text={$t('view_similar_photos')}
/>
{/if}

View File

@@ -1,4 +1,5 @@
<script lang="ts">
import { resolve } from '$app/paths';
import { shortcut } from '$lib/actions/shortcut';
import { AppRoute } from '$lib/constants';
import { authManager } from '$lib/managers/auth-manager.svelte';
@@ -45,7 +46,7 @@
<div class="flex group transition-all">
<a
class="inline-block h-min whitespace-nowrap ps-3 pe-1 group-hover:ps-3 py-1 text-center align-baseline leading-none text-gray-100 dark:text-immich-dark-gray bg-primary rounded-s-full hover:bg-immich-primary/80 dark:hover:bg-immich-dark-primary/80 transition-all"
href={encodeURI(`${AppRoute.TAGS}/?path=${tag.value}`)}
href={resolve(`${AppRoute.TAGS}/?path=${encodeURI(tag.value)}`)}
>
<p class="text-sm">
{tag.value}

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import { goto } from '$app/navigation';
import { resolve } from '$app/paths';
import DetailPanelDescription from '$lib/components/asset-viewer/detail-panel-description.svelte';
import DetailPanelLocation from '$lib/components/asset-viewer/detail-panel-location.svelte';
import DetailPanelRating from '$lib/components/asset-viewer/detail-panel-star-rating.svelte';
@@ -208,9 +209,11 @@
{#if showingHiddenPeople || !person.isHidden}
<a
class="w-[90px]"
href="{AppRoute.PEOPLE}/{person.id}?{QueryParameter.PREVIOUS_ROUTE}={currentAlbum?.id
? `${AppRoute.ALBUMS}/${currentAlbum?.id}`
: AppRoute.PHOTOS}"
href={resolve(
`${AppRoute.PEOPLE}/${person.id}?${QueryParameter.PREVIOUS_ROUTE}=${
currentAlbum?.id ? `${AppRoute.ALBUMS}/${currentAlbum?.id}` : AppRoute.PHOTOS
}`,
)}
onfocus={() => ($boundingBoxesArray = people[index].faces)}
onblur={() => ($boundingBoxesArray = [])}
onmouseover={() => ($boundingBoxesArray = people[index].faces)}
@@ -362,6 +365,7 @@
</p>
{#if showAssetPath}
<p class="text-xs opacity-50 break-all pb-2 hover:text-primary" transition:slide={{ duration: 250 }}>
<!-- eslint-disable-next-line svelte/no-navigation-without-resolve this is supposed to be treated as an absolute/external link -->
<a href={getAssetFolderHref(asset)} title={$t('go_to_folder')} class="whitespace-pre-wrap">
{asset.originalPath}
</a>
@@ -394,10 +398,12 @@
{#if asset.exifInfo?.make || asset.exifInfo?.model}
<p>
<a
href="{AppRoute.SEARCH}?{getMetadataSearchQuery({
...(asset.exifInfo?.make ? { make: asset.exifInfo.make } : {}),
...(asset.exifInfo?.model ? { model: asset.exifInfo.model } : {}),
})}"
href={resolve(
`${AppRoute.SEARCH}?${getMetadataSearchQuery({
...(asset.exifInfo?.make ? { make: asset.exifInfo.make } : {}),
...(asset.exifInfo?.model ? { model: asset.exifInfo.model } : {}),
})}`,
)}
title="{$t('search_for')} {asset.exifInfo.make || ''} {asset.exifInfo.model || ''}"
class="hover:text-primary"
>
@@ -411,7 +417,9 @@
<div class="flex gap-2 text-sm">
<p>
<a
href="{AppRoute.SEARCH}?{getMetadataSearchQuery({ lensModel: asset.exifInfo.lensModel })}"
href={resolve(
`${AppRoute.SEARCH}?${getMetadataSearchQuery({ lensModel: asset.exifInfo.lensModel })}`,
)}
title="{$t('search_for')} {asset.exifInfo.lensModel}"
class="hover:text-primary line-clamp-1"
>
@@ -475,7 +483,7 @@
simplified
useLocationPin
showSimpleControls={!showEditFaces}
onOpenInMapView={() => goto(`${AppRoute.MAP}#12.5/${latlng.lat}/${latlng.lng}`)}
onOpenInMapView={() => goto(resolve(`${AppRoute.MAP}#12.5/${latlng.lat}/${latlng.lng}`))}
>
{#snippet popup({ marker })}
{@const { lat, lon } = marker}
@@ -516,7 +524,7 @@
<section class="px-6 pt-6 dark:text-immich-dark-fg">
<p class="uppercase pb-4 text-sm">{$t('appears_in')}</p>
{#each albums as album (album.id)}
<a href="{AppRoute.ALBUMS}/{album.id}">
<a href={resolve(`${AppRoute.ALBUMS}/${album.id}`)}>
<div class="flex gap-4 pt-2 hover:cursor-pointer items-center">
<div>
<img

View File

@@ -97,12 +97,15 @@
}
try {
await copyImageToClipboard($photoViewerImgElement ?? assetFileUrl);
notificationController.show({
type: NotificationType.Info,
message: $t('copied_image_to_clipboard'),
timeout: 3000,
});
const result = await copyImageToClipboard($photoViewerImgElement ?? assetFileUrl);
if (result.success) {
notificationController.show({ type: NotificationType.Info, message: $t('copied_image_to_clipboard') });
} else {
notificationController.show({
type: NotificationType.Error,
message: $t('errors.clipboard_unsupported_mime_type', { values: { mimeType: result.mimeType } }),
});
}
} catch (error) {
handleError(error, $t('copy_error'));
}
@@ -240,7 +243,7 @@
use:zoomImageAction
use:swipe={() => ({})}
onswipe={onSwipe}
class="h-full w-full flex"
class="h-full w-full"
transition:fade={{ duration: haveFadeTransition ? assetViewerFadeDuration : 0 }}
>
{#if $slideshowState !== SlideshowState.None && $slideshowLook === SlideshowLook.BlurredBackground}
@@ -255,7 +258,7 @@
bind:this={$photoViewerImgElement}
src={assetFileUrl}
alt={$getAltText(toTimelineAsset(asset))}
class="max-h-full max-w-full h-auto w-auto mx-auto my-auto {$slideshowState === SlideshowState.None
class="h-full w-full {$slideshowState === SlideshowState.None
? 'object-contain'
: slideshowLookCssMapping[$slideshowLook]}"
draggable="false"

View File

@@ -7,6 +7,7 @@
<script lang="ts">
import DateInput from '$lib/elements/DateInput.svelte';
import { Text } from '@immich/ui';
import { t } from 'svelte-i18n';
interface Props {
@@ -14,31 +15,27 @@
}
let { filters = $bindable() }: Props = $props();
let invalid = $derived(filters.takenAfter && filters.takenBefore && filters.takenAfter > filters.takenBefore);
const inputClasses = $derived(
`immich-form-input w-full mt-1 hover:cursor-pointer ${invalid ? 'border border-danger' : ''}`,
);
</script>
<div id="date-range-selection" class="grid grid-auto-fit-40 gap-5">
<label class="immich-form-label" for="start-date">
<span class="uppercase">{$t('start_date')}</span>
<DateInput
class="immich-form-input w-full mt-1 hover:cursor-pointer"
type="date"
id="start-date"
name="start-date"
max={filters.takenBefore}
bind:value={filters.takenAfter}
/>
</label>
<div class="flex flex-col gap-1">
<div id="date-range-selection" class="grid grid-auto-fit-40 gap-5">
<label class="immich-form-label" for="start-date">
<span class="uppercase">{$t('start_date')}</span>
<DateInput class={inputClasses} type="date" id="start-date" name="start-date" bind:value={filters.takenAfter} />
</label>
<label class="immich-form-label" for="end-date">
<span class="uppercase">{$t('end_date')}</span>
<DateInput
class="immich-form-input w-full mt-1 hover:cursor-pointer"
type="date"
id="end-date"
name="end-date"
placeholder=""
min={filters.takenAfter}
bind:value={filters.takenBefore}
/>
</label>
<label class="immich-form-label" for="end-date">
<span class="uppercase">{$t('end_date')}</span>
<DateInput class={inputClasses} type="date" id="end-date" name="end-date" bind:value={filters.takenBefore} />
</label>
</div>
{#if invalid}
<Text color="danger">{$t('start_date_before_end_date')}</Text>
{/if}
</div>

View File

@@ -105,7 +105,7 @@
{/if}
{#if inputType !== SettingInputFieldType.PASSWORD}
<div class="flex place-items-center place-content-center">
<div class="flex place-items-center place-content-center gap-2">
{#if inputType === SettingInputFieldType.COLOR}
<input
bind:this={input}

View File

@@ -17,6 +17,9 @@ describe('RecentAlbums component', () => {
render(RecentAlbums);
expect(sdkMock.getAllAlbums).toBeCalledTimes(1);
// wtf
await tick();
await tick();
const links = screen.getAllByRole('link');

View File

@@ -625,7 +625,21 @@ const urlToBlob = async (imageSource: string) => {
return await response.blob();
};
export const copyImageToClipboard = async (source: HTMLImageElement | string) => {
const blob = source instanceof HTMLImageElement ? await imgToBlob(source) : await urlToBlob(source);
export const copyImageToClipboard = async (
source: HTMLImageElement | string,
): Promise<{ success: true } | { success: false; mimeType: string }> => {
if (source instanceof HTMLImageElement) {
// do not await, so the Safari clipboard write happens in the context of the user gesture
await navigator.clipboard.write([new ClipboardItem({ ['image/png']: imgToBlob(source) })]);
return { success: true };
}
// if we had a way to get the mime type synchronously, we could do the same thing here
const blob = await urlToBlob(source);
if (!ClipboardItem.supports(blob.type)) {
return { success: false, mimeType: blob.type };
}
await navigator.clipboard.write([new ClipboardItem({ [blob.type]: blob })]);
return { success: true };
};