Compare commits
5 Commits
original-p
...
hash-on-up
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
630fcf3516 | ||
|
|
c579e78413 | ||
|
|
4bf82fb4c4 | ||
|
|
cbb0a7f8d4 | ||
|
|
ee6550c02c |
167
mobile/lib/models/backup/bulk_upload_check_result.model.dart
Normal file
167
mobile/lib/models/backup/bulk_upload_check_result.model.dart
Normal file
@@ -0,0 +1,167 @@
|
||||
// ignore_for_file: public_member_api_docs, sort_constructors_first
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
|
||||
class RejectResult {
|
||||
final String localId;
|
||||
final String remoteId;
|
||||
|
||||
RejectResult({
|
||||
required this.localId,
|
||||
required this.remoteId,
|
||||
});
|
||||
|
||||
RejectResult copyWith({
|
||||
String? localId,
|
||||
String? remoteId,
|
||||
}) {
|
||||
return RejectResult(
|
||||
localId: localId ?? this.localId,
|
||||
remoteId: remoteId ?? this.remoteId,
|
||||
);
|
||||
}
|
||||
|
||||
Map<String, dynamic> toMap() {
|
||||
return <String, dynamic>{
|
||||
'localId': localId,
|
||||
'remoteId': remoteId,
|
||||
};
|
||||
}
|
||||
|
||||
factory RejectResult.fromMap(Map<String, dynamic> map) {
|
||||
return RejectResult(
|
||||
localId: map['localId'] as String,
|
||||
remoteId: map['remoteId'] as String,
|
||||
);
|
||||
}
|
||||
|
||||
String toJson() => json.encode(toMap());
|
||||
|
||||
factory RejectResult.fromJson(String source) =>
|
||||
RejectResult.fromMap(json.decode(source) as Map<String, dynamic>);
|
||||
|
||||
@override
|
||||
String toString() => 'RejectResult(localId: $localId, remoteId: $remoteId)';
|
||||
|
||||
@override
|
||||
bool operator ==(covariant RejectResult other) {
|
||||
if (identical(this, other)) return true;
|
||||
|
||||
return other.localId == localId && other.remoteId == remoteId;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => localId.hashCode ^ remoteId.hashCode;
|
||||
}
|
||||
|
||||
class AcceptResult {
|
||||
final String localId;
|
||||
|
||||
AcceptResult({
|
||||
required this.localId,
|
||||
});
|
||||
|
||||
AcceptResult copyWith({
|
||||
String? localId,
|
||||
}) {
|
||||
return AcceptResult(
|
||||
localId: localId ?? this.localId,
|
||||
);
|
||||
}
|
||||
|
||||
Map<String, dynamic> toMap() {
|
||||
return <String, dynamic>{
|
||||
'localId': localId,
|
||||
};
|
||||
}
|
||||
|
||||
factory AcceptResult.fromMap(Map<String, dynamic> map) {
|
||||
return AcceptResult(
|
||||
localId: map['localId'] as String,
|
||||
);
|
||||
}
|
||||
|
||||
String toJson() => json.encode(toMap());
|
||||
|
||||
factory AcceptResult.fromJson(String source) =>
|
||||
AcceptResult.fromMap(json.decode(source) as Map<String, dynamic>);
|
||||
|
||||
@override
|
||||
String toString() => 'AcceptResult(localId: $localId)';
|
||||
|
||||
@override
|
||||
bool operator ==(covariant AcceptResult other) {
|
||||
if (identical(this, other)) return true;
|
||||
|
||||
return other.localId == localId;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => localId.hashCode;
|
||||
}
|
||||
|
||||
class BulkUploadCheckResult {
|
||||
List<RejectResult> rejects;
|
||||
List<AcceptResult> accepts;
|
||||
|
||||
BulkUploadCheckResult({
|
||||
required this.rejects,
|
||||
required this.accepts,
|
||||
});
|
||||
|
||||
BulkUploadCheckResult copyWith({
|
||||
List<RejectResult>? rejects,
|
||||
List<AcceptResult>? accepts,
|
||||
}) {
|
||||
return BulkUploadCheckResult(
|
||||
rejects: rejects ?? this.rejects,
|
||||
accepts: accepts ?? this.accepts,
|
||||
);
|
||||
}
|
||||
|
||||
Map<String, dynamic> toMap() {
|
||||
return <String, dynamic>{
|
||||
'rejects': rejects.map((x) => x.toMap()).toList(),
|
||||
'accepts': accepts.map((x) => x.toMap()).toList(),
|
||||
};
|
||||
}
|
||||
|
||||
factory BulkUploadCheckResult.fromMap(Map<String, dynamic> map) {
|
||||
return BulkUploadCheckResult(
|
||||
rejects: List<RejectResult>.from(
|
||||
(map['rejects'] as List<int>).map<RejectResult>(
|
||||
(x) => RejectResult.fromMap(x as Map<String, dynamic>),
|
||||
),
|
||||
),
|
||||
accepts: List<AcceptResult>.from(
|
||||
(map['accepts'] as List<int>).map<AcceptResult>(
|
||||
(x) => AcceptResult.fromMap(x as Map<String, dynamic>),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
String toJson() => json.encode(toMap());
|
||||
|
||||
factory BulkUploadCheckResult.fromJson(String source) =>
|
||||
BulkUploadCheckResult.fromMap(
|
||||
json.decode(source) as Map<String, dynamic>,
|
||||
);
|
||||
|
||||
@override
|
||||
String toString() =>
|
||||
'BulkUploadCheckResult(rejects: $rejects, accepts: $accepts)';
|
||||
|
||||
@override
|
||||
bool operator ==(covariant BulkUploadCheckResult other) {
|
||||
if (identical(this, other)) return true;
|
||||
final listEquals = const DeepCollectionEquality().equals;
|
||||
|
||||
return listEquals(other.rejects, rejects) &&
|
||||
listEquals(other.accepts, accepts);
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => rejects.hashCode ^ accepts.hashCode;
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:cancellation_token_http/http.dart';
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
@@ -462,36 +460,39 @@ class BackupNotifier extends StateNotifier<BackUpState> {
|
||||
return;
|
||||
}
|
||||
|
||||
Set<BackupCandidate> assetsWillBeBackup = Set.from(state.allUniqueAssets);
|
||||
Set<BackupCandidate> candidates = Set.from(state.allUniqueAssets);
|
||||
// Remove item that has already been backed up
|
||||
for (final assetId in state.allAssetsInDatabase) {
|
||||
assetsWillBeBackup.removeWhere((e) => e.asset.id == assetId);
|
||||
candidates.removeWhere((e) => e.asset.id == assetId);
|
||||
}
|
||||
|
||||
if (assetsWillBeBackup.isEmpty) {
|
||||
if (candidates.isEmpty) {
|
||||
state = state.copyWith(backupProgress: BackUpProgressEnum.idle);
|
||||
}
|
||||
|
||||
// Perform Backup
|
||||
state = state.copyWith(cancelToken: CancellationToken());
|
||||
// Check with server for hash duplication
|
||||
final bulkCheckResult = await _backupService.checkBulkUpload(candidates);
|
||||
|
||||
final pmProgressHandler = Platform.isIOS ? PMProgressHandler() : null;
|
||||
// // Perform Backup
|
||||
// state = state.copyWith(cancelToken: CancellationToken());
|
||||
|
||||
pmProgressHandler?.stream.listen((event) {
|
||||
final double progress = event.progress;
|
||||
state = state.copyWith(iCloudDownloadProgress: progress);
|
||||
});
|
||||
// final pmProgressHandler = Platform.isIOS ? PMProgressHandler() : null;
|
||||
|
||||
await _backupService.backupAsset(
|
||||
assetsWillBeBackup,
|
||||
state.cancelToken,
|
||||
pmProgressHandler: pmProgressHandler,
|
||||
onSuccess: _onAssetUploaded,
|
||||
onProgress: _onUploadProgress,
|
||||
onCurrentAsset: _onSetCurrentBackupAsset,
|
||||
onError: _onBackupError,
|
||||
);
|
||||
await notifyBackgroundServiceCanRun();
|
||||
// pmProgressHandler?.stream.listen((event) {
|
||||
// final double progress = event.progress;
|
||||
// state = state.copyWith(iCloudDownloadProgress: progress);
|
||||
// });
|
||||
|
||||
// await _backupService.backupAsset(
|
||||
// candidates,
|
||||
// state.cancelToken,
|
||||
// pmProgressHandler: pmProgressHandler,
|
||||
// onSuccess: _onAssetUploaded,
|
||||
// onProgress: _onUploadProgress,
|
||||
// onCurrentAsset: _onSetCurrentBackupAsset,
|
||||
// onError: _onBackupError,
|
||||
// );
|
||||
// await notifyBackgroundServiceCanRun();
|
||||
} else {
|
||||
openAppSettings();
|
||||
}
|
||||
|
||||
@@ -361,8 +361,13 @@ class BackgroundService {
|
||||
UserService(apiService, db, syncSerive, partnerService);
|
||||
AlbumService albumService =
|
||||
AlbumService(apiService, userService, syncSerive, db);
|
||||
BackupService backupService =
|
||||
BackupService(apiService, db, settingService, albumService);
|
||||
BackupService backupService = BackupService(
|
||||
apiService,
|
||||
db,
|
||||
settingService,
|
||||
albumService,
|
||||
hashService,
|
||||
);
|
||||
|
||||
final selectedAlbums = backupService.selectedAlbumsQuery().findAllSync();
|
||||
final excludedAlbums = backupService.excludedAlbumsQuery().findAllSync();
|
||||
|
||||
@@ -10,6 +10,7 @@ import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/entities/duplicated_asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_candidate.model.dart';
|
||||
import 'package:immich_mobile/models/backup/bulk_upload_check_result.model.dart';
|
||||
import 'package:immich_mobile/models/backup/current_upload_asset.model.dart';
|
||||
import 'package:immich_mobile/models/backup/error_upload_asset.model.dart';
|
||||
import 'package:immich_mobile/models/backup/success_upload_asset.model.dart';
|
||||
@@ -19,6 +20,7 @@ import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/services/album.service.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/services/hash.service.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
@@ -32,6 +34,7 @@ final backupServiceProvider = Provider(
|
||||
ref.watch(dbProvider),
|
||||
ref.watch(appSettingsServiceProvider),
|
||||
ref.watch(albumServiceProvider),
|
||||
ref.watch(hashServiceProvider),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -42,14 +45,71 @@ class BackupService {
|
||||
final Logger _log = Logger("BackupService");
|
||||
final AppSettingsService _appSetting;
|
||||
final AlbumService _albumService;
|
||||
final HashService _hashService;
|
||||
|
||||
BackupService(
|
||||
this._apiService,
|
||||
this._db,
|
||||
this._appSetting,
|
||||
this._albumService,
|
||||
this._hashService,
|
||||
);
|
||||
|
||||
Future<BulkUploadCheckResult> checkBulkUpload(
|
||||
Set<BackupCandidate> candidates,
|
||||
) async {
|
||||
List<AssetBulkUploadCheckItem> assets = [];
|
||||
|
||||
final assetEntities = candidates.map((c) => c.asset).toList();
|
||||
final hashedDeviceAssets =
|
||||
await _hashService.getHashedAssetsFromAssetEntity(assetEntities);
|
||||
|
||||
for (final hashedAsset in hashedDeviceAssets) {
|
||||
final AssetBulkUploadCheckItem item = AssetBulkUploadCheckItem(
|
||||
id: hashedAsset.id.toString(),
|
||||
checksum: hashedAsset.checksum,
|
||||
);
|
||||
|
||||
assets.add(item);
|
||||
}
|
||||
|
||||
final response = await _apiService.assetsApi.checkBulkUpload(
|
||||
AssetBulkUploadCheckDto(assets: assets),
|
||||
);
|
||||
|
||||
if (response == null) {
|
||||
return BulkUploadCheckResult(
|
||||
rejects: [],
|
||||
accepts: [],
|
||||
);
|
||||
}
|
||||
|
||||
final List<RejectResult> rejects = [];
|
||||
final List<AcceptResult> accepts = [];
|
||||
|
||||
for (final result in response.results) {
|
||||
if (result.action == AssetBulkUploadCheckResultActionEnum.reject) {
|
||||
rejects.add(
|
||||
RejectResult(
|
||||
localId: result.id,
|
||||
remoteId: result.assetId ?? "",
|
||||
),
|
||||
);
|
||||
} else {
|
||||
accepts.add(
|
||||
AcceptResult(
|
||||
localId: result.id,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return BulkUploadCheckResult(
|
||||
rejects: rejects,
|
||||
accepts: accepts,
|
||||
);
|
||||
}
|
||||
|
||||
Future<List<String>?> getDeviceBackupAsset() async {
|
||||
final String deviceId = Store.get(StoreKey.deviceId);
|
||||
|
||||
|
||||
@@ -19,8 +19,20 @@ class HashService {
|
||||
final BackgroundService _backgroundService;
|
||||
final _log = Logger('HashService');
|
||||
|
||||
Future<List<Asset>> getHashedAssetsFromAssetEntity(
|
||||
List<AssetEntity> assets,
|
||||
) async {
|
||||
final ids = assets
|
||||
.map(Platform.isAndroid ? (a) => a.id.toInt() : (a) => a.id)
|
||||
.toList();
|
||||
|
||||
final List<DeviceAsset?> hashes = await lookupHashes(ids);
|
||||
|
||||
return _mapAllHashedAssets(assets, hashes);
|
||||
}
|
||||
|
||||
/// Returns all assets that were successfully hashed
|
||||
Future<List<Asset>> getHashedAssets(
|
||||
Future<List<Asset>> getHashedAssetsFromDeviceAlbum(
|
||||
AssetPathEntity album, {
|
||||
int start = 0,
|
||||
int end = 0x7fffffffffffffff,
|
||||
@@ -44,7 +56,7 @@ class HashService {
|
||||
final ids = assetEntities
|
||||
.map(Platform.isAndroid ? (a) => a.id.toInt() : (a) => a.id)
|
||||
.toList();
|
||||
final List<DeviceAsset?> hashes = await _lookupHashes(ids);
|
||||
final List<DeviceAsset?> hashes = await lookupHashes(ids);
|
||||
final List<DeviceAsset> toAdd = [];
|
||||
final List<String> toHash = [];
|
||||
|
||||
@@ -90,7 +102,7 @@ class HashService {
|
||||
}
|
||||
|
||||
/// Lookup hashes of assets by their local ID
|
||||
Future<List<DeviceAsset?>> _lookupHashes(List<Object> ids) =>
|
||||
Future<List<DeviceAsset?>> lookupHashes(List<Object> ids) =>
|
||||
Platform.isAndroid
|
||||
? _db.androidDeviceAssets.getAll(ids.cast())
|
||||
: _db.iOSDeviceAssets.getAllById(ids.cast());
|
||||
|
||||
@@ -566,8 +566,8 @@ class SyncService {
|
||||
.findAll();
|
||||
assert(inDb.isSorted(Asset.compareByChecksum), "inDb not sorted!");
|
||||
final int assetCountOnDevice = await ape.assetCountAsync;
|
||||
final List<Asset> onDevice =
|
||||
await _hashService.getHashedAssets(ape, excludedAssets: excludedAssets);
|
||||
final List<Asset> onDevice = await _hashService
|
||||
.getHashedAssetsFromDeviceAlbum(ape, excludedAssets: excludedAssets);
|
||||
_removeDuplicates(onDevice);
|
||||
// _removeDuplicates sorts `onDevice` by checksum
|
||||
final (toAdd, toUpdate, toDelete) = _diffAssets(onDevice, inDb);
|
||||
@@ -649,7 +649,8 @@ class SyncService {
|
||||
if (modified == null) {
|
||||
return false;
|
||||
}
|
||||
final List<Asset> newAssets = await _hashService.getHashedAssets(modified);
|
||||
final List<Asset> newAssets =
|
||||
await _hashService.getHashedAssetsFromDeviceAlbum(modified);
|
||||
|
||||
if (totalOnDevice != lastKnownTotal + newAssets.length) {
|
||||
return false;
|
||||
@@ -683,8 +684,8 @@ class SyncService {
|
||||
]) async {
|
||||
_log.info("Syncing a new local album to DB: ${ape.name}");
|
||||
final Album a = Album.local(ape);
|
||||
final assets =
|
||||
await _hashService.getHashedAssets(ape, excludedAssets: excludedAssets);
|
||||
final assets = await _hashService.getHashedAssetsFromDeviceAlbum(ape,
|
||||
excludedAssets: excludedAssets);
|
||||
_removeDuplicates(assets);
|
||||
final (existingInDb, updated) = await _linkWithExistingFromDb(assets);
|
||||
_log.info(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BinaryField } from 'exiftool-vendored';
|
||||
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { constants } from 'node:fs/promises';
|
||||
@@ -746,6 +746,8 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should save all metadata', async () => {
|
||||
const dateForTest = new Date('1970-01-01T00:00:00.000-11:30');
|
||||
|
||||
const tags: ImmichTags = {
|
||||
BitsPerSample: 1,
|
||||
ComponentBitDepth: 1,
|
||||
@@ -753,7 +755,7 @@ describe(MetadataService.name, () => {
|
||||
BitDepth: 1,
|
||||
ColorBitDepth: 1,
|
||||
ColorSpace: '1',
|
||||
DateTimeOriginal: new Date('1970-01-01').toISOString(),
|
||||
DateTimeOriginal: ExifDateTime.fromISO(dateForTest.toISOString()),
|
||||
ExposureTime: '100ms',
|
||||
FocalLength: 20,
|
||||
ImageDescription: 'test description',
|
||||
@@ -762,11 +764,11 @@ describe(MetadataService.name, () => {
|
||||
MediaGroupUUID: 'livePhoto',
|
||||
Make: 'test-factory',
|
||||
Model: "'mockel'",
|
||||
ModifyDate: new Date('1970-01-01').toISOString(),
|
||||
ModifyDate: ExifDateTime.fromISO(dateForTest.toISOString()),
|
||||
Orientation: 0,
|
||||
ProfileDescription: 'extensive description',
|
||||
ProjectionType: 'equirectangular',
|
||||
tz: '+02:00',
|
||||
tz: 'UTC-11:30',
|
||||
Rating: 3,
|
||||
};
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
@@ -779,7 +781,7 @@ describe(MetadataService.name, () => {
|
||||
bitsPerSample: expect.any(Number),
|
||||
autoStackId: null,
|
||||
colorspace: tags.ColorSpace,
|
||||
dateTimeOriginal: new Date('1970-01-01'),
|
||||
dateTimeOriginal: dateForTest,
|
||||
description: tags.ImageDescription,
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
@@ -805,11 +807,37 @@ describe(MetadataService.name, () => {
|
||||
expect(assetMock.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
duration: null,
|
||||
fileCreatedAt: new Date('1970-01-01'),
|
||||
localDateTime: new Date('1970-01-01'),
|
||||
fileCreatedAt: dateForTest,
|
||||
localDateTime: dateForTest,
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract +00:00 timezone from raw value', async () => {
|
||||
// exiftool-vendored returns "no timezone" information even though "+00:00" might be set explicitly
|
||||
// https://github.com/photostructure/exiftool-vendored.js/issues/203
|
||||
|
||||
// this only tests our assumptions of exiftool-vendored, demonstrating the issue
|
||||
const someDate = '2024-09-01T00:00:00.000';
|
||||
expect(ExifDateTime.fromISO(someDate + 'Z')?.zone).toBe('UTC');
|
||||
expect(ExifDateTime.fromISO(someDate + '+00:00')?.zone).toBe('UTC'); // this is the issue, should be UTC+0
|
||||
expect(ExifDateTime.fromISO(someDate + '+04:00')?.zone).toBe('UTC+4');
|
||||
|
||||
const tags: ImmichTags = {
|
||||
DateTimeOriginal: ExifDateTime.fromISO(someDate + '+00:00'),
|
||||
tz: undefined,
|
||||
};
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
metadataMock.readTags.mockResolvedValue(tags);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
expect(assetMock.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
timeZone: 'UTC+0',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should extract duration', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([{ ...assetStub.video }]);
|
||||
mediaMock.probe.mockResolvedValue({
|
||||
|
||||
@@ -531,12 +531,16 @@ export class MetadataService {
|
||||
|
||||
this.logger.verbose('Exif Tags', exifTags);
|
||||
|
||||
const dateTimeOriginalWithRawValue = this.getDateTimeOriginalWithRawValue(exifTags);
|
||||
const dateTimeOriginal = dateTimeOriginalWithRawValue.exifDate ?? asset.fileCreatedAt;
|
||||
const timeZone = this.getTimeZone(exifTags, dateTimeOriginalWithRawValue.rawValue);
|
||||
|
||||
const exifData = {
|
||||
// altitude: tags.GPSAltitude ?? null,
|
||||
assetId: asset.id,
|
||||
bitsPerSample: this.getBitsPerSample(exifTags),
|
||||
colorspace: exifTags.ColorSpace ?? null,
|
||||
dateTimeOriginal: this.getDateTimeOriginal(exifTags) ?? asset.fileCreatedAt,
|
||||
dateTimeOriginal,
|
||||
description: String(exifTags.ImageDescription || exifTags.Description || '').trim(),
|
||||
exifImageHeight: validate(exifTags.ImageHeight),
|
||||
exifImageWidth: validate(exifTags.ImageWidth),
|
||||
@@ -557,7 +561,7 @@ export class MetadataService {
|
||||
orientation: validate(exifTags.Orientation)?.toString() ?? null,
|
||||
profileDescription: exifTags.ProfileDescription || null,
|
||||
projectionType: exifTags.ProjectionType ? String(exifTags.ProjectionType).toUpperCase() : null,
|
||||
timeZone: exifTags.tz ?? null,
|
||||
timeZone,
|
||||
rating: exifTags.Rating ?? null,
|
||||
};
|
||||
|
||||
@@ -578,10 +582,25 @@ export class MetadataService {
|
||||
}
|
||||
|
||||
private getDateTimeOriginal(tags: ImmichTags | Tags | null) {
|
||||
return this.getDateTimeOriginalWithRawValue(tags).exifDate;
|
||||
}
|
||||
|
||||
private getDateTimeOriginalWithRawValue(tags: ImmichTags | Tags | null): { exifDate: Date | null; rawValue: string } {
|
||||
if (!tags) {
|
||||
return null;
|
||||
return { exifDate: null, rawValue: '' };
|
||||
}
|
||||
return exifDate(firstDateTime(tags as Tags, EXIF_DATE_TAGS));
|
||||
const first = firstDateTime(tags as Tags, EXIF_DATE_TAGS);
|
||||
return { exifDate: exifDate(first), rawValue: first?.rawValue ?? '' };
|
||||
}
|
||||
|
||||
private getTimeZone(exifTags: ImmichTags, rawValue: string) {
|
||||
const timeZone = exifTags.tz ?? null;
|
||||
if (timeZone == null && rawValue.endsWith('+00:00')) {
|
||||
// exiftool-vendored returns "no timezone" information even though "+00:00" might be set explicitly
|
||||
// https://github.com/photostructure/exiftool-vendored.js/issues/203
|
||||
return 'UTC+0';
|
||||
}
|
||||
return timeZone;
|
||||
}
|
||||
|
||||
private getBitsPerSample(tags: ImmichTags): number | null {
|
||||
|
||||
@@ -10,18 +10,25 @@
|
||||
|
||||
type ZoneOption = {
|
||||
/**
|
||||
* Timezone name
|
||||
* Timezone name with offset
|
||||
*
|
||||
* e.g. Asia/Jerusalem (+03:00)
|
||||
*/
|
||||
label: string;
|
||||
|
||||
/**
|
||||
* Timezone offset
|
||||
* Timezone name
|
||||
*
|
||||
* e.g. UTC+01:00
|
||||
* e.g. Asia/Jerusalem
|
||||
*/
|
||||
value: string;
|
||||
|
||||
/**
|
||||
* Timezone offset in minutes
|
||||
*
|
||||
* e.g. 300
|
||||
*/
|
||||
offsetMinutes: number;
|
||||
};
|
||||
|
||||
const timezones: ZoneOption[] = Intl.supportedValuesOf('timeZone')
|
||||
@@ -37,21 +44,23 @@
|
||||
const offset = zone.toFormat('ZZ');
|
||||
return {
|
||||
label: `${zone.zoneName} (${offset})`,
|
||||
value: 'UTC' + offset,
|
||||
value: zone.zoneName,
|
||||
offsetMinutes: zone.offset,
|
||||
};
|
||||
});
|
||||
|
||||
const initialOption = timezones.find((item) => item.value === 'UTC' + initialDate.toFormat('ZZ'));
|
||||
const initialOption = timezones.find((item) => item.offsetMinutes === initialDate.offset);
|
||||
|
||||
let selectedOption = initialOption && {
|
||||
label: initialOption?.label || '',
|
||||
offsetMinutes: initialOption?.offsetMinutes || 0,
|
||||
value: initialOption?.value || '',
|
||||
};
|
||||
|
||||
let selectedDate = initialDate.toFormat("yyyy-MM-dd'T'HH:mm");
|
||||
|
||||
// Keep local time if not it's really confusing
|
||||
$: date = DateTime.fromISO(selectedDate).setZone(selectedOption?.value, { keepLocalTime: true });
|
||||
// when changing the time zone, assume the configured date/time is meant for that time zone (instead of updating it)
|
||||
$: date = DateTime.fromISO(selectedDate, { zone: selectedOption?.value, setZone: true });
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
cancel: void;
|
||||
|
||||
@@ -284,6 +284,7 @@ export const langs = [
|
||||
{ name: 'Lithuanian', code: 'lt', loader: () => import('$lib/i18n/lt.json') },
|
||||
{ name: 'Latvian', code: 'lv', loader: () => import('$lib/i18n/lv.json') },
|
||||
{ name: 'Mongolian', code: 'mn', loader: () => import('$lib/i18n/mn.json') },
|
||||
{ name: 'Malay', code: 'ms', loader: () => import('$lib/i18n/ms.json') },
|
||||
{ name: 'Norwegian Bokmål', code: 'nb-NO', weblateCode: 'nb_NO', loader: () => import('$lib/i18n/nb_NO.json') },
|
||||
{ name: 'Dutch', code: 'nl', loader: () => import('$lib/i18n/nl.json') },
|
||||
{ name: 'Polish', code: 'pl', loader: () => import('$lib/i18n/pl.json') },
|
||||
|
||||
1
web/src/lib/i18n/ms.json
Normal file
1
web/src/lib/i18n/ms.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
Reference in New Issue
Block a user