Compare commits

..

1 Commits

Author SHA1 Message Date
Carsten Otto
c3d9f2ce34 feat(server): infix search for originalPath 2024-09-04 08:44:24 +02:00
17 changed files with 96 additions and 366 deletions

View File

@@ -851,4 +851,26 @@ describe('/libraries', () => {
expect(existsSync(`${testAssetDir}/temp/directoryB/assetB.png`)).toBe(true);
});
});
describe('POST /search/metadata', () => {
it('should search by originalPath', async () => {
const directory = `some-61498-directory`;
const infix = 'me-61498-di';
utils.createImageFile(`${testAssetDir}/temp/${directory}/assetZ.jpg`);
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 1 });
const { status, body } = await request(app)
.post('/search/metadata')
.send({ originalPath: infix })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body.assets).toBeDefined();
expect(Array.isArray(body.assets.items)).toBe(true);
expect(body.assets.items).toHaveLength(1);
expect(body.assets.items[0]).toEqual(expect.objectContaining({ originalFileName: 'assetZ.jpg' }));
});
});
});

View File

@@ -288,13 +288,6 @@ describe('/search', () => {
should: 'should search by takenAfter (no results)',
deferred: () => ({ dto: { takenAfter: today.plus({ hour: 1 }).toJSDate() }, assets: [] }),
},
// {
// should: 'should search by originalPath',
// deferred: () => ({
// dto: { originalPath: asset1.originalPath },
// assets: [asset1],
// }),
// },
{
should: 'should search by originalFilename',
deferred: () => ({

View File

@@ -1,167 +0,0 @@
// ignore_for_file: public_member_api_docs, sort_constructors_first
import 'dart:convert';
import 'package:collection/collection.dart';
class RejectResult {
final String localId;
final String remoteId;
RejectResult({
required this.localId,
required this.remoteId,
});
RejectResult copyWith({
String? localId,
String? remoteId,
}) {
return RejectResult(
localId: localId ?? this.localId,
remoteId: remoteId ?? this.remoteId,
);
}
Map<String, dynamic> toMap() {
return <String, dynamic>{
'localId': localId,
'remoteId': remoteId,
};
}
factory RejectResult.fromMap(Map<String, dynamic> map) {
return RejectResult(
localId: map['localId'] as String,
remoteId: map['remoteId'] as String,
);
}
String toJson() => json.encode(toMap());
factory RejectResult.fromJson(String source) =>
RejectResult.fromMap(json.decode(source) as Map<String, dynamic>);
@override
String toString() => 'RejectResult(localId: $localId, remoteId: $remoteId)';
@override
bool operator ==(covariant RejectResult other) {
if (identical(this, other)) return true;
return other.localId == localId && other.remoteId == remoteId;
}
@override
int get hashCode => localId.hashCode ^ remoteId.hashCode;
}
class AcceptResult {
final String localId;
AcceptResult({
required this.localId,
});
AcceptResult copyWith({
String? localId,
}) {
return AcceptResult(
localId: localId ?? this.localId,
);
}
Map<String, dynamic> toMap() {
return <String, dynamic>{
'localId': localId,
};
}
factory AcceptResult.fromMap(Map<String, dynamic> map) {
return AcceptResult(
localId: map['localId'] as String,
);
}
String toJson() => json.encode(toMap());
factory AcceptResult.fromJson(String source) =>
AcceptResult.fromMap(json.decode(source) as Map<String, dynamic>);
@override
String toString() => 'AcceptResult(localId: $localId)';
@override
bool operator ==(covariant AcceptResult other) {
if (identical(this, other)) return true;
return other.localId == localId;
}
@override
int get hashCode => localId.hashCode;
}
class BulkUploadCheckResult {
List<RejectResult> rejects;
List<AcceptResult> accepts;
BulkUploadCheckResult({
required this.rejects,
required this.accepts,
});
BulkUploadCheckResult copyWith({
List<RejectResult>? rejects,
List<AcceptResult>? accepts,
}) {
return BulkUploadCheckResult(
rejects: rejects ?? this.rejects,
accepts: accepts ?? this.accepts,
);
}
Map<String, dynamic> toMap() {
return <String, dynamic>{
'rejects': rejects.map((x) => x.toMap()).toList(),
'accepts': accepts.map((x) => x.toMap()).toList(),
};
}
factory BulkUploadCheckResult.fromMap(Map<String, dynamic> map) {
return BulkUploadCheckResult(
rejects: List<RejectResult>.from(
(map['rejects'] as List<int>).map<RejectResult>(
(x) => RejectResult.fromMap(x as Map<String, dynamic>),
),
),
accepts: List<AcceptResult>.from(
(map['accepts'] as List<int>).map<AcceptResult>(
(x) => AcceptResult.fromMap(x as Map<String, dynamic>),
),
),
);
}
String toJson() => json.encode(toMap());
factory BulkUploadCheckResult.fromJson(String source) =>
BulkUploadCheckResult.fromMap(
json.decode(source) as Map<String, dynamic>,
);
@override
String toString() =>
'BulkUploadCheckResult(rejects: $rejects, accepts: $accepts)';
@override
bool operator ==(covariant BulkUploadCheckResult other) {
if (identical(this, other)) return true;
final listEquals = const DeepCollectionEquality().equals;
return listEquals(other.rejects, rejects) &&
listEquals(other.accepts, accepts);
}
@override
int get hashCode => rejects.hashCode ^ accepts.hashCode;
}

View File

@@ -1,3 +1,5 @@
import 'dart:io';
import 'package:cancellation_token_http/http.dart';
import 'package:collection/collection.dart';
import 'package:flutter/foundation.dart';
@@ -460,39 +462,36 @@ class BackupNotifier extends StateNotifier<BackUpState> {
return;
}
Set<BackupCandidate> candidates = Set.from(state.allUniqueAssets);
Set<BackupCandidate> assetsWillBeBackup = Set.from(state.allUniqueAssets);
// Remove item that has already been backed up
for (final assetId in state.allAssetsInDatabase) {
candidates.removeWhere((e) => e.asset.id == assetId);
assetsWillBeBackup.removeWhere((e) => e.asset.id == assetId);
}
if (candidates.isEmpty) {
if (assetsWillBeBackup.isEmpty) {
state = state.copyWith(backupProgress: BackUpProgressEnum.idle);
}
// Check with server for hash duplication
final bulkCheckResult = await _backupService.checkBulkUpload(candidates);
// Perform Backup
state = state.copyWith(cancelToken: CancellationToken());
// // Perform Backup
// state = state.copyWith(cancelToken: CancellationToken());
final pmProgressHandler = Platform.isIOS ? PMProgressHandler() : null;
// final pmProgressHandler = Platform.isIOS ? PMProgressHandler() : null;
pmProgressHandler?.stream.listen((event) {
final double progress = event.progress;
state = state.copyWith(iCloudDownloadProgress: progress);
});
// pmProgressHandler?.stream.listen((event) {
// final double progress = event.progress;
// state = state.copyWith(iCloudDownloadProgress: progress);
// });
// await _backupService.backupAsset(
// candidates,
// state.cancelToken,
// pmProgressHandler: pmProgressHandler,
// onSuccess: _onAssetUploaded,
// onProgress: _onUploadProgress,
// onCurrentAsset: _onSetCurrentBackupAsset,
// onError: _onBackupError,
// );
// await notifyBackgroundServiceCanRun();
await _backupService.backupAsset(
assetsWillBeBackup,
state.cancelToken,
pmProgressHandler: pmProgressHandler,
onSuccess: _onAssetUploaded,
onProgress: _onUploadProgress,
onCurrentAsset: _onSetCurrentBackupAsset,
onError: _onBackupError,
);
await notifyBackgroundServiceCanRun();
} else {
openAppSettings();
}

View File

@@ -361,13 +361,8 @@ class BackgroundService {
UserService(apiService, db, syncSerive, partnerService);
AlbumService albumService =
AlbumService(apiService, userService, syncSerive, db);
BackupService backupService = BackupService(
apiService,
db,
settingService,
albumService,
hashService,
);
BackupService backupService =
BackupService(apiService, db, settingService, albumService);
final selectedAlbums = backupService.selectedAlbumsQuery().findAllSync();
final excludedAlbums = backupService.excludedAlbumsQuery().findAllSync();

View File

@@ -10,7 +10,6 @@ import 'package:immich_mobile/entities/backup_album.entity.dart';
import 'package:immich_mobile/entities/duplicated_asset.entity.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/models/backup/backup_candidate.model.dart';
import 'package:immich_mobile/models/backup/bulk_upload_check_result.model.dart';
import 'package:immich_mobile/models/backup/current_upload_asset.model.dart';
import 'package:immich_mobile/models/backup/error_upload_asset.model.dart';
import 'package:immich_mobile/models/backup/success_upload_asset.model.dart';
@@ -20,7 +19,6 @@ import 'package:immich_mobile/providers/db.provider.dart';
import 'package:immich_mobile/services/album.service.dart';
import 'package:immich_mobile/services/api.service.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:immich_mobile/services/hash.service.dart';
import 'package:isar/isar.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
@@ -34,7 +32,6 @@ final backupServiceProvider = Provider(
ref.watch(dbProvider),
ref.watch(appSettingsServiceProvider),
ref.watch(albumServiceProvider),
ref.watch(hashServiceProvider),
),
);
@@ -45,71 +42,14 @@ class BackupService {
final Logger _log = Logger("BackupService");
final AppSettingsService _appSetting;
final AlbumService _albumService;
final HashService _hashService;
BackupService(
this._apiService,
this._db,
this._appSetting,
this._albumService,
this._hashService,
);
Future<BulkUploadCheckResult> checkBulkUpload(
Set<BackupCandidate> candidates,
) async {
List<AssetBulkUploadCheckItem> assets = [];
final assetEntities = candidates.map((c) => c.asset).toList();
final hashedDeviceAssets =
await _hashService.getHashedAssetsFromAssetEntity(assetEntities);
for (final hashedAsset in hashedDeviceAssets) {
final AssetBulkUploadCheckItem item = AssetBulkUploadCheckItem(
id: hashedAsset.id.toString(),
checksum: hashedAsset.checksum,
);
assets.add(item);
}
final response = await _apiService.assetsApi.checkBulkUpload(
AssetBulkUploadCheckDto(assets: assets),
);
if (response == null) {
return BulkUploadCheckResult(
rejects: [],
accepts: [],
);
}
final List<RejectResult> rejects = [];
final List<AcceptResult> accepts = [];
for (final result in response.results) {
if (result.action == AssetBulkUploadCheckResultActionEnum.reject) {
rejects.add(
RejectResult(
localId: result.id,
remoteId: result.assetId ?? "",
),
);
} else {
accepts.add(
AcceptResult(
localId: result.id,
),
);
}
}
return BulkUploadCheckResult(
rejects: rejects,
accepts: accepts,
);
}
Future<List<String>?> getDeviceBackupAsset() async {
final String deviceId = Store.get(StoreKey.deviceId);

View File

@@ -19,20 +19,8 @@ class HashService {
final BackgroundService _backgroundService;
final _log = Logger('HashService');
Future<List<Asset>> getHashedAssetsFromAssetEntity(
List<AssetEntity> assets,
) async {
final ids = assets
.map(Platform.isAndroid ? (a) => a.id.toInt() : (a) => a.id)
.toList();
final List<DeviceAsset?> hashes = await lookupHashes(ids);
return _mapAllHashedAssets(assets, hashes);
}
/// Returns all assets that were successfully hashed
Future<List<Asset>> getHashedAssetsFromDeviceAlbum(
Future<List<Asset>> getHashedAssets(
AssetPathEntity album, {
int start = 0,
int end = 0x7fffffffffffffff,
@@ -56,7 +44,7 @@ class HashService {
final ids = assetEntities
.map(Platform.isAndroid ? (a) => a.id.toInt() : (a) => a.id)
.toList();
final List<DeviceAsset?> hashes = await lookupHashes(ids);
final List<DeviceAsset?> hashes = await _lookupHashes(ids);
final List<DeviceAsset> toAdd = [];
final List<String> toHash = [];
@@ -102,7 +90,7 @@ class HashService {
}
/// Lookup hashes of assets by their local ID
Future<List<DeviceAsset?>> lookupHashes(List<Object> ids) =>
Future<List<DeviceAsset?>> _lookupHashes(List<Object> ids) =>
Platform.isAndroid
? _db.androidDeviceAssets.getAll(ids.cast())
: _db.iOSDeviceAssets.getAllById(ids.cast());

View File

@@ -566,8 +566,8 @@ class SyncService {
.findAll();
assert(inDb.isSorted(Asset.compareByChecksum), "inDb not sorted!");
final int assetCountOnDevice = await ape.assetCountAsync;
final List<Asset> onDevice = await _hashService
.getHashedAssetsFromDeviceAlbum(ape, excludedAssets: excludedAssets);
final List<Asset> onDevice =
await _hashService.getHashedAssets(ape, excludedAssets: excludedAssets);
_removeDuplicates(onDevice);
// _removeDuplicates sorts `onDevice` by checksum
final (toAdd, toUpdate, toDelete) = _diffAssets(onDevice, inDb);
@@ -649,8 +649,7 @@ class SyncService {
if (modified == null) {
return false;
}
final List<Asset> newAssets =
await _hashService.getHashedAssetsFromDeviceAlbum(modified);
final List<Asset> newAssets = await _hashService.getHashedAssets(modified);
if (totalOnDevice != lastKnownTotal + newAssets.length) {
return false;
@@ -684,8 +683,8 @@ class SyncService {
]) async {
_log.info("Syncing a new local album to DB: ${ape.name}");
final Album a = Album.local(ape);
final assets = await _hashService.getHashedAssetsFromDeviceAlbum(ape,
excludedAssets: excludedAssets);
final assets =
await _hashService.getHashedAssets(ape, excludedAssets: excludedAssets);
_removeDuplicates(assets);
final (existingInDb, updated) = await _linkWithExistingFromDb(assets);
_log.info(

View File

@@ -44,7 +44,8 @@ export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
@Index('IDX_originalPath_libraryId', ['originalPath', 'libraryId'])
@Index('IDX_asset_id_stackId', ['id', 'stackId'])
@Index('idx_originalFileName_trigram', { synchronize: false })
// For all assets, each originalpath must be unique per user and library
@Index('idx_originalPath_trigram', { synchronize: false })
// For all assets, each originalPath must be unique per user and library
export class AssetEntity {
@PrimaryGeneratedColumn('uuid')
id!: string;

View File

@@ -0,0 +1,13 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class AddAssetOriginalPathTrigramIndex1724231348454 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`CREATE INDEX idx_originalPath_trigram ON assets USING gin (f_unaccent("originalPath") gin_trgm_ops)`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "idx_originalPath_trigram"`);
}
}

View File

@@ -75,7 +75,6 @@ FROM
"asset"."fileCreatedAt" >= $1
AND "exifInfo"."lensModel" = $2
AND 1 = 1
AND 1 = 1
AND (
"asset"."isFavorite" = $3
AND "asset"."isArchived" = $4
@@ -169,7 +168,6 @@ WHERE
"asset"."fileCreatedAt" >= $1
AND "exifInfo"."lensModel" = $2
AND 1 = 1
AND 1 = 1
AND (
"asset"."isFavorite" = $3
AND "asset"."isArchived" = $4

View File

@@ -1,4 +1,4 @@
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
import { BinaryField } from 'exiftool-vendored';
import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
@@ -746,8 +746,6 @@ describe(MetadataService.name, () => {
});
it('should save all metadata', async () => {
const dateForTest = new Date('1970-01-01T00:00:00.000-11:30');
const tags: ImmichTags = {
BitsPerSample: 1,
ComponentBitDepth: 1,
@@ -755,7 +753,7 @@ describe(MetadataService.name, () => {
BitDepth: 1,
ColorBitDepth: 1,
ColorSpace: '1',
DateTimeOriginal: ExifDateTime.fromISO(dateForTest.toISOString()),
DateTimeOriginal: new Date('1970-01-01').toISOString(),
ExposureTime: '100ms',
FocalLength: 20,
ImageDescription: 'test description',
@@ -764,11 +762,11 @@ describe(MetadataService.name, () => {
MediaGroupUUID: 'livePhoto',
Make: 'test-factory',
Model: "'mockel'",
ModifyDate: ExifDateTime.fromISO(dateForTest.toISOString()),
ModifyDate: new Date('1970-01-01').toISOString(),
Orientation: 0,
ProfileDescription: 'extensive description',
ProjectionType: 'equirectangular',
tz: 'UTC-11:30',
tz: '+02:00',
Rating: 3,
};
assetMock.getByIds.mockResolvedValue([assetStub.image]);
@@ -781,7 +779,7 @@ describe(MetadataService.name, () => {
bitsPerSample: expect.any(Number),
autoStackId: null,
colorspace: tags.ColorSpace,
dateTimeOriginal: dateForTest,
dateTimeOriginal: new Date('1970-01-01'),
description: tags.ImageDescription,
exifImageHeight: null,
exifImageWidth: null,
@@ -807,37 +805,11 @@ describe(MetadataService.name, () => {
expect(assetMock.update).toHaveBeenCalledWith({
id: assetStub.image.id,
duration: null,
fileCreatedAt: dateForTest,
localDateTime: dateForTest,
fileCreatedAt: new Date('1970-01-01'),
localDateTime: new Date('1970-01-01'),
});
});
it('should extract +00:00 timezone from raw value', async () => {
// exiftool-vendored returns "no timezone" information even though "+00:00" might be set explicitly
// https://github.com/photostructure/exiftool-vendored.js/issues/203
// this only tests our assumptions of exiftool-vendored, demonstrating the issue
const someDate = '2024-09-01T00:00:00.000';
expect(ExifDateTime.fromISO(someDate + 'Z')?.zone).toBe('UTC');
expect(ExifDateTime.fromISO(someDate + '+00:00')?.zone).toBe('UTC'); // this is the issue, should be UTC+0
expect(ExifDateTime.fromISO(someDate + '+04:00')?.zone).toBe('UTC+4');
const tags: ImmichTags = {
DateTimeOriginal: ExifDateTime.fromISO(someDate + '+00:00'),
tz: undefined,
};
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue(tags);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
expect(assetMock.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
timeZone: 'UTC+0',
}),
);
});
it('should extract duration', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.video }]);
mediaMock.probe.mockResolvedValue({

View File

@@ -531,16 +531,12 @@ export class MetadataService {
this.logger.verbose('Exif Tags', exifTags);
const dateTimeOriginalWithRawValue = this.getDateTimeOriginalWithRawValue(exifTags);
const dateTimeOriginal = dateTimeOriginalWithRawValue.exifDate ?? asset.fileCreatedAt;
const timeZone = this.getTimeZone(exifTags, dateTimeOriginalWithRawValue.rawValue);
const exifData = {
// altitude: tags.GPSAltitude ?? null,
assetId: asset.id,
bitsPerSample: this.getBitsPerSample(exifTags),
colorspace: exifTags.ColorSpace ?? null,
dateTimeOriginal,
dateTimeOriginal: this.getDateTimeOriginal(exifTags) ?? asset.fileCreatedAt,
description: String(exifTags.ImageDescription || exifTags.Description || '').trim(),
exifImageHeight: validate(exifTags.ImageHeight),
exifImageWidth: validate(exifTags.ImageWidth),
@@ -561,7 +557,7 @@ export class MetadataService {
orientation: validate(exifTags.Orientation)?.toString() ?? null,
profileDescription: exifTags.ProfileDescription || null,
projectionType: exifTags.ProjectionType ? String(exifTags.ProjectionType).toUpperCase() : null,
timeZone,
timeZone: exifTags.tz ?? null,
rating: exifTags.Rating ?? null,
};
@@ -582,25 +578,10 @@ export class MetadataService {
}
private getDateTimeOriginal(tags: ImmichTags | Tags | null) {
return this.getDateTimeOriginalWithRawValue(tags).exifDate;
}
private getDateTimeOriginalWithRawValue(tags: ImmichTags | Tags | null): { exifDate: Date | null; rawValue: string } {
if (!tags) {
return { exifDate: null, rawValue: '' };
return null;
}
const first = firstDateTime(tags as Tags, EXIF_DATE_TAGS);
return { exifDate: exifDate(first), rawValue: first?.rawValue ?? '' };
}
private getTimeZone(exifTags: ImmichTags, rawValue: string) {
const timeZone = exifTags.tz ?? null;
if (timeZone == null && rawValue.endsWith('+00:00')) {
// exiftool-vendored returns "no timezone" information even though "+00:00" might be set explicitly
// https://github.com/photostructure/exiftool-vendored.js/issues/203
return 'UTC+0';
}
return timeZone;
return exifDate(firstDateTime(tags as Tags, EXIF_DATE_TAGS));
}
private getBitsPerSample(tags: ImmichTags): number | null {

View File

@@ -71,8 +71,15 @@ export function searchAssetBuilder(
builder.andWhere(`${builder.alias}.ownerId IN (:...userIds)`, { userIds: options.userIds });
}
const path = _.pick(options, ['encodedVideoPath', 'originalPath']);
builder.andWhere(_.omitBy(path, _.isUndefined));
if (options.encodedVideoPath) {
builder.andWhere({ encodedVideoPath: options.encodedVideoPath });
}
if (options.originalPath) {
builder.andWhere(`f_unaccent(${builder.alias}.originalPath) ILIKE f_unaccent(:originalPath)`, {
originalPath: `%${options.originalPath}%`,
});
}
if (options.originalFileName) {
builder.andWhere(`f_unaccent(${builder.alias}.originalFileName) ILIKE f_unaccent(:originalFileName)`, {

View File

@@ -10,25 +10,18 @@
type ZoneOption = {
/**
* Timezone name with offset
* Timezone name
*
* e.g. Asia/Jerusalem (+03:00)
*/
label: string;
/**
* Timezone name
* Timezone offset
*
* e.g. Asia/Jerusalem
* e.g. UTC+01:00
*/
value: string;
/**
* Timezone offset in minutes
*
* e.g. 300
*/
offsetMinutes: number;
};
const timezones: ZoneOption[] = Intl.supportedValuesOf('timeZone')
@@ -44,23 +37,21 @@
const offset = zone.toFormat('ZZ');
return {
label: `${zone.zoneName} (${offset})`,
value: zone.zoneName,
offsetMinutes: zone.offset,
value: 'UTC' + offset,
};
});
const initialOption = timezones.find((item) => item.offsetMinutes === initialDate.offset);
const initialOption = timezones.find((item) => item.value === 'UTC' + initialDate.toFormat('ZZ'));
let selectedOption = initialOption && {
label: initialOption?.label || '',
offsetMinutes: initialOption?.offsetMinutes || 0,
value: initialOption?.value || '',
};
let selectedDate = initialDate.toFormat("yyyy-MM-dd'T'HH:mm");
// when changing the time zone, assume the configured date/time is meant for that time zone (instead of updating it)
$: date = DateTime.fromISO(selectedDate, { zone: selectedOption?.value, setZone: true });
// Keep local time if not it's really confusing
$: date = DateTime.fromISO(selectedDate).setZone(selectedOption?.value, { keepLocalTime: true });
const dispatch = createEventDispatcher<{
cancel: void;

View File

@@ -284,7 +284,6 @@ export const langs = [
{ name: 'Lithuanian', code: 'lt', loader: () => import('$lib/i18n/lt.json') },
{ name: 'Latvian', code: 'lv', loader: () => import('$lib/i18n/lv.json') },
{ name: 'Mongolian', code: 'mn', loader: () => import('$lib/i18n/mn.json') },
{ name: 'Malay', code: 'ms', loader: () => import('$lib/i18n/ms.json') },
{ name: 'Norwegian Bokmål', code: 'nb-NO', weblateCode: 'nb_NO', loader: () => import('$lib/i18n/nb_NO.json') },
{ name: 'Dutch', code: 'nl', loader: () => import('$lib/i18n/nl.json') },
{ name: 'Polish', code: 'pl', loader: () => import('$lib/i18n/pl.json') },

View File

@@ -1 +0,0 @@
{}