Compare commits
4 Commits
v1.131.3
...
fix/server
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b803a35bdc | ||
|
|
0da1c3b279 | ||
|
|
33c9ea1c9c | ||
|
|
e7503ce3dc |
6
cli/package-lock.json
generated
6
cli/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.61",
|
||||
"version": "2.2.60",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.61",
|
||||
"version": "2.2.60",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"chokidar": "^4.0.3",
|
||||
@@ -54,7 +54,7 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.61",
|
||||
"version": "2.2.60",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
|
||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,8 +1,4 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.131.3",
|
||||
"url": "https://v1.131.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.2",
|
||||
"url": "https://v1.131.2.archive.immich.app"
|
||||
|
||||
8
e2e/package-lock.json
generated
8
e2e/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-e2e",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
@@ -44,7 +44,7 @@
|
||||
},
|
||||
"../cli": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.61",
|
||||
"version": "2.2.60",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
@@ -93,7 +93,7 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 193,
|
||||
"android.injected.version.name" => "1.131.3",
|
||||
"android.injected.version.code" => 192,
|
||||
"android.injected.version.name" => "1.131.2",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -19,7 +19,7 @@ platform :ios do
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
increment_version_number(
|
||||
version_number: "1.131.3"
|
||||
version_number: "1.131.2"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
2
mobile/openapi/README.md
generated
2
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.131.3
|
||||
- API version: 1.131.2
|
||||
- Generator version: 7.8.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 1.131.3+193
|
||||
version: 1.131.2+192
|
||||
|
||||
environment:
|
||||
sdk: '>=3.3.0 <4.0.0'
|
||||
|
||||
@@ -7656,7 +7656,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
|
||||
4
open-api/typescript-sdk/package-lock.json
generated
4
open-api/typescript-sdk/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 1.131.3
|
||||
* 1.131.2
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
|
||||
4
server/package-lock.json
generated
4
server/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"hasInstallScript": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import path, { dirname, join, resolve } from 'node:path';
|
||||
import { APP_MEDIA_LOCATION } from 'src/constants';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { PersonEntity } from 'src/entities/person.entity';
|
||||
@@ -115,18 +115,21 @@ export class StorageCore {
|
||||
return normalizedPath.startsWith(normalizedAppMediaLocation);
|
||||
}
|
||||
|
||||
async moveAssetImage(asset: AssetEntity, pathType: GeneratedImageType, format: ImageFormat) {
|
||||
const { id: entityId, files } = asset;
|
||||
const oldFile = getAssetFile(files, pathType);
|
||||
moveAssetImage(asset: AssetEntity, pathType: GeneratedImageType) {
|
||||
const oldFile = getAssetFile(asset.files, pathType);
|
||||
if (!oldFile?.path) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this.moveFile({
|
||||
entityId,
|
||||
entityId: asset.id,
|
||||
pathType,
|
||||
oldPath: oldFile?.path || null,
|
||||
newPath: StorageCore.getImagePath(asset, pathType, format),
|
||||
oldPath: oldFile.path,
|
||||
newPath: StorageCore.getImagePath(asset, pathType, path.extname(oldFile.path).slice(1) as ImageFormat),
|
||||
});
|
||||
}
|
||||
|
||||
async moveAssetVideo(asset: AssetEntity) {
|
||||
moveAssetVideo(asset: AssetEntity) {
|
||||
return this.moveFile({
|
||||
entityId: asset.id,
|
||||
pathType: AssetPathType.ENCODED_VIDEO,
|
||||
|
||||
@@ -32,7 +32,47 @@ where
|
||||
"asset_stack"."ownerId" = $1
|
||||
|
||||
-- StackRepository.delete
|
||||
delete from "asset_stack"
|
||||
select
|
||||
*,
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"assets".*,
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"tags".*
|
||||
from
|
||||
"tags"
|
||||
inner join "tag_asset" on "tags"."id" = "tag_asset"."tagsId"
|
||||
where
|
||||
"tag_asset"."assetsId" = "assets"."id"
|
||||
) as agg
|
||||
) as "tags",
|
||||
to_json("exifInfo") as "exifInfo"
|
||||
from
|
||||
"assets"
|
||||
inner join lateral (
|
||||
select
|
||||
"exif".*
|
||||
from
|
||||
"exif"
|
||||
where
|
||||
"exif"."assetId" = "assets"."id"
|
||||
) as "exifInfo" on true
|
||||
where
|
||||
"assets"."deletedAt" is null
|
||||
and "assets"."stackId" = "asset_stack"."id"
|
||||
) as agg
|
||||
) as "assets"
|
||||
from
|
||||
"asset_stack"
|
||||
where
|
||||
"id" = $1::uuid
|
||||
|
||||
|
||||
@@ -122,11 +122,38 @@ export class StackRepository {
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async delete(id: string): Promise<void> {
|
||||
const stack = await this.getById(id);
|
||||
if (!stack) {
|
||||
return;
|
||||
}
|
||||
|
||||
const assetIds = stack.assets.map(({ id }) => id);
|
||||
|
||||
await this.db.deleteFrom('asset_stack').where('id', '=', asUuid(id)).execute();
|
||||
await this.db
|
||||
.updateTable('assets')
|
||||
.set({ stackId: null, updatedAt: new Date() })
|
||||
.where('id', 'in', assetIds)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteAll(ids: string[]): Promise<void> {
|
||||
await this.db.deleteFrom('asset_stack').where('id', 'in', ids).execute();
|
||||
const assetIds = [];
|
||||
for (const id of ids) {
|
||||
const stack = await this.getById(id);
|
||||
if (!stack) {
|
||||
continue;
|
||||
}
|
||||
|
||||
assetIds.push(...stack.assets.map(({ id }) => id));
|
||||
}
|
||||
|
||||
await this.db
|
||||
.updateTable('assets')
|
||||
.set({ updatedAt: new Date(), stackId: null })
|
||||
.where('id', 'in', assetIds)
|
||||
.where('stackId', 'in', ids)
|
||||
.execute();
|
||||
}
|
||||
|
||||
update(id: string, entity: Updateable<StackEntity>): Promise<StackEntity> {
|
||||
|
||||
@@ -238,19 +238,19 @@ describe(MediaService.name, () => {
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.FULLSIZE,
|
||||
oldPath: '/uploads/user-id/fullsize/path.webp',
|
||||
newPath: 'upload/thumbs/user-id/as/se/asset-id-fullsize.jpeg',
|
||||
newPath: 'upload/thumbs/user-id/as/se/asset-id-fullsize.webp',
|
||||
});
|
||||
expect(mocks.move.create).toHaveBeenCalledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.PREVIEW,
|
||||
oldPath: '/uploads/user-id/thumbs/path.jpg',
|
||||
newPath: 'upload/thumbs/user-id/as/se/asset-id-preview.jpeg',
|
||||
newPath: 'upload/thumbs/user-id/as/se/asset-id-preview.jpg',
|
||||
});
|
||||
expect(mocks.move.create).toHaveBeenCalledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.THUMBNAIL,
|
||||
oldPath: '/uploads/user-id/webp/path.ext',
|
||||
newPath: 'upload/thumbs/user-id/as/se/asset-id-thumbnail.webp',
|
||||
newPath: 'upload/thumbs/user-id/as/se/asset-id-thumbnail.ext',
|
||||
});
|
||||
expect(mocks.move.create).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
@@ -134,15 +134,14 @@ export class MediaService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.MIGRATE_ASSET, queue: QueueName.MIGRATION })
|
||||
async handleAssetMigration({ id }: JobOf<JobName.MIGRATE_ASSET>): Promise<JobStatus> {
|
||||
const { image } = await this.getConfig({ withCache: true });
|
||||
const [asset] = await this.assetRepository.getByIds([id], { files: true });
|
||||
if (!asset) {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.FULLSIZE, image.fullsize.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.PREVIEW, image.preview.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.THUMBNAIL, image.thumbnail.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.FULLSIZE);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.PREVIEW);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.THUMBNAIL);
|
||||
await this.storageCore.moveAssetVideo(asset);
|
||||
|
||||
return JobStatus.SUCCESS;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { Stats } from 'node:fs';
|
||||
import { constants } from 'node:fs/promises';
|
||||
import { defaults } from 'src/config';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
@@ -22,8 +21,14 @@ describe(MetadataService.name, () => {
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
const mockReadTags = (exifData?: Partial<ImmichTags>, sidecarData?: Partial<ImmichTags>) => {
|
||||
exifData = {
|
||||
FileSize: '123456',
|
||||
FileCreateDate: '2024-01-01T00:00:00.000Z',
|
||||
FileModifyDate: '2024-01-01T00:00:00.000Z',
|
||||
...exifData,
|
||||
};
|
||||
mocks.metadata.readTags.mockReset();
|
||||
mocks.metadata.readTags.mockResolvedValueOnce(exifData ?? {});
|
||||
mocks.metadata.readTags.mockResolvedValueOnce(exifData);
|
||||
mocks.metadata.readTags.mockResolvedValueOnce(sidecarData ?? {});
|
||||
};
|
||||
|
||||
@@ -109,17 +114,6 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
describe('handleMetadataExtraction', () => {
|
||||
beforeEach(() => {
|
||||
const time = new Date('2022-01-01T00:00:00.000Z');
|
||||
const timeMs = time.valueOf();
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: time,
|
||||
mtimeMs: timeMs,
|
||||
birthtimeMs: timeMs,
|
||||
} as Stats);
|
||||
});
|
||||
|
||||
it('should handle an asset that could not be found', async () => {
|
||||
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||
|
||||
@@ -151,13 +145,10 @@ describe(MetadataService.name, () => {
|
||||
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
mtimeMs: fileModifiedAt.valueOf(),
|
||||
birthtimeMs: fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags();
|
||||
mockReadTags({
|
||||
FileCreateDate: fileCreatedAt.toISOString(),
|
||||
FileModifyDate: fileModifiedAt.toISOString(),
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
|
||||
@@ -177,13 +168,10 @@ describe(MetadataService.name, () => {
|
||||
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
mtimeMs: fileModifiedAt.valueOf(),
|
||||
birthtimeMs: fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags();
|
||||
mockReadTags({
|
||||
FileCreateDate: fileCreatedAt.toISOString(),
|
||||
FileModifyDate: fileModifiedAt.toISOString(),
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.asset.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
|
||||
@@ -218,14 +206,10 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should handle lists of numbers', async () => {
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.image.fileModifiedAt,
|
||||
mtimeMs: assetStub.image.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.image.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
ISO: [160],
|
||||
FileCreateDate: assetStub.image.fileCreatedAt.toISOString(),
|
||||
FileModifyDate: assetStub.image.fileModifiedAt.toISOString(),
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -244,15 +228,11 @@ describe(MetadataService.name, () => {
|
||||
mocks.asset.getByIds.mockResolvedValue([assetStub.withLocation]);
|
||||
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
|
||||
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.withLocation.fileModifiedAt,
|
||||
mtimeMs: assetStub.withLocation.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.withLocation.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
|
||||
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
|
||||
FileCreateDate: assetStub.withLocation.fileCreatedAt.toISOString(),
|
||||
FileModifyDate: assetStub.withLocation.fileModifiedAt.toISOString(),
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
@@ -495,12 +475,6 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract the MotionPhotoVideo tag from Samsung HEIC motion photos', async () => {
|
||||
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
|
||||
mtimeMs: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
Directory: 'foo/bar/',
|
||||
MotionPhoto: 1,
|
||||
@@ -509,6 +483,8 @@ describe(MetadataService.name, () => {
|
||||
// instead of the EmbeddedVideoFile, since HEIC MotionPhotos include both
|
||||
EmbeddedVideoFile: new BinaryField(0, ''),
|
||||
EmbeddedVideoType: 'MotionPhoto_Data',
|
||||
FileCreateDate: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.toISOString(),
|
||||
FileModifyDate: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.toISOString(),
|
||||
});
|
||||
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
|
||||
mocks.asset.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
@@ -549,18 +525,14 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract the EmbeddedVideo tag from Samsung JPEG motion photos', async () => {
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
|
||||
mtimeMs: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
|
||||
mockReadTags({
|
||||
Directory: 'foo/bar/',
|
||||
EmbeddedVideoFile: new BinaryField(0, ''),
|
||||
EmbeddedVideoType: 'MotionPhoto_Data',
|
||||
MotionPhoto: 1,
|
||||
FileCreateDate: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.toISOString(),
|
||||
FileModifyDate: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.toISOString(),
|
||||
});
|
||||
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
|
||||
mocks.asset.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
@@ -602,17 +574,13 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract the motion photo video from the XMP directory entry ', async () => {
|
||||
mocks.asset.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.livePhotoWithOriginalFileName.fileModifiedAt,
|
||||
mtimeMs: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
Directory: 'foo/bar/',
|
||||
MotionPhoto: 1,
|
||||
MicroVideo: 1,
|
||||
MicroVideoOffset: 1,
|
||||
FileCreateDate: assetStub.livePhotoWithOriginalFileName.fileCreatedAt.toISOString(),
|
||||
FileModifyDate: assetStub.livePhotoWithOriginalFileName.fileModifiedAt.toISOString(),
|
||||
});
|
||||
mocks.crypto.hashSha1.mockReturnValue(randomBytes(512));
|
||||
mocks.asset.create.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ContainerDirectoryItem, Maybe, Tags } from 'exiftool-vendored';
|
||||
import { ContainerDirectoryItem, ExifDateTime, Maybe, Tags } from 'exiftool-vendored';
|
||||
import { firstDateTime } from 'exiftool-vendored/dist/FirstDateTime';
|
||||
import { Insertable } from 'kysely';
|
||||
import _ from 'lodash';
|
||||
import { Duration } from 'luxon';
|
||||
import { Stats } from 'node:fs';
|
||||
import { constants } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
@@ -78,11 +77,6 @@ const validateRange = (value: number | undefined, min: number, max: number): Non
|
||||
|
||||
type ImmichTagsWithFaces = ImmichTags & { RegionInfo: NonNullable<ImmichTags['RegionInfo']> };
|
||||
|
||||
type Dates = {
|
||||
dateTimeOriginal: Date;
|
||||
localDateTime: Date;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class MetadataService extends BaseService {
|
||||
@OnEvent({ name: 'app.bootstrap', workers: [ImmichWorker.MICROSERVICES] })
|
||||
@@ -177,13 +171,18 @@ export class MetadataService extends BaseService {
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
const [exifTags, stats] = await Promise.all([
|
||||
this.getExifTags(asset),
|
||||
this.storageRepository.stat(asset.originalPath),
|
||||
]);
|
||||
const exifTags = await this.getExifTags(asset);
|
||||
if (!exifTags.FileCreateDate || !exifTags.FileModifyDate || exifTags.FileSize === undefined) {
|
||||
this.logger.warn(`Missing file creation or modification date for asset ${asset.id}: ${asset.originalPath}`);
|
||||
const stat = await this.storageRepository.stat(asset.originalPath);
|
||||
exifTags.FileCreateDate = stat.ctime.toISOString();
|
||||
exifTags.FileModifyDate = stat.mtime.toISOString();
|
||||
exifTags.FileSize = stat.size.toString();
|
||||
}
|
||||
|
||||
this.logger.verbose('Exif Tags', exifTags);
|
||||
|
||||
const dates = this.getDates(asset, exifTags, stats);
|
||||
const { dateTimeOriginal, localDateTime, timeZone, modifyDate } = this.getDates(asset, exifTags);
|
||||
|
||||
const { width, height } = this.getImageDimensions(exifTags);
|
||||
let geo: ReverseGeocodeResult, latitude: number | null, longitude: number | null;
|
||||
@@ -201,9 +200,9 @@ export class MetadataService extends BaseService {
|
||||
assetId: asset.id,
|
||||
|
||||
// dates
|
||||
dateTimeOriginal: dates.dateTimeOriginal,
|
||||
modifyDate: stats.mtime,
|
||||
timeZone: dates.timeZone,
|
||||
dateTimeOriginal,
|
||||
modifyDate,
|
||||
timeZone,
|
||||
|
||||
// gps
|
||||
latitude,
|
||||
@@ -213,7 +212,7 @@ export class MetadataService extends BaseService {
|
||||
city: geo.city,
|
||||
|
||||
// image/file
|
||||
fileSizeInByte: stats.size,
|
||||
fileSizeInByte: Number.parseInt(exifTags.FileSize!),
|
||||
exifImageHeight: validate(height),
|
||||
exifImageWidth: validate(width),
|
||||
orientation: validate(exifTags.Orientation)?.toString() ?? null,
|
||||
@@ -246,15 +245,15 @@ export class MetadataService extends BaseService {
|
||||
this.assetRepository.update({
|
||||
id: asset.id,
|
||||
duration: exifTags.Duration?.toString() ?? null,
|
||||
localDateTime: dates.localDateTime,
|
||||
fileCreatedAt: dates.dateTimeOriginal ?? undefined,
|
||||
fileModifiedAt: stats.mtime,
|
||||
localDateTime,
|
||||
fileCreatedAt: exifData.dateTimeOriginal ?? undefined,
|
||||
fileModifiedAt: exifData.modifyDate ?? undefined,
|
||||
}),
|
||||
this.applyTagList(asset, exifTags),
|
||||
];
|
||||
|
||||
if (this.isMotionPhoto(asset, exifTags)) {
|
||||
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
|
||||
promises.push(this.applyMotionPhotos(asset, exifTags, exifData.fileSizeInByte!));
|
||||
}
|
||||
|
||||
if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) {
|
||||
@@ -433,7 +432,7 @@ export class MetadataService extends BaseService {
|
||||
return asset.type === AssetType.IMAGE && !!(tags.MotionPhoto || tags.MicroVideo);
|
||||
}
|
||||
|
||||
private async applyMotionPhotos(asset: AssetEntity, tags: ImmichTags, dates: Dates, stats: Stats) {
|
||||
private async applyMotionPhotos(asset: AssetEntity, tags: ImmichTags, fileSize: number) {
|
||||
const isMotionPhoto = tags.MotionPhoto;
|
||||
const isMicroVideo = tags.MicroVideo;
|
||||
const videoOffset = tags.MicroVideoOffset;
|
||||
@@ -467,7 +466,7 @@ export class MetadataService extends BaseService {
|
||||
this.logger.debug(`Starting motion photo video extraction for asset ${asset.id}: ${asset.originalPath}`);
|
||||
|
||||
try {
|
||||
const position = stats.size - length - padding;
|
||||
const position = fileSize - length - padding;
|
||||
let video: Buffer;
|
||||
// Samsung MotionPhoto video extraction
|
||||
// HEIC-encoded
|
||||
@@ -506,12 +505,13 @@ export class MetadataService extends BaseService {
|
||||
}
|
||||
} else {
|
||||
const motionAssetId = this.cryptoRepository.randomUUID();
|
||||
const dates = this.getDates(asset, tags);
|
||||
motionAsset = await this.assetRepository.create({
|
||||
id: motionAssetId,
|
||||
libraryId: asset.libraryId,
|
||||
type: AssetType.VIDEO,
|
||||
fileCreatedAt: dates.dateTimeOriginal,
|
||||
fileModifiedAt: stats.mtime,
|
||||
fileModifiedAt: dates.modifyDate,
|
||||
localDateTime: dates.localDateTime,
|
||||
checksum,
|
||||
ownerId: asset.ownerId,
|
||||
@@ -634,7 +634,7 @@ export class MetadataService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
private getDates(asset: AssetEntity, exifTags: ImmichTags, stats: Stats) {
|
||||
private getDates(asset: AssetEntity, exifTags: ImmichTags) {
|
||||
const dateTime = firstDateTime(exifTags as Maybe<Tags>, EXIF_DATE_TAGS);
|
||||
this.logger.verbose(`Date and time is ${dateTime} for asset ${asset.id}: ${asset.originalPath}`);
|
||||
|
||||
@@ -654,16 +654,17 @@ export class MetadataService extends BaseService {
|
||||
this.logger.debug(`No timezone information found for asset ${asset.id}: ${asset.originalPath}`);
|
||||
}
|
||||
|
||||
const modifyDate = this.toDate(exifTags.FileModifyDate!);
|
||||
let dateTimeOriginal = dateTime?.toDate();
|
||||
let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate();
|
||||
if (!localDateTime || !dateTimeOriginal) {
|
||||
// FileCreateDate is not available on linux, likely because exiftool hasn't integrated the statx syscall yet
|
||||
// birthtime is not available in Docker on macOS, so it appears as 0
|
||||
const earliestDate = stats.birthtimeMs ? new Date(Math.min(stats.mtimeMs, stats.birthtimeMs)) : stats.mtime;
|
||||
const fileCreatedAt = this.toDate(exifTags.FileCreateDate!);
|
||||
const earliestDate = this.earliestDate(fileCreatedAt, modifyDate);
|
||||
this.logger.debug(
|
||||
`No exif date time found, falling back on ${earliestDate.toISOString()}, earliest of file creation and modification for asset ${asset.id}: ${asset.originalPath}`,
|
||||
`No exif date time found, falling back on ${earliestDate.toISOString()}, earliest of file creation and modification for assset ${asset.id}: ${asset.originalPath}`,
|
||||
);
|
||||
dateTimeOriginal = localDateTime = earliestDate;
|
||||
dateTimeOriginal = earliestDate;
|
||||
localDateTime = earliestDate;
|
||||
}
|
||||
|
||||
this.logger.verbose(
|
||||
@@ -674,9 +675,18 @@ export class MetadataService extends BaseService {
|
||||
dateTimeOriginal,
|
||||
timeZone,
|
||||
localDateTime,
|
||||
modifyDate,
|
||||
};
|
||||
}
|
||||
|
||||
private toDate(date: string | ExifDateTime): Date {
|
||||
return typeof date === 'string' ? new Date(date) : date.toDate();
|
||||
}
|
||||
|
||||
private earliestDate(a: Date, b: Date) {
|
||||
return new Date(Math.min(a.valueOf(), b.valueOf()));
|
||||
}
|
||||
|
||||
private hasGeo(tags: ImmichTags): tags is ImmichTags & { GPSLatitude: number; GPSLongitude: number } {
|
||||
return (
|
||||
tags.GPSLatitude !== undefined &&
|
||||
|
||||
@@ -39,12 +39,7 @@ describe(MetadataService.name, () => {
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(MetadataService, { metadata: metadataRepository }));
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: new Date(654_321),
|
||||
mtimeMs: 654_321,
|
||||
birthtimeMs: 654_322,
|
||||
} as Stats);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 123_456, ctime: new Date(), mtime: new Date() } as Stats);
|
||||
|
||||
delete process.env.TZ;
|
||||
});
|
||||
@@ -59,6 +54,8 @@ describe(MetadataService.name, () => {
|
||||
description: 'should handle no time zone information',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00',
|
||||
FileCreateDate: '2022:01:01 00:00:00',
|
||||
FileModifyDate: '2022:01:01 00:00:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
@@ -71,6 +68,8 @@ describe(MetadataService.name, () => {
|
||||
serverTimeZone: 'America/Los_Angeles',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00',
|
||||
FileCreateDate: '2022:01:01 00:00:00',
|
||||
FileModifyDate: '2022:01:01 00:00:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
@@ -83,6 +82,8 @@ describe(MetadataService.name, () => {
|
||||
serverTimeZone: 'Europe/Brussels',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00',
|
||||
FileCreateDate: '2022:01:01 00:00:00',
|
||||
FileModifyDate: '2022:01:01 00:00:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
@@ -95,6 +96,8 @@ describe(MetadataService.name, () => {
|
||||
serverTimeZone: 'Europe/Brussels',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:06:01 00:00:00',
|
||||
FileCreateDate: '2022:06:01 00:00:00',
|
||||
FileModifyDate: '2022:06:01 00:00:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-06-01T00:00:00.000Z',
|
||||
@@ -106,6 +109,8 @@ describe(MetadataService.name, () => {
|
||||
description: 'should handle a +13:00 time zone',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00+13:00',
|
||||
FileCreateDate: '2022:01:01 00:00:00+13:00',
|
||||
FileModifyDate: '2022:01:01 00:00:00+13:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
|
||||
6
web/package-lock.json
generated
6
web/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-web",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@formatjs/icu-messageformat-parser": "^2.9.8",
|
||||
@@ -81,7 +81,7 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "1.131.3",
|
||||
"version": "1.131.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
Reference in New Issue
Block a user