Compare commits

...

56 Commits

Author SHA1 Message Date
Alex The Bot
81d51fbd7e Version v1.81.0 2023-10-03 20:48:23 +00:00
Alex
02f9b40d67 fix(server): library control doesn't apply to new library from the third row (#4331) 2023-10-03 14:05:14 -05:00
Jason Rasmussen
260a600bbc chore(server): dev compose changes (#4316) 2023-10-03 13:06:08 -05:00
Jason Rasmussen
818005fcb5 fix(server): fallback to local timezone when rendering storage template (#4317) 2023-10-03 13:05:44 -05:00
Daniel Dietzler
e5f704cf3b fix asset upload permissions for shared links (#4325) 2023-10-03 12:36:51 -04:00
Jonathan Jogenfors
e2f1e38472 chore(server,web): bump node version to 20.8 (#4311)
* chore: bump node version to 20.8

* fix: remove node hash
2023-10-03 09:34:35 -05:00
Alex
b3c82d5ba2 fix(server): incorrect video creation date EXIF extraction (#4309)
* fix(server): incorrect video creation date EXIF extraction

* update dependency

* update dependency

* revert

* remove unused code
2023-10-03 08:51:40 -05:00
Jonathan Jogenfors
6d1868a6e0 feat: server containers use host timezone (#4313) 2023-10-02 20:50:27 -05:00
Daniel Dietzler
98db9331d8 fix(server): delete face thumbnails when merging people (#4310)
* new job for person deletion, including face thumbnail deletion

* fix tests, delete files directly instead queueing jobs
2023-10-02 21:15:11 -04:00
Alex The Bot
66e860a08e Version v1.80.0 2023-10-02 14:47:21 +00:00
Jonathan Jogenfors
3172c341e0 chore(server): bump exiftool-vendored to 23.1.0 (#4302)
* chore: bump exiftool-vendored

* fix: correct version

* chore: bump exiftool-vendored.pl

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-10-02 08:54:43 -04:00
Ikko Eltociear Ashimine
8234234c48 docs: add Japanese translated README (#4268)
* docs: add Japanese translated README

* docs: update README_ja_JP.md
2023-10-01 12:55:30 -05:00
Daniel Dietzler
8d5e782fc4 version links to releases page (#4288) 2023-10-01 02:09:25 +07:00
Daniel Dietzler
10d10d9021 chore(server): unit tests for metadata service (#4280)
* unit tests for metadata service

* better test descriptions
2023-09-29 17:25:45 -04:00
Jason Rasmussen
68d6d89a3b feat(web): better context menu position (#4271)
* feat(web): better context menu position

* fix: album context menu

* fix: add middle variant

* fix: rest of context menus

* fix: linting error
2023-09-29 17:41:58 +00:00
Jason Rasmussen
3e73cfb71a fix(server): handle number lists in metadata extraction (#4273) 2023-09-29 11:42:33 -04:00
shenlong
d7e970dcea fix(mobile): reduce server version api calls (#4265) 2023-09-29 21:51:54 +07:00
Daniel Dietzler
fb7249d1f6 fix(web): Context menu underflowing on people page (#4270) 2023-09-29 09:21:51 -04:00
Daniel Dietzler
521436dd21 feat(server): Add week numbers for templating (#4263)
* add week numbers as template option

* generate api

* fix tests

* change example date to show week padding

* change example date to immich birthday
2023-09-28 13:47:31 -04:00
Mert
c145963b02 fix(server): always disable two-pass mode for video thumbnails (#4258)
* always disable two-pass mode for thumbnails

* add regression test

* added bitrate constraint to config mock
2023-09-28 08:29:31 -04:00
Fynn Petersen-Frey
098ab9eae5 fix(mobile): speed up RenderList creation for timeline (#4103)
Co-authored-by: Alex <alex.tran1502@gmail.com>
2023-09-28 10:43:55 +07:00
shenlong
a937efe719 feat(mobile): use map settings from server-config (#4045)
* feat(mobile): use map settings from server-config to enable / disable map

* refactor(mobile): remove async await for server info update
2023-09-28 10:26:48 +07:00
Jason Rasmussen
b7fcec7ce3 feat(web): people sidebar link (#4257) 2023-09-28 10:09:54 +07:00
Russell Tan
69c23aa3ec fix(server): Exclude archived assets from search-explore #4041 (#4122)
* Exclude archived assets from search-explore  #4041

* Update test to properly expect an empty array with archived items

* typesense changes wip

* Add isArchived filter to default search filters

* Bump assets typesense schema version

* fix(server): sync bug for bulk asset update

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-09-27 21:38:55 -04:00
Daniel Dietzler
0a22e64799 refactor(server): merge facial-recognition and person (#4237)
* move facial recognition service into person service

* merge face repository and person repository

* fix imports
2023-09-27 16:46:46 -04:00
Jason Rasmussen
c3d6d69262 fix(server): live photo linking (#4253) 2023-09-27 20:32:58 +00:00
Jason Rasmussen
7cb78ed972 fix(server): android motion photo (#4254) 2023-09-27 20:27:08 +00:00
Daniel Dietzler
cc70f5f6a0 fix(server): Thumbnail migration creating unnecessary directories (#4251)
* fix: during migration folders will be created before checking if needed

* refactor
2023-09-27 15:29:07 -04:00
David Johnson
85efbc6984 fix(server): handle NaN in metadata extraction (#4221)
Fallback to null in event of invalid number.
2023-09-27 15:17:18 -04:00
Daniel Dietzler
3a44e8f8d3 refactor(server): Move metadata extraction to domain (#4243)
* use storageRepository in metadata extraction

* move metadata extraction processor to domain

* cleanup infra/domain

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-09-27 18:44:51 +00:00
Daniel Dietzler
9bada51d56 feat(server, web)!: Move reverse geocoding settings to the UI (#4222)
* feat: reverse geocoding settings

* chore: open api

* re-init geocoder if precision has been updated

* update docs

* chore: update verbiage

* fix: re-init logic

* fix: reset to default

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-09-26 14:03:57 +07:00
Jason Rasmussen
7bc6e9ef64 refactor(server): person thumbnail job (#4233)
* refactor(server): person thumbnail job

* fix(server): set feature photo
2023-09-26 14:03:22 +07:00
Jason Rasmussen
ea797c1723 chore: use non-conflicting port to serve docs (#4230) 2023-09-25 23:00:56 -04:00
martin
f63d6d5b67 fix(web): escape shortcut (#3753)
* fix: escape shortcut

* feat: more escape scenarios

* feat: more escape shortcuts

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-09-26 02:53:26 +00:00
Jonathan Jogenfors
8873c9a02f docs: deprecate read only assets (#4226)
* chore: add deprecation notice in docs

* feat: link to library documentation

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-09-26 02:26:11 +00:00
Jason Rasmussen
ee0e131efa fix(server): tests (#4229) 2023-09-25 22:17:53 -04:00
Thomas
af5a9d9108 chore(web): use axios isCancel function (#4227)
Use of this function should be more idiomatic and reliable - it's much less
likely for changes to Axios to cause regressions.
2023-09-25 23:20:01 +00:00
Mert
56cf9464af fix(server): use srgb pipeline for srgb images (#4101)
* added color-related exif fields

* remove metadata check, conditional pipe colorspace

* check exif metadata for srgb

* added migration

* updated e2e fixture

* uncased srgb check, search substrings

* extracted exif logic into separate function

* handle images with no bit depth or color metadata

* added unit tests
2023-09-25 19:18:47 -04:00
Jonathan Jogenfors
9676412875 fix: cli import (#4224)
* fix: allow import of assets

* fix: allow deletion of read only assets
2023-09-25 19:04:30 -04:00
Jason Rasmussen
54bea23485 chore(web): remove upload preview (#4219) 2023-09-25 22:28:01 +07:00
Daniel Dietzler
3053cbd4c8 chore(server): Store generated files (thumbnails, encoded video) in subdirectories (#4112)
* save thumbnails in subdirectories

* migration job, migrate assets and face thumbnails

* fix tests

* directory depth of two instead of three

* cleanup empty dirs after migration

* clean up empty dirs after migration, migrate people without assetId

* add job card for new migration job

* fix removeEmptyDirs race condition because of missing await

* cleanup empty directories after asset deletion

* move ensurePath to storage core

* rename jobs

* remove unnecessary property of IEntityJob

* use updated person getById, minor refactoring

* ensure that directory cleanup doesn't interfere with migration

* better description for job in ui

* fix remove directories when migration is done

* cleanup empty folders at start of migration

* fix: actually persist concurrency setting

* add comment explaining regex

* chore: cleanup

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-09-25 11:07:21 -04:00
Jason Rasmussen
07069c3b1e fix(server): android live motion (#4218) 2023-09-25 09:42:23 -04:00
fujie
c0ce81ca0e fix(mobile): assetList is empty (#4213)
* fix(mobile): assetList is empty

* add comments

---------

Co-authored-by: Alex <alex.tran1502@gmail.com>
2023-09-25 08:09:09 +00:00
Alex
3bef456923 fix(server): metadata extraction error (#4210) 2023-09-25 14:45:26 +07:00
ochen1
91e2348381 fix(server): search for terms separated by hyphens and/or underscores in asset search (#4156)
* Use hyphen and underscore as token separators in search

* Bump typesense asset schema version

* Bump typesense asset schema version
2023-09-25 11:30:16 +07:00
Andreas
1564ed3256 feat(server): add support for .psd files (#4192) 2023-09-24 22:03:14 +07:00
martin
b8fec26115 refactor(web): album listing page (#4146)
* feat: add more options to album page

* pr feedback

* pr feedback

* feat: add quick actions on the list mode

* feat: responsive design

* feat: remove dropdown for display mode

* pr feedback
2023-09-24 20:22:46 +07:00
Daniel Dietzler
dd86aa9259 fix(server): require library.write to upload assets to library (#4200)
* require library.write to upload assets to library

* fix tests
2023-09-24 20:19:36 +07:00
Daniel Dietzler
84e4c15ed5 fix(server): random returning less than count assets (#4201) 2023-09-24 20:18:31 +07:00
Daniele Ricci
25d1b3e1b1 fix(server): use mtime instead of ctime for fileCreatedAt (#4191) 2023-09-24 20:14:25 +07:00
Eric Helgeson
0e63efb490 Correct admin commands (#4177)
* Correct admin commands

Also use text for examples so they can be easily copied and updated.

* Add missing line

* remove leading # per feedback
2023-09-24 02:06:59 +00:00
Daniele Ricci
014d164d99 feat(server): random assets API (#4184)
* feat(server): get random assets API

* Fix tests

* Use correct validation annotation

* Fix offset use in query

* Update API specs

* Fix typo

* Random assets e2e tests

* Improve e2e tests
2023-09-23 22:28:55 +07:00
martin
fc64be6603 feat(web): suggest people when typing a name (#4126)
* feat(web): suggest people when entering a name

* fix: border size from 2 to 1 pixel

* pr feedback

* fix: web unit test

* pr feedback

---------

Co-authored-by: Alex <alex.tran1502@gmail.com>
2023-09-23 11:58:51 +07:00
Alex
9a7e48eaa6 chore(web): remove flowbite (#4178)
* chore(web): remove flowbite

* Added confirmation prompt for deletion
2023-09-23 11:50:21 +07:00
Alex The Bot
e050121dbf Version v1.79.1 2023-09-22 01:37:20 +00:00
Jason Rasmussen
f0a5d39625 fix: live photo uploads (#4167)
* fix: live photo uploads

* fix: format

---------

Co-authored-by: Alex <alex.tran1502@gmail.com>
2023-09-22 08:35:25 +07:00
223 changed files with 4451 additions and 2177 deletions

View File

@@ -24,6 +24,7 @@
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_ja_JP.md">日本語</a>
</p>
## Disclaimer

View File

@@ -24,6 +24,7 @@
<a href="README_ca_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_ja_JP.md">日本語</a>
</p>
## Avís legal

View File

@@ -23,6 +23,7 @@
<a href="README_tr_TR.md">Türkçe</a>
<a href="README_ca_ES.md">Català</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_ja_JP.md">日本語</a>
</p>
## Descargo de responsabilidad

View File

@@ -24,6 +24,7 @@
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_ja_JP.md">日本語</a>
</p>
## Clause de non-responsabilité

111
README_ja_JP.md Normal file
View File

@@ -0,0 +1,111 @@
<p align="center">
<br/>
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/license-MIT-green.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: MIT"></a>
<a href="https://discord.gg/D8JsnBEuKb">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" atl="Discord"/>
</a>
<br/>
<br/>
</p>
<p align="center">
<img src="design/immich-logo.svg" width="150" title="Login With Custom URL">
</p>
<h3 align="center">Immich - 高性能なセルフホスト 写真/ビデオバックアップソリューション</h3>
<br/>
<a href="https://immich.app">
<img src="design/immich-screenshots.png" title="Main Screenshot">
</a>
<br/>
<p align="center">
<a href="README_zh_CN.md">中文</a>
<a href="README_tr_TR.md">Türkçe</a>
<a href="README_ca_ES.md">Català</a>
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
</p>
## 免責事項
- ⚠️ このプロジェクトは **非常に活発に** 開発中です。
- ⚠️ バグの存在や変更が入ることも予想されます。
- ⚠️ **写真やビデオを保存する唯一の方法としてこのアプリを使用しないでください。**
- ⚠️ 大切な写真やビデオは、常に [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) のバックアッププランに従ってください!
## コンテンツ
- [公式ドキュメント](https://immich.app/docs)
- [ロードマップ](https://github.com/orgs/immich-app/projects/1)
- [デモ](#デモ)
- [機能](#機能)
- [紹介](https://immich.app/docs/overview/introduction)
- [インストール](https://immich.app/docs/install/requirements)
- [コントリビューションガイド](https://immich.app/docs/overview/support-the-project)
- [プロジェクトのサポート](#プロジェクトのサポート)
## ドキュメント
インストールガイドを含む主なドキュメントは、https://immich.app/ です。
## デモ
web デモは https://demo.immich.app からアクセスできます
モバイルアプリの場合、`Server Endpoint URL` には `https://demo.immich.app/api` を使用することができます
```bash title="Demo Credential"
The credential
email: demo@immich.app
password: demo
```
```
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
```
# 機能
| 機能 | モバイル | Web |
| ------------------------------------------- | ------ | --- |
| ビデオや写真のアップロードと表示 | はい | はい |
| アプリを開いたとき自動バックアップ | はい | N/A |
| バックアップ用アルバム選択 | はい | N/A |
| 写真やビデオをローカルデバイスにダウンロード | はい | はい |
| マルチユーザー対応 | はい | はい |
| アルバムと共有アルバム | はい | はい |
| スクラブ可能/ドラッグ可能スクロールバ | はい | はい |
| 生のフォーマットに対応 | はい | はい |
| メタデータ表示EXIF、地図 | はい | はい |
| メタデータ、オブジェクト、フェース、CLIPによる検索 | はい | はい |
| 管理機能(ユーザー管理) | いいえ | はい |
| バックグラウンドバックアップ | はい | N/A |
| 仮想スクロール | はい | はい |
| OAuth サポート | はい | はい |
| API キー | N/A | はい |
| LivePhoto のバックアップと再生 | iOS | はい |
| ユーザー定義のストレージ構造 | はい | はい |
| 公開シェアリング | いいえ | はい |
| アーカイブとお気に入り | はい | はい |
| グローバルマップ | はい | はい |
| パートナー共有 | はい | はい |
| 思い出x 年前)顔認識とクラスタリング | はい | はい |
| 思い出x 年前) | はい | はい |
| オフラインサポート | はい | いいえ |
| 読み取り専用ギャラリー | はい | はい |
# プロジェクトのサポート
私はこのプロジェクトにコミットしてきました。ドキュメントを更新し、新しい機能を追加し、バグを修正し続けるつもりですが、私ひとりではできません。だから、続けるためのモチベーションをさらに高めてくれる皆さんの助けが必要なのです。
[selfhosted.show - In the episode 'The-organization-must-いいえt-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) のホストが言ったように、これはチームと私がやっていることの大規模な事業だ。そしていつの日か、フルタイムでこの仕事ができるようになりたいと思っています。
もし、あなたがこのプロジェクトに賛同し、このアプリを長く使い続けたいと思われるのであれば、以下のオプションから支援をご検討ください。
## 寄付
- GitHub スポンサー経由の[毎月の寄付](https://github.com/sponsors/alextran1502)
- GitHub スポンサー経由の[一回寄付](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502)
- [Librepay](https://liberapay.com/alex.tran1502/)
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX

View File

@@ -24,6 +24,7 @@
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_ja_JP.md">日本語</a>
</p>
## Disclaimer

View File

@@ -24,6 +24,7 @@
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_ja_JP.md">日本語</a>
</p>
## Feragatname

View File

@@ -28,6 +28,7 @@
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_ja_JP.md">日本語</a>
</p>

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.79.0
* The version of the OpenAPI document: 1.81.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
@@ -307,6 +307,12 @@ export interface AllJobStatusResponseDto {
* @memberof AllJobStatusResponseDto
*/
'metadataExtraction': JobStatusDto;
/**
*
* @type {JobStatusDto}
* @memberof AllJobStatusResponseDto
*/
'migration': JobStatusDto;
/**
*
* @type {JobStatusDto}
@@ -1049,6 +1055,22 @@ export interface CheckExistingAssetsResponseDto {
*/
'existingIds': Array<string>;
}
/**
*
* @export
* @enum {string}
*/
export const CitiesFile = {
Cities15000: 'cities15000',
Cities5000: 'cities5000',
Cities1000: 'cities1000',
Cities500: 'cities500'
} as const;
export type CitiesFile = typeof CitiesFile[keyof typeof CitiesFile];
/**
*
* @export
@@ -1779,6 +1801,7 @@ export const JobName = {
ClipEncoding: 'clipEncoding',
BackgroundTask: 'backgroundTask',
StorageTemplateMigration: 'storageTemplateMigration',
Migration: 'migration',
Search: 'search',
Sidecar: 'sidecar',
Library: 'library'
@@ -2643,6 +2666,12 @@ export interface ServerFeaturesDto {
* @memberof ServerFeaturesDto
*/
'passwordLogin': boolean;
/**
*
* @type {boolean}
* @memberof ServerFeaturesDto
*/
'reverseGeocoding': boolean;
/**
*
* @type {boolean}
@@ -3086,6 +3115,12 @@ export interface SystemConfigDto {
* @memberof SystemConfigDto
*/
'passwordLogin': SystemConfigPasswordLoginDto;
/**
*
* @type {SystemConfigReverseGeocodingDto}
* @memberof SystemConfigDto
*/
'reverseGeocoding': SystemConfigReverseGeocodingDto;
/**
*
* @type {SystemConfigStorageTemplateDto}
@@ -3240,6 +3275,12 @@ export interface SystemConfigJobDto {
* @memberof SystemConfigJobDto
*/
'metadataExtraction': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
* @memberof SystemConfigJobDto
*/
'migration': JobSettingsDto;
/**
*
* @type {JobSettingsDto}
@@ -3425,6 +3466,27 @@ export interface SystemConfigPasswordLoginDto {
*/
'enabled': boolean;
}
/**
*
* @export
* @interface SystemConfigReverseGeocodingDto
*/
export interface SystemConfigReverseGeocodingDto {
/**
*
* @type {CitiesFile}
* @memberof SystemConfigReverseGeocodingDto
*/
'citiesFileOverride': CitiesFile;
/**
*
* @type {boolean}
* @memberof SystemConfigReverseGeocodingDto
*/
'enabled': boolean;
}
/**
*
* @export
@@ -3480,6 +3542,12 @@ export interface SystemConfigTemplateStorageOptionDto {
* @memberof SystemConfigTemplateStorageOptionDto
*/
'secondOptions': Array<string>;
/**
*
* @type {Array<string>}
* @memberof SystemConfigTemplateStorageOptionDto
*/
'weekOptions': Array<string>;
/**
*
* @type {Array<string>}
@@ -6303,6 +6371,49 @@ export const AssetApiAxiosParamCreator = function (configuration?: Configuration
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
return {
url: toPathString(localVarUrlObj),
options: localVarRequestOptions,
};
},
/**
*
* @param {number} [count]
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
getRandom: async (count?: number, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
const localVarPath = `/asset/random`;
// use dummy base URL string because the URL constructor only accepts absolute URLs.
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
let baseOptions;
if (configuration) {
baseOptions = configuration.baseOptions;
}
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
const localVarHeaderParameter = {} as any;
const localVarQueryParameter = {} as any;
// authentication cookie required
// authentication api_key required
await setApiKeyToObject(localVarHeaderParameter, "x-api-key", configuration)
// authentication bearer required
// http bearer authentication required
await setBearerAuthToObject(localVarHeaderParameter, configuration)
if (count !== undefined) {
localVarQueryParameter['count'] = count;
}
setSearchParams(localVarUrlObj, localVarQueryParameter);
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -7043,6 +7154,16 @@ export const AssetApiFp = function(configuration?: Configuration) {
const localVarAxiosArgs = await localVarAxiosParamCreator.getMemoryLane(timestamp, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
/**
*
* @param {number} [count]
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
async getRandom(count?: number, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<AssetResponseDto>>> {
const localVarAxiosArgs = await localVarAxiosParamCreator.getRandom(count, options);
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
},
/**
*
* @param {TimeBucketSize} size
@@ -7318,6 +7439,15 @@ export const AssetApiFactory = function (configuration?: Configuration, basePath
getMemoryLane(requestParameters: AssetApiGetMemoryLaneRequest, options?: AxiosRequestConfig): AxiosPromise<Array<MemoryLaneResponseDto>> {
return localVarFp.getMemoryLane(requestParameters.timestamp, options).then((request) => request(axios, basePath));
},
/**
*
* @param {AssetApiGetRandomRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
*/
getRandom(requestParameters: AssetApiGetRandomRequest = {}, options?: AxiosRequestConfig): AxiosPromise<Array<AssetResponseDto>> {
return localVarFp.getRandom(requestParameters.count, options).then((request) => request(axios, basePath));
},
/**
*
* @param {AssetApiGetTimeBucketsRequest} requestParameters Request parameters.
@@ -7752,6 +7882,20 @@ export interface AssetApiGetMemoryLaneRequest {
readonly timestamp: string
}
/**
* Request parameters for getRandom operation in AssetApi.
* @export
* @interface AssetApiGetRandomRequest
*/
export interface AssetApiGetRandomRequest {
/**
*
* @type {number}
* @memberof AssetApiGetRandom
*/
readonly count?: number
}
/**
* Request parameters for getTimeBuckets operation in AssetApi.
* @export
@@ -8244,6 +8388,17 @@ export class AssetApi extends BaseAPI {
return AssetApiFp(this.configuration).getMemoryLane(requestParameters.timestamp, options).then((request) => request(this.axios, this.basePath));
}
/**
*
* @param {AssetApiGetRandomRequest} requestParameters Request parameters.
* @param {*} [options] Override http request option.
* @throws {RequiredError}
* @memberof AssetApi
*/
public getRandom(requestParameters: AssetApiGetRandomRequest = {}, options?: AxiosRequestConfig) {
return AssetApiFp(this.configuration).getRandom(requestParameters.count, options).then((request) => request(this.axios, this.basePath));
}
/**
*
* @param {AssetApiGetTimeBucketsRequest} requestParameters Request parameters.

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.79.0
* The version of the OpenAPI document: 1.81.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.79.0
* The version of the OpenAPI document: 1.81.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.79.0
* The version of the OpenAPI document: 1.81.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.79.0
* The version of the OpenAPI document: 1.81.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -34,6 +34,7 @@ const other = [
'orf',
'ori',
'pef',
'psd',
'raf',
'raw',
'rwl',

View File

@@ -25,7 +25,7 @@ export class CrawledAsset {
async process() {
const stats = await fs.promises.stat(this.path);
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replace(/\s+/g, '');
this.fileCreatedAt = stats.ctime.toISOString();
this.fileCreatedAt = stats.mtime.toISOString();
this.fileModifiedAt = stats.mtime.toISOString();
this.fileSize = stats.size;

View File

@@ -11,8 +11,10 @@ services:
command: npm run start:debug immich
volumes:
- ../server:/usr/src/app
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- /usr/src/app/node_modules
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
ports:
- 3001:3001
- 9230:9230
@@ -25,25 +27,6 @@ services:
- database
- typesense
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning-dev:latest
build:
context: ../machine-learning
dockerfile: Dockerfile
ports:
- 3003:3003
volumes:
- ../machine-learning:/usr/src/app
- model-cache:/cache
env_file:
- .env
environment:
- NODE_ENV=development
depends_on:
- database
restart: unless-stopped
immich-microservices:
container_name: immich_microservices
image: immich-microservices:latest
@@ -57,8 +40,10 @@ services:
command: npm run start:debug microservices
volumes:
- ../server:/usr/src/app
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- /usr/src/app/node_modules
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
ports:
@@ -94,6 +79,25 @@ services:
depends_on:
- immich-server
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning-dev:latest
build:
context: ../machine-learning
dockerfile: Dockerfile
ports:
- 3003:3003
volumes:
- ../machine-learning:/usr/src/app
- model-cache:/cache
env_file:
- .env
environment:
- NODE_ENV=development
depends_on:
- database
restart: unless-stopped
typesense:
container_name: immich_typesense
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
@@ -103,7 +107,7 @@ services:
# remove this to get debug messages
- GLOG_minloglevel=1
volumes:
- tsdata:/data
- ${UPLOAD_LOCATION}/typesense:/data
redis:
container_name: immich_redis
@@ -119,7 +123,7 @@ services:
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
volumes:
- pgdata:/var/lib/postgresql/data
- ${UPLOAD_LOCATION}/postgres:/data
ports:
- 5432:5432
@@ -141,6 +145,4 @@ services:
restart: unless-stopped
volumes:
pgdata:
model-cache:
tsdata:

View File

@@ -10,6 +10,8 @@ services:
command: ["./start-server.sh"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
depends_on:
@@ -29,7 +31,7 @@ services:
env_file:
- .env
restart: always
immich-microservices:
container_name: immich_microservices
image: immich-microservices:latest
@@ -42,6 +44,8 @@ services:
command: ["./start-microservices.sh"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
depends_on:

View File

@@ -4,9 +4,11 @@ services:
immich-server:
container_name: immich_server
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
command: [ "start.sh", "immich" ]
command: ["start.sh", "immich"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
depends_on:
@@ -21,9 +23,11 @@ services:
# extends:
# file: hwaccel.yml
# service: hwaccel
command: [ "start.sh", "microservices" ]
command: ["start.sh", "microservices"]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
depends_on:

View File

@@ -16,20 +16,53 @@ To run a command, [connect](/docs/guides/docker-help.md#attach-to-a-container) t
## Examples
Note that the commands below should begin with `immich-admin`.
Reset Admin Password
![Reset Admin Password](./img/reset-admin-password.png)
```
immich-admin reset-admin-password
Found Admin:
- ID=e65e6f88-2a30-4dbe-8dd9-1885f4889b53
- OAuth ID=
- Email=admin@example.com
- Name=Immich Admin
? Please choose a new password (optional) immich-is-cool
The admin password has been updated.
```
Disable Password Login
![Disable Password Login](./img/disable-password-login.png)
```
immich-admin disable-password-login
Password login has been disabled.
```
Enabled Password Login
![Enable Password Login](./img/enable-password-login.png)
```
immich-admin enable-password-login
Password login has been enabled.
```
List Users
![List Users](./img/list-users.png)
```
immich-admin list-users
[
{
id: 'e65e6f88-2a30-4dbe-8dd9-1885f4889b53',
email: 'immich@example.com.com',
firstName: 'Immich',
lastName: 'Admin',
storageLabel: 'admin',
externalPath: null,
profileImagePath: 'upload/profile/e65e6f88-2a30-4dbe-8dd9-1885f4889b53/e65e6f88-2a30-4dbe-8dd9-1885f4889b53.jpg',
shouldChangePassword: true,
isAdmin: true,
createdAt: 2023-07-11T20:12:20.602Z,
deletedAt: null,
updatedAt: 2023-09-21T15:42:28.129Z,
oauthId: '',
memoriesEnabled: true
}
]
```

View File

@@ -1,4 +1,10 @@
# Read-only Gallery [Experimental]
# Read-only Gallery [Deprecated]
:::caution
This feature is being deprecated in favor of [Libraries](/docs/features/libraries.md).
:::
## Overview
@@ -6,18 +12,6 @@ This feature enables users to use an existing gallery without uploading the asse
Upon syncing the file information, it will be read by Immich to generate supported files.
:::caution
This feature is still in an experimental stage. And this is an initial implementation and will receive improvements in the future.
The current limitations of this feature are:
- Assets are not automatically synced and must instead be manually synced with the CLI tool.
- Only new files that are added to the gallery will be detected.
- Deleted and moved files will not be detected.
:::
## Usage
:::tip Example scenario

View File

@@ -49,11 +49,9 @@ These environment variables are used by the `docker-compose.yml` file and do **N
## Geocoding
| Variable | Description | Default | Services |
| :--------------------------------- | :---------------------------------- | :--------------------------: | :------------ |
| `DISABLE_REVERSE_GEOCODING` | Disable Reverse Geocoding Precision | `false` | microservices |
| `REVERSE_GEOCODING_PRECISION` | Reverse Geocoding Precision | `3` | microservices |
| `REVERSE_GEOCODING_DUMP_DIRECTORY` | Reverse Geocoding Dump Directory | `./.reverse-geocoding-dump/` | microservices |
| Variable | Description | Default | Services |
| :--------------------------------- | :------------------------------- | :--------------------------: | :------------ |
| `REVERSE_GEOCODING_DUMP_DIRECTORY` | Reverse Geocoding Dump Directory | `./.reverse-geocoding-dump/` | microservices |
## Ports

View File

@@ -6,7 +6,7 @@
"docusaurus": "docusaurus",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"start": "docusaurus start",
"start": "docusaurus start --port 3005",
"build": "docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "machine-learning"
version = "1.79.0"
version = "1.81.0"
description = ""
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
readme = "README.md"

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 103,
"android.injected.version.name" => "1.79.0",
"android.injected.version.code" => 105,
"android.injected.version.name" => "1.81.0",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -19,7 +19,7 @@ platform :ios do
desc "iOS Beta"
lane :beta do
increment_version_number(
version_number: "1.79.0"
version_number: "1.81.0"
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -117,6 +117,7 @@ class SharingPage extends HookConsumerWidget {
padding: const EdgeInsets.only(
left: 12.0,
right: 12.0,
top: 24.0,
bottom: 12.0,
),
child: Row(

View File

@@ -13,8 +13,9 @@ final archiveProvider = StreamProvider<RenderList>((ref) async* {
final query = ref
.watch(dbProvider)
.assets
.where()
.ownerIdEqualToAnyChecksum(user.isarId)
.filter()
.ownerIdEqualTo(user.isarId)
.isArchivedEqualTo(true)
.sortByFileCreatedAt();
final settings = ref.watch(appSettingsServiceProvider);

View File

@@ -218,6 +218,12 @@ class BackupNotifier extends StateNotifier<BackUpState> {
final assetCountInAlbum = await album.assetCountAsync;
if (assetCountInAlbum > 0) {
final assetList = await album.getAssetListPaged(page: 0, size: 1);
// Even though we check assetCountInAlbum to make sure that there are assets in album
// The `getAssetListPaged` method still return empty list and cause not assets get rendered
if (assetList.isEmpty) {
continue;
}
final thumbnailAsset = assetList.first;
try {
final thumbnailData = await thumbnailAsset

View File

@@ -13,8 +13,9 @@ final favoriteAssetsProvider = StreamProvider<RenderList>((ref) async* {
final query = ref
.watch(dbProvider)
.assets
.where()
.ownerIdEqualToAnyChecksum(user.isarId)
.filter()
.ownerIdEqualTo(user.isarId)
.isFavoriteEqualTo(true)
.sortByFileCreatedAt();
final settings = ref.watch(appSettingsServiceProvider);

View File

@@ -142,7 +142,7 @@ class RenderList {
) async {
final List<RenderAssetGridElement> elements = [];
const pageSize = 500;
const pageSize = 50000;
const sectionSize = 60; // divides evenly by 2,3,4,5,6
if (groupBy == GroupAssetsBy.none) {

View File

@@ -54,7 +54,7 @@ class HomePage extends HookConsumerWidget {
Future(() => ref.read(assetProvider.notifier).getAllAsset());
ref.read(albumProvider.notifier).getAllAlbums();
ref.read(sharedAlbumProvider.notifier).getAllSharedAlbums();
ref.read(serverInfoProvider.notifier).getServerVersion();
ref.read(serverInfoProvider.notifier).getServerInfo();
selectionEnabledHook.addListener(() {
multiselectEnabled.state = selectionEnabledHook.value;

View File

@@ -1,6 +1,7 @@
import 'package:flutter/material.dart';
import 'package:flutter_map/plugin_api.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:immich_mobile/utils/color_filter_generator.dart';
import 'package:latlong2/latlong.dart';
import 'package:url_launcher/url_launcher.dart';
@@ -29,8 +30,9 @@ class MapThumbnail extends HookConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
final tileLayer = TileLayer(
urlTemplate: "https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",
subdomains: const ['a', 'b', 'c'],
urlTemplate: ref.watch(
serverInfoProvider.select((v) => v.serverConfig.mapTileUrl),
),
);
return SizedBox(

View File

@@ -20,6 +20,7 @@ import 'package:immich_mobile/modules/map/ui/map_page_bottom_sheet.dart';
import 'package:immich_mobile/modules/map/ui/map_page_app_bar.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/models/asset.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:immich_mobile/shared/ui/immich_loading_indicator.dart';
import 'package:immich_mobile/shared/ui/immich_toast.dart';
import 'package:immich_mobile/utils/color_filter_generator.dart';
@@ -358,8 +359,9 @@ class MapPageState extends ConsumerState<MapPage> {
}
final tileLayer = TileLayer(
urlTemplate: "https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",
subdomains: const ['a', 'b', 'c'],
urlTemplate: ref.watch(
serverInfoProvider.select((v) => v.serverConfig.mapTileUrl),
),
maxNativeZoom: 19,
maxZoom: 19,
);

View File

@@ -8,15 +8,21 @@ import 'package:immich_mobile/shared/models/store.dart';
import 'package:latlong2/latlong.dart';
class CuratedPlacesRow extends CuratedRow {
final bool isMapEnabled;
const CuratedPlacesRow({
super.key,
required super.content,
this.isMapEnabled = true,
super.imageSize,
super.onTap,
});
@override
Widget build(BuildContext context) {
// Calculating the actual index of the content based on the whether map is enabled or not.
// If enabled, inject map as the first item in the list (index 0) and so the actual content will start from index 1
final int actualContentIndex = isMapEnabled ? 1 : 0;
Widget buildMapThumbnail() {
return GestureDetector(
onTap: () => AutoRouter.of(context).push(
@@ -75,6 +81,24 @@ class CuratedPlacesRow extends CuratedRow {
);
}
// Return empty thumbnail
if (!isMapEnabled && content.isEmpty) {
return Align(
alignment: Alignment.centerLeft,
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 16.0),
child: SizedBox(
width: imageSize,
height: imageSize,
child: ThumbnailWithInfo(
textInfo: '',
onTap: () {},
),
),
),
);
}
return ListView.builder(
scrollDirection: Axis.horizontal,
padding: const EdgeInsets.symmetric(
@@ -82,11 +106,10 @@ class CuratedPlacesRow extends CuratedRow {
),
itemBuilder: (context, index) {
// Injecting Map thumbnail as the first element
if (index == 0) {
if (isMapEnabled && index == 0) {
return buildMapThumbnail();
}
// The actual index is 1 less than the virutal index since we inject map into the first position
final actualIndex = index - 1;
final actualIndex = index - actualContentIndex;
final object = content[actualIndex];
final thumbnailRequestUrl =
'${Store.get(StoreKey.serverEndpoint)}/asset/thumbnail/${object.id}';
@@ -103,8 +126,7 @@ class CuratedPlacesRow extends CuratedRow {
),
);
},
// Adding 1 to inject map thumbnail as first element
itemCount: content.length + 1,
itemCount: content.length + actualContentIndex,
);
}
}

View File

@@ -14,6 +14,7 @@ import 'package:immich_mobile/modules/search/ui/person_name_edit_form.dart';
import 'package:immich_mobile/modules/search/ui/search_row_title.dart';
import 'package:immich_mobile/modules/search/ui/search_suggestion_list.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:immich_mobile/shared/ui/immich_loading_indicator.dart';
// ignore: must_be_immutable
@@ -27,6 +28,8 @@ class SearchPage extends HookConsumerWidget {
final isSearchEnabled = ref.watch(searchPageStateProvider).isSearchEnabled;
final curatedLocation = ref.watch(getCuratedLocationProvider);
final curatedPeople = ref.watch(getCuratedPeopleProvider);
final isMapEnabled =
ref.watch(serverInfoProvider.select((v) => v.serverFeatures.map));
var isDarkTheme = Theme.of(context).brightness == Brightness.dark;
double imageSize = math.min(MediaQuery.of(context).size.width / 3, 150);
@@ -107,6 +110,7 @@ class SearchPage extends HookConsumerWidget {
loading: () => const Center(child: ImmichLoadingIndicator()),
error: (err, stack) => Center(child: Text('Error: $err')),
data: (locations) => CuratedPlacesRow(
isMapEnabled: isMapEnabled,
content: locations
.map(
(o) => CuratedContent(

View File

@@ -64,10 +64,10 @@ class TabNavigationObserver extends AutoRouterObserver {
}
Store.put(StoreKey.currentUser, User.fromDto(userResponseDto));
ref.read(serverInfoProvider.notifier).getServerVersion();
} catch (e) {
debugPrint("Error refreshing user info $e");
}
}
ref.watch(serverInfoProvider.notifier).getServerVersion();
}
}

View File

@@ -100,12 +100,6 @@ class Asset {
/// stores the raw SHA1 bytes as a base64 String
/// because Isar cannot sort lists of byte arrays
@Index(
unique: true,
replace: false,
type: IndexType.hash,
composite: [CompositeIndex("ownerId")],
)
String checksum;
@Index(unique: false, replace: false, type: IndexType.hash)
@@ -114,6 +108,11 @@ class Asset {
@Index(unique: false, replace: false, type: IndexType.hash)
String? localId;
@Index(
unique: true,
replace: false,
composite: [CompositeIndex("checksum", type: IndexType.hash)],
)
int ownerId;
DateTime fileCreatedAt;

View File

@@ -100,24 +100,6 @@ const AssetSchema = CollectionSchema(
deserializeProp: _assetDeserializeProp,
idName: r'id',
indexes: {
r'checksum_ownerId': IndexSchema(
id: 5611361749756160119,
name: r'checksum_ownerId',
unique: true,
replace: false,
properties: [
IndexPropertySchema(
name: r'checksum',
type: IndexType.hash,
caseSensitive: true,
),
IndexPropertySchema(
name: r'ownerId',
type: IndexType.value,
caseSensitive: false,
)
],
),
r'remoteId': IndexSchema(
id: 6301175856541681032,
name: r'remoteId',
@@ -143,6 +125,24 @@ const AssetSchema = CollectionSchema(
caseSensitive: true,
)
],
),
r'ownerId_checksum': IndexSchema(
id: -3295822444433175883,
name: r'ownerId_checksum',
unique: true,
replace: false,
properties: [
IndexPropertySchema(
name: r'ownerId',
type: IndexType.value,
caseSensitive: false,
),
IndexPropertySchema(
name: r'checksum',
type: IndexType.hash,
caseSensitive: true,
)
],
)
},
links: {},
@@ -302,89 +302,89 @@ void _assetAttach(IsarCollection<dynamic> col, Id id, Asset object) {
}
extension AssetByIndex on IsarCollection<Asset> {
Future<Asset?> getByChecksumOwnerId(String checksum, int ownerId) {
return getByIndex(r'checksum_ownerId', [checksum, ownerId]);
Future<Asset?> getByOwnerIdChecksum(int ownerId, String checksum) {
return getByIndex(r'ownerId_checksum', [ownerId, checksum]);
}
Asset? getByChecksumOwnerIdSync(String checksum, int ownerId) {
return getByIndexSync(r'checksum_ownerId', [checksum, ownerId]);
Asset? getByOwnerIdChecksumSync(int ownerId, String checksum) {
return getByIndexSync(r'ownerId_checksum', [ownerId, checksum]);
}
Future<bool> deleteByChecksumOwnerId(String checksum, int ownerId) {
return deleteByIndex(r'checksum_ownerId', [checksum, ownerId]);
Future<bool> deleteByOwnerIdChecksum(int ownerId, String checksum) {
return deleteByIndex(r'ownerId_checksum', [ownerId, checksum]);
}
bool deleteByChecksumOwnerIdSync(String checksum, int ownerId) {
return deleteByIndexSync(r'checksum_ownerId', [checksum, ownerId]);
bool deleteByOwnerIdChecksumSync(int ownerId, String checksum) {
return deleteByIndexSync(r'ownerId_checksum', [ownerId, checksum]);
}
Future<List<Asset?>> getAllByChecksumOwnerId(
List<String> checksumValues, List<int> ownerIdValues) {
final len = checksumValues.length;
assert(ownerIdValues.length == len,
Future<List<Asset?>> getAllByOwnerIdChecksum(
List<int> ownerIdValues, List<String> checksumValues) {
final len = ownerIdValues.length;
assert(checksumValues.length == len,
'All index values must have the same length');
final values = <List<dynamic>>[];
for (var i = 0; i < len; i++) {
values.add([checksumValues[i], ownerIdValues[i]]);
values.add([ownerIdValues[i], checksumValues[i]]);
}
return getAllByIndex(r'checksum_ownerId', values);
return getAllByIndex(r'ownerId_checksum', values);
}
List<Asset?> getAllByChecksumOwnerIdSync(
List<String> checksumValues, List<int> ownerIdValues) {
final len = checksumValues.length;
assert(ownerIdValues.length == len,
List<Asset?> getAllByOwnerIdChecksumSync(
List<int> ownerIdValues, List<String> checksumValues) {
final len = ownerIdValues.length;
assert(checksumValues.length == len,
'All index values must have the same length');
final values = <List<dynamic>>[];
for (var i = 0; i < len; i++) {
values.add([checksumValues[i], ownerIdValues[i]]);
values.add([ownerIdValues[i], checksumValues[i]]);
}
return getAllByIndexSync(r'checksum_ownerId', values);
return getAllByIndexSync(r'ownerId_checksum', values);
}
Future<int> deleteAllByChecksumOwnerId(
List<String> checksumValues, List<int> ownerIdValues) {
final len = checksumValues.length;
assert(ownerIdValues.length == len,
Future<int> deleteAllByOwnerIdChecksum(
List<int> ownerIdValues, List<String> checksumValues) {
final len = ownerIdValues.length;
assert(checksumValues.length == len,
'All index values must have the same length');
final values = <List<dynamic>>[];
for (var i = 0; i < len; i++) {
values.add([checksumValues[i], ownerIdValues[i]]);
values.add([ownerIdValues[i], checksumValues[i]]);
}
return deleteAllByIndex(r'checksum_ownerId', values);
return deleteAllByIndex(r'ownerId_checksum', values);
}
int deleteAllByChecksumOwnerIdSync(
List<String> checksumValues, List<int> ownerIdValues) {
final len = checksumValues.length;
assert(ownerIdValues.length == len,
int deleteAllByOwnerIdChecksumSync(
List<int> ownerIdValues, List<String> checksumValues) {
final len = ownerIdValues.length;
assert(checksumValues.length == len,
'All index values must have the same length');
final values = <List<dynamic>>[];
for (var i = 0; i < len; i++) {
values.add([checksumValues[i], ownerIdValues[i]]);
values.add([ownerIdValues[i], checksumValues[i]]);
}
return deleteAllByIndexSync(r'checksum_ownerId', values);
return deleteAllByIndexSync(r'ownerId_checksum', values);
}
Future<Id> putByChecksumOwnerId(Asset object) {
return putByIndex(r'checksum_ownerId', object);
Future<Id> putByOwnerIdChecksum(Asset object) {
return putByIndex(r'ownerId_checksum', object);
}
Id putByChecksumOwnerIdSync(Asset object, {bool saveLinks = true}) {
return putByIndexSync(r'checksum_ownerId', object, saveLinks: saveLinks);
Id putByOwnerIdChecksumSync(Asset object, {bool saveLinks = true}) {
return putByIndexSync(r'ownerId_checksum', object, saveLinks: saveLinks);
}
Future<List<Id>> putAllByChecksumOwnerId(List<Asset> objects) {
return putAllByIndex(r'checksum_ownerId', objects);
Future<List<Id>> putAllByOwnerIdChecksum(List<Asset> objects) {
return putAllByIndex(r'ownerId_checksum', objects);
}
List<Id> putAllByChecksumOwnerIdSync(List<Asset> objects,
List<Id> putAllByOwnerIdChecksumSync(List<Asset> objects,
{bool saveLinks = true}) {
return putAllByIndexSync(r'checksum_ownerId', objects,
return putAllByIndexSync(r'ownerId_checksum', objects,
saveLinks: saveLinks);
}
}
@@ -463,145 +463,6 @@ extension AssetQueryWhere on QueryBuilder<Asset, Asset, QWhereClause> {
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> checksumEqualToAnyOwnerId(
String checksum) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.equalTo(
indexName: r'checksum_ownerId',
value: [checksum],
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> checksumNotEqualToAnyOwnerId(
String checksum) {
return QueryBuilder.apply(this, (query) {
if (query.whereSort == Sort.asc) {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [],
upper: [checksum],
includeUpper: false,
))
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum],
includeLower: false,
upper: [],
));
} else {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum],
includeLower: false,
upper: [],
))
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [],
upper: [checksum],
includeUpper: false,
));
}
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> checksumOwnerIdEqualTo(
String checksum, int ownerId) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.equalTo(
indexName: r'checksum_ownerId',
value: [checksum, ownerId],
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause>
checksumEqualToOwnerIdNotEqualTo(String checksum, int ownerId) {
return QueryBuilder.apply(this, (query) {
if (query.whereSort == Sort.asc) {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum],
upper: [checksum, ownerId],
includeUpper: false,
))
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum, ownerId],
includeLower: false,
upper: [checksum],
));
} else {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum, ownerId],
includeLower: false,
upper: [checksum],
))
.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum],
upper: [checksum, ownerId],
includeUpper: false,
));
}
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause>
checksumEqualToOwnerIdGreaterThan(
String checksum,
int ownerId, {
bool include = false,
}) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum, ownerId],
includeLower: include,
upper: [checksum],
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> checksumEqualToOwnerIdLessThan(
String checksum,
int ownerId, {
bool include = false,
}) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum],
upper: [checksum, ownerId],
includeUpper: include,
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> checksumEqualToOwnerIdBetween(
String checksum,
int lowerOwnerId,
int upperOwnerId, {
bool includeLower = true,
bool includeUpper = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.between(
indexName: r'checksum_ownerId',
lower: [checksum, lowerOwnerId],
includeLower: includeLower,
upper: [checksum, upperOwnerId],
includeUpper: includeUpper,
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> remoteIdIsNull() {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.equalTo(
@@ -731,6 +592,141 @@ extension AssetQueryWhere on QueryBuilder<Asset, Asset, QWhereClause> {
}
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> ownerIdEqualToAnyChecksum(
int ownerId) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.equalTo(
indexName: r'ownerId_checksum',
value: [ownerId],
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> ownerIdNotEqualToAnyChecksum(
int ownerId) {
return QueryBuilder.apply(this, (query) {
if (query.whereSort == Sort.asc) {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [],
upper: [ownerId],
includeUpper: false,
))
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId],
includeLower: false,
upper: [],
));
} else {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId],
includeLower: false,
upper: [],
))
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [],
upper: [ownerId],
includeUpper: false,
));
}
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> ownerIdGreaterThanAnyChecksum(
int ownerId, {
bool include = false,
}) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId],
includeLower: include,
upper: [],
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> ownerIdLessThanAnyChecksum(
int ownerId, {
bool include = false,
}) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [],
upper: [ownerId],
includeUpper: include,
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> ownerIdBetweenAnyChecksum(
int lowerOwnerId,
int upperOwnerId, {
bool includeLower = true,
bool includeUpper = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [lowerOwnerId],
includeLower: includeLower,
upper: [upperOwnerId],
includeUpper: includeUpper,
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause> ownerIdChecksumEqualTo(
int ownerId, String checksum) {
return QueryBuilder.apply(this, (query) {
return query.addWhereClause(IndexWhereClause.equalTo(
indexName: r'ownerId_checksum',
value: [ownerId, checksum],
));
});
}
QueryBuilder<Asset, Asset, QAfterWhereClause>
ownerIdEqualToChecksumNotEqualTo(int ownerId, String checksum) {
return QueryBuilder.apply(this, (query) {
if (query.whereSort == Sort.asc) {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId],
upper: [ownerId, checksum],
includeUpper: false,
))
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId, checksum],
includeLower: false,
upper: [ownerId],
));
} else {
return query
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId, checksum],
includeLower: false,
upper: [ownerId],
))
.addWhereClause(IndexWhereClause.between(
indexName: r'ownerId_checksum',
lower: [ownerId],
upper: [ownerId, checksum],
includeUpper: false,
));
}
});
}
}
extension AssetQueryFilter on QueryBuilder<Asset, Asset, QFilterCondition> {

View File

@@ -2,22 +2,30 @@ import 'package:openapi/api.dart';
class ServerInfoState {
final ServerVersionResponseDto serverVersion;
final ServerFeaturesDto serverFeatures;
final ServerConfigDto serverConfig;
final bool isVersionMismatch;
final String versionMismatchErrorMessage;
ServerInfoState({
required this.serverVersion,
required this.serverFeatures,
required this.serverConfig,
required this.isVersionMismatch,
required this.versionMismatchErrorMessage,
});
ServerInfoState copyWith({
ServerVersionResponseDto? serverVersion,
ServerFeaturesDto? serverFeatures,
ServerConfigDto? serverConfig,
bool? isVersionMismatch,
String? versionMismatchErrorMessage,
}) {
return ServerInfoState(
serverVersion: serverVersion ?? this.serverVersion,
serverFeatures: serverFeatures ?? this.serverFeatures,
serverConfig: serverConfig ?? this.serverConfig,
isVersionMismatch: isVersionMismatch ?? this.isVersionMismatch,
versionMismatchErrorMessage:
versionMismatchErrorMessage ?? this.versionMismatchErrorMessage,
@@ -26,7 +34,7 @@ class ServerInfoState {
@override
String toString() {
return 'ServerInfoState( serverVersion: $serverVersion, isVersionMismatch: $isVersionMismatch, versionMismatchErrorMessage: $versionMismatchErrorMessage)';
return 'ServerInfoState( serverVersion: $serverVersion, serverFeatures: $serverFeatures, serverConfig: $serverConfig, isVersionMismatch: $isVersionMismatch, versionMismatchErrorMessage: $versionMismatchErrorMessage)';
}
@override
@@ -35,6 +43,8 @@ class ServerInfoState {
return other is ServerInfoState &&
other.serverVersion == serverVersion &&
other.serverFeatures == serverFeatures &&
other.serverConfig == serverConfig &&
other.isVersionMismatch == isVersionMismatch &&
other.versionMismatchErrorMessage == versionMismatchErrorMessage;
}
@@ -42,6 +52,8 @@ class ServerInfoState {
@override
int get hashCode {
return serverVersion.hashCode ^
serverFeatures.hashCode ^
serverConfig.hashCode ^
isVersionMismatch.hashCode ^
versionMismatchErrorMessage.hashCode;
}

View File

@@ -186,8 +186,9 @@ final assetsProvider =
final query = ref
.watch(dbProvider)
.assets
.where()
.ownerIdEqualToAnyChecksum(userId)
.filter()
.ownerIdEqualTo(userId)
.isArchivedEqualTo(false)
.sortByFileCreatedAtDesc();
final settings = ref.watch(appSettingsServiceProvider);

View File

@@ -15,6 +15,24 @@ class ServerInfoNotifier extends StateNotifier<ServerInfoState> {
patch_: 0,
minor: 0,
),
serverFeatures: ServerFeaturesDto(
clipEncode: true,
configFile: false,
facialRecognition: true,
map: true,
oauth: false,
oauthAutoLaunch: false,
passwordLogin: true,
search: true,
sidecar: true,
tagImage: true,
reverseGeocoding: true,
),
serverConfig: ServerConfigDto(
loginPageMessage: "",
mapTileUrl: "https://tile.openstreetmap.org/{z}/{x}/{y}.png",
oauthButtonText: "",
),
isVersionMismatch: false,
versionMismatchErrorMessage: "",
),
@@ -22,6 +40,12 @@ class ServerInfoNotifier extends StateNotifier<ServerInfoState> {
final ServerInfoService _serverInfoService;
getServerInfo() {
getServerVersion();
getServerFeatures();
getServerConfig();
}
getServerVersion() async {
ServerVersionResponseDto? serverVersion =
await _serverInfoService.getServerVersion();
@@ -66,6 +90,23 @@ class ServerInfoNotifier extends StateNotifier<ServerInfoState> {
);
}
getServerFeatures() async {
ServerFeaturesDto? serverFeatures =
await _serverInfoService.getServerFeatures();
if (serverFeatures == null) {
return;
}
state = state.copyWith(serverFeatures: serverFeatures);
}
getServerConfig() async {
ServerConfigDto? serverConfig = await _serverInfoService.getServerConfig();
if (serverConfig == null) {
return;
}
state = state.copyWith(serverConfig: serverConfig);
}
Map<String, int> _getDetailVersion(String version) {
List<String> detail = version.split(".");

View File

@@ -28,7 +28,25 @@ class ServerInfoService {
try {
return await _apiService.serverInfoApi.getServerVersion();
} catch (e) {
debugPrint("Error getting server info");
debugPrint("Error [getServerVersion] ${e.toString()}");
return null;
}
}
Future<ServerFeaturesDto?> getServerFeatures() async {
try {
return await _apiService.serverInfoApi.getServerFeatures();
} catch (e) {
debugPrint("Error [getServerFeatures] ${e.toString()}");
return null;
}
}
Future<ServerConfigDto?> getServerConfig() async {
try {
return await _apiService.serverInfoApi.getServerConfig();
} catch (e) {
debugPrint("Error [getServerConfig] ${e.toString()}");
return null;
}
}

View File

@@ -123,7 +123,7 @@ class SyncService {
/// Syncs a new asset to the db. Returns `true` if successful
Future<bool> _syncNewAssetToDb(Asset a) async {
final Asset? inDb =
await _db.assets.getByChecksumOwnerId(a.checksum, a.ownerId);
await _db.assets.getByOwnerIdChecksum(a.ownerId, a.checksum);
if (inDb != null) {
// unify local/remote assets by replacing the
// local-only asset in the DB with a local&remote asset
@@ -195,8 +195,8 @@ class SyncService {
return false;
}
final List<Asset> inDb = await _db.assets
.filter()
.ownerIdEqualTo(user.isarId)
.where()
.ownerIdEqualToAnyChecksum(user.isarId)
.sortByChecksum()
.findAll();
assert(inDb.isSorted(Asset.compareByChecksum), "inDb not sorted!");
@@ -638,9 +638,9 @@ class SyncService {
) async {
if (assets.isEmpty) return ([].cast<Asset>(), [].cast<Asset>());
final List<Asset?> inDb = await _db.assets.getAllByChecksumOwnerId(
assets.map((a) => a.checksum).toList(growable: false),
final List<Asset?> inDb = await _db.assets.getAllByOwnerIdChecksum(
assets.map((a) => a.ownerId).toInt64List(),
assets.map((a) => a.checksum).toList(growable: false),
);
assert(inDb.length == assets.length);
final List<Asset> existing = [], toUpsert = [];
@@ -683,9 +683,9 @@ class SyncService {
);
// give details on the errors
assets.sort(Asset.compareByOwnerChecksum);
final inDb = await _db.assets.getAllByChecksumOwnerId(
assets.map((e) => e.checksum).toList(growable: false),
final inDb = await _db.assets.getAllByOwnerIdChecksum(
assets.map((e) => e.ownerId).toInt64List(),
assets.map((e) => e.checksum).toList(growable: false),
);
for (int i = 0; i < assets.length; i++) {
final Asset a = assets[i];

View File

@@ -43,6 +43,7 @@ doc/CheckDuplicateAssetDto.md
doc/CheckDuplicateAssetResponseDto.md
doc/CheckExistingAssetsDto.md
doc/CheckExistingAssetsResponseDto.md
doc/CitiesFile.md
doc/ClassificationConfig.md
doc/Colorspace.md
doc/CreateAlbumDto.md
@@ -126,6 +127,7 @@ doc/SystemConfigMachineLearningDto.md
doc/SystemConfigMapDto.md
doc/SystemConfigOAuthDto.md
doc/SystemConfigPasswordLoginDto.md
doc/SystemConfigReverseGeocodingDto.md
doc/SystemConfigStorageTemplateDto.md
doc/SystemConfigTemplateStorageOptionDto.md
doc/SystemConfigThumbnailDto.md
@@ -207,6 +209,7 @@ lib/model/check_duplicate_asset_dto.dart
lib/model/check_duplicate_asset_response_dto.dart
lib/model/check_existing_assets_dto.dart
lib/model/check_existing_assets_response_dto.dart
lib/model/cities_file.dart
lib/model/classification_config.dart
lib/model/clip_config.dart
lib/model/clip_mode.dart
@@ -284,6 +287,7 @@ lib/model/system_config_machine_learning_dto.dart
lib/model/system_config_map_dto.dart
lib/model/system_config_o_auth_dto.dart
lib/model/system_config_password_login_dto.dart
lib/model/system_config_reverse_geocoding_dto.dart
lib/model/system_config_storage_template_dto.dart
lib/model/system_config_template_storage_option_dto.dart
lib/model/system_config_thumbnail_dto.dart
@@ -343,6 +347,7 @@ test/check_duplicate_asset_dto_test.dart
test/check_duplicate_asset_response_dto_test.dart
test/check_existing_assets_dto_test.dart
test/check_existing_assets_response_dto_test.dart
test/cities_file_test.dart
test/classification_config_test.dart
test/clip_config_test.dart
test/clip_mode_test.dart
@@ -429,6 +434,7 @@ test/system_config_machine_learning_dto_test.dart
test/system_config_map_dto_test.dart
test/system_config_o_auth_dto_test.dart
test/system_config_password_login_dto_test.dart
test/system_config_reverse_geocoding_dto_test.dart
test/system_config_storage_template_dto_test.dart
test/system_config_template_storage_option_dto_test.dart
test/system_config_thumbnail_dto_test.dart

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 1.79.0
- API version: 1.81.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen
## Requirements
@@ -104,6 +104,7 @@ Class | Method | HTTP request | Description
*AssetApi* | [**getDownloadInfo**](doc//AssetApi.md#getdownloadinfo) | **POST** /asset/download/info |
*AssetApi* | [**getMapMarkers**](doc//AssetApi.md#getmapmarkers) | **GET** /asset/map-marker |
*AssetApi* | [**getMemoryLane**](doc//AssetApi.md#getmemorylane) | **GET** /asset/memory-lane |
*AssetApi* | [**getRandom**](doc//AssetApi.md#getrandom) | **GET** /asset/random |
*AssetApi* | [**getTimeBuckets**](doc//AssetApi.md#gettimebuckets) | **GET** /asset/time-buckets |
*AssetApi* | [**getUserAssetsByDeviceId**](doc//AssetApi.md#getuserassetsbydeviceid) | **GET** /asset/{deviceId} |
*AssetApi* | [**importFile**](doc//AssetApi.md#importfile) | **POST** /asset/import |
@@ -226,6 +227,7 @@ Class | Method | HTTP request | Description
- [CheckDuplicateAssetResponseDto](doc//CheckDuplicateAssetResponseDto.md)
- [CheckExistingAssetsDto](doc//CheckExistingAssetsDto.md)
- [CheckExistingAssetsResponseDto](doc//CheckExistingAssetsResponseDto.md)
- [CitiesFile](doc//CitiesFile.md)
- [ClassificationConfig](doc//ClassificationConfig.md)
- [Colorspace](doc//Colorspace.md)
- [CreateAlbumDto](doc//CreateAlbumDto.md)
@@ -300,6 +302,7 @@ Class | Method | HTTP request | Description
- [SystemConfigMapDto](doc//SystemConfigMapDto.md)
- [SystemConfigOAuthDto](doc//SystemConfigOAuthDto.md)
- [SystemConfigPasswordLoginDto](doc//SystemConfigPasswordLoginDto.md)
- [SystemConfigReverseGeocodingDto](doc//SystemConfigReverseGeocodingDto.md)
- [SystemConfigStorageTemplateDto](doc//SystemConfigStorageTemplateDto.md)
- [SystemConfigTemplateStorageOptionDto](doc//SystemConfigTemplateStorageOptionDto.md)
- [SystemConfigThumbnailDto](doc//SystemConfigThumbnailDto.md)

View File

@@ -12,6 +12,7 @@ Name | Type | Description | Notes
**clipEncoding** | [**JobStatusDto**](JobStatusDto.md) | |
**library_** | [**JobStatusDto**](JobStatusDto.md) | |
**metadataExtraction** | [**JobStatusDto**](JobStatusDto.md) | |
**migration** | [**JobStatusDto**](JobStatusDto.md) | |
**objectTagging** | [**JobStatusDto**](JobStatusDto.md) | |
**recognizeFaces** | [**JobStatusDto**](JobStatusDto.md) | |
**search** | [**JobStatusDto**](JobStatusDto.md) | |

View File

@@ -26,6 +26,7 @@ Method | HTTP request | Description
[**getDownloadInfo**](AssetApi.md#getdownloadinfo) | **POST** /asset/download/info |
[**getMapMarkers**](AssetApi.md#getmapmarkers) | **GET** /asset/map-marker |
[**getMemoryLane**](AssetApi.md#getmemorylane) | **GET** /asset/memory-lane |
[**getRandom**](AssetApi.md#getrandom) | **GET** /asset/random |
[**getTimeBuckets**](AssetApi.md#gettimebuckets) | **GET** /asset/time-buckets |
[**getUserAssetsByDeviceId**](AssetApi.md#getuserassetsbydeviceid) | **GET** /asset/{deviceId} |
[**importFile**](AssetApi.md#importfile) | **POST** /asset/import |
@@ -1014,6 +1015,61 @@ Name | Type | Description | Notes
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
# **getRandom**
> List<AssetResponseDto> getRandom(count)
### Example
```dart
import 'package:openapi/api.dart';
// TODO Configure API key authorization: cookie
//defaultApiClient.getAuthentication<ApiKeyAuth>('cookie').apiKey = 'YOUR_API_KEY';
// uncomment below to setup prefix (e.g. Bearer) for API key, if needed
//defaultApiClient.getAuthentication<ApiKeyAuth>('cookie').apiKeyPrefix = 'Bearer';
// TODO Configure API key authorization: api_key
//defaultApiClient.getAuthentication<ApiKeyAuth>('api_key').apiKey = 'YOUR_API_KEY';
// uncomment below to setup prefix (e.g. Bearer) for API key, if needed
//defaultApiClient.getAuthentication<ApiKeyAuth>('api_key').apiKeyPrefix = 'Bearer';
// TODO Configure HTTP Bearer authorization: bearer
// Case 1. Use String Token
//defaultApiClient.getAuthentication<HttpBearerAuth>('bearer').setAccessToken('YOUR_ACCESS_TOKEN');
// Case 2. Use Function which generate token.
// String yourTokenGeneratorFunction() { ... }
//defaultApiClient.getAuthentication<HttpBearerAuth>('bearer').setAccessToken(yourTokenGeneratorFunction);
final api_instance = AssetApi();
final count = 8.14; // num |
try {
final result = api_instance.getRandom(count);
print(result);
} catch (e) {
print('Exception when calling AssetApi->getRandom: $e\n');
}
```
### Parameters
Name | Type | Description | Notes
------------- | ------------- | ------------- | -------------
**count** | **num**| | [optional]
### Return type
[**List<AssetResponseDto>**](AssetResponseDto.md)
### Authorization
[cookie](../README.md#cookie), [api_key](../README.md#api_key), [bearer](../README.md#bearer)
### HTTP request headers
- **Content-Type**: Not defined
- **Accept**: application/json
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
# **getTimeBuckets**
> List<TimeBucketResponseDto> getTimeBuckets(size, userId, albumId, personId, isArchived, isFavorite, key)

14
mobile/openapi/doc/CitiesFile.md generated Normal file
View File

@@ -0,0 +1,14 @@
# openapi.model.CitiesFile
## Load the model package
```dart
import 'package:openapi/api.dart';
```
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)

View File

@@ -15,6 +15,7 @@ Name | Type | Description | Notes
**oauth** | **bool** | |
**oauthAutoLaunch** | **bool** | |
**passwordLogin** | **bool** | |
**reverseGeocoding** | **bool** | |
**search** | **bool** | |
**sidecar** | **bool** | |
**tagImage** | **bool** | |

View File

@@ -14,6 +14,7 @@ Name | Type | Description | Notes
**map** | [**SystemConfigMapDto**](SystemConfigMapDto.md) | |
**oauth** | [**SystemConfigOAuthDto**](SystemConfigOAuthDto.md) | |
**passwordLogin** | [**SystemConfigPasswordLoginDto**](SystemConfigPasswordLoginDto.md) | |
**reverseGeocoding** | [**SystemConfigReverseGeocodingDto**](SystemConfigReverseGeocodingDto.md) | |
**storageTemplate** | [**SystemConfigStorageTemplateDto**](SystemConfigStorageTemplateDto.md) | |
**thumbnail** | [**SystemConfigThumbnailDto**](SystemConfigThumbnailDto.md) | |

View File

@@ -12,6 +12,7 @@ Name | Type | Description | Notes
**clipEncoding** | [**JobSettingsDto**](JobSettingsDto.md) | |
**library_** | [**JobSettingsDto**](JobSettingsDto.md) | |
**metadataExtraction** | [**JobSettingsDto**](JobSettingsDto.md) | |
**migration** | [**JobSettingsDto**](JobSettingsDto.md) | |
**objectTagging** | [**JobSettingsDto**](JobSettingsDto.md) | |
**recognizeFaces** | [**JobSettingsDto**](JobSettingsDto.md) | |
**search** | [**JobSettingsDto**](JobSettingsDto.md) | |

View File

@@ -0,0 +1,16 @@
# openapi.model.SystemConfigReverseGeocodingDto
## Load the model package
```dart
import 'package:openapi/api.dart';
```
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**citiesFileOverride** | [**CitiesFile**](CitiesFile.md) | |
**enabled** | **bool** | |
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)

View File

@@ -14,6 +14,7 @@ Name | Type | Description | Notes
**monthOptions** | **List<String>** | | [default to const []]
**presetOptions** | **List<String>** | | [default to const []]
**secondOptions** | **List<String>** | | [default to const []]
**weekOptions** | **List<String>** | | [default to const []]
**yearOptions** | **List<String>** | | [default to const []]
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)

View File

@@ -80,6 +80,7 @@ part 'model/check_duplicate_asset_dto.dart';
part 'model/check_duplicate_asset_response_dto.dart';
part 'model/check_existing_assets_dto.dart';
part 'model/check_existing_assets_response_dto.dart';
part 'model/cities_file.dart';
part 'model/classification_config.dart';
part 'model/colorspace.dart';
part 'model/create_album_dto.dart';
@@ -154,6 +155,7 @@ part 'model/system_config_machine_learning_dto.dart';
part 'model/system_config_map_dto.dart';
part 'model/system_config_o_auth_dto.dart';
part 'model/system_config_password_login_dto.dart';
part 'model/system_config_reverse_geocoding_dto.dart';
part 'model/system_config_storage_template_dto.dart';
part 'model/system_config_template_storage_option_dto.dart';
part 'model/system_config_thumbnail_dto.dart';

View File

@@ -1028,6 +1028,60 @@ class AssetApi {
return null;
}
/// Performs an HTTP 'GET /asset/random' operation and returns the [Response].
/// Parameters:
///
/// * [num] count:
Future<Response> getRandomWithHttpInfo({ num? count, }) async {
// ignore: prefer_const_declarations
final path = r'/asset/random';
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (count != null) {
queryParams.addAll(_queryParams('', 'count', count));
}
const contentTypes = <String>[];
return apiClient.invokeAPI(
path,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [num] count:
Future<List<AssetResponseDto>?> getRandom({ num? count, }) async {
final response = await getRandomWithHttpInfo( count: count, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
final responseBody = await _decodeBodyBytes(response);
return (await apiClient.deserializeAsync(responseBody, 'List<AssetResponseDto>') as List)
.cast<AssetResponseDto>()
.toList();
}
return null;
}
/// Performs an HTTP 'GET /asset/time-buckets' operation and returns the [Response].
/// Parameters:
///

View File

@@ -251,6 +251,8 @@ class ApiClient {
return CheckExistingAssetsDto.fromJson(value);
case 'CheckExistingAssetsResponseDto':
return CheckExistingAssetsResponseDto.fromJson(value);
case 'CitiesFile':
return CitiesFileTypeTransformer().decode(value);
case 'ClassificationConfig':
return ClassificationConfig.fromJson(value);
case 'Colorspace':
@@ -399,6 +401,8 @@ class ApiClient {
return SystemConfigOAuthDto.fromJson(value);
case 'SystemConfigPasswordLoginDto':
return SystemConfigPasswordLoginDto.fromJson(value);
case 'SystemConfigReverseGeocodingDto':
return SystemConfigReverseGeocodingDto.fromJson(value);
case 'SystemConfigStorageTemplateDto':
return SystemConfigStorageTemplateDto.fromJson(value);
case 'SystemConfigTemplateStorageOptionDto':

View File

@@ -70,6 +70,9 @@ String parameterToString(dynamic value) {
if (value is CQMode) {
return CQModeTypeTransformer().encode(value).toString();
}
if (value is CitiesFile) {
return CitiesFileTypeTransformer().encode(value).toString();
}
if (value is Colorspace) {
return ColorspaceTypeTransformer().encode(value).toString();
}

View File

@@ -17,6 +17,7 @@ class AllJobStatusResponseDto {
required this.clipEncoding,
required this.library_,
required this.metadataExtraction,
required this.migration,
required this.objectTagging,
required this.recognizeFaces,
required this.search,
@@ -34,6 +35,8 @@ class AllJobStatusResponseDto {
JobStatusDto metadataExtraction;
JobStatusDto migration;
JobStatusDto objectTagging;
JobStatusDto recognizeFaces;
@@ -54,6 +57,7 @@ class AllJobStatusResponseDto {
other.clipEncoding == clipEncoding &&
other.library_ == library_ &&
other.metadataExtraction == metadataExtraction &&
other.migration == migration &&
other.objectTagging == objectTagging &&
other.recognizeFaces == recognizeFaces &&
other.search == search &&
@@ -69,6 +73,7 @@ class AllJobStatusResponseDto {
(clipEncoding.hashCode) +
(library_.hashCode) +
(metadataExtraction.hashCode) +
(migration.hashCode) +
(objectTagging.hashCode) +
(recognizeFaces.hashCode) +
(search.hashCode) +
@@ -78,7 +83,7 @@ class AllJobStatusResponseDto {
(videoConversion.hashCode);
@override
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, clipEncoding=$clipEncoding, library_=$library_, metadataExtraction=$metadataExtraction, objectTagging=$objectTagging, recognizeFaces=$recognizeFaces, search=$search, sidecar=$sidecar, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, clipEncoding=$clipEncoding, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, objectTagging=$objectTagging, recognizeFaces=$recognizeFaces, search=$search, sidecar=$sidecar, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -86,6 +91,7 @@ class AllJobStatusResponseDto {
json[r'clipEncoding'] = this.clipEncoding;
json[r'library'] = this.library_;
json[r'metadataExtraction'] = this.metadataExtraction;
json[r'migration'] = this.migration;
json[r'objectTagging'] = this.objectTagging;
json[r'recognizeFaces'] = this.recognizeFaces;
json[r'search'] = this.search;
@@ -108,6 +114,7 @@ class AllJobStatusResponseDto {
clipEncoding: JobStatusDto.fromJson(json[r'clipEncoding'])!,
library_: JobStatusDto.fromJson(json[r'library'])!,
metadataExtraction: JobStatusDto.fromJson(json[r'metadataExtraction'])!,
migration: JobStatusDto.fromJson(json[r'migration'])!,
objectTagging: JobStatusDto.fromJson(json[r'objectTagging'])!,
recognizeFaces: JobStatusDto.fromJson(json[r'recognizeFaces'])!,
search: JobStatusDto.fromJson(json[r'search'])!,
@@ -166,6 +173,7 @@ class AllJobStatusResponseDto {
'clipEncoding',
'library',
'metadataExtraction',
'migration',
'objectTagging',
'recognizeFaces',
'search',

View File

@@ -0,0 +1,91 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.12
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class CitiesFile {
/// Instantiate a new enum with the provided [value].
const CitiesFile._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const cities15000 = CitiesFile._(r'cities15000');
static const cities5000 = CitiesFile._(r'cities5000');
static const cities1000 = CitiesFile._(r'cities1000');
static const cities500 = CitiesFile._(r'cities500');
/// List of all possible values in this [enum][CitiesFile].
static const values = <CitiesFile>[
cities15000,
cities5000,
cities1000,
cities500,
];
static CitiesFile? fromJson(dynamic value) => CitiesFileTypeTransformer().decode(value);
static List<CitiesFile>? listFromJson(dynamic json, {bool growable = false,}) {
final result = <CitiesFile>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = CitiesFile.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [CitiesFile] to String,
/// and [decode] dynamic data back to [CitiesFile].
class CitiesFileTypeTransformer {
factory CitiesFileTypeTransformer() => _instance ??= const CitiesFileTypeTransformer._();
const CitiesFileTypeTransformer._();
String encode(CitiesFile data) => data.value;
/// Decodes a [dynamic value][data] to a CitiesFile.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
CitiesFile? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'cities15000': return CitiesFile.cities15000;
case r'cities5000': return CitiesFile.cities5000;
case r'cities1000': return CitiesFile.cities1000;
case r'cities500': return CitiesFile.cities500;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [CitiesFileTypeTransformer] instance.
static CitiesFileTypeTransformer? _instance;
}

View File

@@ -31,6 +31,7 @@ class JobName {
static const clipEncoding = JobName._(r'clipEncoding');
static const backgroundTask = JobName._(r'backgroundTask');
static const storageTemplateMigration = JobName._(r'storageTemplateMigration');
static const migration = JobName._(r'migration');
static const search = JobName._(r'search');
static const sidecar = JobName._(r'sidecar');
static const library_ = JobName._(r'library');
@@ -45,6 +46,7 @@ class JobName {
clipEncoding,
backgroundTask,
storageTemplateMigration,
migration,
search,
sidecar,
library_,
@@ -94,6 +96,7 @@ class JobNameTypeTransformer {
case r'clipEncoding': return JobName.clipEncoding;
case r'backgroundTask': return JobName.backgroundTask;
case r'storageTemplateMigration': return JobName.storageTemplateMigration;
case r'migration': return JobName.migration;
case r'search': return JobName.search;
case r'sidecar': return JobName.sidecar;
case r'library': return JobName.library_;

View File

@@ -20,6 +20,7 @@ class ServerFeaturesDto {
required this.oauth,
required this.oauthAutoLaunch,
required this.passwordLogin,
required this.reverseGeocoding,
required this.search,
required this.sidecar,
required this.tagImage,
@@ -39,6 +40,8 @@ class ServerFeaturesDto {
bool passwordLogin;
bool reverseGeocoding;
bool search;
bool sidecar;
@@ -54,6 +57,7 @@ class ServerFeaturesDto {
other.oauth == oauth &&
other.oauthAutoLaunch == oauthAutoLaunch &&
other.passwordLogin == passwordLogin &&
other.reverseGeocoding == reverseGeocoding &&
other.search == search &&
other.sidecar == sidecar &&
other.tagImage == tagImage;
@@ -68,12 +72,13 @@ class ServerFeaturesDto {
(oauth.hashCode) +
(oauthAutoLaunch.hashCode) +
(passwordLogin.hashCode) +
(reverseGeocoding.hashCode) +
(search.hashCode) +
(sidecar.hashCode) +
(tagImage.hashCode);
@override
String toString() => 'ServerFeaturesDto[clipEncode=$clipEncode, configFile=$configFile, facialRecognition=$facialRecognition, map=$map, oauth=$oauth, oauthAutoLaunch=$oauthAutoLaunch, passwordLogin=$passwordLogin, search=$search, sidecar=$sidecar, tagImage=$tagImage]';
String toString() => 'ServerFeaturesDto[clipEncode=$clipEncode, configFile=$configFile, facialRecognition=$facialRecognition, map=$map, oauth=$oauth, oauthAutoLaunch=$oauthAutoLaunch, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, search=$search, sidecar=$sidecar, tagImage=$tagImage]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -84,6 +89,7 @@ class ServerFeaturesDto {
json[r'oauth'] = this.oauth;
json[r'oauthAutoLaunch'] = this.oauthAutoLaunch;
json[r'passwordLogin'] = this.passwordLogin;
json[r'reverseGeocoding'] = this.reverseGeocoding;
json[r'search'] = this.search;
json[r'sidecar'] = this.sidecar;
json[r'tagImage'] = this.tagImage;
@@ -105,6 +111,7 @@ class ServerFeaturesDto {
oauth: mapValueOfType<bool>(json, r'oauth')!,
oauthAutoLaunch: mapValueOfType<bool>(json, r'oauthAutoLaunch')!,
passwordLogin: mapValueOfType<bool>(json, r'passwordLogin')!,
reverseGeocoding: mapValueOfType<bool>(json, r'reverseGeocoding')!,
search: mapValueOfType<bool>(json, r'search')!,
sidecar: mapValueOfType<bool>(json, r'sidecar')!,
tagImage: mapValueOfType<bool>(json, r'tagImage')!,
@@ -162,6 +169,7 @@ class ServerFeaturesDto {
'oauth',
'oauthAutoLaunch',
'passwordLogin',
'reverseGeocoding',
'search',
'sidecar',
'tagImage',

View File

@@ -19,6 +19,7 @@ class SystemConfigDto {
required this.map,
required this.oauth,
required this.passwordLogin,
required this.reverseGeocoding,
required this.storageTemplate,
required this.thumbnail,
});
@@ -35,6 +36,8 @@ class SystemConfigDto {
SystemConfigPasswordLoginDto passwordLogin;
SystemConfigReverseGeocodingDto reverseGeocoding;
SystemConfigStorageTemplateDto storageTemplate;
SystemConfigThumbnailDto thumbnail;
@@ -47,6 +50,7 @@ class SystemConfigDto {
other.map == map &&
other.oauth == oauth &&
other.passwordLogin == passwordLogin &&
other.reverseGeocoding == reverseGeocoding &&
other.storageTemplate == storageTemplate &&
other.thumbnail == thumbnail;
@@ -59,11 +63,12 @@ class SystemConfigDto {
(map.hashCode) +
(oauth.hashCode) +
(passwordLogin.hashCode) +
(reverseGeocoding.hashCode) +
(storageTemplate.hashCode) +
(thumbnail.hashCode);
@override
String toString() => 'SystemConfigDto[ffmpeg=$ffmpeg, job=$job, machineLearning=$machineLearning, map=$map, oauth=$oauth, passwordLogin=$passwordLogin, storageTemplate=$storageTemplate, thumbnail=$thumbnail]';
String toString() => 'SystemConfigDto[ffmpeg=$ffmpeg, job=$job, machineLearning=$machineLearning, map=$map, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, storageTemplate=$storageTemplate, thumbnail=$thumbnail]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -73,6 +78,7 @@ class SystemConfigDto {
json[r'map'] = this.map;
json[r'oauth'] = this.oauth;
json[r'passwordLogin'] = this.passwordLogin;
json[r'reverseGeocoding'] = this.reverseGeocoding;
json[r'storageTemplate'] = this.storageTemplate;
json[r'thumbnail'] = this.thumbnail;
return json;
@@ -92,6 +98,7 @@ class SystemConfigDto {
map: SystemConfigMapDto.fromJson(json[r'map'])!,
oauth: SystemConfigOAuthDto.fromJson(json[r'oauth'])!,
passwordLogin: SystemConfigPasswordLoginDto.fromJson(json[r'passwordLogin'])!,
reverseGeocoding: SystemConfigReverseGeocodingDto.fromJson(json[r'reverseGeocoding'])!,
storageTemplate: SystemConfigStorageTemplateDto.fromJson(json[r'storageTemplate'])!,
thumbnail: SystemConfigThumbnailDto.fromJson(json[r'thumbnail'])!,
);
@@ -147,6 +154,7 @@ class SystemConfigDto {
'map',
'oauth',
'passwordLogin',
'reverseGeocoding',
'storageTemplate',
'thumbnail',
};

View File

@@ -17,6 +17,7 @@ class SystemConfigJobDto {
required this.clipEncoding,
required this.library_,
required this.metadataExtraction,
required this.migration,
required this.objectTagging,
required this.recognizeFaces,
required this.search,
@@ -34,6 +35,8 @@ class SystemConfigJobDto {
JobSettingsDto metadataExtraction;
JobSettingsDto migration;
JobSettingsDto objectTagging;
JobSettingsDto recognizeFaces;
@@ -54,6 +57,7 @@ class SystemConfigJobDto {
other.clipEncoding == clipEncoding &&
other.library_ == library_ &&
other.metadataExtraction == metadataExtraction &&
other.migration == migration &&
other.objectTagging == objectTagging &&
other.recognizeFaces == recognizeFaces &&
other.search == search &&
@@ -69,6 +73,7 @@ class SystemConfigJobDto {
(clipEncoding.hashCode) +
(library_.hashCode) +
(metadataExtraction.hashCode) +
(migration.hashCode) +
(objectTagging.hashCode) +
(recognizeFaces.hashCode) +
(search.hashCode) +
@@ -78,7 +83,7 @@ class SystemConfigJobDto {
(videoConversion.hashCode);
@override
String toString() => 'SystemConfigJobDto[backgroundTask=$backgroundTask, clipEncoding=$clipEncoding, library_=$library_, metadataExtraction=$metadataExtraction, objectTagging=$objectTagging, recognizeFaces=$recognizeFaces, search=$search, sidecar=$sidecar, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
String toString() => 'SystemConfigJobDto[backgroundTask=$backgroundTask, clipEncoding=$clipEncoding, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, objectTagging=$objectTagging, recognizeFaces=$recognizeFaces, search=$search, sidecar=$sidecar, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -86,6 +91,7 @@ class SystemConfigJobDto {
json[r'clipEncoding'] = this.clipEncoding;
json[r'library'] = this.library_;
json[r'metadataExtraction'] = this.metadataExtraction;
json[r'migration'] = this.migration;
json[r'objectTagging'] = this.objectTagging;
json[r'recognizeFaces'] = this.recognizeFaces;
json[r'search'] = this.search;
@@ -108,6 +114,7 @@ class SystemConfigJobDto {
clipEncoding: JobSettingsDto.fromJson(json[r'clipEncoding'])!,
library_: JobSettingsDto.fromJson(json[r'library'])!,
metadataExtraction: JobSettingsDto.fromJson(json[r'metadataExtraction'])!,
migration: JobSettingsDto.fromJson(json[r'migration'])!,
objectTagging: JobSettingsDto.fromJson(json[r'objectTagging'])!,
recognizeFaces: JobSettingsDto.fromJson(json[r'recognizeFaces'])!,
search: JobSettingsDto.fromJson(json[r'search'])!,
@@ -166,6 +173,7 @@ class SystemConfigJobDto {
'clipEncoding',
'library',
'metadataExtraction',
'migration',
'objectTagging',
'recognizeFaces',
'search',

View File

@@ -0,0 +1,106 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.12
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class SystemConfigReverseGeocodingDto {
/// Returns a new [SystemConfigReverseGeocodingDto] instance.
SystemConfigReverseGeocodingDto({
required this.citiesFileOverride,
required this.enabled,
});
CitiesFile citiesFileOverride;
bool enabled;
@override
bool operator ==(Object other) => identical(this, other) || other is SystemConfigReverseGeocodingDto &&
other.citiesFileOverride == citiesFileOverride &&
other.enabled == enabled;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(citiesFileOverride.hashCode) +
(enabled.hashCode);
@override
String toString() => 'SystemConfigReverseGeocodingDto[citiesFileOverride=$citiesFileOverride, enabled=$enabled]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'citiesFileOverride'] = this.citiesFileOverride;
json[r'enabled'] = this.enabled;
return json;
}
/// Returns a new [SystemConfigReverseGeocodingDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static SystemConfigReverseGeocodingDto? fromJson(dynamic value) {
if (value is Map) {
final json = value.cast<String, dynamic>();
return SystemConfigReverseGeocodingDto(
citiesFileOverride: CitiesFile.fromJson(json[r'citiesFileOverride'])!,
enabled: mapValueOfType<bool>(json, r'enabled')!,
);
}
return null;
}
static List<SystemConfigReverseGeocodingDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <SystemConfigReverseGeocodingDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = SystemConfigReverseGeocodingDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, SystemConfigReverseGeocodingDto> mapFromJson(dynamic json) {
final map = <String, SystemConfigReverseGeocodingDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = SystemConfigReverseGeocodingDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of SystemConfigReverseGeocodingDto-objects as value to a dart map
static Map<String, List<SystemConfigReverseGeocodingDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<SystemConfigReverseGeocodingDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = SystemConfigReverseGeocodingDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'citiesFileOverride',
'enabled',
};
}

View File

@@ -19,6 +19,7 @@ class SystemConfigTemplateStorageOptionDto {
this.monthOptions = const [],
this.presetOptions = const [],
this.secondOptions = const [],
this.weekOptions = const [],
this.yearOptions = const [],
});
@@ -34,6 +35,8 @@ class SystemConfigTemplateStorageOptionDto {
List<String> secondOptions;
List<String> weekOptions;
List<String> yearOptions;
@override
@@ -44,6 +47,7 @@ class SystemConfigTemplateStorageOptionDto {
other.monthOptions == monthOptions &&
other.presetOptions == presetOptions &&
other.secondOptions == secondOptions &&
other.weekOptions == weekOptions &&
other.yearOptions == yearOptions;
@override
@@ -55,10 +59,11 @@ class SystemConfigTemplateStorageOptionDto {
(monthOptions.hashCode) +
(presetOptions.hashCode) +
(secondOptions.hashCode) +
(weekOptions.hashCode) +
(yearOptions.hashCode);
@override
String toString() => 'SystemConfigTemplateStorageOptionDto[dayOptions=$dayOptions, hourOptions=$hourOptions, minuteOptions=$minuteOptions, monthOptions=$monthOptions, presetOptions=$presetOptions, secondOptions=$secondOptions, yearOptions=$yearOptions]';
String toString() => 'SystemConfigTemplateStorageOptionDto[dayOptions=$dayOptions, hourOptions=$hourOptions, minuteOptions=$minuteOptions, monthOptions=$monthOptions, presetOptions=$presetOptions, secondOptions=$secondOptions, weekOptions=$weekOptions, yearOptions=$yearOptions]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -68,6 +73,7 @@ class SystemConfigTemplateStorageOptionDto {
json[r'monthOptions'] = this.monthOptions;
json[r'presetOptions'] = this.presetOptions;
json[r'secondOptions'] = this.secondOptions;
json[r'weekOptions'] = this.weekOptions;
json[r'yearOptions'] = this.yearOptions;
return json;
}
@@ -98,6 +104,9 @@ class SystemConfigTemplateStorageOptionDto {
secondOptions: json[r'secondOptions'] is List
? (json[r'secondOptions'] as List).cast<String>()
: const [],
weekOptions: json[r'weekOptions'] is List
? (json[r'weekOptions'] as List).cast<String>()
: const [],
yearOptions: json[r'yearOptions'] is List
? (json[r'yearOptions'] as List).cast<String>()
: const [],
@@ -154,6 +163,7 @@ class SystemConfigTemplateStorageOptionDto {
'monthOptions',
'presetOptions',
'secondOptions',
'weekOptions',
'yearOptions',
};
}

View File

@@ -36,6 +36,11 @@ void main() {
// TODO
});
// JobStatusDto migration
test('to test the property `migration`', () async {
// TODO
});
// JobStatusDto objectTagging
test('to test the property `objectTagging`', () async {
// TODO

View File

@@ -112,6 +112,11 @@ void main() {
// TODO
});
//Future<List<AssetResponseDto>> getRandom({ num count }) async
test('test getRandom', () async {
// TODO
});
//Future<List<TimeBucketResponseDto>> getTimeBuckets(TimeBucketSize size, { String userId, String albumId, String personId, bool isArchived, bool isFavorite, String key }) async
test('test getTimeBuckets', () async {
// TODO

View File

@@ -0,0 +1,21 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.12
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
import 'package:openapi/api.dart';
import 'package:test/test.dart';
// tests for CitiesFile
void main() {
group('test CitiesFile', () {
});
}

View File

@@ -51,6 +51,11 @@ void main() {
// TODO
});
// bool reverseGeocoding
test('to test the property `reverseGeocoding`', () async {
// TODO
});
// bool search
test('to test the property `search`', () async {
// TODO

View File

@@ -46,6 +46,11 @@ void main() {
// TODO
});
// SystemConfigReverseGeocodingDto reverseGeocoding
test('to test the property `reverseGeocoding`', () async {
// TODO
});
// SystemConfigStorageTemplateDto storageTemplate
test('to test the property `storageTemplate`', () async {
// TODO

View File

@@ -36,6 +36,11 @@ void main() {
// TODO
});
// JobSettingsDto migration
test('to test the property `migration`', () async {
// TODO
});
// JobSettingsDto objectTagging
test('to test the property `objectTagging`', () async {
// TODO

View File

@@ -0,0 +1,32 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.12
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
import 'package:openapi/api.dart';
import 'package:test/test.dart';
// tests for SystemConfigReverseGeocodingDto
void main() {
// final instance = SystemConfigReverseGeocodingDto();
group('test SystemConfigReverseGeocodingDto', () {
// CitiesFile citiesFileOverride
test('to test the property `citiesFileOverride`', () async {
// TODO
});
// bool enabled
test('to test the property `enabled`', () async {
// TODO
});
});
}

View File

@@ -46,6 +46,11 @@ void main() {
// TODO
});
// List<String> weekOptions (default value: const [])
test('to test the property `weekOptions`', () async {
// TODO
});
// List<String> yearOptions (default value: const [])
test('to test the property `yearOptions`', () async {
// TODO

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: "none"
version: 1.79.0+103
version: 1.81.0+105
isar_version: &isar_version 3.1.0+1
environment:

View File

@@ -1,4 +1,4 @@
FROM node:18-bookworm@sha256:c85dc4392f44f5de1d0d72dd20a088a542734445f99bed7aa8ac895c706d370d as builder
FROM node:20.8-bookworm as builder
WORKDIR /usr/src/app
@@ -29,7 +29,7 @@ FROM builder as prod
RUN npm run build
RUN npm prune --omit=dev --omit=optional
FROM node:18-bookworm-slim@sha256:a0cca98f2896135d4c0386922211c1f90f98f27a58b8f2c07850d0fbe1c2104e
FROM node:20.8-bookworm
ENV NODE_ENV=production

View File

@@ -1510,6 +1510,50 @@
]
}
},
"/asset/random": {
"get": {
"operationId": "getRandom",
"parameters": [
{
"name": "count",
"required": false,
"in": "query",
"schema": {
"type": "number"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"items": {
"$ref": "#/components/schemas/AssetResponseDto"
},
"type": "array"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Asset"
]
}
},
"/asset/search": {
"post": {
"operationId": "searchAsset",
@@ -5055,7 +5099,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "1.79.0",
"version": "1.81.0",
"contact": {}
},
"tags": [],
@@ -5299,6 +5343,9 @@
"metadataExtraction": {
"$ref": "#/components/schemas/JobStatusDto"
},
"migration": {
"$ref": "#/components/schemas/JobStatusDto"
},
"objectTagging": {
"$ref": "#/components/schemas/JobStatusDto"
},
@@ -5328,6 +5375,7 @@
"objectTagging",
"clipEncoding",
"storageTemplateMigration",
"migration",
"backgroundTask",
"search",
"recognizeFaces",
@@ -5866,6 +5914,15 @@
],
"type": "object"
},
"CitiesFile": {
"enum": [
"cities15000",
"cities5000",
"cities1000",
"cities500"
],
"type": "string"
},
"ClassificationConfig": {
"properties": {
"enabled": {
@@ -6491,6 +6548,7 @@
"clipEncoding",
"backgroundTask",
"storageTemplateMigration",
"migration",
"search",
"sidecar",
"library"
@@ -7180,6 +7238,9 @@
"passwordLogin": {
"type": "boolean"
},
"reverseGeocoding": {
"type": "boolean"
},
"search": {
"type": "boolean"
},
@@ -7195,6 +7256,7 @@
"configFile",
"facialRecognition",
"map",
"reverseGeocoding",
"oauth",
"oauthAutoLaunch",
"passwordLogin",
@@ -7541,6 +7603,9 @@
"passwordLogin": {
"$ref": "#/components/schemas/SystemConfigPasswordLoginDto"
},
"reverseGeocoding": {
"$ref": "#/components/schemas/SystemConfigReverseGeocodingDto"
},
"storageTemplate": {
"$ref": "#/components/schemas/SystemConfigStorageTemplateDto"
},
@@ -7554,6 +7619,7 @@
"map",
"oauth",
"passwordLogin",
"reverseGeocoding",
"storageTemplate",
"job",
"thumbnail"
@@ -7649,6 +7715,9 @@
"metadataExtraction": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"migration": {
"$ref": "#/components/schemas/JobSettingsDto"
},
"objectTagging": {
"$ref": "#/components/schemas/JobSettingsDto"
},
@@ -7678,6 +7747,7 @@
"objectTagging",
"clipEncoding",
"storageTemplateMigration",
"migration",
"backgroundTask",
"search",
"recognizeFaces",
@@ -7790,6 +7860,21 @@
],
"type": "object"
},
"SystemConfigReverseGeocodingDto": {
"properties": {
"citiesFileOverride": {
"$ref": "#/components/schemas/CitiesFile"
},
"enabled": {
"type": "boolean"
}
},
"required": [
"citiesFileOverride",
"enabled"
],
"type": "object"
},
"SystemConfigStorageTemplateDto": {
"properties": {
"template": {
@@ -7839,6 +7924,12 @@
},
"type": "array"
},
"weekOptions": {
"items": {
"type": "string"
},
"type": "array"
},
"yearOptions": {
"items": {
"type": "string"
@@ -7849,6 +7940,7 @@
"required": [
"yearOptions",
"monthOptions",
"weekOptions",
"dayOptions",
"hourOptions",
"minuteOptions",

View File

@@ -1,12 +1,12 @@
{
"name": "immich",
"version": "1.79.0",
"version": "1.81.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "immich",
"version": "1.79.0",
"version": "1.81.0",
"license": "UNLICENSED",
"dependencies": {
"@babel/runtime": "^7.22.11",
@@ -28,8 +28,8 @@
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
"cookie-parser": "^1.4.6",
"exiftool-vendored": "^23.0.0",
"exiftool-vendored.pl": "^12.62.0",
"exiftool-vendored": "~23.1.0",
"exiftool-vendored.pl": "12.67",
"fluent-ffmpeg": "^2.1.2",
"geo-tz": "^7.0.7",
"glob": "^10.3.3",
@@ -6825,9 +6825,9 @@
}
},
"node_modules/exiftool-vendored": {
"version": "23.0.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-23.0.0.tgz",
"integrity": "sha512-QHAKZ+M6IqWl/b5UHdG6eF+uwqGZY5Jr9JcPeQB4s5H7vPyK8KvUWsnY1NbRo824OIOd4e6kn4tNhhWpYRNnCw==",
"version": "23.1.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-23.1.0.tgz",
"integrity": "sha512-sZ1OUpvAWbUCCoidMMKDTTJ3hHE3mHxb4ihWKmta/eQYYMR54Mssp6+Nf7HoFvY//nX5YK2VCOGVexGGuhM8Bw==",
"dependencies": {
"@photostructure/tz-lookup": "^8.0.0",
"@types/luxon": "^3.3.2",
@@ -6836,23 +6836,23 @@
"luxon": "^3.4.3"
},
"optionalDependencies": {
"exiftool-vendored.exe": "12.65.0",
"exiftool-vendored.pl": "12.65.0"
"exiftool-vendored.exe": "12.67.0",
"exiftool-vendored.pl": "12.67.0"
}
},
"node_modules/exiftool-vendored.exe": {
"version": "12.65.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.exe/-/exiftool-vendored.exe-12.65.0.tgz",
"integrity": "sha512-VDTSW3/u5bdLlg516g1oTypq2Sxd3I2pWTzdd5EmDtSjmvvBCLyDlMpv4Gnz8dnlQTRsEqwIgv/TAtdWykwEBg==",
"version": "12.67.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.exe/-/exiftool-vendored.exe-12.67.0.tgz",
"integrity": "sha512-wzgMDoL/VWH34l38g22cVUn43mVFtTSVj0HRjfjR46+4fGwpSvSueeYbwLCZ5NvBAVINCS5Rz9Rl2DVmqoIjsw==",
"optional": true,
"os": [
"win32"
]
},
"node_modules/exiftool-vendored.pl": {
"version": "12.65.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.pl/-/exiftool-vendored.pl-12.65.0.tgz",
"integrity": "sha512-BpR+rwKLWqUAPbsW17fw+8FAmyijkMhjaLu3fWU2QX6rpBJnOfn+lQp4Txkq44avL1LDV+eQ8pbWXyimjkPw0Q==",
"version": "12.67.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.pl/-/exiftool-vendored.pl-12.67.0.tgz",
"integrity": "sha512-Jvjkv4Cad+Bnp/4PuLEhO2BSpKy0MBccmq8if/H8V2ykssZrpUh8DRwEJkONnsaNX7dqKfObbOFig3vwoDyXsA==",
"os": [
"!win32"
]
@@ -19191,29 +19191,29 @@
}
},
"exiftool-vendored": {
"version": "23.0.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-23.0.0.tgz",
"integrity": "sha512-QHAKZ+M6IqWl/b5UHdG6eF+uwqGZY5Jr9JcPeQB4s5H7vPyK8KvUWsnY1NbRo824OIOd4e6kn4tNhhWpYRNnCw==",
"version": "23.1.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored/-/exiftool-vendored-23.1.0.tgz",
"integrity": "sha512-sZ1OUpvAWbUCCoidMMKDTTJ3hHE3mHxb4ihWKmta/eQYYMR54Mssp6+Nf7HoFvY//nX5YK2VCOGVexGGuhM8Bw==",
"requires": {
"@photostructure/tz-lookup": "^8.0.0",
"@types/luxon": "^3.3.2",
"batch-cluster": "^12.1.0",
"exiftool-vendored.exe": "12.65.0",
"exiftool-vendored.pl": "12.65.0",
"exiftool-vendored.exe": "12.67.0",
"exiftool-vendored.pl": "12.67.0",
"he": "^1.2.0",
"luxon": "^3.4.3"
}
},
"exiftool-vendored.exe": {
"version": "12.65.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.exe/-/exiftool-vendored.exe-12.65.0.tgz",
"integrity": "sha512-VDTSW3/u5bdLlg516g1oTypq2Sxd3I2pWTzdd5EmDtSjmvvBCLyDlMpv4Gnz8dnlQTRsEqwIgv/TAtdWykwEBg==",
"version": "12.67.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.exe/-/exiftool-vendored.exe-12.67.0.tgz",
"integrity": "sha512-wzgMDoL/VWH34l38g22cVUn43mVFtTSVj0HRjfjR46+4fGwpSvSueeYbwLCZ5NvBAVINCS5Rz9Rl2DVmqoIjsw==",
"optional": true
},
"exiftool-vendored.pl": {
"version": "12.65.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.pl/-/exiftool-vendored.pl-12.65.0.tgz",
"integrity": "sha512-BpR+rwKLWqUAPbsW17fw+8FAmyijkMhjaLu3fWU2QX6rpBJnOfn+lQp4Txkq44avL1LDV+eQ8pbWXyimjkPw0Q=="
"version": "12.67.0",
"resolved": "https://registry.npmjs.org/exiftool-vendored.pl/-/exiftool-vendored.pl-12.67.0.tgz",
"integrity": "sha512-Jvjkv4Cad+Bnp/4PuLEhO2BSpKy0MBccmq8if/H8V2ykssZrpUh8DRwEJkONnsaNX7dqKfObbOFig3vwoDyXsA=="
},
"exit": {
"version": "0.1.2",

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "1.79.0",
"version": "1.81.0",
"description": "",
"author": "",
"private": true,
@@ -58,11 +58,11 @@
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
"cookie-parser": "^1.4.6",
"exiftool-vendored": "^23.0.0",
"exiftool-vendored.pl": "^12.62.0",
"exiftool-vendored": "~23.1.0",
"exiftool-vendored.pl": "12.67",
"fluent-ffmpeg": "^2.1.2",
"glob": "^10.3.3",
"geo-tz": "^7.0.7",
"glob": "^10.3.3",
"handlebars": "^4.7.8",
"i18n-iso-countries": "^7.6.0",
"immich": "^0.41.0",

View File

@@ -10,6 +10,7 @@ export enum Permission {
ASSET_SHARE = 'asset.share',
ASSET_VIEW = 'asset.view',
ASSET_DOWNLOAD = 'asset.download',
ASSET_UPLOAD = 'asset.upload',
// ALBUM_CREATE = 'album.create',
ALBUM_READ = 'album.read',
@@ -95,6 +96,9 @@ export class AccessCore {
case Permission.ASSET_DOWNLOAD:
return !!authUser.isAllowDownload && (await this.repository.asset.hasSharedLinkAccess(sharedLinkId, id));
case Permission.ASSET_UPLOAD:
return authUser.isAllowUpload;
case Permission.ASSET_SHARE:
// TODO: fix this to not use authUser.id for shared link access control
return this.repository.asset.hasOwnerAccess(authUser.id, id);
@@ -165,6 +169,9 @@ export class AccessCore {
(await this.repository.album.hasSharedAlbumAccess(authUser.id, id))
);
case Permission.ASSET_UPLOAD:
return this.repository.library.hasOwnerAccess(authUser.id, id);
case Permission.ALBUM_REMOVE_ASSET:
return this.repository.album.hasOwnerAccess(authUser.id, id);

View File

@@ -78,6 +78,7 @@ export interface IAssetRepository {
getByUserId(pagination: PaginationOptions, userId: string): Paginated<AssetEntity>;
getWithout(pagination: PaginationOptions, property: WithoutProperty): Paginated<AssetEntity>;
getWith(pagination: PaginationOptions, property: WithProperty, libraryId?: string): Paginated<AssetEntity>;
getRandom(userId: string, count: number): Promise<AssetEntity[]>;
getFirstAssetForAlbumId(albumId: string): Promise<AssetEntity | null>;
getLastUpdatedAssetForAlbumId(albumId: string): Promise<AssetEntity | null>;
getByLibraryId(libraryIds: string[]): Promise<AssetEntity[]>;

View File

@@ -96,6 +96,7 @@ const validImages = [
'.ori',
'.pef',
'.png',
'.psd',
'.raf',
'.raw',
'.rwl',

View File

@@ -57,7 +57,7 @@ export interface UploadFile {
export class AssetService {
private logger = new Logger(AssetService.name);
private access: AccessCore;
private storageCore = new StorageCore();
private storageCore: StorageCore;
constructor(
@Inject(IAccessRepository) accessRepository: IAccessRepository,
@@ -67,6 +67,7 @@ export class AssetService {
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
) {
this.access = new AccessCore(accessRepository);
this.storageCore = new StorageCore(storageRepository);
}
canUploadFile({ authUser, fieldName, file }: UploadRequest): true {
@@ -284,6 +285,11 @@ export class AssetService {
return mapStats(stats);
}
async getRandom(authUser: AuthUserDto, count: number): Promise<AssetResponseDto[]> {
const assets = await this.assetRepository.getRandom(authUser.id, count);
return assets.map((a) => mapAsset(a));
}
async update(authUser: AuthUserDto, id: string, dto: UpdateAssetDto): Promise<AssetResponseDto> {
await this.access.requirePermission(authUser, Permission.ASSET_UPDATE, id);
@@ -300,6 +306,7 @@ export class AssetService {
async updateAll(authUser: AuthUserDto, dto: AssetBulkUpdateDto) {
const { ids, ...options } = dto;
await this.access.requirePermission(authUser, Permission.ASSET_UPDATE, ids);
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids } });
await this.assetRepository.updateAll(ids, options);
}

View File

@@ -1,4 +1,5 @@
import { IsBoolean, IsString } from 'class-validator';
import { Type } from 'class-transformer';
import { IsBoolean, IsInt, IsPositive, IsString } from 'class-validator';
import { Optional } from '../../domain.util';
import { BulkIdsDto } from '../response-dto';
@@ -25,3 +26,11 @@ export class UpdateAssetDto {
@IsString()
description?: string;
}
export class RandomAssetsDto {
@Optional()
@IsInt()
@IsPositive()
@Type(() => Number)
count?: number;
}

View File

@@ -31,6 +31,7 @@ describe('mimeTypes', () => {
{ mimetype: 'image/ori', extension: '.ori' },
{ mimetype: 'image/pef', extension: '.pef' },
{ mimetype: 'image/png', extension: '.png' },
{ mimetype: 'image/psd', extension: '.psd' },
{ mimetype: 'image/raf', extension: '.raf' },
{ mimetype: 'image/raw', extension: '.raw' },
{ mimetype: 'image/rwl', extension: '.rwl' },
@@ -40,6 +41,7 @@ describe('mimeTypes', () => {
{ mimetype: 'image/tiff', extension: '.tif' },
{ mimetype: 'image/tiff', extension: '.tiff' },
{ mimetype: 'image/webp', extension: '.webp' },
{ mimetype: 'image/vnd.adobe.photoshop', extension: '.psd' },
{ mimetype: 'image/x-adobe-dng', extension: '.dng' },
{ mimetype: 'image/x-arriflex-ari', extension: '.ari' },
{ mimetype: 'image/x-canon-cr2', extension: '.cr2' },

View File

@@ -53,6 +53,7 @@ const image: Record<string, string[]> = {
'.ori': ['image/ori', 'image/x-olympus-ori'],
'.pef': ['image/pef', 'image/x-pentax-pef'],
'.png': ['image/png'],
'.psd': ['image/psd', 'image/vnd.adobe.photoshop'],
'.raf': ['image/raf', 'image/x-fuji-raf'],
'.raw': ['image/raw', 'image/x-panasonic-raw'],
'.rwl': ['image/rwl', 'image/x-leica-rwl'],

View File

@@ -4,7 +4,6 @@ import { APIKeyService } from './api-key';
import { AssetService } from './asset';
import { AuditService } from './audit';
import { AuthService } from './auth';
import { FacialRecognitionService } from './facial-recognition';
import { JobService } from './job';
import { LibraryService } from './library';
import { MediaService } from './media';
@@ -27,7 +26,6 @@ const providers: Provider[] = [
AssetService,
AuditService,
AuthService,
FacialRecognitionService,
JobService,
MediaService,
MetadataService,

View File

@@ -1,14 +0,0 @@
import { AssetFaceEntity } from '@app/infra/entities';
export const IFaceRepository = 'IFaceRepository';
export interface AssetFaceId {
assetId: string;
personId: string;
}
export interface IFaceRepository {
getAll(): Promise<AssetFaceEntity[]>;
getByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]>;
create(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity>;
}

View File

@@ -1,368 +0,0 @@
import { Colorspace, SystemConfigKey } from '@app/infra/entities';
import {
assetStub,
faceStub,
newAssetRepositoryMock,
newFaceRepositoryMock,
newJobRepositoryMock,
newMachineLearningRepositoryMock,
newMediaRepositoryMock,
newPersonRepositoryMock,
newSearchRepositoryMock,
newStorageRepositoryMock,
newSystemConfigRepositoryMock,
personStub,
} from '@test';
import { IAssetRepository, WithoutProperty } from '../asset';
import { IJobRepository, JobName } from '../job';
import { IMediaRepository } from '../media';
import { IPersonRepository } from '../person';
import { ISearchRepository } from '../search';
import { IMachineLearningRepository } from '../smart-info';
import { IStorageRepository } from '../storage';
import { ISystemConfigRepository } from '../system-config';
import { IFaceRepository } from './face.repository';
import { FacialRecognitionService } from './facial-recognition.services';
const croppedFace = Buffer.from('Cropped Face');
const face = {
start: {
assetId: 'asset-1',
personId: 'person-1',
boundingBox: {
x1: 5,
y1: 5,
x2: 505,
y2: 505,
},
imageHeight: 1000,
imageWidth: 1000,
},
middle: {
assetId: 'asset-1',
personId: 'person-1',
boundingBox: {
x1: 100,
y1: 100,
x2: 200,
y2: 200,
},
imageHeight: 500,
imageWidth: 400,
embedding: [1, 2, 3, 4],
score: 0.2,
},
end: {
assetId: 'asset-1',
personId: 'person-1',
boundingBox: {
x1: 300,
y1: 300,
x2: 495,
y2: 495,
},
imageHeight: 500,
imageWidth: 500,
},
};
const faceSearch = {
noMatch: {
total: 0,
count: 0,
page: 1,
items: [],
distances: [],
facets: [],
},
oneMatch: {
total: 1,
count: 1,
page: 1,
items: [faceStub.face1],
distances: [0.1],
facets: [],
},
oneRemoteMatch: {
total: 1,
count: 1,
page: 1,
items: [faceStub.face1],
distances: [0.8],
facets: [],
},
};
describe(FacialRecognitionService.name, () => {
let sut: FacialRecognitionService;
let assetMock: jest.Mocked<IAssetRepository>;
let configMock: jest.Mocked<ISystemConfigRepository>;
let faceMock: jest.Mocked<IFaceRepository>;
let jobMock: jest.Mocked<IJobRepository>;
let machineLearningMock: jest.Mocked<IMachineLearningRepository>;
let mediaMock: jest.Mocked<IMediaRepository>;
let personMock: jest.Mocked<IPersonRepository>;
let searchMock: jest.Mocked<ISearchRepository>;
let storageMock: jest.Mocked<IStorageRepository>;
beforeEach(async () => {
assetMock = newAssetRepositoryMock();
configMock = newSystemConfigRepositoryMock();
faceMock = newFaceRepositoryMock();
jobMock = newJobRepositoryMock();
machineLearningMock = newMachineLearningRepositoryMock();
mediaMock = newMediaRepositoryMock();
personMock = newPersonRepositoryMock();
searchMock = newSearchRepositoryMock();
storageMock = newStorageRepositoryMock();
mediaMock.crop.mockResolvedValue(croppedFace);
sut = new FacialRecognitionService(
assetMock,
configMock,
faceMock,
jobMock,
machineLearningMock,
mediaMock,
personMock,
searchMock,
storageMock,
);
});
it('should be defined', () => {
expect(sut).toBeDefined();
});
describe('handleQueueRecognizeFaces', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(true);
expect(jobMock.queue).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
it('should queue missing assets', async () => {
assetMock.getWithout.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
await sut.handleQueueRecognizeFaces({});
expect(assetMock.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.RECOGNIZE_FACES,
data: { id: assetStub.image.id },
});
});
it('should queue all assets', async () => {
assetMock.getAll.mockResolvedValue({
items: [assetStub.image],
hasNextPage: false,
});
personMock.deleteAll.mockResolvedValue(5);
searchMock.deleteAllFaces.mockResolvedValue(100);
await sut.handleQueueRecognizeFaces({ force: true });
expect(assetMock.getAll).toHaveBeenCalled();
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.RECOGNIZE_FACES,
data: { id: assetStub.image.id },
});
});
});
describe('handleRecognizeFaces', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleRecognizeFaces({ id: 'foo' })).resolves.toBe(true);
expect(assetMock.getByIds).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
it('should skip when no resize path', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
await sut.handleRecognizeFaces({ id: assetStub.noResizePath.id });
expect(machineLearningMock.detectFaces).not.toHaveBeenCalled();
});
it('should handle no results', async () => {
machineLearningMock.detectFaces.mockResolvedValue([]);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleRecognizeFaces({ id: assetStub.image.id });
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith(
'http://immich-machine-learning:3003',
{
imagePath: assetStub.image.resizePath,
},
{
enabled: true,
maxDistance: 0.6,
minScore: 0.7,
minFaces: 1,
modelName: 'buffalo_l',
},
);
expect(faceMock.create).not.toHaveBeenCalled();
expect(jobMock.queue).not.toHaveBeenCalled();
});
it('should match existing people', async () => {
machineLearningMock.detectFaces.mockResolvedValue([face.middle]);
searchMock.searchFaces.mockResolvedValue(faceSearch.oneMatch);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleRecognizeFaces({ id: assetStub.image.id });
expect(faceMock.create).toHaveBeenCalledWith({
personId: 'person-1',
assetId: 'asset-id',
embedding: [1, 2, 3, 4],
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
});
});
it('should create a new person', async () => {
machineLearningMock.detectFaces.mockResolvedValue([face.middle]);
searchMock.searchFaces.mockResolvedValue(faceSearch.oneRemoteMatch);
personMock.create.mockResolvedValue(personStub.noName);
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleRecognizeFaces({ id: assetStub.image.id });
expect(personMock.create).toHaveBeenCalledWith({ ownerId: assetStub.image.ownerId });
expect(faceMock.create).toHaveBeenCalledWith({
personId: 'person-1',
assetId: 'asset-id',
embedding: [1, 2, 3, 4],
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
});
expect(jobMock.queue.mock.calls).toEqual([
[
{
name: JobName.GENERATE_FACE_THUMBNAIL,
data: {
assetId: 'asset-1',
personId: 'person-1',
boundingBox: {
x1: 100,
y1: 100,
x2: 200,
y2: 200,
},
imageHeight: 500,
imageWidth: 400,
score: 0.2,
},
},
],
[{ name: JobName.SEARCH_INDEX_FACE, data: { personId: 'person-1', assetId: 'asset-id' } }],
]);
});
});
describe('handleGenerateFaceThumbnail', () => {
it('should return if machine learning is disabled', async () => {
configMock.load.mockResolvedValue([{ key: SystemConfigKey.MACHINE_LEARNING_ENABLED, value: false }]);
await expect(sut.handleGenerateFaceThumbnail(face.middle)).resolves.toBe(true);
expect(assetMock.getByIds).not.toHaveBeenCalled();
expect(configMock.load).toHaveBeenCalled();
});
it('should skip an asset not found', async () => {
assetMock.getByIds.mockResolvedValue([]);
await sut.handleGenerateFaceThumbnail(face.middle);
expect(mediaMock.crop).not.toHaveBeenCalled();
});
it('should skip an asset without a thumbnail', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.noResizePath]);
await sut.handleGenerateFaceThumbnail(face.middle);
expect(mediaMock.crop).not.toHaveBeenCalled();
});
it('should generate a thumbnail', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleGenerateFaceThumbnail(face.middle);
expect(assetMock.getByIds).toHaveBeenCalledWith(['asset-1']);
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id');
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
left: 95,
top: 95,
width: 110,
height: 110,
});
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/person-1.jpeg', {
format: 'jpeg',
size: 250,
quality: 80,
colorspace: Colorspace.P3,
});
expect(personMock.update).toHaveBeenCalledWith({
faceAssetId: 'asset-1',
id: 'person-1',
thumbnailPath: 'upload/thumbs/user-id/person-1.jpeg',
});
});
it('should generate a thumbnail without going negative', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleGenerateFaceThumbnail(face.start);
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
left: 0,
top: 0,
width: 510,
height: 510,
});
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/person-1.jpeg', {
format: 'jpeg',
size: 250,
quality: 80,
colorspace: Colorspace.P3,
});
});
it('should generate a thumbnail without overflowing', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleGenerateFaceThumbnail(face.end);
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
left: 297,
top: 297,
width: 202,
height: 202,
});
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/person-1.jpeg', {
format: 'jpeg',
size: 250,
quality: 80,
colorspace: Colorspace.P3,
});
});
});
});

View File

@@ -1,178 +0,0 @@
import { Inject, Logger } from '@nestjs/common';
import { join } from 'path';
import { IAssetRepository, WithoutProperty } from '../asset';
import { usePagination } from '../domain.util';
import { IBaseJob, IEntityJob, IFaceThumbnailJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
import { CropOptions, FACE_THUMBNAIL_SIZE, IMediaRepository } from '../media';
import { IPersonRepository } from '../person/person.repository';
import { ISearchRepository } from '../search/search.repository';
import { IMachineLearningRepository } from '../smart-info';
import { IStorageRepository, StorageCore, StorageFolder } from '../storage';
import { ISystemConfigRepository, SystemConfigCore } from '../system-config';
import { AssetFaceId, IFaceRepository } from './face.repository';
export class FacialRecognitionService {
private logger = new Logger(FacialRecognitionService.name);
private storageCore = new StorageCore();
private configCore: SystemConfigCore;
constructor(
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
@Inject(IFaceRepository) private faceRepository: IFaceRepository,
@Inject(IJobRepository) private jobRepository: IJobRepository,
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
@Inject(IMediaRepository) private mediaRepository: IMediaRepository,
@Inject(IPersonRepository) private personRepository: IPersonRepository,
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
) {
this.configCore = new SystemConfigCore(configRepository);
}
async handleQueueRecognizeFaces({ force }: IBaseJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
}
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination, { order: 'DESC' })
: this.assetRepository.getWithout(pagination, WithoutProperty.FACES);
});
if (force) {
const people = await this.personRepository.deleteAll();
const faces = await this.searchRepository.deleteAllFaces();
this.logger.debug(`Deleted ${people} people and ${faces} faces`);
}
for await (const assets of assetPagination) {
for (const asset of assets) {
await this.jobRepository.queue({ name: JobName.RECOGNIZE_FACES, data: { id: asset.id } });
}
}
return true;
}
async handleRecognizeFaces({ id }: IEntityJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
}
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset || !asset.resizePath) {
return false;
}
const faces = await this.machineLearning.detectFaces(
machineLearning.url,
{ imagePath: asset.resizePath },
machineLearning.facialRecognition,
);
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `float[${face.embedding.length}]` })));
for (const { embedding, ...rest } of faces) {
const faceSearchResult = await this.searchRepository.searchFaces(embedding, { ownerId: asset.ownerId });
let personId: string | null = null;
// try to find a matching face and link to the associated person
// The closer to 0, the better the match. Range is from 0 to 2
if (faceSearchResult.total && faceSearchResult.distances[0] <= machineLearning.facialRecognition.maxDistance) {
this.logger.verbose(`Match face with distance ${faceSearchResult.distances[0]}`);
personId = faceSearchResult.items[0].personId;
}
if (!personId) {
this.logger.debug('No matches, creating a new person.');
const person = await this.personRepository.create({ ownerId: asset.ownerId });
personId = person.id;
await this.jobRepository.queue({
name: JobName.GENERATE_FACE_THUMBNAIL,
data: { assetId: asset.id, personId, ...rest },
});
}
const faceId: AssetFaceId = { assetId: asset.id, personId };
await this.faceRepository.create({
...faceId,
embedding,
imageHeight: rest.imageHeight,
imageWidth: rest.imageWidth,
boundingBoxX1: rest.boundingBox.x1,
boundingBoxX2: rest.boundingBox.x2,
boundingBoxY1: rest.boundingBox.y1,
boundingBoxY2: rest.boundingBox.y2,
});
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACE, data: faceId });
}
return true;
}
async handleGenerateFaceThumbnail(data: IFaceThumbnailJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
return true;
}
const { assetId, personId, boundingBox, imageWidth, imageHeight } = data;
const [asset] = await this.assetRepository.getByIds([assetId]);
if (!asset || !asset.resizePath) {
return false;
}
this.logger.verbose(`Cropping face for person: ${personId}`);
const outputFolder = this.storageCore.getFolderLocation(StorageFolder.THUMBNAILS, asset.ownerId);
const output = join(outputFolder, `${personId}.jpeg`);
this.storageRepository.mkdirSync(outputFolder);
const { x1, y1, x2, y2 } = boundingBox;
const halfWidth = (x2 - x1) / 2;
const halfHeight = (y2 - y1) / 2;
const middleX = Math.round(x1 + halfWidth);
const middleY = Math.round(y1 + halfHeight);
// zoom out 10%
const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
// get the longest distance from the center of the image without overflowing
const newHalfSize = Math.min(
middleX - Math.max(0, middleX - targetHalfSize),
middleY - Math.max(0, middleY - targetHalfSize),
Math.min(imageWidth - 1, middleX + targetHalfSize) - middleX,
Math.min(imageHeight - 1, middleY + targetHalfSize) - middleY,
);
const cropOptions: CropOptions = {
left: middleX - newHalfSize,
top: middleY - newHalfSize,
width: newHalfSize * 2,
height: newHalfSize * 2,
};
const { thumbnail } = await this.configCore.getConfig();
const croppedOutput = await this.mediaRepository.crop(asset.resizePath, cropOptions);
const thumbnailOptions = {
format: 'jpeg',
size: FACE_THUMBNAIL_SIZE,
colorspace: thumbnail.colorspace,
quality: thumbnail.quality,
} as const;
await this.mediaRepository.resize(croppedOutput, output, thumbnailOptions);
await this.personRepository.update({ id: personId, thumbnailPath: output, faceAssetId: data.assetId });
return true;
}
}

View File

@@ -1,2 +0,0 @@
export * from './face.repository';
export * from './facial-recognition.services';

View File

@@ -10,7 +10,6 @@ export * from './domain.config';
export * from './domain.constant';
export * from './domain.module';
export * from './domain.util';
export * from './facial-recognition';
export * from './job';
export * from './library';
export * from './media';

View File

@@ -7,6 +7,7 @@ export enum QueueName {
CLIP_ENCODING = 'clipEncoding',
BACKGROUND_TASK = 'backgroundTask',
STORAGE_TEMPLATE_MIGRATION = 'storageTemplateMigration',
MIGRATION = 'migration',
SEARCH = 'search',
SIDECAR = 'sidecar',
LIBRARY = 'library',
@@ -29,7 +30,7 @@ export enum JobName {
GENERATE_JPEG_THUMBNAIL = 'generate-jpeg-thumbnail',
GENERATE_WEBP_THUMBNAIL = 'generate-webp-thumbnail',
GENERATE_THUMBHASH_THUMBNAIL = 'generate-thumbhash-thumbnail',
GENERATE_FACE_THUMBNAIL = 'generate-face-thumbnail',
GENERATE_PERSON_THUMBNAIL = 'generate-person-thumbnail',
// metadata
QUEUE_METADATA_EXTRACTION = 'queue-metadata-extraction',
@@ -45,14 +46,20 @@ export enum JobName {
STORAGE_TEMPLATE_MIGRATION_SINGLE = 'storage-template-migration-single',
SYSTEM_CONFIG_CHANGE = 'system-config-change',
// migration
QUEUE_MIGRATION = 'queue-migration',
MIGRATE_ASSET = 'migrate-asset',
MIGRATE_PERSON = 'migrate-person',
// object tagging
QUEUE_OBJECT_TAGGING = 'queue-object-tagging',
CLASSIFY_IMAGE = 'classify-image',
// facial recognition
PERSON_CLEANUP = 'person-cleanup',
PERSON_DELETE = 'person-delete',
QUEUE_RECOGNIZE_FACES = 'queue-recognize-faces',
RECOGNIZE_FACES = 'recognize-faces',
PERSON_CLEANUP = 'person-cleanup',
// library managment
LIBRARY_SCAN = 'library-refresh',
@@ -97,6 +104,7 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.DELETE_FILES]: QueueName.BACKGROUND_TASK,
[JobName.CLEAN_OLD_AUDIT_LOGS]: QueueName.BACKGROUND_TASK,
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
[JobName.PERSON_DELETE]: QueueName.BACKGROUND_TASK,
// conversion
[JobName.QUEUE_VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
@@ -107,7 +115,7 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.GENERATE_JPEG_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION,
[JobName.GENERATE_WEBP_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION,
[JobName.GENERATE_THUMBHASH_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION,
[JobName.GENERATE_FACE_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION,
[JobName.GENERATE_PERSON_THUMBNAIL]: QueueName.THUMBNAIL_GENERATION,
// metadata
[JobName.QUEUE_METADATA_EXTRACTION]: QueueName.METADATA_EXTRACTION,
@@ -119,6 +127,11 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE]: QueueName.STORAGE_TEMPLATE_MIGRATION,
[JobName.SYSTEM_CONFIG_CHANGE]: QueueName.STORAGE_TEMPLATE_MIGRATION,
// migration
[JobName.QUEUE_MIGRATION]: QueueName.MIGRATION,
[JobName.MIGRATE_ASSET]: QueueName.MIGRATION,
[JobName.MIGRATE_PERSON]: QueueName.MIGRATION,
// object tagging
[JobName.QUEUE_OBJECT_TAGGING]: QueueName.OBJECT_TAGGING,
[JobName.CLASSIFY_IMAGE]: QueueName.OBJECT_TAGGING,

View File

@@ -68,6 +68,9 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
@ApiProperty({ type: JobStatusDto })
[QueueName.STORAGE_TEMPLATE_MIGRATION]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.MIGRATION]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.BACKGROUND_TASK]!: JobStatusDto;

View File

@@ -1,5 +1,3 @@
import { BoundingBox } from '../smart-info';
export interface IBaseJob {
force?: boolean;
}
@@ -9,14 +7,6 @@ export interface IAssetFaceJob extends IBaseJob {
personId: string;
}
export interface IFaceThumbnailJob extends IAssetFaceJob {
imageWidth: number;
imageHeight: number;
boundingBox: BoundingBox;
assetId: string;
personId: string;
}
export interface IEntityJob extends IBaseJob {
id: string;
source?: 'upload';

View File

@@ -6,7 +6,6 @@ import {
IBulkEntityJob,
IDeleteFilesJob,
IEntityJob,
IFaceThumbnailJob,
ILibraryFileJob,
ILibraryRefreshJob,
IOfflineLibraryFileJob,
@@ -46,6 +45,11 @@ export type JobItem =
| { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE; data: IEntityJob }
| { name: JobName.SYSTEM_CONFIG_CHANGE; data?: IBaseJob }
// Migration
| { name: JobName.QUEUE_MIGRATION; data?: IBaseJob }
| { name: JobName.MIGRATE_ASSET; data?: IEntityJob }
| { name: JobName.MIGRATE_PERSON; data?: IEntityJob }
// Metadata Extraction
| { name: JobName.QUEUE_METADATA_EXTRACTION; data: IBaseJob }
| { name: JobName.METADATA_EXTRACTION; data: IEntityJob }
@@ -63,7 +67,8 @@ export type JobItem =
// Recognize Faces
| { name: JobName.QUEUE_RECOGNIZE_FACES; data: IBaseJob }
| { name: JobName.RECOGNIZE_FACES; data: IEntityJob }
| { name: JobName.GENERATE_FACE_THUMBNAIL; data: IFaceThumbnailJob }
| { name: JobName.GENERATE_PERSON_THUMBNAIL; data: IEntityJob }
| { name: JobName.PERSON_DELETE; data: IEntityJob }
// Clip Embedding
| { name: JobName.QUEUE_ENCODE_CLIP; data: IBaseJob }

View File

@@ -94,6 +94,7 @@ describe(JobService.name, () => {
[QueueName.OBJECT_TAGGING]: expectedJobStatus,
[QueueName.SEARCH]: expectedJobStatus,
[QueueName.STORAGE_TEMPLATE_MIGRATION]: expectedJobStatus,
[QueueName.MIGRATION]: expectedJobStatus,
[QueueName.THUMBNAIL_GENERATION]: expectedJobStatus,
[QueueName.VIDEO_CONVERSION]: expectedJobStatus,
[QueueName.RECOGNIZE_FACES]: expectedJobStatus,
@@ -229,6 +230,7 @@ describe(JobService.name, () => {
[QueueName.SIDECAR]: { concurrency: 10 },
[QueueName.LIBRARY]: { concurrency: 10 },
[QueueName.STORAGE_TEMPLATE_MIGRATION]: { concurrency: 10 },
[QueueName.MIGRATION]: { concurrency: 10 },
[QueueName.THUMBNAIL_GENERATION]: { concurrency: 10 },
[QueueName.VIDEO_CONVERSION]: { concurrency: 10 },
},
@@ -242,6 +244,7 @@ describe(JobService.name, () => {
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.SIDECAR, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.LIBRARY, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.STORAGE_TEMPLATE_MIGRATION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.MIGRATION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.THUMBNAIL_GENERATION, 10);
expect(jobMock.setConcurrency).toHaveBeenCalledWith(QueueName.VIDEO_CONVERSION, 10);
});

View File

@@ -76,6 +76,9 @@ export class JobService {
case QueueName.STORAGE_TEMPLATE_MIGRATION:
return this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
case QueueName.MIGRATION:
return this.jobRepository.queue({ name: JobName.QUEUE_MIGRATION });
case QueueName.OBJECT_TAGGING:
await this.configCore.requireFeature(FeatureFlag.TAG_IMAGE);
return this.jobRepository.queue({ name: JobName.QUEUE_OBJECT_TAGGING, data: { force } });

View File

@@ -249,7 +249,7 @@ export class LibraryService {
originalPath: assetPath,
deviceAssetId: deviceAssetId,
deviceId: 'Library Import',
fileCreatedAt: stats.ctime,
fileCreatedAt: stats.mtime,
fileModifiedAt: stats.mtime,
type: assetType,
originalFileName: parse(assetPath).name,
@@ -261,7 +261,7 @@ export class LibraryService {
} else if (doRefresh && existingAssetEntity) {
assetId = existingAssetEntity.id;
await this.assetRepository.updateAll([existingAssetEntity.id], {
fileCreatedAt: stats.ctime,
fileCreatedAt: stats.mtime,
fileModifiedAt: stats.mtime,
});
} else {

View File

@@ -1,6 +1,7 @@
import {
AssetType,
Colorspace,
ExifEntity,
SystemConfigKey,
ToneMapping,
TranscodeHWAccel,
@@ -58,7 +59,7 @@ describe(MediaService.name, () => {
hasNextPage: false,
});
personMock.getAll.mockResolvedValue([personStub.newThumbnail]);
personMock.getFaceById.mockResolvedValue(faceStub.face1);
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
await sut.handleQueueGenerateThumbnails({ force: true });
@@ -72,19 +73,8 @@ describe(MediaService.name, () => {
expect(personMock.getAll).toHaveBeenCalled();
expect(personMock.getAllWithoutThumbnail).not.toHaveBeenCalled();
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.GENERATE_FACE_THUMBNAIL,
data: {
imageWidth: faceStub.face1.imageWidth,
imageHeight: faceStub.face1.imageHeight,
boundingBox: {
x1: faceStub.face1.boundingBoxX1,
x2: faceStub.face1.boundingBoxX2,
y1: faceStub.face1.boundingBoxY1,
y2: faceStub.face1.boundingBoxY2,
},
assetId: faceStub.face1.assetId,
personId: personStub.newThumbnail.id,
},
name: JobName.GENERATE_PERSON_THUMBNAIL,
data: { id: personStub.newThumbnail.id },
});
});
@@ -105,18 +95,9 @@ describe(MediaService.name, () => {
expect(personMock.getAllWithoutThumbnail).toHaveBeenCalled();
expect(personMock.getRandomFace).toHaveBeenCalled();
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.GENERATE_FACE_THUMBNAIL,
name: JobName.GENERATE_PERSON_THUMBNAIL,
data: {
imageWidth: faceStub.face1.imageWidth,
imageHeight: faceStub.face1.imageHeight,
boundingBox: {
x1: faceStub.face1.boundingBoxX1,
x2: faceStub.face1.boundingBoxX2,
y1: faceStub.face1.boundingBoxY1,
y2: faceStub.face1.boundingBoxY2,
},
assetId: faceStub.face1.assetId,
personId: personStub.newThumbnail.id,
id: personStub.newThumbnail.id,
},
});
});
@@ -202,8 +183,27 @@ describe(MediaService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleGenerateJpegThumbnail({ id: assetStub.image.id });
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id');
expect(mediaMock.resize).toHaveBeenCalledWith('/original/path.jpg', 'upload/thumbs/user-id/asset-id.jpeg', {
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se');
expect(mediaMock.resize).toHaveBeenCalledWith('/original/path.jpg', 'upload/thumbs/user-id/as/se/asset-id.jpeg', {
size: 1440,
format: 'jpeg',
quality: 80,
colorspace: Colorspace.SRGB,
});
expect(assetMock.save).toHaveBeenCalledWith({
id: 'asset-id',
resizePath: 'upload/thumbs/user-id/as/se/asset-id.jpeg',
});
});
it('should generate a P3 thumbnail for a wide gamut image', async () => {
assetMock.getByIds.mockResolvedValue([
{ ...assetStub.image, exifInfo: { profileDescription: 'Adobe RGB', bitsPerSample: 14 } as ExifEntity },
]);
await sut.handleGenerateJpegThumbnail({ id: assetStub.image.id });
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se');
expect(mediaMock.resize).toHaveBeenCalledWith('/original/path.jpg', 'upload/thumbs/user-id/as/se/asset-id.jpeg', {
size: 1440,
format: 'jpeg',
quality: 80,
@@ -211,7 +211,7 @@ describe(MediaService.name, () => {
});
expect(assetMock.save).toHaveBeenCalledWith({
id: 'asset-id',
resizePath: 'upload/thumbs/user-id/asset-id.jpeg',
resizePath: 'upload/thumbs/user-id/as/se/asset-id.jpeg',
});
});
@@ -220,19 +220,23 @@ describe(MediaService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleGenerateJpegThumbnail({ id: assetStub.video.id });
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id');
expect(mediaMock.transcode).toHaveBeenCalledWith('/original/path.ext', 'upload/thumbs/user-id/asset-id.jpeg', {
inputOptions: ['-ss 00:00:00', '-sws_flags accurate_rnd+bitexact+full_chroma_int'],
outputOptions: [
'-frames:v 1',
'-v verbose',
'-vf scale=-2:1440:flags=lanczos+accurate_rnd+bitexact+full_chroma_int:out_color_matrix=601:out_range=pc,format=yuv420p',
],
twoPass: false,
});
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se');
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/thumbs/user-id/as/se/asset-id.jpeg',
{
inputOptions: ['-ss 00:00:00', '-sws_flags accurate_rnd+bitexact+full_chroma_int'],
outputOptions: [
'-frames:v 1',
'-v verbose',
'-vf scale=-2:1440:flags=lanczos+accurate_rnd+bitexact+full_chroma_int:out_color_matrix=601:out_range=pc,format=yuv420p',
],
twoPass: false,
},
);
expect(assetMock.save).toHaveBeenCalledWith({
id: 'asset-id',
resizePath: 'upload/thumbs/user-id/asset-id.jpeg',
resizePath: 'upload/thumbs/user-id/as/se/asset-id.jpeg',
});
});
@@ -241,22 +245,50 @@ describe(MediaService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleGenerateJpegThumbnail({ id: assetStub.video.id });
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id');
expect(mediaMock.transcode).toHaveBeenCalledWith('/original/path.ext', 'upload/thumbs/user-id/asset-id.jpeg', {
inputOptions: ['-ss 00:00:00', '-sws_flags accurate_rnd+bitexact+full_chroma_int'],
outputOptions: [
'-frames:v 1',
'-v verbose',
'-vf zscale=t=linear:npl=100,tonemap=hable:desat=0,zscale=p=bt709:t=601:m=bt470bg:range=pc,format=yuv420p',
],
twoPass: false,
});
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se');
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/thumbs/user-id/as/se/asset-id.jpeg',
{
inputOptions: ['-ss 00:00:00', '-sws_flags accurate_rnd+bitexact+full_chroma_int'],
outputOptions: [
'-frames:v 1',
'-v verbose',
'-vf zscale=t=linear:npl=100,tonemap=hable:desat=0,zscale=p=bt709:t=601:m=bt470bg:range=pc,format=yuv420p',
],
twoPass: false,
},
);
expect(assetMock.save).toHaveBeenCalledWith({
id: 'asset-id',
resizePath: 'upload/thumbs/user-id/asset-id.jpeg',
resizePath: 'upload/thumbs/user-id/as/se/asset-id.jpeg',
});
});
it('should always generate video thumbnail in one pass', async () => {
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
configMock.load.mockResolvedValue([
{ key: SystemConfigKey.FFMPEG_TWO_PASS, value: true },
{ key: SystemConfigKey.FFMPEG_MAX_BITRATE, value: '5000k' },
]);
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleGenerateJpegThumbnail({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/thumbs/user-id/as/se/asset-id.jpeg',
{
inputOptions: ['-ss 00:00:00', '-sws_flags accurate_rnd+bitexact+full_chroma_int'],
outputOptions: [
'-frames:v 1',
'-v verbose',
'-vf zscale=t=linear:npl=100,tonemap=hable:desat=0,zscale=p=bt709:t=601:m=bt470bg:range=pc,format=yuv420p',
],
twoPass: false,
},
);
});
it('should run successfully', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleGenerateJpegThumbnail({ id: assetStub.image.id });
@@ -275,13 +307,35 @@ describe(MediaService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
await sut.handleGenerateWebpThumbnail({ id: assetStub.image.id });
expect(mediaMock.resize).toHaveBeenCalledWith('/original/path.jpg', 'upload/thumbs/user-id/asset-id.webp', {
expect(mediaMock.resize).toHaveBeenCalledWith('/original/path.jpg', 'upload/thumbs/user-id/as/se/asset-id.webp', {
format: 'webp',
size: 250,
quality: 80,
colorspace: Colorspace.P3,
colorspace: Colorspace.SRGB,
});
expect(assetMock.save).toHaveBeenCalledWith({ id: 'asset-id', webpPath: 'upload/thumbs/user-id/asset-id.webp' });
expect(assetMock.save).toHaveBeenCalledWith({
id: 'asset-id',
webpPath: 'upload/thumbs/user-id/as/se/asset-id.webp',
});
});
});
it('should generate a P3 thumbnail for a wide gamut image', async () => {
assetMock.getByIds.mockResolvedValue([
{ ...assetStub.image, exifInfo: { profileDescription: 'Adobe RGB', bitsPerSample: 14 } as ExifEntity },
]);
await sut.handleGenerateWebpThumbnail({ id: assetStub.image.id });
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/as/se');
expect(mediaMock.resize).toHaveBeenCalledWith('/original/path.jpg', 'upload/thumbs/user-id/as/se/asset-id.webp', {
format: 'webp',
size: 250,
quality: 80,
colorspace: Colorspace.P3,
});
expect(assetMock.save).toHaveBeenCalledWith({
id: 'asset-id',
webpPath: 'upload/thumbs/user-id/as/se/asset-id.webp',
});
});
@@ -375,7 +429,7 @@ describe(MediaService.name, () => {
expect(storageMock.mkdirSync).toHaveBeenCalled();
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -416,7 +470,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -442,7 +496,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -471,7 +525,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -498,7 +552,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -525,7 +579,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -552,7 +606,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -603,7 +657,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -635,7 +689,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -664,7 +718,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -695,7 +749,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -728,7 +782,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -760,7 +814,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -791,7 +845,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -821,7 +875,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -851,7 +905,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -881,7 +935,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -914,7 +968,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -976,7 +1030,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device cuda=cuda:0', '-filter_hw_device cuda'],
outputOptions: [
@@ -1014,7 +1068,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device cuda=cuda:0', '-filter_hw_device cuda'],
outputOptions: [
@@ -1048,7 +1102,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device cuda=cuda:0', '-filter_hw_device cuda'],
outputOptions: [
@@ -1083,7 +1137,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device cuda=cuda:0', '-filter_hw_device cuda'],
outputOptions: [
@@ -1114,7 +1168,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device cuda=cuda:0', '-filter_hw_device cuda'],
outputOptions: [
@@ -1150,7 +1204,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device qsv=hw', '-filter_hw_device hw'],
outputOptions: [
@@ -1186,7 +1240,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device qsv=hw', '-filter_hw_device hw'],
outputOptions: [
@@ -1219,7 +1273,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device qsv=hw', '-filter_hw_device hw'],
outputOptions: [
@@ -1263,7 +1317,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device vaapi=accel:/dev/dri/renderD128', '-filter_hw_device accel'],
outputOptions: [
@@ -1295,7 +1349,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device vaapi=accel:/dev/dri/renderD128', '-filter_hw_device accel'],
outputOptions: [
@@ -1329,7 +1383,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device vaapi=accel:/dev/dri/renderD128', '-filter_hw_device accel'],
outputOptions: [
@@ -1359,7 +1413,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device vaapi=accel:/dev/dri/card1', '-filter_hw_device accel'],
outputOptions: [
@@ -1385,7 +1439,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: ['-init_hw_device vaapi=accel:/dev/dri/renderD129', '-filter_hw_device accel'],
outputOptions: [
@@ -1418,7 +1472,7 @@ describe(MediaService.name, () => {
expect(mediaMock.transcode).toHaveBeenCalledTimes(2);
expect(mediaMock.transcode).toHaveBeenLastCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -1455,7 +1509,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -1482,7 +1536,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -1509,7 +1563,7 @@ describe(MediaService.name, () => {
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/asset-id.mp4',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
{
inputOptions: [],
outputOptions: [
@@ -1528,4 +1582,51 @@ describe(MediaService.name, () => {
},
);
});
describe('isSRGB', () => {
it('should return true for srgb colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB' } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for srgb profile description', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB v1.31' } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for 8-bit image with no colorspace metadata', () => {
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 8 } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for image with no colorspace or bit depth metadata', () => {
const asset = { ...assetStub.image, exifInfo: {} as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return false for non-srgb colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'Adobe RGB' } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(false);
});
it('should return false for non-srgb profile description', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sP3C' } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(false);
});
it('should return false for 16-bit image with no colorspace metadata', () => {
const asset = { ...assetStub.image, exifInfo: { bitsPerSample: 16 } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(false);
});
it('should return true for 16-bit image with sRGB colorspace', () => {
const asset = { ...assetStub.image, exifInfo: { colorspace: 'sRGB', bitsPerSample: 16 } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(true);
});
it('should return true for 16-bit image with sRGB profile', () => {
const asset = { ...assetStub.image, exifInfo: { profileDescription: 'sRGB', bitsPerSample: 16 } as ExifEntity };
expect(sut.isSRGB(asset)).toEqual(true);
});
});
});

View File

@@ -1,9 +1,8 @@
import { AssetEntity, AssetType, TranscodeHWAccel, TranscodePolicy, VideoCodec } from '@app/infra/entities';
import { AssetEntity, AssetType, Colorspace, TranscodeHWAccel, TranscodePolicy, VideoCodec } from '@app/infra/entities';
import { Inject, Injectable, Logger, UnsupportedMediaTypeException } from '@nestjs/common';
import { join } from 'path';
import { IAssetRepository, WithoutProperty } from '../asset';
import { usePagination } from '../domain.util';
import { IBaseJob, IEntityJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
import { IBaseJob, IEntityJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName, QueueName } from '../job';
import { IPersonRepository } from '../person';
import { IStorageRepository, StorageCore, StorageFolder } from '../storage';
import { ISystemConfigRepository, SystemConfigFFmpegDto } from '../system-config';
@@ -14,8 +13,8 @@ import { H264Config, HEVCConfig, NVENCConfig, QSVConfig, ThumbnailConfig, VAAPIC
@Injectable()
export class MediaService {
private logger = new Logger(MediaService.name);
private storageCore = new StorageCore();
private configCore: SystemConfigCore;
private storageCore: StorageCore;
constructor(
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
@@ -26,11 +25,10 @@ export class MediaService {
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
) {
this.configCore = new SystemConfigCore(configRepository);
this.storageCore = new StorageCore(this.storageRepository);
}
async handleQueueGenerateThumbnails(job: IBaseJob) {
const { force } = job;
async handleQueueGenerateThumbnails({ force }: IBaseJob) {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
return force
? this.assetRepository.getAll(pagination)
@@ -55,26 +53,73 @@ export class MediaService {
const people = force ? await this.personRepository.getAll() : await this.personRepository.getAllWithoutThumbnail();
for (const person of people) {
// use stored asset for generating thumbnail or pick a random one if not present
const face = person.faceAssetId
? await this.personRepository.getFaceById({ personId: person.id, assetId: person.faceAssetId })
: await this.personRepository.getRandomFace(person.id);
if (face) {
await this.jobRepository.queue({
name: JobName.GENERATE_FACE_THUMBNAIL,
data: {
imageWidth: face.imageWidth,
imageHeight: face.imageHeight,
boundingBox: {
x1: face.boundingBoxX1,
x2: face.boundingBoxX2,
y1: face.boundingBoxY1,
y2: face.boundingBoxY2,
},
assetId: face.assetId,
personId: person.id,
},
});
if (!person.faceAssetId) {
const face = await this.personRepository.getRandomFace(person.id);
if (!face) {
continue;
}
await this.personRepository.update({ id: person.id, faceAssetId: face.assetId });
}
await this.jobRepository.queue({ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: person.id } });
}
return true;
}
async handleQueueMigration() {
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination),
);
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
if (active === 1 && waiting === 0) {
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
}
for await (const assets of assetPagination) {
for (const asset of assets) {
await this.jobRepository.queue({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } });
}
}
const people = await this.personRepository.getAll();
for (const person of people) {
await this.jobRepository.queue({ name: JobName.MIGRATE_PERSON, data: { id: person.id } });
}
return true;
}
async handleAssetMigration({ id }: IEntityJob) {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return false;
}
if (asset.resizePath) {
const resizePath = this.ensureThumbnailPath(asset, 'jpeg');
if (asset.resizePath !== resizePath) {
await this.storageRepository.moveFile(asset.resizePath, resizePath);
await this.assetRepository.save({ id: asset.id, resizePath });
}
}
if (asset.webpPath) {
const webpPath = this.ensureThumbnailPath(asset, 'webp');
if (asset.webpPath !== webpPath) {
await this.storageRepository.moveFile(asset.webpPath, webpPath);
await this.assetRepository.save({ id: asset.id, webpPath });
}
}
if (asset.encodedVideoPath) {
const encodedVideoPath = this.ensureEncodedVideoPath(asset, 'mp4');
if (asset.encodedVideoPath !== encodedVideoPath) {
await this.storageRepository.moveFile(asset.encodedVideoPath, encodedVideoPath);
await this.assetRepository.save({ id: asset.id, encodedVideoPath });
}
}
@@ -113,8 +158,9 @@ export class MediaService {
async generateImageThumbnail(asset: AssetEntity, format: 'jpeg' | 'webp') {
const { thumbnail } = await this.configCore.getConfig();
const size = format === 'jpeg' ? thumbnail.jpegSize : thumbnail.webpSize;
const thumbnailOptions = { format, size, colorspace: thumbnail.colorspace, quality: thumbnail.quality };
const path = this.ensureThumbnailPath(asset, format);
const colorspace = this.isSRGB(asset) ? Colorspace.SRGB : thumbnail.colorspace;
const thumbnailOptions = { format, size, colorspace, quality: thumbnail.quality };
await this.mediaRepository.resize(asset.originalPath, path, thumbnailOptions);
return path;
}
@@ -184,9 +230,7 @@ export class MediaService {
}
const input = asset.originalPath;
const outputFolder = this.storageCore.getFolderLocation(StorageFolder.ENCODED_VIDEO, asset.ownerId);
const output = join(outputFolder, `${asset.id}.mp4`);
this.storageRepository.mkdirSync(outputFolder);
const output = this.ensureEncodedVideoPath(asset, 'mp4');
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input);
const mainVideoStream = this.getMainStream(videoStreams);
@@ -330,8 +374,23 @@ export class MediaService {
}
ensureThumbnailPath(asset: AssetEntity, extension: string): string {
const folderPath = this.storageCore.getFolderLocation(StorageFolder.THUMBNAILS, asset.ownerId);
this.storageRepository.mkdirSync(folderPath);
return join(folderPath, `${asset.id}.${extension}`);
return this.storageCore.ensurePath(StorageFolder.THUMBNAILS, asset.ownerId, `${asset.id}.${extension}`);
}
ensureEncodedVideoPath(asset: AssetEntity, extension: string): string {
return this.storageCore.ensurePath(StorageFolder.ENCODED_VIDEO, asset.ownerId, `${asset.id}.${extension}`);
}
isSRGB(asset: AssetEntity): boolean {
const { colorspace, profileDescription, bitsPerSample } = asset.exifInfo ?? {};
if (colorspace || profileDescription) {
return [colorspace, profileDescription].some((s) => s?.toLowerCase().includes('srgb'));
} else if (bitsPerSample) {
// assume sRGB for 8-bit images with no color profile or colorspace metadata
return bitsPerSample === 8;
} else {
// assume sRGB for images with no relevant metadata
return true;
}
}
}

Some files were not shown because too many files have changed in this diff Show More