Compare commits

...

69 Commits

Author SHA1 Message Date
Alex The Bot
37ab37bffc Version v1.85.0 2023-11-08 03:20:59 +00:00
shenlong
664b7106ca feat(mobile): shared album activity disable handling (#4890)
* feat(mobile): shared album activity disable handling

* not show comment/like option on non-shared album, alternative text when activity is disabled

---------

Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-11-08 03:07:43 +00:00
Alex
bb28cae671 docs: update (#4893)
* docs: update

* chore:format
2023-11-07 15:52:44 -06:00
shenlong
c2c26c471a fix(mobile): do not show trashed assets in album viewer page (#4894)
Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-11-07 13:19:45 -06:00
Strubbl
2dca2850dc Update README.md (#4892)
fix alt parameter spelling of the Discord image tag
2023-11-07 18:57:30 +00:00
dependabot[bot]
7fc8f6433b chore(deps): bump stumpylog/image-cleaner-action from 0.3.0 to 0.4.0 (#4886)
Bumps [stumpylog/image-cleaner-action](https://github.com/stumpylog/image-cleaner-action) from 0.3.0 to 0.4.0.
- [Release notes](https://github.com/stumpylog/image-cleaner-action/releases)
- [Changelog](https://github.com/stumpylog/image-cleaner-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/stumpylog/image-cleaner-action/compare/v0.3.0...v0.4.0)

---
updated-dependencies:
- dependency-name: stumpylog/image-cleaner-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-07 13:54:18 -05:00
Jason Rasmussen
f6180fccdc chore(server): compose updates (#4878)
* chore: compose updates

* chore: docs for troubleshooting

* chore: add reference in docker compose to docs
2023-11-07 11:34:03 -05:00
martin
9d01885b58 feat(server, web): Album's options (#4870)
* feat: disable activity

* fix: disable reactions

* fix: tests

* fix: tests

* fix: tests

* pr feedback

* pr feedback

* chore: styling & wording

* refactor component

---------

Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-11-07 04:37:21 +00:00
Jason Rasmussen
ace0a5911c fix(web): optimize deps (#4877) 2023-11-06 22:02:46 -06:00
Fynn Petersen-Frey
21f2d3058a feat(mobile)!: batched full/initial sync (#4840)
* feat(mobile): batched full/initial sync

* use OptionalBetween

* skip/take as integer

---------

Co-authored-by: Fynn Petersen-Frey <zoodyy@users.noreply.github.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-11-06 11:40:43 -06:00
shenlong
26fd9d7e5f feat(mobile): shared album activities (#4833)
* fix(server): global activity like duplicate search

* mobile: user_circle_avatar - fallback to text icon if no profile pic available

* mobile: use favourite icon in search "your activity"

* feat(mobile): shared album activities

* mobile: align hearts with user profile icon

* styling

* replace bottom sheet with dismissible

* add auto focus to the input

---------

Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
Co-authored-by: Alex <alex.tran1502@gmail.com>
2023-11-06 09:46:26 -06:00
Jason Rasmussen
c74ea7282a docs: python upload guide (#4867) 2023-11-06 09:08:36 -05:00
Jason Rasmussen
279481ad54 feat(server): make is favorite optional on asset upload (#4865)
* feat(server): make isFavorite optional

* chore: open api

* chore: e2e
2023-11-06 09:04:39 -05:00
Mert
9e7a32804b chore(server): set relations for getByIds (#4855) 2023-11-05 20:15:12 -06:00
martin
a0743d8b7d feat(web): global activity (#4796)
* feat: global activity

* fix: tests

* pr feedback

* use flexbox

* fix: deleted control actions

* fix: flex box

* fix: do not show activity tab by default

* feat: better grouping

* fix: set isShared default value to false

* fix: prevent re-rendering the asset grid

* fix: activity status above the scrollbar

* fix: prevent re-rendering the asset grid

* fix: prevent re-rendering the asset grid

* pr feedback

* pr feedback

* pr feedback

* styling and better thumbnail

---------

Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-11-05 11:24:43 -06:00
Fynn Petersen-Frey
68000c21a8 fix(mobile): backup indicator wrong when only background backup is enabled (#4842)
Co-authored-by: Fynn Petersen-Frey <zoodyy@users.noreply.github.com>
2023-11-05 10:07:57 -06:00
Jason Rasmussen
e671b30aaf fix(server): duplicate faces bug (#4844) 2023-11-05 10:07:29 -06:00
Sergey Kondrikov
cf1dfdc776 fix(web): unstack after stack child selection (#4834) 2023-11-04 07:59:21 -05:00
doggo
de29480dda fix(mobile): first stack list asset is now highlighted on view (#4802) 2023-11-03 20:43:43 -05:00
Jason Rasmussen
2e424fe249 feat(server): better api error messages (for unhandled exceptions) (#4817)
* feat(server): better error messages

* chore: open api

* chore: remove debug log

* fix: syntax error

* fix: e2e test
2023-11-03 20:33:15 -05:00
Wingy
d4ef6f52bb chore(mobile): change version mismatch text (#4831)
* change version mismatch text

* change text
2023-11-03 20:20:42 -05:00
martin
e1e45f3f32 fix(web): show one face for the same person in the detail panel (#4822) 2023-11-03 16:02:05 -05:00
Jesbin
330f4cadda docs: changes to docker compose command. (#4828) 2023-11-03 16:01:17 -05:00
waclaw66
621eef0edc feat(mobile): share assets from album (#4821)
* share from album

* fix case

* enhance conditional array items
2023-11-03 16:00:55 -05:00
waclaw66
33ce2b7bba fix(mobile): shows asset datetime with original timezone (#4774) 2023-11-03 09:04:41 -05:00
shenlong
81792a5342 fix(mobile): immich app bar tap radius (#4816)
* mobile: tool-tip for server url in app bar dialog

* fix: Add Inkwell around the entire profile image

* mobile: open documentation and github in browser

---------

Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-11-03 09:04:06 -05:00
shenlong
5f43971ccf mobile: allow upload if local asset in selection (#4815)
Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-11-03 09:03:01 -05:00
Sergey Kondrikov
38443a6068 fix(web): unstacking issues (#4792)
* Fix typo

* Restore asset store consistency after unstacking

* Fix aspect ratio after unstacking
2023-11-03 09:01:48 -05:00
waclaw66
92bb42950e fix(web): scrollbar year label visibility (#4820)
* fixes year label visibility

* format fix
2023-11-03 08:54:02 -05:00
Jason Rasmussen
b58edae134 fix(web): timeline alignment (#4808) 2023-11-02 15:11:59 -05:00
martin
2b9f20a1b5 fix: update like status (#4803) 2023-11-02 14:43:27 -04:00
Alex
d5f8199655 fix(web): scrollbar not showing year (#4782)
* fix(web): scrollbar not showing year

* grammar

* fix test
2023-11-01 20:50:24 -05:00
Alex
d8903de92e docs: remove read-only related content (#4781)
* docs: remove read-only related content

* format

* broken link
2023-11-01 20:49:57 -05:00
Jason Rasmussen
1d35965d03 feat(web): shuffle slideshow order (#4277)
* feat(web): shuffle slideshow order

* Fix play/stop issues

* Enter/exit fullscreen mode
* Prevent navigation to the next asset after exiting slideshow mode

* Fix entering the slideshow mode from an album page

* Simplify markup of the AssetViewer

Group viewer area and navigation (prev/next/slideshow bar) controls together

* Select a random asset from a random bucket

* Preserve assets order in random mode

* Exit fullscreen mode only if it is active

* Extract SlideshowHistory class

* Use traditional functions instead of arrow functions

* Refactor SlideshowHistory class

* Extract SlideshowBar component

* Fix comments

* Hide Say something in slideshow mode

---------

Co-authored-by: brighteyed <sergey.kondrikov@gmail.com>
2023-11-01 21:34:30 -04:00
Alex
309bf1ad22 chore: post release tasks 2023-11-01 14:43:10 -05:00
Jason Rasmussen
0130591a0f fix: show/set activity like per user (#4775)
* fix: like per user

* chore: open api

* chore: e2e test for userId filtering
2023-11-01 11:49:12 -04:00
Alex The Bot
cf4ec06750 Version v1.84.0 2023-11-01 14:46:59 +00:00
Alex
e8712e6694 fix(server): import scheduler module (#4766) 2023-10-31 23:40:35 -05:00
martin
ce5966c23d feat(web,server): activity (#4682)
* feat: activity

* regenerate api

* fix: make asset owner unable to delete comment

* fix: merge

* fix: tests

* feat: use textarea instead of input

* fix: do actions only if the album is shared

* fix: placeholder opacity

* fix(web): improve messages UI

* fix(web): improve input message UI

* pr feedback

* fix: tests

* pr feedback

* pr feedback

* pr feedback

* fix permissions

* regenerate api

* pr feedback

* pr feedback

* multiple improvements on web

* fix: ui colors

* WIP

* chore: open api

* pr feedback

* fix: add comment

* chore: clean up

* pr feedback

* refactor: endpoints

* chore: open api

* fix: filter by type

* fix: e2e

* feat: e2e remove own comment

* fix: web tests

* remove console.log

* chore: cleanup

* fix: ui tweaks

* pr feedback

* fix web test

* fix: unit tests

* chore: remove unused code

* revert useless changes

* fix: grouping messages

* fix: remove nullable on updatedAt

* fix: text overflow

* styling

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-10-31 22:13:34 -05:00
Jason Rasmussen
68f6446718 fix(cli): ignore web socket when unavailable and skip metadata init (#4748) 2023-10-31 22:08:21 -05:00
Jason Rasmussen
197f336b5f fix(web): no preload repair report (#4749) 2023-10-31 20:37:32 +00:00
Daniel Dietzler
cd375a976e feat(server): custom library scanning interval (#4390)
* add automatic library scan config options

* add validation

* open api

* use CronJob instead of cron-validator

* fix tests

* catch potential error of the library scan initialization

* better description for input field

* move library scan job initialization to server app service

* fix tests

* add comments to all parameters of cronjob contructor

* make scan a child of a more general library object

* open api

* chore: cleanup

* move cronjob handling to job repoistory

* web: select for common cron expressions

* fix open api

* fix tests

* put scanning settings in nested accordion

* fix system config validation

* refactor, tests

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-10-31 15:19:12 -05:00
Jason Rasmussen
088d5addf2 refactor(server): user core (#4733) 2023-10-31 11:01:32 -04:00
shenlong
2377df9dae fix(mobile): store exposure time as string (#4589)
Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-10-31 05:33:45 -05:00
waclaw66
ad5ba82f50 fix(mobile): don't show lens info if it's not available (#4737) 2023-10-31 05:33:08 -05:00
Michael Manganiello
b6f18cbe81 fix(server): Correctly set album start and end dates (#4698)
* fix(server): Correctly set album start and end dates

Currently, the query that retrieves album assets uses
`ORDER BY assets.fileCreatedAt DESC`, which makes the existing logic
return the start/end dates reversed (with `startDate` being taken from
the first asset in the array).

Instead of using the index-based approach, this change iterates through
assets to get the min/max `fileCreatedAt`. This will avoid any future
issues, if the query ordering changes, or becomes customizable (e.g. in
case the user prefers to visualize older assets first).

* fix: Maintain constant cost and only swap variables if needed
2023-10-31 05:08:34 -05:00
Mert
87a0ba3db3 feat(ml): export clip models to ONNX and host models on Hugging Face (#4700)
* export clip models

* export to hf

refactored export code

* export mclip, general refactoring

cleanup

* updated conda deps

* do transforms with pillow and numpy, add tokenization config to export, general refactoring

* moved conda dockerfile, re-added poetry

* minor fixes

* updated link

* updated tests

* removed `requirements.txt` from workflow

* fixed mimalloc path

* removed torchvision

* cleaner np typing

* review suggestions

* update default model name

* update test
2023-10-31 05:02:04 -05:00
Jason Rasmussen
3212a47720 refactor(server): user profile picture (#4728) 2023-10-30 19:38:34 -04:00
Jason Rasmussen
431536cdbb refactor(server): user core (#4722) 2023-10-30 17:02:36 -04:00
martin
9a60578088 fix(web): multiple improvements for people page (1) (#4717)
* fix(web): multiple improvements for people page

* feat: better responsive icons
2023-10-30 14:40:28 -05:00
Jason Rasmussen
8dcd159bd6 chore(server): remove user count endpoint (#4724)
* chore: remove unused endpoint

* chore: open api
2023-10-30 19:29:18 +00:00
Skyler Mäntysaari
2f87463170 fix(server): better fix for the OAuth Discovery errors (#4695)
* fix(server/oauth): Handle errors from OAuth Discovery.

* fix(server/oauth): Better fix for OAuth discovery error.

* This doesn't break tests.

* Update server/tsconfig.json

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>

* Revert back to the mostly original way.

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
2023-10-30 13:22:30 -04:00
shenlong
9f56bf0ab9 refactor(mobile): app bar (#4687)
* refactor(mobile): add app bar to library and sharing

* mobile: add app bar dialog

* fix(mobile): refetch profile image only when path is changed

* mobile: add server url to dialog

* mobile: move trash to library app bar

* replace discord link with github

* user confirmation before sign out

* edit some styles

---------

Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-10-30 12:17:34 -05:00
Jason Rasmussen
603b056512 refactor(server): auth delete device (#4720)
* refactor(server): auth delete device

* fix: person e2e
2023-10-30 11:48:38 -04:00
Fynn Petersen-Frey
ce04e9e07a feat(server): hardware video acceleration for Rockchip SOCs via RKMPP (#4645)
* feat(server): hardware video acceleration for Rockchip SOCs via RKMPP

* add tests

* use LD_LIBRARY_PATH for custom ffmpeg

* incorporate review feedback

* code re-use for ffmpeg call

* review feedback
2023-10-30 09:39:37 -05:00
Alex
c54a188154 fix(web): sidebar setting not updating when there is a new property added to the data payload (#4708) 2023-10-30 09:17:37 -05:00
Mayuresh Dharwadkar
c77ba46d60 docs: fix typos (#4713) 2023-10-30 09:17:10 -05:00
martin
cc3149c520 fix(server): do not leak people (#4710) 2023-10-30 03:44:05 -05:00
shenlong
512f672e9e fix(mobile): cache key for assets from dto (#4699)
Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-10-29 15:28:54 -05:00
shenlong
b117985f66 fix(mobile): first char miss in new description (#4697)
Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-10-29 14:16:25 -05:00
Kalyani Mhala
b92a2b2a56 chore: add contribution section to readme (#4690)
* Update README.md

Successfully added contribution section to readme.md file.

* reordering

---------

Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-10-29 13:58:26 -05:00
Alex
a6f39bc74f fix(web): Improve UI/UX for shared link form (#4685)
* chore(web): Improve shared link form

* add verification for password

* improve ux
2023-10-29 13:50:43 -05:00
doggo
daad02504f feat(web): added toggle for Sharing button in the sidebar (#4674)
* Added toggle for Sharing button in the sidebar

* fix: format

---------

Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2023-10-29 01:42:51 +00:00
jarvis2f
8a6889529c feat(server,web,mobile): Add optional password option for share links. (#4655)
* feat(server,web,mobile): Add optional password option for share links.

Signed-off-by: jarvis2f <137974272+jarvis2f@users.noreply.github.com>

* feat(server,web): Update shared-link.controller and page.svelte for improved cookie handling and metadata updates.

Signed-off-by: jarvis2f <137974272+jarvis2f@users.noreply.github.com>

---------

Signed-off-by: jarvis2f <137974272+jarvis2f@users.noreply.github.com>
2023-10-28 20:35:38 -05:00
Alex
b34cbd881a fix(web): scrollbar does not show all years (#4684) 2023-10-29 01:31:33 +00:00
martin
f6eaaab725 docs: update milestone page (#4683)
* docs: update milestone page

* docs: add 20k milestone
2023-10-28 20:20:05 -05:00
shenlong
2a2c74e081 fix(mobile): handle shared assets in viewer (#4679)
Co-authored-by: shalong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2023-10-28 14:48:30 -05:00
Skyler Mäntysaari
c653e0f261 fix(server/oauth): Handle errors from OAuth Discovery. (#4678) 2023-10-28 14:35:09 -05:00
martin
f0dd1d715a fix(web): table headers when there's no album (#4673) 2023-10-28 14:34:45 -05:00
386 changed files with 18698 additions and 6705 deletions

View File

@@ -38,7 +38,7 @@ jobs:
-
name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.3.0
uses: stumpylog/image-cleaner-action/ephemeral@v0.4.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
@@ -70,7 +70,7 @@ jobs:
-
name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.3.0
uses: stumpylog/image-cleaner-action/untagged@v0.4.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"

View File

@@ -166,7 +166,6 @@ jobs:
- name: Install dependencies
run: |
poetry install --with dev
poetry run pip install --no-deps -r requirements.txt
- name: Lint with ruff
run: |
poetry run ruff check --format=github app

View File

@@ -2,7 +2,7 @@
<br/>
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/license-MIT-green.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: MIT"></a>
<a href="https://discord.gg/D8JsnBEuKb">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" atl="Discord"/>
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a>
<br/>
<br/>
@@ -66,7 +66,7 @@ password: demo
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
```
# Features
## Features
| Features | Mobile | Web |
| -------------------------------------------- | ------ | --- |
@@ -96,7 +96,7 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
| Offline support | Yes | No |
| Read-only gallery | Yes | Yes |
# Support the project
## Support the project
I've committed to this project, and I will not stop. I will keep updating the docs, adding new features, and fixing bugs. But I can't do it alone. So I need your help to give me additional motivation to keep going.
@@ -104,10 +104,15 @@ As our hosts in the [selfhosted.show - In the episode 'The-organization-must-not
If you feel like this is the right cause and the app is something you are seeing yourself using for a long time, please consider supporting the project with the option below.
## Donation
### Donation
- [Monthly donation](https://github.com/sponsors/alextran1502) via GitHub Sponsors
- [One-time donation](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
- [Librepay](https://liberapay.com/alex.tran1502/)
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
## Contributors
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.83.0
* The version of the OpenAPI document: 1.85.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.83.0
* The version of the OpenAPI document: 1.85.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.83.0
* The version of the OpenAPI document: 1.85.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -4,7 +4,7 @@
* Immich
* Immich API
*
* The version of the OpenAPI document: 1.83.0
* The version of the OpenAPI document: 1.85.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).

View File

@@ -1,3 +1,7 @@
# See:
# - https://immich.app/docs/developer/setup
# - https://immich.app/docs/developer/troubleshooting
version: "3.8"
services:
@@ -71,10 +75,6 @@ services:
command: npm run dev --host
env_file:
- .env
environment:
# Rename these values for svelte public interface
- PUBLIC_IMMICH_SERVER_URL=${IMMICH_SERVER_URL}
- PUBLIC_IMMICH_API_URL_EXTERNAL=${IMMICH_API_URL_EXTERNAL}
ports:
- 3000:3000
- 24678:24678

View File

@@ -7,9 +7,9 @@ services:
build:
context: ../server
dockerfile: Dockerfile
command: ["./start-server.sh"]
command: [ "./start-server.sh" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
@@ -18,19 +18,6 @@ services:
- database
- typesense
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning:latest
build:
context: ../machine-learning
dockerfile: Dockerfile
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- model-cache:/cache
env_file:
- .env
restart: always
immich-microservices:
container_name: immich_microservices
image: immich-microservices:latest
@@ -40,9 +27,9 @@ services:
build:
context: ../server
dockerfile: Dockerfile
command: ["./start-microservices.sh"]
command: [ "./start-microservices.sh" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
@@ -64,6 +51,18 @@ services:
depends_on:
- immich-server
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning:latest
build:
context: ../machine-learning
dockerfile: Dockerfile
volumes:
- model-cache:/cache
env_file:
- .env
restart: always
typesense:
container_name: immich_typesense
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
@@ -73,7 +72,7 @@ services:
# remove this to get debug messages
- GLOG_minloglevel=1
volumes:
- tsdata:/data
- ${UPLOAD_LOCATION}/typesense:/data
restart: always
redis:
@@ -91,7 +90,7 @@ services:
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
volumes:
- pgdata:/var/lib/postgresql/data
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
restart: always
immich-proxy:
@@ -113,6 +112,4 @@ services:
restart: always
volumes:
pgdata:
model-cache:
tsdata:

24
docker/hwaccel-rkmpp.yml Normal file
View File

@@ -0,0 +1,24 @@
version: "3.8"
# Hardware acceleration for transcoding using RKMPP for Rockchip SOCs
# This is only needed if you want to use hardware acceleration for transcoding.
# Supported host OS is Ubuntu Jammy 22.04 with custom ffmpeg from ppa:liujianfeng1994/rockchip-multimedia
services:
hwaccel:
security_opt: # enables full access to /sys and /proc, still far better than privileged: true
- systempaths=unconfined
- apparmor=unconfined
group_add:
- video
devices:
- /dev/rga:/dev/rga
- /dev/dri:/dev/dri
- /dev/dma_heap:/dev/dma_heap
- /dev/mpp_service:/dev/mpp_service
volumes:
- /usr/bin/ffmpeg:/usr/bin/ffmpeg_mpp:ro
- /lib/aarch64-linux-gnu:/lib/ffmpeg-mpp:ro
- /lib/aarch64-linux-gnu/libblas.so.3:/lib/ffmpeg-mpp/libblas.so.3:ro # symlink is resolved by mounting
- /lib/aarch64-linux-gnu/liblapack.so.3:/lib/ffmpeg-mpp/liblapack.so.3:ro # symlink is resolved by mounting
- /lib/aarch64-linux-gnu/pulseaudio/libpulsecommon-15.99.so:/lib/ffmpeg-mpp/libpulsecommon-15.99.so:ro

View File

@@ -33,8 +33,6 @@ To be concise, Immich can now read in the gallery files, register the path into
- Only new files that are added to the gallery will be detected.
- Deleted and moved files will not be detected.
You can find more information on how to use the feature by reading the documentation [here](/docs/features/read-only-gallery).
## Memory feature
This is considered a fun feature that the team and I wanted to build for so long, but we had to put it off because of the refactoring of the code base. The code base is now in a good enough form to circle back and add more exciting features.

View File

@@ -17,13 +17,13 @@ docker exec -t immich_postgres pg_dumpall -c -U postgres | gzip > "/path/to/back
```
```bash title='Restore'
docker-compose down -v # CAUTION! Deletes all Immich data to start from scratch.
docker-compose pull # Update to latest version of Immich (if desired)
docker-compose create # Create Docker containers for Immich apps without running them.
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
docker compose pull # Update to latest version of Immich (if desired)
docker compose create # Create Docker containers for Immich apps without running them.
docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up
gunzip < "/path/to/backup/dump.sql.gz" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
docker-compose up -d # Start remainder of Immich apps
docker compose up -d # Start remainder of Immich apps
```
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).

View File

@@ -12,6 +12,6 @@ The backend has an end-to-end test suite that can be called with `npm run test:e
Note that there is a bug in nodejs <20.8 that causes segmentation faults when running these tests. If you run into segfaults, ensure you are using at least version 20.8.
To perform a full e2e test, you need to run e2e tests inside docker. The easiest way to do that is to run `make test-e2e` in the root directory. This will build and start a docker-compose consisting of the server, microservices, and a postgres database. It will then perfom the tests and exit.
To perform a full e2e test, you need to run e2e tests inside docker. The easiest way to do that is to run `make test-e2e` in the root directory. This will build and start a docker-compose consisting of the server, microservices, and a postgres database. It will then perform the tests and exit.
If you manually install the dependencies (see the DOCKERFILE) on your development machine, you can also run the full e2e tests manually by setting the `IMMICH_RUN_ALL_TESTS` environment value to true, i.e. `IMMICH_RUN_ALL_TESTS=true npm run test:e2e`.

View File

@@ -0,0 +1,19 @@
# Troubleshooting
:::tip
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.gg/D8JsnBEuKb) server, where we have a dedicated channel for `#contributing`.
:::
## Known Issues
### Running on Windows
Running Immich on Windows can be frustrating and there are lots of ways it can go wrong. Where possible we recommend using Docker on Linux. However, several people have had success running Immich on Windows using Docker via WSL2.
### NTFS Mounted Volumes
The docker-compose.dev.yml and docker-compose.prod.yml use volume mounts for the postgres database. On start-up, postgres will try to `chown` the data directory, but fail. See [this post](https://forums.docker.com/t/data-directory-var-lib-postgresql-data-pgdata-has-wrong-ownership/17963/24) for more information about this issue and possible solutions.
### `Cannot read properties of null (reading 'split')`
This error occurs when trying to access the app via port `3000` instead of `2283`. During development `immich-proxy` runs on port 2283, while `immich-web` runs on `3000`.

View File

@@ -4,6 +4,10 @@ You can use the CLI to upload an existing gallery to the Immich server
[Immich CLI Repository](https://github.com/immich-app/CLI)
:::tip Google Photos Takeout
If you are looking to import your Google Photos takeout, we recommed this community maintained tool [immich-go](https://github.com/simulot/immich-go)
:::
## Requirements
- Node.js 16 or above
@@ -32,7 +36,6 @@ immich
| --server / -s | Immich's server address |
| --threads / -t | Number of threads to use (Default 5) |
| --album/ -al | Create albums for assets based on the parent folder or a given name |
| --import/ -i | Import gallery (assets are not uploaded) |
## Quick Start
@@ -108,70 +111,3 @@ npm run build
```bash title="Run the command"
node bin/index.js upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api --recursive your/asset/directory
```
---
## Importing existing libraries
If you do not wish to upload files into the server, existing files can be imported into the immich gallery through the use of the `--import` flag.
```
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api --recursive directory/ --import
```
```
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api file1.jpg file2.jpg --import
```
The `immich-server` and `immich-microservices` containers must be able to access the files, or directories at the path referenced in the command. The directories referenced must be set under a user's `External Path` setting. More detailed instructions can be found [here](/docs/features/read-only-gallery).
:::tip Matching volume references
The import command is most easily run on the machine running the immich service, as the path to the files on the machine running the command and the server much match identically.
If you are running immich within docker, the volume pointing to your existing library should be identical with your host machine.
```diff title="docker-compose.yml"
immich-server:
container_name: immich_server
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
command: [ "start.sh", "immich" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /path/to/media:/path/to/media
env_file:
- .env
depends_on:
- redis
- database
- typesense
restart: always
immich-microservices:
container_name: immich_microservices
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
command: [ "start.sh", "microservices" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /path/to/media:/path/to/media
env_file:
- .env
depends_on:
- redis
- database
- typesense
restart: always
```
The proper command for above would be as shown below. You should have access to `/path/to/media` exactly on the environment the CLI command is being run on
```
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api --recursive /path/to/media --import
```
If you are running the import using the docker command, please note that the volumes should point to the `/path/to/media` exactly on the environment the CLI command is being run on
```
docker run -it --rm -v "/path/to/media:/path/to/media" ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api --recursive /path/to/media --import
```
:::

View File

@@ -1,5 +1,7 @@
# Facial Recognition
## Overview
Immich recognizes faces in your photos and videos and groups them together. You can then assign names to the faces and search for them.
The list of people is shown in the Explore page.
@@ -13,3 +15,16 @@ Upon clicking on a person, a list of assets that contain their face will be show
The asset detail view will also show the faces that are recognized in the asset.
<img src={require('./img/facial-recognition-3.png').default} title='Facial Recognition 3' />
## Actions
Additional actions you can do with a detected person are:
- Change the feature face photo of the person
- Set date of birth
- Merge two or more detected faces into one person
- Hide face
It can be found from the app bar when you access the detial view of a person
<img src={require('./img/facial-recognition-4.png').default} title='Facial Recognition 4' width="70%"/>

Binary file not shown.

After

Width:  |  Height:  |  Size: 416 KiB

View File

@@ -85,7 +85,7 @@ There is an automatic job that's run once a day and refreshes all modified files
Let's show a concrete example where we add an existing gallery to Immich. Here, we have the following folders we want to add:
- `/home/user/old-pics`: a folder contining childhood photos.
- `/home/user/old-pics`: a folder containing childhood photos.
- `/mnt/nas/christmas-trip`: photos from a christmas trip. The subfolder `/mnt/nas/christmas-trip/Raw` contains the raw files directly from the DSLR. We don't want to import the raw files to Immich
- `/mnt/media/videos`: Videos from the same christmas trip.

View File

@@ -1,6 +1,6 @@
import MobileAppDownload from '../partials/_mobile-app-download.md';
import MobileAppLogin from '../partials/_mobile-app-login.md';
import MobileAppBackup from '../partials/_mobile-app-login.md';
import MobileAppBackup from '../partials/_mobile-app-backup.md';
# Mobile App

View File

@@ -1,97 +0,0 @@
# Read-only Gallery [Deprecated]
:::caution
This feature is being deprecated in favor of [Libraries](/docs/features/libraries.md).
:::
## Overview
This feature enables users to use an existing gallery without uploading the assets to Immich.
Upon syncing the file information, it will be read by Immich to generate supported files.
## Usage
:::tip Example scenario
On the VM/system that Immich is running, I have 2 galleries that I want to use with Immich.
- My gallery is stored at `/mnt/media/precious-memory`
- My wife's gallery is stored at `/mnt/media/childhood-memory`
We will use those values in the steps below.
:::
### Mount the gallery to the containers.
`immich-server` and `immich-microservices` containers will need access to the gallery. Mount the directory path as in the example below
```diff title="docker-compose.yml"
immich-server:
container_name: immich_server
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
command: [ "start.sh", "immich" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /mnt/media/precious-memory:/mnt/media/precious-memory:ro
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory:ro
env_file:
- .env
depends_on:
- redis
- database
- typesense
restart: always
immich-microservices:
container_name: immich_microservices
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
command: [ "start.sh", "microservices" ]
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
+ - /mnt/media/precious-memory:/mnt/media/precious-memory:ro
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory:ro
env_file:
- .env
depends_on:
- redis
- database
- typesense
restart: always
```
:::tip
Internal and external path have to be identical.
:::
_Remember to bring the container down/up to register the changes. Make sure you can see the mounted path in the container._
### Register the path for the user.
This action is done by the admin of the instance.
- Navigate to `Administration > Users` page on the web.
- Click on the user edit button.
- Add the gallery path to the `External Path` field for the corresponding user and confirm the changes.
<img src={require('./img/me.png').default} width='33%' title='My Account Storage Path' />
<img src={require('./img/my-wife.png').default} width='33%' title='My Wifes Account Storage Path' />
### Sync with the CLI tool.
- Install or update the [CLI Tool](/docs/features/bulk-upload.md). The import feature is supported from version `v0.39.0` of the CLI
- Run the command below to sync the gallery with Immich.
```bash title="Import my gallery"
immich upload --key <my-api-key> --server http://my-server-ip:2283/api /mnt/media/precious-memory --recursive --import
```
```bash title="Import my wife gallery"
immich upload --key <my-wife-api-key> --server http://my-server-ip:2283/api /mnt/media/childhood-memory --recursive --import
```
The `--import` flag will tell Immich to import the files by path instead of uploading them.

View File

@@ -0,0 +1,42 @@
# Python File Upload
```python
#!/usr/bin/python3
import requests
import os
from datetime import datetime
API_KEY = 'YOUR_API_KEY' # replace with a valid api key
BASE_URL = 'http://127.0.0.1:2283/api' # replace as needed
def upload(file):
stats = os.stat(file)
headers = {
'Accept': 'application/json',
'x-api-key': API_KEY
}
data = {
'deviceAssetId': f'{file}-{stats.st_mtime}',
'deviceId': 'python',
'fileCreatedAt': datetime.fromtimestamp(stats.st_mtime),
'fileModifiedAt': datetime.fromtimestamp(stats.st_mtime),
'isFavorite': 'false',
}
files = {
'assetData': open(file, 'rb')
}
response = requests.post(
f'{BASE_URL}/asset/upload', headers=headers, data=data, files=files)
print(response.json())
# {'id': 'ef96f635-61c7-4639-9e60-61a11c4bbfba', 'duplicate': False}
upload('./test.jpg')
```

View File

@@ -17,6 +17,12 @@ The default configuration looks like this:
"targetAudioCodec": "aac",
"targetResolution": "720",
"maxBitrate": "0",
"bframes": -1,
"refs": 0,
"gopSize": 0,
"npl": 0,
"temporalAQ": false,
"cqMode": "auto",
"twoPass": false,
"transcode": "required",
"tonemap": "hable",
@@ -44,9 +50,15 @@ The default configuration looks like this:
"sidecar": {
"concurrency": 5
},
"library": {
"concurrency": 5
},
"storageTemplateMigration": {
"concurrency": 5
},
"migration": {
"concurrency": 5
},
"thumbnailGeneration": {
"concurrency": 5
},
@@ -55,16 +67,16 @@ The default configuration looks like this:
}
},
"machineLearning": {
"classification": {
"minScore": 0.7,
"enabled": true,
"modelName": "microsoft/resnet-50"
},
"enabled": true,
"url": "http://immich-machine-learning:3003",
"classification": {
"enabled": true,
"modelName": "microsoft/resnet-50",
"minScore": 0.9
},
"clip": {
"enabled": true,
"modelName": "ViT-B-32::openai"
"modelName": "ViT-B-32__openai"
},
"facialRecognition": {
"enabled": true,
@@ -74,6 +86,14 @@ The default configuration looks like this:
"minFaces": 1
}
},
"map": {
"enabled": true,
"tileUrl": "https://tile.openstreetmap.org/{z}/{x}/{y}.png"
},
"reverseGeocoding": {
"enabled": true,
"citiesFileOverride": "cities500"
},
"oauth": {
"enabled": false,
"issuerUrl": "",
@@ -96,8 +116,27 @@ The default configuration looks like this:
"thumbnail": {
"webpSize": 250,
"jpegSize": 1440,
"quality": 90,
"quality": 80,
"colorspace": "p3"
},
"newVersionCheck": {
"enabled": true
},
"trash": {
"enabled": true,
"days": 30
},
"theme": {
"customCss": ""
},
"library": {
"scan": {
"enabled": true,
"cronExpression": "0 0 * * *"
}
},
"stylesheets": {
"css": ""
}
}
```

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 137 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 321 KiB

After

Width:  |  Height:  |  Size: 404 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 335 KiB

After

Width:  |  Height:  |  Size: 334 KiB

View File

@@ -34,7 +34,7 @@ function HomepageHeader() {
</Link>
</div>
<img src="/img/immich-screenshots.png" alt="logo" />
<img src="/img/immich-screenshots.png" alt="screenshots" width={'85%'} />
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-4 gap-1">
<div className="h-24">

View File

@@ -8,6 +8,7 @@ import {
mdiCheckAll,
mdiCheckboxMarked,
mdiCollage,
mdiContentCopy,
mdiDevices,
mdiFaceMan,
mdiFaceManOutline,
@@ -26,6 +27,7 @@ import {
mdiMerge,
mdiMonitor,
mdiMotionPlayOutline,
mdiPalette,
mdiPanVertical,
mdiPartyPopper,
mdiRaw,
@@ -47,6 +49,33 @@ import React from 'react';
import Timeline, { DateType, Item } from '../components/timeline';
const items: Item[] = [
{
icon: mdiStar,
description: 'Reach 20K Stars on GitHub!',
title: '20,000 Stars',
release: 'v1.83.0',
tag: 'v1.83.0',
date: new Date(2023, 9, 28),
dateType: DateType.RELEASE,
},
{
icon: mdiContentCopy,
title: 'Stack assets',
description: 'Manual asset stacking for grouping and hiding related assets in the main timeline.',
release: 'v1.83.0',
tag: 'v1.83.0',
date: new Date(2023, 9, 28),
dateType: DateType.RELEASE,
},
{
icon: mdiPalette,
title: 'Custom theme',
description: 'Apply your custom CSS for modifying fonts, colors, and styles in the web application.',
release: 'v1.83.0',
tag: 'v1.83.0',
date: new Date(2023, 9, 28),
dateType: DateType.RELEASE,
},
{
icon: mdiTrashCanOutline,
title: 'Trash Feature',
@@ -283,7 +312,7 @@ const items: Item[] = [
},
{
icon: mdiStar,
description: 'Reach 10K Starts on GitHub!',
description: 'Reach 10K Stars on GitHub!',
title: '10,000 Stars',
release: 'v1.54.0',
tag: 'v1.54.0',

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 MiB

After

Width:  |  Height:  |  Size: 1.8 MiB

View File

@@ -10,9 +10,8 @@ RUN poetry config installer.max-workers 10 && \
RUN python -m venv /opt/venv
ENV VIRTUAL_ENV="/opt/venv" PATH="/opt/venv/bin:${PATH}"
COPY poetry.lock pyproject.toml requirements.txt ./
COPY poetry.lock pyproject.toml ./
RUN poetry install --sync --no-interaction --no-ansi --no-root --only main
RUN pip install --no-deps -r requirements.txt
FROM python:3.11-slim-bookworm

View File

@@ -1,5 +1,6 @@
import json
from typing import Any, Iterator, TypeAlias
from pathlib import Path
from typing import Any, Iterator
from unittest import mock
import numpy as np
@@ -8,8 +9,7 @@ from fastapi.testclient import TestClient
from PIL import Image
from .main import app, init_state
ndarray: TypeAlias = np.ndarray[int, np.dtype[np.float32]]
from .schemas import ndarray_f32
@pytest.fixture
@@ -18,13 +18,13 @@ def pil_image() -> Image.Image:
@pytest.fixture
def cv_image(pil_image: Image.Image) -> ndarray:
def cv_image(pil_image: Image.Image) -> ndarray_f32:
return np.asarray(pil_image)[:, :, ::-1] # PIL uses RGB while cv2 uses BGR
@pytest.fixture
def mock_get_model() -> Iterator[mock.Mock]:
with mock.patch("app.models.cache.InferenceModel.from_model_type", autospec=True) as mocked:
with mock.patch("app.models.cache.from_model_type", autospec=True) as mocked:
yield mocked
@@ -37,3 +37,25 @@ def deployed_app() -> TestClient:
@pytest.fixture(scope="session")
def responses() -> dict[str, Any]:
return json.load(open("responses.json", "r"))
@pytest.fixture(scope="session")
def clip_model_cfg() -> dict[str, Any]:
return {
"embed_dim": 512,
"vision_cfg": {"image_size": 224, "layers": 12, "width": 768, "patch_size": 32},
"text_cfg": {"context_length": 77, "vocab_size": 49408, "width": 512, "heads": 8, "layers": 12},
}
@pytest.fixture(scope="session")
def clip_preprocess_cfg() -> dict[str, Any]:
return {
"size": [224, 224],
"mode": "RGB",
"mean": [0.48145466, 0.4578275, 0.40821073],
"std": [0.26862954, 0.26130258, 0.27577711],
"interpolation": "bicubic",
"resize_mode": "shortest",
"fill_color": 0,
}

View File

@@ -1,3 +1,25 @@
from .clip import CLIPEncoder
from typing import Any
from app.schemas import ModelType
from .base import InferenceModel
from .clip import MCLIPEncoder, OpenCLIPEncoder, is_mclip, is_openclip
from .facial_recognition import FaceRecognizer
from .image_classification import ImageClassifier
def from_model_type(model_type: ModelType, model_name: str, **model_kwargs: Any) -> InferenceModel:
match model_type:
case ModelType.CLIP:
if is_openclip(model_name):
return OpenCLIPEncoder(model_name, **model_kwargs)
elif is_mclip(model_name):
return MCLIPEncoder(model_name, **model_kwargs)
else:
raise ValueError(f"Unknown CLIP model {model_name}")
case ModelType.FACIAL_RECOGNITION:
return FaceRecognizer(model_name, **model_kwargs)
case ModelType.IMAGE_CLASSIFICATION:
return ImageClassifier(model_name, **model_kwargs)
case _:
raise ValueError(f"Unknown model type {model_type}")

View File

@@ -25,7 +25,7 @@ class InferenceModel(ABC):
) -> None:
self.model_name = model_name
self.loaded = False
self._cache_dir = Path(cache_dir) if cache_dir is not None else get_cache_dir(model_name, self.model_type)
self._cache_dir = Path(cache_dir) if cache_dir is not None else None
self.providers = model_kwargs.pop("providers", ["CPUExecutionProvider"])
# don't pre-allocate more memory than needed
self.provider_options = model_kwargs.pop(
@@ -92,7 +92,7 @@ class InferenceModel(ABC):
@property
def cache_dir(self) -> Path:
return self._cache_dir
return self._cache_dir if self._cache_dir is not None else get_cache_dir(self.model_name, self.model_type)
@cache_dir.setter
def cache_dir(self, cache_dir: Path) -> None:

View File

@@ -4,6 +4,8 @@ from aiocache.backends.memory import SimpleMemoryCache
from aiocache.lock import OptimisticLock
from aiocache.plugins import BasePlugin, TimingPlugin
from app.models import from_model_type
from ..schemas import ModelType
from .base import InferenceModel
@@ -50,7 +52,7 @@ class ModelCache:
async with OptimisticLock(self.cache, key) as lock:
model = await self.cache.get(key)
if model is None:
model = InferenceModel.from_model_type(model_type, model_name, **model_kwargs)
model = from_model_type(model_type, model_name, **model_kwargs)
await lock.cas(model, ttl=self.ttl)
return model

View File

@@ -1,23 +1,24 @@
import os
import zipfile
import json
from abc import abstractmethod
from functools import cached_property
from io import BytesIO
from pathlib import Path
from typing import Any, Literal
import numpy as np
import onnxruntime as ort
import torch
from clip_server.model.clip import BICUBIC, _convert_image_to_rgb
from clip_server.model.clip_onnx import _MODELS, _S3_BUCKET_V2, CLIPOnnxModel, download_model
from clip_server.model.pretrained_models import _VISUAL_MODEL_IMAGE_SIZE
from clip_server.model.tokenization import Tokenizer
from huggingface_hub import snapshot_download
from PIL import Image
from torchvision.transforms import CenterCrop, Compose, Normalize, Resize, ToTensor
from transformers import AutoTokenizer
from app.config import log
from app.models.transforms import crop, get_pil_resampling, normalize, resize, to_numpy
from app.schemas import ModelType, ndarray_f32, ndarray_i32, ndarray_i64
from ..config import log
from ..schemas import ModelType
from .base import InferenceModel
class CLIPEncoder(InferenceModel):
class BaseCLIPEncoder(InferenceModel):
_model_type = ModelType.CLIP
def __init__(
@@ -27,48 +28,29 @@ class CLIPEncoder(InferenceModel):
mode: Literal["text", "vision"] | None = None,
**model_kwargs: Any,
) -> None:
if mode is not None and mode not in ("text", "vision"):
raise ValueError(f"Mode must be 'text', 'vision', or omitted; got '{mode}'")
if model_name not in _MODELS:
raise ValueError(f"Unknown model name {model_name}.")
self.mode = mode
super().__init__(model_name, cache_dir, **model_kwargs)
def _download(self) -> None:
models: tuple[tuple[str, str], tuple[str, str]] = _MODELS[self.model_name]
text_onnx_path = self.cache_dir / "textual.onnx"
vision_onnx_path = self.cache_dir / "visual.onnx"
if not text_onnx_path.is_file():
self._download_model(*models[0])
if not vision_onnx_path.is_file():
self._download_model(*models[1])
def _load(self) -> None:
if self.mode == "text" or self.mode is None:
log.debug(f"Loading clip text model '{self.model_name}'")
self.text_model = ort.InferenceSession(
self.cache_dir / "textual.onnx",
self.textual_path.as_posix(),
sess_options=self.sess_options,
providers=self.providers,
provider_options=self.provider_options,
)
self.text_outputs = [output.name for output in self.text_model.get_outputs()]
self.tokenizer = Tokenizer(self.model_name)
if self.mode == "vision" or self.mode is None:
log.debug(f"Loading clip vision model '{self.model_name}'")
self.vision_model = ort.InferenceSession(
self.cache_dir / "visual.onnx",
self.visual_path.as_posix(),
sess_options=self.sess_options,
providers=self.providers,
provider_options=self.provider_options,
)
self.vision_outputs = [output.name for output in self.vision_model.get_outputs()]
image_size = _VISUAL_MODEL_IMAGE_SIZE[CLIPOnnxModel.get_model_name(self.model_name)]
self.transform = _transform_pil_image(image_size)
def _predict(self, image_or_text: Image.Image | str) -> list[float]:
if isinstance(image_or_text, bytes):
@@ -78,55 +60,163 @@ class CLIPEncoder(InferenceModel):
case Image.Image():
if self.mode == "text":
raise TypeError("Cannot encode image as text-only model")
pixel_values = self.transform(image_or_text)
assert isinstance(pixel_values, torch.Tensor)
pixel_values = torch.unsqueeze(pixel_values, 0).numpy()
outputs = self.vision_model.run(self.vision_outputs, {"pixel_values": pixel_values})
outputs = self.vision_model.run(None, self.transform(image_or_text))
case str():
if self.mode == "vision":
raise TypeError("Cannot encode text as vision-only model")
text_inputs: dict[str, torch.Tensor] = self.tokenizer(image_or_text)
inputs = {
"input_ids": text_inputs["input_ids"].int().numpy(),
"attention_mask": text_inputs["attention_mask"].int().numpy(),
}
outputs = self.text_model.run(self.text_outputs, inputs)
outputs = self.text_model.run(None, self.tokenize(image_or_text))
case _:
raise TypeError(f"Expected Image or str, but got: {type(image_or_text)}")
return outputs[0][0].tolist()
def _download_model(self, model_name: str, model_md5: str) -> bool:
# downloading logic is adapted from clip-server's CLIPOnnxModel class
download_model(
url=_S3_BUCKET_V2 + model_name,
target_folder=self.cache_dir.as_posix(),
md5sum=model_md5,
with_resume=True,
)
file = self.cache_dir / model_name.split("/")[1]
if file.suffix == ".zip":
with zipfile.ZipFile(file, "r") as zip_ref:
zip_ref.extractall(self.cache_dir)
os.remove(file)
return True
@abstractmethod
def tokenize(self, text: str) -> dict[str, ndarray_i32]:
pass
@abstractmethod
def transform(self, image: Image.Image) -> dict[str, ndarray_f32]:
pass
@property
def textual_dir(self) -> Path:
return self.cache_dir / "textual"
@property
def visual_dir(self) -> Path:
return self.cache_dir / "visual"
@property
def model_cfg_path(self) -> Path:
return self.cache_dir / "config.json"
@property
def textual_path(self) -> Path:
return self.textual_dir / "model.onnx"
@property
def visual_path(self) -> Path:
return self.visual_dir / "model.onnx"
@property
def preprocess_cfg_path(self) -> Path:
return self.visual_dir / "preprocess_cfg.json"
@property
def cached(self) -> bool:
return (self.cache_dir / "textual.onnx").is_file() and (self.cache_dir / "visual.onnx").is_file()
return self.textual_path.is_file() and self.visual_path.is_file()
# same as `_transform_blob` without `_blob2image`
def _transform_pil_image(n_px: int) -> Compose:
return Compose(
[
Resize(n_px, interpolation=BICUBIC),
CenterCrop(n_px),
_convert_image_to_rgb,
ToTensor(),
Normalize(
(0.48145466, 0.4578275, 0.40821073),
(0.26862954, 0.26130258, 0.27577711),
),
]
)
class OpenCLIPEncoder(BaseCLIPEncoder):
def __init__(
self,
model_name: str,
cache_dir: str | None = None,
mode: Literal["text", "vision"] | None = None,
**model_kwargs: Any,
) -> None:
super().__init__(_clean_model_name(model_name), cache_dir, mode, **model_kwargs)
def _download(self) -> None:
snapshot_download(
f"immich-app/{self.model_name}",
cache_dir=self.cache_dir,
local_dir=self.cache_dir,
local_dir_use_symlinks=False,
)
def _load(self) -> None:
super()._load()
self.tokenizer = AutoTokenizer.from_pretrained(self.textual_dir)
self.sequence_length = self.model_cfg["text_cfg"]["context_length"]
self.size = (
self.preprocess_cfg["size"][0] if type(self.preprocess_cfg["size"]) == list else self.preprocess_cfg["size"]
)
self.resampling = get_pil_resampling(self.preprocess_cfg["interpolation"])
self.mean = np.array(self.preprocess_cfg["mean"], dtype=np.float32)
self.std = np.array(self.preprocess_cfg["std"], dtype=np.float32)
def tokenize(self, text: str) -> dict[str, ndarray_i32]:
input_ids: ndarray_i64 = self.tokenizer(
text,
max_length=self.sequence_length,
return_tensors="np",
return_attention_mask=False,
padding="max_length",
truncation=True,
).input_ids
return {"text": input_ids.astype(np.int32)}
def transform(self, image: Image.Image) -> dict[str, ndarray_f32]:
image = resize(image, self.size)
image = crop(image, self.size)
image_np = to_numpy(image)
image_np = normalize(image_np, self.mean, self.std)
return {"image": np.expand_dims(image_np.transpose(2, 0, 1), 0)}
@cached_property
def model_cfg(self) -> dict[str, Any]:
return json.load(self.model_cfg_path.open())
@cached_property
def preprocess_cfg(self) -> dict[str, Any]:
return json.load(self.preprocess_cfg_path.open())
class MCLIPEncoder(OpenCLIPEncoder):
def tokenize(self, text: str) -> dict[str, ndarray_i32]:
tokens: dict[str, ndarray_i64] = self.tokenizer(text, return_tensors="np")
return {k: v.astype(np.int32) for k, v in tokens.items()}
_OPENCLIP_MODELS = {
"RN50__openai",
"RN50__yfcc15m",
"RN50__cc12m",
"RN101__openai",
"RN101__yfcc15m",
"RN50x4__openai",
"RN50x16__openai",
"RN50x64__openai",
"ViT-B-32__openai",
"ViT-B-32__laion2b_e16",
"ViT-B-32__laion400m_e31",
"ViT-B-32__laion400m_e32",
"ViT-B-32__laion2b-s34b-b79k",
"ViT-B-16__openai",
"ViT-B-16__laion400m_e31",
"ViT-B-16__laion400m_e32",
"ViT-B-16-plus-240__laion400m_e31",
"ViT-B-16-plus-240__laion400m_e32",
"ViT-L-14__openai",
"ViT-L-14__laion400m_e31",
"ViT-L-14__laion400m_e32",
"ViT-L-14__laion2b-s32b-b82k",
"ViT-L-14-336__openai",
"ViT-H-14__laion2b-s32b-b79k",
"ViT-g-14__laion2b-s12b-b42k",
}
_MCLIP_MODELS = {
"LABSE-Vit-L-14",
"XLM-Roberta-Large-Vit-B-32",
"XLM-Roberta-Large-Vit-B-16Plus",
"XLM-Roberta-Large-Vit-L-14",
}
def _clean_model_name(model_name: str) -> str:
return model_name.split("/")[-1].replace("::", "__")
def is_openclip(model_name: str) -> bool:
return _clean_model_name(model_name) in _OPENCLIP_MODELS
def is_mclip(model_name: str) -> bool:
return _clean_model_name(model_name) in _MCLIP_MODELS

View File

@@ -9,7 +9,8 @@ from insightface.model_zoo import ArcFaceONNX, RetinaFace
from insightface.utils.face_align import norm_crop
from insightface.utils.storage import BASE_REPO_URL, download_file
from ..schemas import ModelType
from app.schemas import ModelType, ndarray_f32
from .base import InferenceModel
@@ -68,7 +69,7 @@ class FaceRecognizer(InferenceModel):
)
self.rec_model.prepare(ctx_id=0)
def _predict(self, image: np.ndarray[int, np.dtype[Any]] | bytes) -> list[dict[str, Any]]:
def _predict(self, image: ndarray_f32 | bytes) -> list[dict[str, Any]]:
if isinstance(image, bytes):
image = cv2.imdecode(np.frombuffer(image, np.uint8), cv2.IMREAD_COLOR)
bboxes, kpss = self.det_model.detect(image)

View File

@@ -0,0 +1,35 @@
import numpy as np
from PIL import Image
from app.schemas import ndarray_f32
_PIL_RESAMPLING_METHODS = {resampling.name.lower(): resampling for resampling in Image.Resampling}
def resize(img: Image.Image, size: int) -> Image.Image:
if img.width < img.height:
return img.resize((size, int((img.height / img.width) * size)), resample=Image.BICUBIC)
else:
return img.resize((int((img.width / img.height) * size), size), resample=Image.BICUBIC)
# https://stackoverflow.com/a/60883103
def crop(img: Image.Image, size: int) -> Image.Image:
left = int((img.size[0] / 2) - (size / 2))
upper = int((img.size[1] / 2) - (size / 2))
right = left + size
lower = upper + size
return img.crop((left, upper, right, lower))
def to_numpy(img: Image.Image) -> ndarray_f32:
return np.asarray(img.convert("RGB")).astype(np.float32) / 255.0
def normalize(img: ndarray_f32, mean: float | ndarray_f32, std: float | ndarray_f32) -> ndarray_f32:
return (img - mean) / std
def get_pil_resampling(resample: str) -> Image.Resampling:
return _PIL_RESAMPLING_METHODS[resample.lower()]

View File

@@ -1,5 +1,7 @@
from enum import StrEnum
from typing import TypeAlias
import numpy as np
from pydantic import BaseModel
@@ -31,3 +33,8 @@ class ModelType(StrEnum):
IMAGE_CLASSIFICATION = "image-classification"
CLIP = "clip"
FACIAL_RECOGNITION = "facial-recognition"
ndarray_f32: TypeAlias = np.ndarray[int, np.dtype[np.float32]]
ndarray_i64: TypeAlias = np.ndarray[int, np.dtype[np.int64]]
ndarray_i32: TypeAlias = np.ndarray[int, np.dtype[np.int32]]

View File

@@ -1,7 +1,8 @@
import json
import pickle
from io import BytesIO
from typing import Any, TypeAlias
from pathlib import Path
from typing import Any, Callable
from unittest import mock
import cv2
@@ -14,13 +15,11 @@ from pytest_mock import MockerFixture
from .config import settings
from .models.base import PicklableSessionOptions
from .models.cache import ModelCache
from .models.clip import CLIPEncoder
from .models.clip import OpenCLIPEncoder
from .models.facial_recognition import FaceRecognizer
from .models.image_classification import ImageClassifier
from .schemas import ModelType
ndarray: TypeAlias = np.ndarray[int, np.dtype[np.float32]]
class TestImageClassifier:
classifier_preds = [
@@ -56,30 +55,50 @@ class TestImageClassifier:
class TestCLIP:
embedding = np.random.rand(512).astype(np.float32)
cache_dir = Path("test_cache")
def test_basic_image(self, pil_image: Image.Image, mocker: MockerFixture) -> None:
mocker.patch.object(CLIPEncoder, "download")
def test_basic_image(
self,
pil_image: Image.Image,
mocker: MockerFixture,
clip_model_cfg: dict[str, Any],
clip_preprocess_cfg: Callable[[Path], dict[str, Any]],
) -> None:
mocker.patch.object(OpenCLIPEncoder, "download")
mocker.patch.object(OpenCLIPEncoder, "model_cfg", clip_model_cfg)
mocker.patch.object(OpenCLIPEncoder, "preprocess_cfg", clip_preprocess_cfg)
mocker.patch("app.models.clip.AutoTokenizer.from_pretrained", autospec=True)
mocked = mocker.patch("app.models.clip.ort.InferenceSession", autospec=True)
mocked.return_value.run.return_value = [[self.embedding]]
clip_encoder = CLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", mode="vision")
assert clip_encoder.mode == "vision"
clip_encoder = OpenCLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", mode="vision")
embedding = clip_encoder.predict(pil_image)
assert clip_encoder.mode == "vision"
assert isinstance(embedding, list)
assert len(embedding) == 512
assert len(embedding) == clip_model_cfg["embed_dim"]
assert all([isinstance(num, float) for num in embedding])
clip_encoder.vision_model.run.assert_called_once()
def test_basic_text(self, mocker: MockerFixture) -> None:
mocker.patch.object(CLIPEncoder, "download")
def test_basic_text(
self,
mocker: MockerFixture,
clip_model_cfg: dict[str, Any],
clip_preprocess_cfg: Callable[[Path], dict[str, Any]],
) -> None:
mocker.patch.object(OpenCLIPEncoder, "download")
mocker.patch.object(OpenCLIPEncoder, "model_cfg", clip_model_cfg)
mocker.patch.object(OpenCLIPEncoder, "preprocess_cfg", clip_preprocess_cfg)
mocker.patch("app.models.clip.AutoTokenizer.from_pretrained", autospec=True)
mocked = mocker.patch("app.models.clip.ort.InferenceSession", autospec=True)
mocked.return_value.run.return_value = [[self.embedding]]
clip_encoder = CLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", mode="text")
assert clip_encoder.mode == "text"
clip_encoder = OpenCLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", mode="text")
embedding = clip_encoder.predict("test search query")
assert clip_encoder.mode == "text"
assert isinstance(embedding, list)
assert len(embedding) == 512
assert len(embedding) == clip_model_cfg["embed_dim"]
assert all([isinstance(num, float) for num in embedding])
clip_encoder.text_model.run.assert_called_once()

View File

@@ -0,0 +1,21 @@
FROM mambaorg/micromamba:bookworm-slim as builder
ENV NODE_ENV=production \
TRANSFORMERS_CACHE=/cache \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PATH="/opt/venv/bin:$PATH" \
PYTHONPATH=/usr/src
COPY --chown=$MAMBA_USER:$MAMBA_USER conda-lock.yml /tmp/conda-lock.yml
RUN micromamba install -y -n base -f /tmp/conda-lock.yml && \
micromamba remove -y -n base cxx-compiler && \
micromamba clean --all --yes
WORKDIR /usr/src/app
COPY --chown=$MAMBA_USER:$MAMBA_USER start.sh .
COPY --chown=$MAMBA_USER:$MAMBA_USER app .
ENTRYPOINT ["/usr/local/bin/_entrypoint.sh"]
CMD ["./start.sh"]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,15 @@
name: base
channels:
- conda-forge
platforms:
- linux-64
- linux-aarch64
dependencies:
- black
- conda-lock
- mypy
- pytest
- pytest-cov
- pytest-mock
- ruff
category: dev

View File

@@ -0,0 +1,25 @@
name: base
channels:
- conda-forge
- nvidia
- pytorch-nightly
platforms:
- linux-64
dependencies:
- cxx-compiler
- onnx==1.*
- onnxruntime==1.*
- open-clip-torch==2.*
- orjson==3.*
- pip
- python==3.11.*
- pytorch
- rich==13.*
- safetensors==0.*
- setuptools==68.*
- torchvision
- transformers==4.*
- pip:
- multilingual-clip
- onnx-simplifier
category: main

View File

@@ -0,0 +1,67 @@
import tempfile
import warnings
from pathlib import Path
import torch
from multilingual_clip.pt_multilingual_clip import MultilingualCLIP
from transformers import AutoTokenizer
from .openclip import OpenCLIPModelConfig
from .openclip import to_onnx as openclip_to_onnx
from .optimize import optimize
from .util import get_model_path
_MCLIP_TO_OPENCLIP = {
"M-CLIP/XLM-Roberta-Large-Vit-B-32": OpenCLIPModelConfig("ViT-B-32", "openai"),
"M-CLIP/XLM-Roberta-Large-Vit-B-16Plus": OpenCLIPModelConfig("ViT-B-16-plus-240", "laion400m_e32"),
"M-CLIP/LABSE-Vit-L-14": OpenCLIPModelConfig("ViT-L-14", "openai"),
"M-CLIP/XLM-Roberta-Large-Vit-L-14": OpenCLIPModelConfig("ViT-L-14", "openai"),
}
def to_onnx(
model_name: str,
output_dir_visual: Path | str,
output_dir_textual: Path | str,
) -> None:
textual_path = get_model_path(output_dir_textual)
with tempfile.TemporaryDirectory() as tmpdir:
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
AutoTokenizer.from_pretrained(model_name).save_pretrained(output_dir_textual)
for param in model.parameters():
param.requires_grad_(False)
export_text_encoder(model, textual_path)
openclip_to_onnx(_MCLIP_TO_OPENCLIP[model_name], output_dir_visual)
optimize(textual_path)
def export_text_encoder(model: MultilingualCLIP, output_path: Path | str) -> None:
output_path = Path(output_path)
def forward(self: MultilingualCLIP, input_ids: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
embs = self.transformer(input_ids, attention_mask)[0]
embs = (embs * attention_mask.unsqueeze(2)).sum(dim=1) / attention_mask.sum(dim=1)[:, None]
embs = self.LinearTransformation(embs)
return torch.nn.functional.normalize(embs, dim=-1)
# unfortunately need to monkeypatch for tracing to work here
# otherwise it hits the 2GiB protobuf serialization limit
MultilingualCLIP.forward = forward
args = (torch.ones(1, 77, dtype=torch.int32), torch.ones(1, 77, dtype=torch.int32))
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
torch.onnx.export(
model,
args,
output_path.as_posix(),
input_names=["input_ids", "attention_mask"],
output_names=["text_embedding"],
opset_version=17,
dynamic_axes={
"input_ids": {0: "batch_size", 1: "sequence_length"},
"attention_mask": {0: "batch_size", 1: "sequence_length"},
},
)

View File

@@ -0,0 +1,109 @@
import tempfile
import warnings
from dataclasses import dataclass, field
from pathlib import Path
import open_clip
import torch
from transformers import AutoTokenizer
from .optimize import optimize
from .util import get_model_path, save_config
@dataclass
class OpenCLIPModelConfig:
name: str
pretrained: str
image_size: int = field(init=False)
sequence_length: int = field(init=False)
def __post_init__(self) -> None:
open_clip_cfg = open_clip.get_model_config(self.name)
if open_clip_cfg is None:
raise ValueError(f"Unknown model {self.name}")
self.image_size = open_clip_cfg["vision_cfg"]["image_size"]
self.sequence_length = open_clip_cfg["text_cfg"]["context_length"]
def to_onnx(
model_cfg: OpenCLIPModelConfig,
output_dir_visual: Path | str | None = None,
output_dir_textual: Path | str | None = None,
) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
model = open_clip.create_model(
model_cfg.name,
pretrained=model_cfg.pretrained,
jit=False,
cache_dir=tmpdir,
require_pretrained=True,
)
text_vision_cfg = open_clip.get_model_config(model_cfg.name)
for param in model.parameters():
param.requires_grad_(False)
if output_dir_visual is not None:
output_dir_visual = Path(output_dir_visual)
visual_path = get_model_path(output_dir_visual)
save_config(open_clip.get_model_preprocess_cfg(model), output_dir_visual / "preprocess_cfg.json")
save_config(text_vision_cfg, output_dir_visual.parent / "config.json")
export_image_encoder(model, model_cfg, visual_path)
optimize(visual_path)
if output_dir_textual is not None:
output_dir_textual = Path(output_dir_textual)
textual_path = get_model_path(output_dir_textual)
tokenizer_name = text_vision_cfg["text_cfg"].get("hf_tokenizer_name", "openai/clip-vit-base-patch32")
AutoTokenizer.from_pretrained(tokenizer_name).save_pretrained(output_dir_textual)
export_text_encoder(model, model_cfg, textual_path)
optimize(textual_path)
def export_image_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
output_path = Path(output_path)
def encode_image(image: torch.Tensor) -> torch.Tensor:
return model.encode_image(image, normalize=True)
args = (torch.randn(1, 3, model_cfg.image_size, model_cfg.image_size),)
traced = torch.jit.trace(encode_image, args)
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
torch.onnx.export(
traced,
args,
output_path.as_posix(),
input_names=["image"],
output_names=["image_embedding"],
opset_version=17,
dynamic_axes={"image": {0: "batch_size"}},
)
def export_text_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
output_path = Path(output_path)
def encode_text(text: torch.Tensor) -> torch.Tensor:
return model.encode_text(text, normalize=True)
args = (torch.ones(1, model_cfg.sequence_length, dtype=torch.int32),)
traced = torch.jit.trace(encode_text, args)
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
torch.onnx.export(
traced,
args,
output_path.as_posix(),
input_names=["text"],
output_names=["text_embedding"],
opset_version=17,
dynamic_axes={"text": {0: "batch_size"}},
)

View File

@@ -0,0 +1,38 @@
from pathlib import Path
import onnx
import onnxruntime as ort
import onnxsim
def optimize_onnxsim(model_path: Path | str, output_path: Path | str) -> None:
model_path = Path(model_path)
output_path = Path(output_path)
model = onnx.load(model_path.as_posix())
model, check = onnxsim.simplify(model, skip_shape_inference=True)
assert check, "Simplified ONNX model could not be validated"
onnx.save(model, output_path.as_posix())
def optimize_ort(
model_path: Path | str,
output_path: Path | str,
level: ort.GraphOptimizationLevel = ort.GraphOptimizationLevel.ORT_ENABLE_BASIC,
) -> None:
model_path = Path(model_path)
output_path = Path(output_path)
sess_options = ort.SessionOptions()
sess_options.graph_optimization_level = level
sess_options.optimized_model_filepath = output_path.as_posix()
ort.InferenceSession(model_path.as_posix(), providers=["CPUExecutionProvider"], sess_options=sess_options)
def optimize(model_path: Path | str) -> None:
model_path = Path(model_path)
optimize_ort(model_path, model_path)
# onnxsim serializes large models as a blob, which uses much more memory when loading the model at runtime
if not any(file.name.startswith("Constant") for file in model_path.parent.iterdir()):
optimize_onnxsim(model_path, model_path)

View File

@@ -0,0 +1,15 @@
import json
from pathlib import Path
from typing import Any
def get_model_path(output_dir: Path | str) -> Path:
output_dir = Path(output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
return output_dir / "model.onnx"
def save_config(config: Any, output_path: Path | str) -> None:
output_path = Path(output_path)
output_path.parent.mkdir(parents=True, exist_ok=True)
json.dump(config, output_path.open("w"))

View File

@@ -0,0 +1,76 @@
import gc
import os
from pathlib import Path
from tempfile import TemporaryDirectory
from huggingface_hub import create_repo, login, upload_folder
from models import mclip, openclip
from rich.progress import Progress
models = [
"RN50::openai",
"RN50::yfcc15m",
"RN50::cc12m",
"RN101::openai",
"RN101::yfcc15m",
"RN50x4::openai",
"RN50x16::openai",
"RN50x64::openai",
"ViT-B-32::openai",
"ViT-B-32::laion2b_e16",
"ViT-B-32::laion400m_e31",
"ViT-B-32::laion400m_e32",
"ViT-B-32::laion2b-s34b-b79k",
"ViT-B-16::openai",
"ViT-B-16::laion400m_e31",
"ViT-B-16::laion400m_e32",
"ViT-B-16-plus-240::laion400m_e31",
"ViT-B-16-plus-240::laion400m_e32",
"ViT-L-14::openai",
"ViT-L-14::laion400m_e31",
"ViT-L-14::laion400m_e32",
"ViT-L-14::laion2b-s32b-b82k",
"ViT-L-14-336::openai",
"ViT-H-14::laion2b-s32b-b79k",
"ViT-g-14::laion2b-s12b-b42k",
"M-CLIP/LABSE-Vit-L-14",
"M-CLIP/XLM-Roberta-Large-Vit-B-32",
"M-CLIP/XLM-Roberta-Large-Vit-B-16Plus",
"M-CLIP/XLM-Roberta-Large-Vit-L-14",
]
login(token=os.environ["HF_AUTH_TOKEN"])
with Progress() as progress:
task1 = progress.add_task("[green]Exporting models...", total=len(models))
task2 = progress.add_task("[yellow]Uploading models...", total=len(models))
with TemporaryDirectory() as tmp:
tmpdir = Path(tmp)
for model in models:
model_name = model.split("/")[-1].replace("::", "__")
config_path = tmpdir / model_name / "config.json"
def upload() -> None:
progress.update(task2, description=f"[yellow]Uploading {model_name}")
repo_id = f"immich-app/{model_name}"
create_repo(repo_id, exist_ok=True)
upload_folder(repo_id=repo_id, folder_path=tmpdir / model_name)
progress.update(task2, advance=1)
def export() -> None:
progress.update(task1, description=f"[green]Exporting {model_name}")
visual_dir = tmpdir / model_name / "visual"
textual_dir = tmpdir / model_name / "textual"
if model.startswith("M-CLIP"):
mclip.to_onnx(model, visual_dir, textual_dir)
else:
name, _, pretrained = model_name.partition("__")
openclip.to_onnx(openclip.OpenCLIPModelConfig(name, pretrained), visual_dir, textual_dir)
progress.update(task1, advance=1)
gc.collect()
export()
upload()

View File

@@ -1,11 +1,12 @@
from io import BytesIO
import json
from argparse import ArgumentParser
from io import BytesIO
from typing import Any
from locust import HttpUser, events, task
from locust.env import Environment
from PIL import Image
from argparse import ArgumentParser
byte_image = BytesIO()
@@ -14,11 +15,21 @@ def _(parser: ArgumentParser) -> None:
parser.add_argument("--tag-model", type=str, default="microsoft/resnet-50")
parser.add_argument("--clip-model", type=str, default="ViT-B-32::openai")
parser.add_argument("--face-model", type=str, default="buffalo_l")
parser.add_argument("--tag-min-score", type=int, default=0.0,
help="Returns all tags at or above this score. The default returns all tags.")
parser.add_argument("--face-min-score", type=int, default=0.034,
help=("Returns all faces at or above this score. The default returns 1 face per request; "
"setting this to 0 blows up the number of faces to the thousands."))
parser.add_argument(
"--tag-min-score",
type=int,
default=0.0,
help="Returns all tags at or above this score. The default returns all tags.",
)
parser.add_argument(
"--face-min-score",
type=int,
default=0.034,
help=(
"Returns all faces at or above this score. The default returns 1 face per request; "
"setting this to 0 blows up the number of faces to the thousands."
),
)
parser.add_argument("--image-size", type=int, default=1000)
@@ -62,7 +73,7 @@ class CLIPTextFormDataLoadTest(InferenceLoadTest):
("modelName", self.environment.parsed_options.clip_model),
("modelType", "clip"),
("options", json.dumps({"mode": "text"})),
("text", "test search query")
("text", "test search query"),
]
self.client.post("/predict", data=data)
@@ -88,5 +99,5 @@ class RecognitionFormDataLoadTest(InferenceLoadTest):
("options", json.dumps({"minScore": self.environment.parsed_options.face_min_score})),
]
files = {"image": self.data}
self.client.post("/predict", data=data, files=files)

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "machine-learning"
version = "1.83.0"
version = "1.85.0"
description = ""
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
readme = "README.md"
@@ -9,8 +9,8 @@ packages = [{include = "app"}]
[tool.poetry.dependencies]
python = "^3.11"
torch = [
{markers = "platform_machine == 'arm64' or platform_machine == 'aarch64'", version = "=2.0.1", source = "pypi"},
{markers = "platform_machine == 'amd64' or platform_machine == 'x86_64'", version = "=2.0.1", source = "pytorch-cpu"}
{markers = "platform_machine == 'arm64' or platform_machine == 'aarch64'", version = "=2.1.0", source = "pypi"},
{markers = "platform_machine == 'amd64' or platform_machine == 'x86_64'", version = "=2.1.0", source = "pytorch-cpu"}
]
transformers = "^4.29.2"
onnxruntime = "^1.15.0"
@@ -22,14 +22,9 @@ uvicorn = {extras = ["standard"], version = "^0.22.0"}
pydantic = "^1.10.8"
aiocache = "^0.12.1"
optimum = "^1.9.1"
torchvision = [
{markers = "platform_machine == 'arm64' or platform_machine == 'aarch64'", version = "=0.15.2", source = "pypi"},
{markers = "platform_machine == 'amd64' or platform_machine == 'x86_64'", version = "=0.15.2", source = "pytorch-cpu"}
]
rich = "^13.4.2"
ftfy = "^6.1.1"
setuptools = "^68.0.0"
open-clip-torch = "^2.20.0"
python-multipart = "^0.0.6"
orjson = "^3.9.5"
safetensors = "0.3.2"
@@ -63,6 +58,7 @@ warn_redundant_casts = true
disallow_any_generics = true
check_untyped_defs = true
disallow_untyped_defs = true
ignore_missing_imports = true
[tool.pydantic-mypy]
init_forbid_extra = true
@@ -70,30 +66,6 @@ init_typed = true
warn_required_dynamic_aliases = true
warn_untyped_fields = true
[[tool.mypy.overrides]]
module = [
"huggingface_hub",
"transformers",
"gunicorn",
"cv2",
"insightface.model_zoo",
"insightface.utils.face_align",
"insightface.utils.storage",
"onnxruntime",
"optimum",
"optimum.pipelines",
"optimum.onnxruntime",
"clip_server.model.clip",
"clip_server.model.clip_onnx",
"clip_server.model.pretrained_models",
"clip_server.model.tokenization",
"torchvision.transforms",
"aiocache.backends.memory",
"aiocache.lock",
"aiocache.plugins"
]
ignore_missing_imports = true
[tool.ruff]
line-length = 120
target-version = "py311"

View File

@@ -1,2 +0,0 @@
# requirements to be installed with `--no-deps` flag
clip-server==0.8.*

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 107,
"android.injected.version.name" => "1.83.0",
"android.injected.version.code" => 109,
"android.injected.version.name" => "1.85.0",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -1,2 +1,2 @@
* User can now download assets to local device
* Increased the font size for curated image thumbnail information on the seach page
* Increased the font size for curated image thumbnail information on the search page

View File

@@ -5,17 +5,17 @@
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000269">
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000625">
</testcase>
<testcase classname="fastlane.lanes" name="1: bundleRelease" time="81.160108">
<testcase classname="fastlane.lanes" name="1: bundleRelease" time="70.943413">
</testcase>
<testcase classname="fastlane.lanes" name="2: upload_to_play_store" time="39.176668">
<testcase classname="fastlane.lanes" name="2: upload_to_play_store" time="30.374484">
</testcase>

View File

@@ -23,6 +23,7 @@
"album_viewer_appbar_share_err_title": "Failed to change album title",
"album_viewer_appbar_share_leave": "Leave album",
"album_viewer_appbar_share_remove": "Remove from album",
"album_viewer_appbar_share_to": "Share To",
"album_viewer_page_share_add_users": "Add users",
"all_people_page_title": "People",
"all_videos_page_title": "Videos",
@@ -253,6 +254,8 @@
"profile_drawer_settings": "Settings",
"profile_drawer_sign_out": "Sign Out",
"profile_drawer_trash": "Trash",
"profile_drawer_documentation": "Documentation",
"profile_drawer_github": "GitHub",
"recently_added_page_title": "Recently Added",
"search_bar_hint": "Search your photos",
"search_page_categories": "Categories",
@@ -277,6 +280,7 @@
"select_user_for_sharing_page_share_suggestions": "Suggestions",
"server_info_box_app_version": "App Version",
"server_info_box_server_version": "Server Version",
"server_info_box_server_url": "Server URL",
"setting_image_viewer_help": "The detail viewer loads the small thumbnail first, then loads the medium-size preview (if enabled), finally loads the original (if enabled).",
"setting_image_viewer_original_subtitle": "Enable to load the original full-resolution image (large!). Disable to reduce data usage (both network and on device cache).",
"setting_image_viewer_original_title": "Load original image",
@@ -311,6 +315,8 @@
"shared_link_edit_change_expiry": "Change expiration time",
"shared_link_edit_description": "Description",
"shared_link_edit_description_hint": "Enter the share description",
"shared_link_edit_password": "Password",
"shared_link_edit_password_hint": "Enter the share password",
"shared_link_edit_show_meta": "Show metadata",
"shared_link_edit_submit_button": "Update link",
"shared_link_empty": "You don't have any shared links",
@@ -364,5 +370,14 @@
"viewer_unstack": "Un-Stack",
"cache_settings_tile_title": "Local Storage",
"cache_settings_tile_subtitle": "Control the local storage behaviour",
"viewer_stack_use_as_main_asset": "Use as Main Asset"
"viewer_stack_use_as_main_asset": "Use as Main Asset",
"app_bar_signout_dialog_title": "Sign out",
"app_bar_signout_dialog_content": "Are you sure you wanna sign out?",
"app_bar_signout_dialog_ok": "Yes",
"shared_album_activities_input_hint": "Say something",
"shared_album_activity_remove_title": "Delete Activity",
"shared_album_activity_remove_content": "Do you want to delete this activity?",
"shared_album_activity_setting_title": "Comments & likes",
"shared_album_activity_setting_subtitle": "Let others respond",
"shared_album_activities_input_disable": "Comment is disabled"
}

View File

@@ -169,4 +169,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 599d8aeb73728400c15364e734525722250a5382
COCOAPODS: 1.12.1
COCOAPODS: 1.11.3

View File

@@ -379,7 +379,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 118;
CURRENT_PROJECT_VERSION = 124;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -515,7 +515,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 118;
CURRENT_PROJECT_VERSION = 124;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -543,7 +543,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 118;
CURRENT_PROJECT_VERSION = 124;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;

View File

@@ -59,11 +59,11 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.78.1</string>
<string>1.84.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>118</string>
<string>124</string>
<key>FLTEnableImpeller</key>
<true />
<key>ITSAppUsesNonExemptEncryption</key>

View File

@@ -19,7 +19,7 @@ platform :ios do
desc "iOS Beta"
lane :beta do
increment_version_number(
version_number: "1.83.0"
version_number: "1.85.0"
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -5,32 +5,32 @@
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000256">
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000253">
</testcase>
<testcase classname="fastlane.lanes" name="1: increment_version_number" time="7.645306">
<testcase classname="fastlane.lanes" name="1: increment_version_number" time="0.181977">
</testcase>
<testcase classname="fastlane.lanes" name="2: latest_testflight_build_number" time="4.669798">
<testcase classname="fastlane.lanes" name="2: latest_testflight_build_number" time="16.12614">
</testcase>
<testcase classname="fastlane.lanes" name="3: increment_build_number" time="2.218788">
<testcase classname="fastlane.lanes" name="3: increment_build_number" time="0.162663">
</testcase>
<testcase classname="fastlane.lanes" name="4: build_app" time="97.596654">
<testcase classname="fastlane.lanes" name="4: build_app" time="145.399278">
</testcase>
<testcase classname="fastlane.lanes" name="5: upload_to_testflight" time="89.490906">
<testcase classname="fastlane.lanes" name="5: upload_to_testflight" time="61.317235">
</testcase>

View File

@@ -7,6 +7,7 @@ import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_displaymode/flutter_displaymode.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:timezone/data/latest.dart';
import 'package:immich_mobile/constants/locales.dart';
import 'package:immich_mobile/modules/backup/background_service/background.service.dart';
import 'package:immich_mobile/modules/backup/models/backup_album.model.dart';
@@ -77,6 +78,8 @@ Future<void> initApp() async {
log.severe('Catch all error: ${error.toString()} - $error', error, stack);
return true;
};
initializeTimeZones();
}
Future<Isar> loadDb() async {

View File

@@ -0,0 +1,90 @@
import 'package:immich_mobile/shared/models/user.dart';
import 'package:openapi/api.dart';
enum ActivityType { comment, like }
class Activity {
final String id;
final String? assetId;
final String? comment;
final DateTime createdAt;
final ActivityType type;
final User user;
const Activity({
required this.id,
this.assetId,
this.comment,
required this.createdAt,
required this.type,
required this.user,
});
Activity copyWith({
String? id,
String? assetId,
String? comment,
DateTime? createdAt,
ActivityType? type,
User? user,
}) {
return Activity(
id: id ?? this.id,
assetId: assetId ?? this.assetId,
comment: comment ?? this.comment,
createdAt: createdAt ?? this.createdAt,
type: type ?? this.type,
user: user ?? this.user,
);
}
Activity.fromDto(ActivityResponseDto dto)
: id = dto.id,
assetId = dto.assetId,
comment = dto.comment,
createdAt = dto.createdAt,
type = dto.type == ActivityResponseDtoTypeEnum.comment
? ActivityType.comment
: ActivityType.like,
user = User(
email: dto.user.email,
firstName: dto.user.firstName,
lastName: dto.user.lastName,
profileImagePath: dto.user.profileImagePath,
id: dto.user.id,
// Placeholder values
isAdmin: false,
updatedAt: DateTime.now(),
isPartnerSharedBy: false,
isPartnerSharedWith: false,
memoryEnabled: false,
);
@override
String toString() {
return 'Activity(id: $id, assetId: $assetId, comment: $comment, createdAt: $createdAt, type: $type, user: $user)';
}
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;
return other is Activity &&
other.id == id &&
other.assetId == assetId &&
other.comment == comment &&
other.createdAt == createdAt &&
other.type == type &&
other.user == user;
}
@override
int get hashCode {
return id.hashCode ^
assetId.hashCode ^
comment.hashCode ^
createdAt.hashCode ^
type.hashCode ^
user.hashCode;
}
}

View File

@@ -0,0 +1,130 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/activities/models/activity.model.dart';
import 'package:immich_mobile/modules/activities/services/activity.service.dart';
class ActivityNotifier extends StateNotifier<AsyncValue<List<Activity>>> {
final Ref _ref;
final ActivityService _activityService;
final String albumId;
final String? assetId;
ActivityNotifier(
this._ref,
this._activityService,
this.albumId,
this.assetId,
) : super(
const AsyncData([]),
) {
fetchActivity();
}
Future<void> fetchActivity() async {
state = const AsyncLoading();
state = await AsyncValue.guard(
() => _activityService.getAllActivities(albumId, assetId),
);
}
Future<void> removeActivity(String id) async {
final activities = state.asData?.value ?? [];
if (await _activityService.removeActivity(id)) {
final removedActivity = activities.firstWhere((a) => a.id == id);
activities.remove(removedActivity);
state = AsyncData(activities);
if (removedActivity.type == ActivityType.comment) {
_ref
.read(
activityStatisticsStateProvider(
(albumId: albumId, assetId: assetId),
).notifier,
)
.removeActivity();
}
}
}
Future<void> addComment(String comment) async {
final activity = await _activityService.addActivity(
albumId,
ActivityType.comment,
assetId: assetId,
comment: comment,
);
if (activity != null) {
final activities = state.asData?.value ?? [];
state = AsyncData([...activities, activity]);
_ref
.read(
activityStatisticsStateProvider(
(albumId: albumId, assetId: assetId),
).notifier,
)
.addActivity();
if (assetId != null) {
// Add a count to the current album's provider as well
_ref
.read(
activityStatisticsStateProvider(
(albumId: albumId, assetId: null),
).notifier,
)
.addActivity();
}
}
}
Future<void> addLike() async {
final activity = await _activityService
.addActivity(albumId, ActivityType.like, assetId: assetId);
if (activity != null) {
final activities = state.asData?.value ?? [];
state = AsyncData([...activities, activity]);
}
}
}
class ActivityStatisticsNotifier extends StateNotifier<int> {
final String albumId;
final String? assetId;
final ActivityService _activityService;
ActivityStatisticsNotifier(this._activityService, this.albumId, this.assetId)
: super(0) {
fetchStatistics();
}
Future<void> fetchStatistics() async {
state = await _activityService.getStatistics(albumId, assetId: assetId);
}
Future<void> addActivity() async {
state = state + 1;
}
Future<void> removeActivity() async {
state = state - 1;
}
}
typedef ActivityParams = ({String albumId, String? assetId});
final activityStateProvider = StateNotifierProvider.autoDispose
.family<ActivityNotifier, AsyncValue<List<Activity>>, ActivityParams>(
(ref, args) {
return ActivityNotifier(
ref,
ref.watch(activityServiceProvider),
args.albumId,
args.assetId,
);
});
final activityStatisticsStateProvider = StateNotifierProvider.autoDispose
.family<ActivityStatisticsNotifier, int, ActivityParams>((ref, args) {
return ActivityStatisticsNotifier(
ref.watch(activityServiceProvider),
args.albumId,
args.assetId,
);
});

View File

@@ -0,0 +1,85 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/activities/models/activity.model.dart';
import 'package:immich_mobile/shared/providers/api.provider.dart';
import 'package:immich_mobile/shared/services/api.service.dart';
import 'package:logging/logging.dart';
import 'package:openapi/api.dart';
final activityServiceProvider =
Provider((ref) => ActivityService(ref.watch(apiServiceProvider)));
class ActivityService {
final ApiService _apiService;
final Logger _log = Logger("ActivityService");
ActivityService(this._apiService);
Future<List<Activity>> getAllActivities(
String albumId,
String? assetId,
) async {
try {
final list = await _apiService.activityApi
.getActivities(albumId, assetId: assetId);
return list != null ? list.map(Activity.fromDto).toList() : [];
} catch (e) {
_log.severe(
"failed to fetch activities for albumId - $albumId; assetId - $assetId -> $e",
);
rethrow;
}
}
Future<int> getStatistics(String albumId, {String? assetId}) async {
try {
final dto = await _apiService.activityApi
.getActivityStatistics(albumId, assetId: assetId);
return dto?.comments ?? 0;
} catch (e) {
_log.severe(
"failed to fetch activity statistics for albumId - $albumId; assetId - $assetId -> $e",
);
}
return 0;
}
Future<bool> removeActivity(String id) async {
try {
await _apiService.activityApi.deleteActivity(id);
return true;
} catch (e) {
_log.severe(
"failed to remove activity id - $id -> $e",
);
}
return false;
}
Future<Activity?> addActivity(
String albumId,
ActivityType type, {
String? assetId,
String? comment,
}) async {
try {
final dto = await _apiService.activityApi.createActivity(
ActivityCreateDto(
albumId: albumId,
type: type == ActivityType.comment
? ReactionType.comment
: ReactionType.like,
assetId: assetId,
comment: comment,
),
);
if (dto != null) {
return Activity.fromDto(dto);
}
} catch (e) {
_log.severe(
"failed to add activity for albumId - $albumId; assetId - $assetId -> $e",
);
}
return null;
}
}

View File

@@ -0,0 +1,320 @@
import 'package:cached_network_image/cached_network_image.dart';
import 'package:collection/collection.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/activities/models/activity.model.dart';
import 'package:immich_mobile/modules/activities/providers/activity.provider.dart';
import 'package:immich_mobile/shared/models/store.dart';
import 'package:immich_mobile/shared/ui/confirm_dialog.dart';
import 'package:immich_mobile/shared/ui/immich_loading_indicator.dart';
import 'package:immich_mobile/shared/ui/user_circle_avatar.dart';
import 'package:immich_mobile/utils/datetime_extensions.dart';
import 'package:immich_mobile/utils/image_url_builder.dart';
class ActivitiesPage extends HookConsumerWidget {
final String albumId;
final String? assetId;
final bool withAssetThumbs;
final String appBarTitle;
final bool isOwner;
final bool isReadOnly;
const ActivitiesPage(
this.albumId, {
this.appBarTitle = "",
this.assetId,
this.withAssetThumbs = true,
this.isOwner = false,
this.isReadOnly = false,
super.key,
});
@override
Widget build(BuildContext context, WidgetRef ref) {
final provider =
activityStateProvider((albumId: albumId, assetId: assetId));
final activities = ref.watch(provider);
final inputController = useTextEditingController();
final inputFocusNode = useFocusNode();
final listViewScrollController = useScrollController();
final currentUser = Store.tryGet(StoreKey.currentUser);
useEffect(
() {
inputFocusNode.requestFocus();
return null;
},
[],
);
buildTitleWithTimestamp(Activity activity, {bool leftAlign = true}) {
final textColor = Theme.of(context).brightness == Brightness.dark
? Colors.white
: Colors.black;
final textStyle = Theme.of(context)
.textTheme
.bodyMedium
?.copyWith(color: textColor.withOpacity(0.6));
return Row(
mainAxisAlignment: leftAlign
? MainAxisAlignment.start
: MainAxisAlignment.spaceBetween,
mainAxisSize: leftAlign ? MainAxisSize.min : MainAxisSize.max,
children: [
Text(
"${activity.user.firstName} ${activity.user.lastName}",
style: textStyle,
overflow: TextOverflow.ellipsis,
),
if (leftAlign)
Text(
"",
style: textStyle,
),
Expanded(
child: Text(
activity.createdAt.copyWith().timeAgo(),
style: textStyle,
overflow: TextOverflow.ellipsis,
textAlign: leftAlign ? TextAlign.left : TextAlign.right,
),
),
],
);
}
buildAssetThumbnail(Activity activity) {
return withAssetThumbs && activity.assetId != null
? Container(
width: 40,
height: 30,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(4),
image: DecorationImage(
image: CachedNetworkImageProvider(
getThumbnailUrlForRemoteId(
activity.assetId!,
),
cacheKey: getThumbnailCacheKeyForRemoteId(
activity.assetId!,
),
headers: {
"Authorization":
'Bearer ${Store.get(StoreKey.accessToken)}',
},
),
fit: BoxFit.cover,
),
),
child: const SizedBox.shrink(),
)
: null;
}
buildTextField(String? likedId) {
final liked = likedId != null;
return Padding(
padding: const EdgeInsets.only(bottom: 10),
child: TextField(
controller: inputController,
enabled: !isReadOnly,
focusNode: inputFocusNode,
textInputAction: TextInputAction.send,
autofocus: false,
decoration: InputDecoration(
border: InputBorder.none,
focusedBorder: InputBorder.none,
prefixIcon: currentUser != null
? Padding(
padding: const EdgeInsets.symmetric(horizontal: 15),
child: UserCircleAvatar(
user: currentUser,
size: 30,
radius: 15,
),
)
: null,
suffixIcon: Padding(
padding: const EdgeInsets.only(right: 10),
child: IconButton(
icon: Icon(
liked
? Icons.favorite_rounded
: Icons.favorite_border_rounded,
),
onPressed: () async {
liked
? await ref
.read(provider.notifier)
.removeActivity(likedId)
: await ref.read(provider.notifier).addLike();
},
),
),
suffixIconColor: liked ? Colors.red[700] : null,
hintText: isReadOnly
? 'shared_album_activities_input_disable'.tr()
: 'shared_album_activities_input_hint'.tr(),
hintStyle: TextStyle(
fontWeight: FontWeight.normal,
fontSize: 14,
color: Colors.grey[600],
),
),
onEditingComplete: () async {
await ref.read(provider.notifier).addComment(inputController.text);
inputController.clear();
inputFocusNode.unfocus();
listViewScrollController.animateTo(
listViewScrollController.position.maxScrollExtent,
duration: const Duration(milliseconds: 800),
curve: Curves.fastOutSlowIn,
);
},
onTapOutside: (_) => inputFocusNode.unfocus(),
),
);
}
getDismissibleWidget(
Widget widget,
Activity activity,
bool canDelete,
) {
return Dismissible(
key: Key(activity.id),
dismissThresholds: const {
DismissDirection.horizontal: 0.7,
},
direction: DismissDirection.horizontal,
confirmDismiss: (direction) => canDelete
? showDialog(
context: context,
builder: (context) => ConfirmDialog(
onOk: () {},
title: "shared_album_activity_remove_title",
content: "shared_album_activity_remove_content",
ok: "delete_dialog_ok",
),
)
: Future.value(false),
onDismissed: (direction) async =>
await ref.read(provider.notifier).removeActivity(activity.id),
background: Container(
color: canDelete ? Colors.red[400] : Colors.grey[600],
alignment: AlignmentDirectional.centerStart,
child: canDelete
? const Padding(
padding: EdgeInsets.all(15),
child: Icon(
Icons.delete_sweep_rounded,
color: Colors.black,
),
)
: null,
),
secondaryBackground: Container(
color: canDelete ? Colors.red[400] : Colors.grey[600],
alignment: AlignmentDirectional.centerEnd,
child: canDelete
? const Padding(
padding: EdgeInsets.all(15),
child: Icon(
Icons.delete_sweep_rounded,
color: Colors.black,
),
)
: null,
),
child: widget,
);
}
return Scaffold(
appBar: AppBar(title: Text(appBarTitle)),
body: activities.maybeWhen(
orElse: () {
return const Center(child: ImmichLoadingIndicator());
},
data: (data) {
final liked = data.firstWhereOrNull(
(a) =>
a.type == ActivityType.like &&
a.user.id == currentUser?.id &&
a.assetId == assetId,
);
return SafeArea(
child: Stack(
children: [
ListView.builder(
controller: listViewScrollController,
itemCount: data.length + 1,
itemBuilder: (context, index) {
// Vertical gap after the last element
if (index == data.length) {
return const SizedBox(
height: 80,
);
}
final activity = data[index];
final canDelete =
activity.user.id == currentUser?.id || isOwner;
return Padding(
padding: const EdgeInsets.all(5),
child: activity.type == ActivityType.comment
? getDismissibleWidget(
ListTile(
minVerticalPadding: 15,
leading: UserCircleAvatar(user: activity.user),
title: buildTitleWithTimestamp(
activity,
leftAlign: withAssetThumbs &&
activity.assetId != null,
),
titleAlignment: ListTileTitleAlignment.top,
trailing: buildAssetThumbnail(activity),
subtitle: Text(activity.comment!),
),
activity,
canDelete,
)
: getDismissibleWidget(
ListTile(
minVerticalPadding: 15,
leading: Container(
width: 44,
alignment: Alignment.center,
child: Icon(
Icons.favorite_rounded,
color: Colors.red[700],
),
),
title: buildTitleWithTimestamp(activity),
trailing: buildAssetThumbnail(activity),
),
activity,
canDelete,
),
);
},
),
Align(
alignment: Alignment.bottomCenter,
child: Container(
color: Theme.of(context).scaffoldBackgroundColor,
child: buildTextField(liked?.id),
),
),
],
),
);
},
),
);
}
}

View File

@@ -2,6 +2,7 @@ import 'dart:async';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/album/providers/album_detail.provider.dart';
import 'package:immich_mobile/modules/album/services/album.service.dart';
import 'package:immich_mobile/shared/models/album.dart';
import 'package:immich_mobile/shared/models/asset.dart';
@@ -10,7 +11,7 @@ import 'package:immich_mobile/shared/providers/db.provider.dart';
import 'package:isar/isar.dart';
class SharedAlbumNotifier extends StateNotifier<List<Album>> {
SharedAlbumNotifier(this._albumService, Isar db) : super([]) {
SharedAlbumNotifier(this._albumService, Isar db, this._ref) : super([]) {
final query = db.albums.filter().sharedEqualTo(true).sortByCreatedAtDesc();
query.findAll().then((value) => state = value);
_streamSub = query.watch().listen((data) => state = data);
@@ -18,6 +19,7 @@ class SharedAlbumNotifier extends StateNotifier<List<Album>> {
final AlbumService _albumService;
late final StreamSubscription<List<Album>> _streamSub;
final Ref _ref;
Future<Album?> createSharedAlbum(
String albumName,
@@ -66,6 +68,17 @@ class SharedAlbumNotifier extends StateNotifier<List<Album>> {
return result;
}
Future<bool> setActivityEnabled(Album album, bool activityEnabled) async {
final result =
await _albumService.setActivityEnabled(album, activityEnabled);
if (result) {
_ref.invalidate(albumDetailProvider(album.id));
}
return result;
}
@override
void dispose() {
_streamSub.cancel();
@@ -78,5 +91,6 @@ final sharedAlbumProvider =
return SharedAlbumNotifier(
ref.watch(albumServiceProvider),
ref.watch(dbProvider),
ref,
);
});

View File

@@ -284,6 +284,23 @@ class AlbumService {
return false;
}
Future<bool> setActivityEnabled(Album album, bool enabled) async {
try {
final result = await _apiService.albumApi.updateAlbumInfo(
album.remoteId!,
UpdateAlbumDto(isActivityEnabled: enabled),
);
if (result != null) {
album.activityEnabled = enabled;
await _db.writeTxn(() => _db.albums.put(album));
return true;
}
} catch (e) {
debugPrint("Error setActivityEnabled ${e.toString()}");
}
return false;
}
Future<bool> deleteAlbum(Album album) async {
try {
final userId = Store.get(StoreKey.currentUser).isarId;

View File

@@ -3,10 +3,13 @@ import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:fluttertoast/fluttertoast.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/activities/providers/activity.provider.dart';
import 'package:immich_mobile/modules/album/providers/album.provider.dart';
import 'package:immich_mobile/modules/album/providers/album_detail.provider.dart';
import 'package:immich_mobile/modules/album/providers/album_viewer.provider.dart';
import 'package:immich_mobile/modules/album/providers/shared_album.provider.dart';
import 'package:immich_mobile/shared/ui/share_dialog.dart';
import 'package:immich_mobile/shared/services/share.service.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/models/album.dart';
import 'package:immich_mobile/shared/models/asset.dart';
@@ -24,6 +27,7 @@ class AlbumViewerAppbar extends HookConsumerWidget
required this.titleFocusNode,
this.onAddPhotos,
this.onAddUsers,
required this.onActivities,
}) : super(key: key);
final Album album;
@@ -33,11 +37,19 @@ class AlbumViewerAppbar extends HookConsumerWidget
final FocusNode titleFocusNode;
final Function(Album album)? onAddPhotos;
final Function(Album album)? onAddUsers;
final Function(Album album) onActivities;
@override
Widget build(BuildContext context, WidgetRef ref) {
final newAlbumTitle = ref.watch(albumViewerProvider).editTitleText;
final isEditAlbum = ref.watch(albumViewerProvider).isEditAlbum;
final comments = album.shared
? ref.watch(
activityStatisticsStateProvider(
(albumId: album.remoteId!, assetId: null),
),
)
: 0;
deleteAlbum() async {
ImmichLoadingOverlayController.appLoader.show();
@@ -160,40 +172,81 @@ class AlbumViewerAppbar extends HookConsumerWidget
ImmichLoadingOverlayController.appLoader.hide();
}
buildBottomSheetActionButton() {
void handleShareAssets(
WidgetRef ref,
BuildContext context,
Set<Asset> selection,
) {
showDialog(
context: context,
builder: (BuildContext buildContext) {
ref.watch(shareServiceProvider).shareAssets(selection.toList()).then(
(bool status) {
if (!status) {
ImmichToast.show(
context: context,
msg: 'image_viewer_page_state_provider_share_error'.tr(),
toastType: ToastType.error,
gravity: ToastGravity.BOTTOM,
);
}
Navigator.of(buildContext).pop();
},
);
return const ShareDialog();
},
barrierDismissible: false,
);
}
void onShareAssetsTo() async {
ImmichLoadingOverlayController.appLoader.show();
handleShareAssets(ref, context, selected);
ImmichLoadingOverlayController.appLoader.hide();
}
buildBottomSheetActions() {
if (selected.isNotEmpty) {
if (album.ownerId == userId) {
return ListTile(
leading: const Icon(Icons.delete_sweep_rounded),
return [
ListTile(
leading: const Icon(Icons.ios_share_rounded),
title: const Text(
'album_viewer_appbar_share_remove',
'album_viewer_appbar_share_to',
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
onTap: () => onRemoveFromAlbumPressed(),
);
} else {
return const SizedBox();
}
onTap: () => onShareAssetsTo(),
),
album.ownerId == userId
? ListTile(
leading: const Icon(Icons.delete_sweep_rounded),
title: const Text(
'album_viewer_appbar_share_remove',
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
onTap: () => onRemoveFromAlbumPressed(),
)
: const SizedBox(),
];
} else {
if (album.ownerId == userId) {
return ListTile(
leading: const Icon(Icons.delete_forever_rounded),
title: const Text(
'album_viewer_appbar_share_delete',
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
onTap: () => onDeleteAlbumPressed(),
);
} else {
return ListTile(
leading: const Icon(Icons.person_remove_rounded),
title: const Text(
'album_viewer_appbar_share_leave',
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
onTap: () => onLeaveAlbumPressed(),
);
}
return [
album.ownerId == userId
? ListTile(
leading: const Icon(Icons.delete_forever_rounded),
title: const Text(
'album_viewer_appbar_share_delete',
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
onTap: () => onDeleteAlbumPressed(),
)
: ListTile(
leading: const Icon(Icons.person_remove_rounded),
title: const Text(
'album_viewer_appbar_share_leave',
style: TextStyle(fontWeight: FontWeight.bold),
).tr(),
onTap: () => onLeaveAlbumPressed(),
),
];
}
}
@@ -257,7 +310,7 @@ class AlbumViewerAppbar extends HookConsumerWidget
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
buildBottomSheetActionButton(),
...buildBottomSheetActions(),
if (selected.isEmpty && onAddPhotos != null) ...commonActions,
if (selected.isEmpty &&
onAddPhotos != null &&
@@ -271,6 +324,33 @@ class AlbumViewerAppbar extends HookConsumerWidget
);
}
Widget buildActivitiesButton() {
return IconButton(
onPressed: () {
onActivities(album);
},
icon: Row(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
const Icon(
Icons.mode_comment_outlined,
),
if (comments != 0)
Padding(
padding: const EdgeInsets.only(left: 5),
child: Text(
comments.toString(),
style: TextStyle(
fontWeight: FontWeight.bold,
color: Theme.of(context).primaryColor,
),
),
),
],
),
);
}
buildLeadingButton() {
if (selected.isNotEmpty) {
return IconButton(
@@ -314,6 +394,8 @@ class AlbumViewerAppbar extends HookConsumerWidget
title: selected.isNotEmpty ? Text('${selected.length}') : null,
centerTitle: false,
actions: [
if (album.shared && (album.activityEnabled || comments != 0))
buildActivitiesButton(),
if (album.isRemote)
IconButton(
splashRadius: 25,

View File

@@ -23,6 +23,7 @@ class AlbumOptionsPage extends HookConsumerWidget {
final sharedUsers = useState(album.sharedUsers.toList());
final owner = album.owner.value;
final userId = ref.watch(authenticationProvider).userId;
final activityEnabled = useState(album.activityEnabled);
final isOwner = owner?.id == userId;
void showErrorMessage() {
@@ -195,6 +196,31 @@ class AlbumOptionsPage extends HookConsumerWidget {
mainAxisAlignment: MainAxisAlignment.start,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
if (isOwner && album.shared)
SwitchListTile.adaptive(
value: activityEnabled.value,
onChanged: (bool value) async {
activityEnabled.value = value;
if (await ref
.read(sharedAlbumProvider.notifier)
.setActivityEnabled(album, value)) {
album.activityEnabled = value;
}
},
activeColor: activityEnabled.value
? Theme.of(context).primaryColor
: Theme.of(context).disabledColor,
dense: true,
title: Text(
"shared_album_activity_setting_title",
style: Theme.of(context)
.textTheme
.labelLarge
?.copyWith(fontWeight: FontWeight.bold),
).tr(),
subtitle:
const Text("shared_album_activity_setting_subtitle").tr(),
),
buildSectionTitle("PEOPLE"),
buildOwnerInfo(),
buildSharedUsersList(),

View File

@@ -232,6 +232,19 @@ class AlbumViewerPage extends HookConsumerWidget {
);
}
onActivitiesPressed(Album album) {
if (album.remoteId != null) {
AutoRouter.of(context).push(
ActivitiesRoute(
albumId: album.remoteId!,
appBarTitle: album.name,
isOwner: userId == album.ownerId,
isReadOnly: !album.activityEnabled,
),
);
}
}
return Scaffold(
appBar: album.when(
data: (data) => AlbumViewerAppbar(
@@ -242,6 +255,7 @@ class AlbumViewerPage extends HookConsumerWidget {
selectionDisabled: disableSelection,
onAddPhotos: onAddPhotosPressed,
onAddUsers: onAddUsersPressed,
onActivities: onActivitiesPressed,
),
error: (error, stackTrace) => AppBar(title: const Text("Error")),
loading: () => AppBar(),
@@ -265,6 +279,9 @@ class AlbumViewerPage extends HookConsumerWidget {
if (data.isRemote) buildControlButton(data),
],
),
isOwner: userId == data.ownerId,
sharedAlbumId:
data.shared && data.activityEnabled ? data.remoteId : null,
),
),
),

View File

@@ -10,12 +10,16 @@ import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/models/album.dart';
import 'package:immich_mobile/modules/settings/providers/app_settings.provider.dart';
import 'package:immich_mobile/modules/settings/services/app_settings.service.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:immich_mobile/shared/ui/immich_app_bar.dart';
class LibraryPage extends HookConsumerWidget {
const LibraryPage({Key? key}) : super(key: key);
@override
Widget build(BuildContext context, WidgetRef ref) {
final trashEnabled =
ref.watch(serverInfoProvider.select((v) => v.serverFeatures.trash));
final albums = ref.watch(albumProvider);
var isDarkMode = Theme.of(context).brightness == Brightness.dark;
var settings = ref.watch(appSettingsServiceProvider);
@@ -28,21 +32,6 @@ class LibraryPage extends HookConsumerWidget {
[],
);
AppBar buildAppBar() {
return AppBar(
centerTitle: true,
automaticallyImplyLeading: false,
title: const Text(
'IMMICH',
style: TextStyle(
fontFamily: 'SnowburstOne',
fontWeight: FontWeight.bold,
fontSize: 22,
),
),
);
}
final selectedAlbumSortOrder =
useState(settings.getSetting(AppSettingsEnum.selectedAlbumSortOrder));
@@ -236,8 +225,23 @@ class LibraryPage extends HookConsumerWidget {
final local = albums.where((a) => a.isLocal).toList();
Widget? shareTrashButton() {
return trashEnabled
? InkWell(
onTap: () => AutoRouter.of(context).push(const TrashRoute()),
borderRadius: BorderRadius.circular(12),
child: const Icon(
Icons.delete_rounded,
size: 25,
),
)
: null;
}
return Scaffold(
appBar: buildAppBar(),
appBar: ImmichAppBar(
action: shareTrashButton(),
),
body: CustomScrollView(
slivers: [
SliverToBoxAdapter(

View File

@@ -10,6 +10,7 @@ import 'package:immich_mobile/modules/partner/ui/partner_list.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/models/album.dart';
import 'package:immich_mobile/shared/providers/user.provider.dart';
import 'package:immich_mobile/shared/ui/immich_app_bar.dart';
import 'package:immich_mobile/shared/ui/immich_image.dart';
class SharingPage extends HookConsumerWidget {
@@ -167,32 +168,6 @@ class SharingPage extends HookConsumerWidget {
);
}
AppBar buildAppBar() {
return AppBar(
centerTitle: true,
automaticallyImplyLeading: false,
title: const Text(
'IMMICH',
style: TextStyle(
fontFamily: 'SnowburstOne',
fontWeight: FontWeight.bold,
fontSize: 22,
),
),
actions: [
IconButton(
splashRadius: 25,
iconSize: 20,
icon: const Icon(
Icons.swap_horizontal_circle_outlined,
size: 20,
),
onPressed: () => AutoRouter.of(context).push(const PartnerRoute()),
),
],
);
}
buildEmptyListIndication() {
return SliverToBoxAdapter(
child: Padding(
@@ -241,8 +216,21 @@ class SharingPage extends HookConsumerWidget {
);
}
Widget sharePartnerButton() {
return InkWell(
onTap: () => AutoRouter.of(context).push(const PartnerRoute()),
borderRadius: BorderRadius.circular(12),
child: const Icon(
Icons.swap_horizontal_circle_rounded,
size: 25,
),
);
}
return Scaffold(
appBar: buildAppBar(),
appBar: ImmichAppBar(
action: sharePartnerButton(),
),
body: CustomScrollView(
slivers: [
SliverToBoxAdapter(child: buildTopBottons()),

View File

@@ -31,7 +31,14 @@ class DescriptionInput extends HookConsumerWidget {
final owner = ref.watch(currentUserProvider);
final hasError = useState(false);
controller.text = description;
useEffect(
() {
controller.text = description;
isTextEmpty.value = description.isEmpty;
return null;
},
[description],
);
submitDescription(String description) async {
hasError.value = false;

View File

@@ -4,6 +4,7 @@ import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_map/flutter_map.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:timezone/timezone.dart';
import 'package:immich_mobile/modules/asset_viewer/ui/description_input.dart';
import 'package:immich_mobile/modules/map/ui/map_thumbnail.dart';
import 'package:immich_mobile/shared/models/asset.dart';
@@ -26,12 +27,36 @@ class ExifBottomSheet extends HookConsumerWidget {
exifInfo.latitude != 0 &&
exifInfo.longitude != 0;
String get formattedDateTime {
final fileCreatedAt = asset.fileCreatedAt.toLocal();
final date = DateFormat.yMMMEd().format(fileCreatedAt);
final time = DateFormat.jm().format(fileCreatedAt);
String formatTimeZone(Duration d) =>
"GMT${d.isNegative ? '-': '+'}${d.inHours.abs().toString().padLeft(2, '0')}:${d.inMinutes.abs().remainder(60).toString().padLeft(2, '0')}";
return '$date$time';
String get formattedDateTime {
DateTime dt = asset.fileCreatedAt.toLocal();
String? timeZone;
if (asset.exifInfo?.dateTimeOriginal != null) {
dt = asset.exifInfo!.dateTimeOriginal!;
if (asset.exifInfo?.timeZone != null) {
dt = dt.toUtc();
try {
final location = getLocation(asset.exifInfo!.timeZone!);
dt = TZDateTime.from(dt, location);
} on LocationNotFoundException {
RegExp re = RegExp(r'^utc(?:([+-]\d{1,2})(?::(\d{2}))?)?$', caseSensitive: false);
final m = re.firstMatch(asset.exifInfo!.timeZone!);
if (m != null) {
final duration = Duration(hours: int.parse(m.group(1) ?? '0'), minutes: int.parse(m.group(2) ?? '0'));
dt = dt.add(duration);
timeZone = formatTimeZone(duration);
}
}
}
}
final date = DateFormat.yMMMEd().format(dt);
final time = DateFormat.jm().format(dt);
timeZone ??= formatTimeZone(dt.timeZoneOffset);
return '$date$time $timeZone';
}
Future<Uri?> _createCoordinatesUri(ExifInfo? exifInfo) async {
@@ -297,9 +322,9 @@ class ExifBottomSheet extends HookConsumerWidget {
fontWeight: FontWeight.bold,
),
),
subtitle: Text(
subtitle: exifInfo.f != null || exifInfo.exposureSeconds != null || exifInfo.mm != null || exifInfo.iso != null ? Text(
"ƒ/${exifInfo.fNumber} ${exifInfo.exposureTime} ${exifInfo.focalLength} mm ISO ${exifInfo.iso ?? ''} ",
),
) : null,
),
],
);

View File

@@ -1,6 +1,7 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/activities/providers/activity.provider.dart';
import 'package:immich_mobile/shared/models/asset.dart';
import 'package:immich_mobile/shared/providers/asset.provider.dart';
@@ -15,6 +16,9 @@ class TopControlAppBar extends HookConsumerWidget {
required this.isPlayingMotionVideo,
required this.onFavorite,
required this.onUploadPressed,
required this.isOwner,
required this.shareAlbumId,
required this.onActivitiesPressed,
}) : super(key: key);
final Asset asset;
@@ -23,13 +27,23 @@ class TopControlAppBar extends HookConsumerWidget {
final VoidCallback? onDownloadPressed;
final VoidCallback onToggleMotionVideo;
final VoidCallback onAddToAlbumPressed;
final VoidCallback onActivitiesPressed;
final Function(Asset) onFavorite;
final bool isPlayingMotionVideo;
final bool isOwner;
final String? shareAlbumId;
@override
Widget build(BuildContext context, WidgetRef ref) {
const double iconSize = 22.0;
final a = ref.watch(assetWatcher(asset)).value ?? asset;
final comments = shareAlbumId != null
? ref.watch(
activityStatisticsStateProvider(
(albumId: shareAlbumId!, assetId: asset.remoteId),
),
)
: 0;
Widget buildFavoriteButton(a) {
return IconButton(
@@ -92,6 +106,34 @@ class TopControlAppBar extends HookConsumerWidget {
);
}
Widget buildActivitiesButton() {
return IconButton(
onPressed: () {
onActivitiesPressed();
},
icon: Row(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Icon(
Icons.mode_comment_outlined,
color: Colors.grey[200],
),
if (comments != 0)
Padding(
padding: const EdgeInsets.only(left: 5),
child: Text(
comments.toString(),
style: TextStyle(
fontWeight: FontWeight.bold,
color: Colors.grey[200],
),
),
),
],
),
);
}
Widget buildUploadButton() {
return IconButton(
onPressed: onUploadPressed,
@@ -123,11 +165,12 @@ class TopControlAppBar extends HookConsumerWidget {
size: iconSize,
),
actions: [
if (asset.isRemote) buildFavoriteButton(a),
if (asset.isRemote && isOwner) buildFavoriteButton(a),
if (asset.livePhotoVideoId != null) buildLivePhotoButton(),
if (asset.isLocal && !asset.isRemote) buildUploadButton(),
if (asset.isRemote && !asset.isLocal) buildDownloadButton(),
if (asset.isRemote) buildAddToAlbumButtom(),
if (asset.isRemote && !asset.isLocal && isOwner) buildDownloadButton(),
if (asset.isRemote && isOwner) buildAddToAlbumButtom(),
if (shareAlbumId != null) buildActivitiesButton(),
buildMoreInfoButton(),
],
);

View File

@@ -48,6 +48,8 @@ class GalleryViewerPage extends HookConsumerWidget {
final int initialIndex;
final int heroOffset;
final bool showStack;
final bool isOwner;
final String? sharedAlbumId;
GalleryViewerPage({
super.key,
@@ -56,6 +58,8 @@ class GalleryViewerPage extends HookConsumerWidget {
required this.totalAssets,
this.heroOffset = 0,
this.showStack = false,
this.isOwner = true,
this.sharedAlbumId,
}) : controller = PageController(initialPage: initialIndex);
final PageController controller;
@@ -88,7 +92,7 @@ class GalleryViewerPage extends HookConsumerWidget {
: <Asset>[];
final stackElements = showStack ? [currentAsset, ...stack] : <Asset>[];
// Assets from response DTOs do not have an isar id, querying which would give us the default autoIncrement id
final isFromResponse = currentAsset.id == Isar.autoIncrement;
final isFromDto = currentAsset.id == Isar.autoIncrement;
Asset asset() => stackIndex.value == -1
? currentAsset
@@ -325,6 +329,19 @@ class GalleryViewerPage extends HookConsumerWidget {
);
}
handleActivities() {
if (sharedAlbumId != null) {
AutoRouter.of(context).push(
ActivitiesRoute(
albumId: sharedAlbumId!,
assetId: asset().remoteId,
withAssetThumbs: false,
isOwner: isOwner,
),
);
}
}
buildAppBar() {
return IgnorePointer(
ignoring: !ref.watch(showControlsProvider),
@@ -334,6 +351,7 @@ class GalleryViewerPage extends HookConsumerWidget {
child: Container(
color: Colors.black.withOpacity(0.4),
child: TopControlAppBar(
isOwner: isOwner,
isPlayingMotionVideo: isPlayingMotionVideo.value,
asset: asset(),
onMoreInfoPressed: showInfo,
@@ -352,6 +370,8 @@ class GalleryViewerPage extends HookConsumerWidget {
isPlayingMotionVideo.value = !isPlayingMotionVideo.value;
}),
onAddToAlbumPressed: () => addToAlbum(asset()),
shareAlbumId: sharedAlbumId,
onActivitiesPressed: handleActivities,
),
),
),
@@ -440,7 +460,8 @@ class GalleryViewerPage extends HookConsumerWidget {
decoration: BoxDecoration(
color: Colors.white,
borderRadius: BorderRadius.circular(6),
border: index == stackIndex.value
border: (stackIndex.value == -1 && index == 0) ||
index == stackIndex.value
? Border.all(
color: Colors.white,
width: 2,
@@ -573,35 +594,50 @@ class GalleryViewerPage extends HookConsumerWidget {
label: 'control_bottom_app_bar_share'.tr(),
tooltip: 'control_bottom_app_bar_share'.tr(),
),
asset().isArchived
? BottomNavigationBarItem(
icon: const Icon(Icons.unarchive_rounded),
label: 'control_bottom_app_bar_unarchive'.tr(),
tooltip: 'control_bottom_app_bar_unarchive'.tr(),
)
: BottomNavigationBarItem(
icon: const Icon(Icons.archive_outlined),
label: 'control_bottom_app_bar_archive'.tr(),
tooltip: 'control_bottom_app_bar_archive'.tr(),
),
if (stack.isNotEmpty)
if (isOwner)
asset().isArchived
? BottomNavigationBarItem(
icon: const Icon(Icons.unarchive_rounded),
label: 'control_bottom_app_bar_unarchive'.tr(),
tooltip: 'control_bottom_app_bar_unarchive'.tr(),
)
: BottomNavigationBarItem(
icon: const Icon(Icons.archive_outlined),
label: 'control_bottom_app_bar_archive'.tr(),
tooltip: 'control_bottom_app_bar_archive'.tr(),
),
if (isOwner && stack.isNotEmpty)
BottomNavigationBarItem(
icon: const Icon(Icons.burst_mode_outlined),
label: 'control_bottom_app_bar_stack'.tr(),
tooltip: 'control_bottom_app_bar_stack'.tr(),
),
BottomNavigationBarItem(
icon: const Icon(Icons.delete_outline),
label: 'control_bottom_app_bar_delete'.tr(),
tooltip: 'control_bottom_app_bar_delete'.tr(),
),
if (isOwner)
BottomNavigationBarItem(
icon: const Icon(Icons.delete_outline),
label: 'control_bottom_app_bar_delete'.tr(),
tooltip: 'control_bottom_app_bar_delete'.tr(),
),
if (!isOwner)
BottomNavigationBarItem(
icon: const Icon(Icons.download_outlined),
label: 'download'.tr(),
tooltip: 'download'.tr(),
),
];
List<Function(int)> actionslist = [
(_) => shareAsset(),
(_) => handleArchive(asset()),
if (stack.isNotEmpty) (_) => showStackActionItems(),
(_) => handleDelete(asset()),
if (isOwner) (_) => handleArchive(asset()),
if (isOwner && stack.isNotEmpty) (_) => showStackActionItems(),
if (isOwner) (_) => handleDelete(asset()),
if (!isOwner)
(_) => asset().isLocal
? null
: ref.watch(imageViewerStateProvider.notifier).downloadAsset(
asset(),
context,
),
];
return IgnorePointer(
@@ -755,7 +791,7 @@ class GalleryViewerPage extends HookConsumerWidget {
},
imageProvider: provider,
heroAttributes: PhotoViewHeroAttributes(
tag: isFromResponse
tag: isFromDto
? '${a.remoteId}-$heroOffset'
: a.id + heroOffset,
),
@@ -774,7 +810,7 @@ class GalleryViewerPage extends HookConsumerWidget {
onDragUpdate: (_, details, __) =>
handleSwipeUpDown(details),
heroAttributes: PhotoViewHeroAttributes(
tag: isFromResponse
tag: isFromDto
? '${a.remoteId}-$heroOffset'
: a.id + heroOffset,
),

View File

@@ -40,7 +40,7 @@ class BackupNotifier extends StateNotifier<BackUpState> {
progressInPercentage: 0,
cancelToken: CancellationToken(),
autoBackup: Store.get(StoreKey.autoBackup, false),
backgroundBackup: false,
backgroundBackup: Store.get(StoreKey.backgroundBackup, false),
backupRequireWifi: Store.get(StoreKey.backupRequireWifi, true),
backupRequireCharging:
Store.get(StoreKey.backupRequireCharging, false),
@@ -171,6 +171,7 @@ class BackupNotifier extends StateNotifier<BackUpState> {
state.backupRequireCharging,
);
await Store.put(StoreKey.backupTriggerDelay, state.backupTriggerDelay);
await Store.put(StoreKey.backgroundBackup, state.backgroundBackup);
} else {
state = state.copyWith(
backgroundBackup: wasEnabled,
@@ -383,6 +384,9 @@ class BackupNotifier extends StateNotifier<BackUpState> {
final isEnabled = await _backgroundService.isBackgroundBackupEnabled();
state = state.copyWith(backgroundBackup: isEnabled);
if (isEnabled != Store.get(StoreKey.backgroundBackup, !isEnabled)) {
Store.put(StoreKey.backgroundBackup, isEnabled);
}
if (state.backupProgress != BackUpProgressEnum.inBackground) {
await _getBackupAlbumsInfo();

View File

@@ -174,46 +174,6 @@ class BackupControllerPage extends HookConsumerWidget {
);
}
Widget buildStorageInformation() {
return ListTile(
leading: Icon(
Icons.storage_rounded,
color: Theme.of(context).primaryColor,
),
title: const Text(
"backup_controller_page_server_storage",
style: TextStyle(fontWeight: FontWeight.bold, fontSize: 14),
).tr(),
isThreeLine: true,
subtitle: Padding(
padding: const EdgeInsets.only(top: 8.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Padding(
padding: const EdgeInsets.only(top: 8.0),
child: LinearProgressIndicator(
minHeight: 10.0,
value: backupState.serverInfo.diskUsagePercentage / 100.0,
backgroundColor: Colors.grey,
color: Theme.of(context).primaryColor,
),
),
Padding(
padding: const EdgeInsets.only(top: 12.0),
child: const Text('backup_controller_page_storage_format').tr(
args: [
backupState.serverInfo.diskUse,
backupState.serverInfo.diskSize,
],
),
),
],
),
),
);
}
ListTile buildAutoBackupController() {
final isAutoBackup = backupState.autoBackup;
final backUpOption = isAutoBackup
@@ -774,7 +734,6 @@ class BackupControllerPage extends HookConsumerWidget {
if (showBackupFix) const Divider(),
if (showBackupFix) buildCheckCorruptBackups(),
const Divider(),
buildStorageInformation(),
const Divider(),
const CurrentUploadingAssetInfoBox(),
if (!hasExclusiveAccess) buildBackgroundBackupInfo(),

View File

@@ -33,6 +33,8 @@ class ImmichAssetGrid extends HookConsumerWidget {
final bool shrinkWrap;
final bool showDragScroll;
final bool showStack;
final bool isOwner;
final String? sharedAlbumId;
const ImmichAssetGrid({
super.key,
@@ -53,6 +55,8 @@ class ImmichAssetGrid extends HookConsumerWidget {
this.shrinkWrap = false,
this.showDragScroll = true,
this.showStack = false,
this.isOwner = true,
this.sharedAlbumId,
});
@override
@@ -117,6 +121,8 @@ class ImmichAssetGrid extends HookConsumerWidget {
shrinkWrap: shrinkWrap,
showDragScroll: showDragScroll,
showStack: showStack,
isOwner: isOwner,
sharedAlbumId: sharedAlbumId,
),
);
}

View File

@@ -38,6 +38,8 @@ class ImmichAssetGridView extends StatefulWidget {
final bool shrinkWrap;
final bool showDragScroll;
final bool showStack;
final bool isOwner;
final String? sharedAlbumId;
const ImmichAssetGridView({
super.key,
@@ -58,6 +60,8 @@ class ImmichAssetGridView extends StatefulWidget {
this.shrinkWrap = false,
this.showDragScroll = true,
this.showStack = false,
this.isOwner = true,
this.sharedAlbumId,
});
@override
@@ -138,6 +142,8 @@ class ImmichAssetGridViewState extends State<ImmichAssetGridView> {
showStorageIndicator: widget.showStorageIndicator,
heroOffset: widget.heroOffset,
showStack: widget.showStack,
isOwner: widget.isOwner,
sharedAlbumId: widget.sharedAlbumId,
);
}

View File

@@ -14,12 +14,14 @@ class ThumbnailImage extends StatelessWidget {
final int totalAssets;
final bool showStorageIndicator;
final bool showStack;
final bool isOwner;
final bool useGrayBoxPlaceholder;
final bool isSelected;
final bool multiselectEnabled;
final Function? onSelect;
final Function? onDeselect;
final int heroOffset;
final String? sharedAlbumId;
const ThumbnailImage({
Key? key,
@@ -29,6 +31,8 @@ class ThumbnailImage extends StatelessWidget {
required this.totalAssets,
this.showStorageIndicator = true,
this.showStack = false,
this.isOwner = true,
this.sharedAlbumId,
this.useGrayBoxPlaceholder = false,
this.isSelected = false,
this.multiselectEnabled = false,
@@ -43,7 +47,7 @@ class ThumbnailImage extends StatelessWidget {
final assetContainerColor =
isDarkTheme ? Colors.blueGrey : Theme.of(context).primaryColorLight;
// Assets from response DTOs do not have an isar id, querying which would give us the default autoIncrement id
final isFromResponse = asset.id == Isar.autoIncrement;
final isFromDto = asset.id == Isar.autoIncrement;
Widget buildSelectionIcon(Asset asset) {
if (isSelected) {
@@ -132,7 +136,7 @@ class ThumbnailImage extends StatelessWidget {
width: 300,
height: 300,
child: Hero(
tag: isFromResponse
tag: isFromDto
? '${asset.remoteId}-$heroOffset'
: asset.id + heroOffset,
child: ImmichImage(
@@ -181,6 +185,8 @@ class ThumbnailImage extends StatelessWidget {
totalAssets: totalAssets,
heroOffset: heroOffset,
showStack: showStack,
isOwner: isOwner,
sharedAlbumId: sharedAlbumId,
),
);
}

View File

@@ -100,7 +100,7 @@ class ControlBottomAppBar extends ConsumerWidget {
label: "control_bottom_app_bar_stack".tr(),
onPressed: enabled ? onStack : null,
),
if (!hasRemote)
if (hasLocal)
ControlBoxButton(
iconData: Icons.backup_outlined,
label: "Upload",

View File

@@ -1,171 +0,0 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/shared/models/store.dart';
import 'package:immich_mobile/shared/ui/user_circle_avatar.dart';
import 'package:immich_mobile/modules/login/models/authentication_state.model.dart';
import 'package:immich_mobile/modules/login/providers/authentication.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/modules/backup/models/backup_state.model.dart';
import 'package:immich_mobile/shared/models/server_info/server_info.model.dart';
import 'package:immich_mobile/modules/backup/providers/backup.provider.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
class HomePageAppBar extends ConsumerWidget implements PreferredSizeWidget {
@override
Size get preferredSize => const Size.fromHeight(kToolbarHeight);
const HomePageAppBar({
super.key,
this.onPopBack,
});
final Function? onPopBack;
@override
Widget build(BuildContext context, WidgetRef ref) {
final BackUpState backupState = ref.watch(backupProvider);
final bool isEnableAutoBackup =
backupState.backgroundBackup || backupState.autoBackup;
final ServerInfo serverInfoState = ref.watch(serverInfoProvider);
AuthenticationState authState = ref.watch(authenticationProvider);
final user = Store.tryGet(StoreKey.currentUser);
buildProfilePhoto() {
if (authState.profileImagePath.isEmpty || user == null) {
return IconButton(
splashRadius: 25,
icon: const Icon(
Icons.face_outlined,
size: 30,
),
onPressed: () {
Scaffold.of(context).openDrawer();
},
);
} else {
return InkWell(
onTap: () {
Scaffold.of(context).openDrawer();
},
child: UserCircleAvatar(
radius: 18,
size: 33,
user: user,
),
);
}
}
return AppBar(
backgroundColor: Theme.of(context).appBarTheme.backgroundColor,
shape: const RoundedRectangleBorder(
borderRadius: BorderRadius.all(
Radius.circular(5),
),
),
leading: Builder(
builder: (BuildContext context) {
return Stack(
children: [
Center(
child: buildProfilePhoto(),
),
if (serverInfoState.isVersionMismatch)
Positioned(
bottom: 4,
right: 6,
child: GestureDetector(
onTap: () => Scaffold.of(context).openDrawer(),
child: Material(
// color: Colors.grey[200],
elevation: 1,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(50.0),
),
child: const Padding(
padding: EdgeInsets.all(2.0),
child: Icon(
Icons.info,
color: Color.fromARGB(255, 243, 188, 106),
size: 15,
),
),
),
),
),
],
);
},
),
title: const Text(
'IMMICH',
style: TextStyle(
fontFamily: 'SnowburstOne',
fontWeight: FontWeight.bold,
fontSize: 22,
),
),
actions: [
Stack(
alignment: AlignmentDirectional.center,
children: [
if (backupState.backupProgress == BackUpProgressEnum.inProgress)
Positioned(
top: 10,
right: 12,
child: SizedBox(
height: 8,
width: 8,
child: CircularProgressIndicator(
strokeWidth: 1,
valueColor: AlwaysStoppedAnimation<Color>(
Theme.of(context).primaryColor,
),
),
),
),
IconButton(
splashRadius: 25,
iconSize: 30,
icon: isEnableAutoBackup
? const Icon(
Icons.backup_rounded,
)
: Badge(
padding: const EdgeInsets.all(4),
backgroundColor: Colors.white,
label: const Icon(
Icons.cloud_off_rounded,
size: 8,
color: Colors.indigo,
),
child: Icon(
Icons.backup_rounded,
color: Theme.of(context).primaryColor,
),
),
onPressed: () async {
var onPop = await AutoRouter.of(context)
.push(const BackupControllerRoute());
if (onPop != null && onPop == true) {
onPopBack!();
}
},
),
if (backupState.backupProgress == BackUpProgressEnum.inProgress)
Positioned(
bottom: 5,
child: Text(
'${backupState.allUniqueAssets.length - backupState.selectedAlbumsBackupAssetsIds.length}',
style:
const TextStyle(fontSize: 9, fontWeight: FontWeight.bold),
),
),
],
),
],
);
}
}

View File

@@ -1,144 +0,0 @@
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/backup/providers/backup.provider.dart';
import 'package:immich_mobile/modules/backup/providers/manual_upload.provider.dart';
import 'package:immich_mobile/modules/home/ui/profile_drawer/profile_drawer_header.dart';
import 'package:immich_mobile/modules/home/ui/profile_drawer/server_info_box.dart';
import 'package:immich_mobile/modules/login/providers/authentication.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/providers/asset.provider.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:immich_mobile/shared/providers/websocket.provider.dart';
class ProfileDrawer extends HookConsumerWidget {
const ProfileDrawer({Key? key}) : super(key: key);
@override
Widget build(BuildContext context, WidgetRef ref) {
final trashEnabled =
ref.watch(serverInfoProvider.select((v) => v.serverFeatures.trash));
buildSignOutButton() {
return ListTile(
leading: SizedBox(
height: double.infinity,
child: Icon(
Icons.logout_rounded,
color: Theme.of(context).textTheme.labelMedium?.color,
size: 20,
),
),
title: Text(
"profile_drawer_sign_out",
style: Theme.of(context)
.textTheme
.labelLarge
?.copyWith(fontWeight: FontWeight.bold),
).tr(),
onTap: () async {
await ref.watch(authenticationProvider.notifier).logout();
ref.read(manualUploadProvider.notifier).cancelBackup();
ref.watch(backupProvider.notifier).cancelBackup();
ref.watch(assetProvider.notifier).clearAllAsset();
ref.watch(websocketProvider.notifier).disconnect();
AutoRouter.of(context).replace(const LoginRoute());
},
);
}
buildSettingButton() {
return ListTile(
leading: SizedBox(
height: double.infinity,
child: Icon(
Icons.settings_rounded,
color: Theme.of(context).textTheme.labelMedium?.color,
size: 20,
),
),
title: Text(
"profile_drawer_settings",
style: Theme.of(context)
.textTheme
.labelLarge
?.copyWith(fontWeight: FontWeight.bold),
).tr(),
onTap: () {
AutoRouter.of(context).push(const SettingsRoute());
},
);
}
buildAppLogButton() {
return ListTile(
leading: SizedBox(
height: double.infinity,
child: Icon(
Icons.assignment_outlined,
color: Theme.of(context).textTheme.labelMedium?.color,
size: 20,
),
),
title: Text(
"profile_drawer_app_logs",
style: Theme.of(context)
.textTheme
.labelLarge
?.copyWith(fontWeight: FontWeight.bold),
).tr(),
onTap: () {
AutoRouter.of(context).push(const AppLogRoute());
},
);
}
buildTrashButton() {
return ListTile(
leading: SizedBox(
height: double.infinity,
child: Icon(
Icons.delete_rounded,
color: Theme.of(context).textTheme.labelMedium?.color,
size: 20,
),
),
title: Text(
"profile_drawer_trash",
style: Theme.of(context)
.textTheme
.labelLarge
?.copyWith(fontWeight: FontWeight.bold),
).tr(),
onTap: () {
AutoRouter.of(context).push(const TrashRoute());
},
);
}
return Drawer(
shape: const RoundedRectangleBorder(
borderRadius: BorderRadius.zero,
),
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
ListView(
shrinkWrap: true,
padding: EdgeInsets.zero,
children: [
const ProfileDrawerHeader(),
buildSettingButton(),
buildAppLogButton(),
if (trashEnabled) buildTrashButton(),
buildSignOutButton(),
],
),
const ServerInfoBox(),
],
),
);
}
}

View File

@@ -1,126 +0,0 @@
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/shared/models/server_info/server_info.model.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:package_info_plus/package_info_plus.dart';
class ServerInfoBox extends HookConsumerWidget {
const ServerInfoBox({
Key? key,
}) : super(key: key);
@override
Widget build(BuildContext context, WidgetRef ref) {
ServerInfo serverInfoState = ref.watch(serverInfoProvider);
final appInfo = useState({});
getPackageInfo() async {
PackageInfo packageInfo = await PackageInfo.fromPlatform();
appInfo.value = {
"version": packageInfo.version,
"buildNumber": packageInfo.buildNumber,
};
}
useEffect(
() {
getPackageInfo();
return null;
},
[],
);
return Padding(
padding: const EdgeInsets.all(8.0),
child: Card(
elevation: 0,
color: Theme.of(context).scaffoldBackgroundColor,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(5), // if you need this
side: const BorderSide(
color: Color.fromARGB(101, 201, 201, 201),
width: 1,
),
),
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 12.0, vertical: 8),
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Padding(
padding: const EdgeInsets.all(8.0),
child: Text(
serverInfoState.isVersionMismatch
? serverInfoState.versionMismatchErrorMessage
: "profile_drawer_client_server_up_to_date".tr(),
textAlign: TextAlign.center,
style: TextStyle(
fontSize: 11,
color: Theme.of(context).primaryColor,
fontWeight: FontWeight.w600,
),
),
),
const Divider(
color: Color.fromARGB(101, 201, 201, 201),
thickness: 1,
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text(
"server_info_box_app_version".tr(),
style: TextStyle(
fontSize: 11,
color: Colors.grey[500],
fontWeight: FontWeight.bold,
),
),
Text(
"${appInfo.value["version"]} build.${appInfo.value["buildNumber"]}",
style: TextStyle(
fontSize: 11,
color: Colors.grey[500],
fontWeight: FontWeight.bold,
),
),
],
),
const Divider(
color: Color.fromARGB(101, 201, 201, 201),
thickness: 1,
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text(
"server_info_box_server_version".tr(),
style: TextStyle(
fontSize: 11,
color: Colors.grey[500],
fontWeight: FontWeight.bold,
),
),
Text(
serverInfoState.serverVersion.major > 0
? "${serverInfoState.serverVersion.major}.${serverInfoState.serverVersion.minor}.${serverInfoState.serverVersion.patch}"
: "?",
style: TextStyle(
fontSize: 11,
color: Colors.grey[500],
fontWeight: FontWeight.bold,
),
),
],
),
],
),
),
),
);
}
}

View File

@@ -17,9 +17,7 @@ import 'package:immich_mobile/modules/home/models/selection_state.dart';
import 'package:immich_mobile/modules/home/providers/multiselect.provider.dart';
import 'package:immich_mobile/modules/home/ui/asset_grid/immich_asset_grid.dart';
import 'package:immich_mobile/modules/home/ui/control_bottom_app_bar.dart';
import 'package:immich_mobile/modules/home/ui/home_page_app_bar.dart';
import 'package:immich_mobile/modules/memories/ui/memory_lane.dart';
import 'package:immich_mobile/modules/home/ui/profile_drawer/profile_drawer.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/shared/models/album.dart';
import 'package:immich_mobile/shared/models/asset.dart';
@@ -27,6 +25,7 @@ import 'package:immich_mobile/shared/providers/asset.provider.dart';
import 'package:immich_mobile/shared/providers/server_info.provider.dart';
import 'package:immich_mobile/shared/providers/user.provider.dart';
import 'package:immich_mobile/shared/providers/websocket.provider.dart';
import 'package:immich_mobile/shared/ui/immich_app_bar.dart';
import 'package:immich_mobile/shared/ui/immich_loading_indicator.dart';
import 'package:immich_mobile/shared/ui/immich_toast.dart';
import 'package:immich_mobile/utils/selection_handlers.dart';
@@ -74,10 +73,6 @@ class HomePage extends HookConsumerWidget {
[],
);
void reloadAllAsset() {
ref.watch(assetProvider.notifier).getAllAsset();
}
Widget buildBody() {
void selectionListener(
bool multiselect,
@@ -174,9 +169,10 @@ class HomePage extends HookConsumerWidget {
processing.value = true;
selectionEnabledHook.value = false;
try {
ref
.read(manualUploadProvider.notifier)
.uploadAssets(context, selection.value);
ref.read(manualUploadProvider.notifier).uploadAssets(
context,
selection.value.where((a) => a.storage == AssetState.local),
);
} finally {
processing.value = false;
}
@@ -375,10 +371,7 @@ class HomePage extends HookConsumerWidget {
}
return Scaffold(
appBar: !selectionEnabledHook.value
? HomePageAppBar(onPopBack: reloadAllAsset)
: null,
drawer: const ProfileDrawer(),
appBar: !selectionEnabledHook.value ? const ImmichAppBar() : null,
body: buildBody(),
);
}

View File

@@ -17,7 +17,7 @@ class OAuthService {
// Resolve API server endpoint from user provided serverUrl
await _apiService.resolveAndSetEndpoint(serverUrl);
return await _apiService.oAuthApi.generateConfig(
return await _apiService.oAuthApi.generateOAuthConfig(
OAuthConfigDto(redirectUri: '$callbackUrlScheme:/'),
);
}
@@ -29,7 +29,7 @@ class OAuthService {
callbackUrlScheme: callbackUrlScheme,
);
return await _apiService.oAuthApi.callback(
return await _apiService.oAuthApi.finishOAuth(
OAuthCallbackDto(
url: result,
),

View File

@@ -65,7 +65,7 @@ class LoginForm extends HookConsumerWidget {
isLoadingServer.value = true;
final endpoint = await apiService.resolveAndSetEndpoint(serverUrl);
final loginConfig = await apiService.oAuthApi.generateConfig(
final loginConfig = await apiService.oAuthApi.generateOAuthConfig(
OAuthConfigDto(redirectUri: serverUrl),
);

View File

@@ -16,7 +16,8 @@ class MemoryLane extends HookConsumerWidget {
final memoryLane = memoryLaneFutureProvider
.whenData(
(memories) => memories != null
? SizedBox(
? Container(
margin: const EdgeInsets.only(top: 10),
height: 200,
child: ListView.builder(
scrollDirection: Axis.horizontal,

View File

@@ -172,7 +172,7 @@ class SearchPage extends HookConsumerWidget {
),
ListTile(
leading: Icon(
Icons.star_outline,
Icons.favorite_border_rounded,
color: categoryIconColor,
),
title:

View File

@@ -9,6 +9,7 @@ class SharedLink {
final bool allowUpload;
final String? thumbAssetId;
final String? description;
final String? password;
final DateTime? expiresAt;
final String key;
final bool showMetadata;
@@ -21,6 +22,7 @@ class SharedLink {
required this.allowUpload,
required this.thumbAssetId,
required this.description,
required this.password,
required this.expiresAt,
required this.key,
required this.showMetadata,
@@ -34,6 +36,7 @@ class SharedLink {
bool? allowDownload,
bool? allowUpload,
String? description,
String? password,
DateTime? expiresAt,
String? key,
bool? showMetadata,
@@ -46,6 +49,7 @@ class SharedLink {
allowDownload: allowDownload ?? this.allowDownload,
allowUpload: allowUpload ?? this.allowUpload,
description: description ?? this.description,
password: password ?? this.password,
expiresAt: expiresAt ?? this.expiresAt,
key: key ?? this.key,
showMetadata: showMetadata ?? this.showMetadata,
@@ -58,6 +62,7 @@ class SharedLink {
allowDownload = dto.allowDownload,
allowUpload = dto.allowUpload,
description = dto.description,
password = dto.password,
expiresAt = dto.expiresAt,
key = dto.key,
showMetadata = dto.showMetadata,
@@ -75,7 +80,7 @@ class SharedLink {
@override
String toString() =>
'SharedLink(id=$id, title=$title, thumbAssetId=$thumbAssetId, allowDownload=$allowDownload, allowUpload=$allowUpload, description=$description, expiresAt=$expiresAt, key=$key, showMetadata=$showMetadata, type=$type)';
'SharedLink(id=$id, title=$title, thumbAssetId=$thumbAssetId, allowDownload=$allowDownload, allowUpload=$allowUpload, description=$description, password=$password, expiresAt=$expiresAt, key=$key, showMetadata=$showMetadata, type=$type)';
@override
bool operator ==(Object other) =>
@@ -87,6 +92,7 @@ class SharedLink {
other.allowDownload == allowDownload &&
other.allowUpload == allowUpload &&
other.description == description &&
other.password == password &&
other.expiresAt == expiresAt &&
other.key == key &&
other.showMetadata == showMetadata &&
@@ -100,6 +106,7 @@ class SharedLink {
allowDownload.hashCode ^
allowUpload.hashCode ^
description.hashCode ^
password.hashCode ^
expiresAt.hashCode ^
key.hashCode ^
showMetadata.hashCode ^

View File

@@ -40,6 +40,7 @@ class SharedLinkService {
required bool allowDownload,
required bool allowUpload,
String? description,
String? password,
String? albumId,
List<String>? assetIds,
DateTime? expiresAt,
@@ -57,6 +58,7 @@ class SharedLinkService {
allowUpload: allowUpload,
expiresAt: expiresAt,
description: description,
password: password,
);
} else if (assetIds != null) {
dto = SharedLinkCreateDto(
@@ -66,6 +68,7 @@ class SharedLinkService {
allowUpload: allowUpload,
expiresAt: expiresAt,
description: description,
password: password,
assetIds: assetIds,
);
}
@@ -90,6 +93,7 @@ class SharedLinkService {
required bool? allowUpload,
bool? changeExpiry = false,
String? description,
String? password,
DateTime? expiresAt,
}) async {
try {
@@ -101,6 +105,7 @@ class SharedLinkService {
allowUpload: allowUpload,
expiresAt: expiresAt,
description: description,
password: password,
changeExpiryTime: changeExpiry,
),
);

View File

@@ -30,6 +30,8 @@ class SharedLinkEditPage extends HookConsumerWidget {
final descriptionController =
useTextEditingController(text: existingLink?.description ?? "");
final descriptionFocusNode = useFocusNode();
final passwordController =
useTextEditingController(text: existingLink?.password ?? "");
final showMetadata = useState(existingLink?.showMetadata ?? true);
final allowDownload = useState(existingLink?.allowDownload ?? true);
final allowUpload = useState(existingLink?.allowUpload ?? false);
@@ -113,6 +115,31 @@ class SharedLinkEditPage extends HookConsumerWidget {
);
}
Widget buildPasswordField() {
return TextField(
controller: passwordController,
enabled: newShareLink.value.isEmpty,
autofocus: false,
decoration: InputDecoration(
labelText: 'shared_link_edit_password'.tr(),
labelStyle: TextStyle(
fontWeight: FontWeight.bold,
color: themeData.primaryColor,
),
floatingLabelBehavior: FloatingLabelBehavior.always,
border: const OutlineInputBorder(),
hintText: 'shared_link_edit_password_hint'.tr(),
hintStyle: const TextStyle(
fontWeight: FontWeight.normal,
fontSize: 14,
),
disabledBorder: OutlineInputBorder(
borderSide: BorderSide(color: Colors.grey.withOpacity(0.5)),
),
),
);
}
Widget buildShowMetaButton() {
return SwitchListTile.adaptive(
value: showMetadata.value,
@@ -229,7 +256,9 @@ class SharedLinkEditPage extends HookConsumerWidget {
void copyLinkToClipboard() {
Clipboard.setData(
ClipboardData(
text: newShareLink.value,
text: passwordController.text.isEmpty
? newShareLink.value
: "Link: ${newShareLink.value}\nPassword: ${passwordController.text}",
),
).then((_) {
ScaffoldMessenger.of(context).showSnackBar(
@@ -302,6 +331,9 @@ class SharedLinkEditPage extends HookConsumerWidget {
description: descriptionController.text.isEmpty
? null
: descriptionController.text,
password: passwordController.text.isEmpty
? null
: passwordController.text,
expiresAt: expiryAfter.value == 0 ? null : calculateExpiry(),
);
ref.invalidate(sharedLinksStateProvider);
@@ -324,6 +356,7 @@ class SharedLinkEditPage extends HookConsumerWidget {
bool? upload;
bool? meta;
String? desc;
String? password;
DateTime? expiry;
bool? changeExpiry;
@@ -343,6 +376,10 @@ class SharedLinkEditPage extends HookConsumerWidget {
desc = descriptionController.text;
}
if (passwordController.text != existingLink!.password) {
password = passwordController.text;
}
if (editExpiry.value) {
expiry = expiryAfter.value == 0 ? null : calculateExpiry();
changeExpiry = true;
@@ -354,6 +391,7 @@ class SharedLinkEditPage extends HookConsumerWidget {
allowDownload: download,
allowUpload: upload,
description: desc,
password: password,
expiresAt: expiry,
changeExpiry: changeExpiry,
);
@@ -385,6 +423,10 @@ class SharedLinkEditPage extends HookConsumerWidget {
padding: const EdgeInsets.all(padding),
child: buildDescriptionField(),
),
Padding(
padding: const EdgeInsets.all(padding),
child: buildPasswordField(),
),
Padding(
padding: const EdgeInsets.only(
left: padding,

View File

@@ -37,6 +37,7 @@ class TrashNotifier extends StateNotifier<bool> {
.remoteIdProperty()
.findAll();
// TODO: handle local asset removal on emptyTrash
_ref
.read(syncServiceProvider)
.handleRemoteAssetRemoval(idsToRemove.cast<String>().toList());

View File

@@ -1,6 +1,7 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/modules/activities/views/activities_page.dart';
import 'package:immich_mobile/modules/album/models/asset_selection_page_result.model.dart';
import 'package:immich_mobile/modules/album/views/album_options_part.dart';
import 'package:immich_mobile/modules/album/views/album_viewer_page.dart';
@@ -133,10 +134,7 @@ part 'router.gr.dart';
DuplicateGuard,
],
),
CustomRoute(
page: AppLogPage,
transitionsBuilder: TransitionsBuilders.slideBottom,
),
AutoRoute(page: AppLogPage, guards: [DuplicateGuard]),
AutoRoute(
page: AppLogDetailPage,
),
@@ -163,6 +161,12 @@ part 'router.gr.dart';
AutoRoute(page: TrashPage, guards: [AuthGuard, DuplicateGuard]),
AutoRoute(page: SharedLinkPage, guards: [AuthGuard, DuplicateGuard]),
AutoRoute(page: SharedLinkEditPage, guards: [AuthGuard, DuplicateGuard]),
CustomRoute(
page: ActivitiesPage,
guards: [AuthGuard, DuplicateGuard],
transitionsBuilder: TransitionsBuilders.slideLeft,
durationInMilliseconds: 200,
),
],
)
class AppRouter extends _$AppRouter {

View File

@@ -72,6 +72,8 @@ class _$AppRouter extends RootStackRouter {
totalAssets: args.totalAssets,
heroOffset: args.heroOffset,
showStack: args.showStack,
isOwner: args.isOwner,
sharedAlbumId: args.sharedAlbumId,
),
);
},
@@ -230,12 +232,9 @@ class _$AppRouter extends RootStackRouter {
);
},
AppLogRoute.name: (routeData) {
return CustomPage<dynamic>(
return MaterialPageX<dynamic>(
routeData: routeData,
child: const AppLogPage(),
transitionsBuilder: TransitionsBuilders.slideBottom,
opaque: true,
barrierDismissible: false,
);
},
AppLogDetailRoute.name: (routeData) {
@@ -339,6 +338,25 @@ class _$AppRouter extends RootStackRouter {
),
);
},
ActivitiesRoute.name: (routeData) {
final args = routeData.argsAs<ActivitiesRouteArgs>();
return CustomPage<dynamic>(
routeData: routeData,
child: ActivitiesPage(
args.albumId,
appBarTitle: args.appBarTitle,
assetId: args.assetId,
withAssetThumbs: args.withAssetThumbs,
isOwner: args.isOwner,
isReadOnly: args.isReadOnly,
key: args.key,
),
transitionsBuilder: TransitionsBuilders.slideLeft,
durationInMilliseconds: 200,
opaque: true,
barrierDismissible: false,
);
},
HomeRoute.name: (routeData) {
return MaterialPageX<dynamic>(
routeData: routeData,
@@ -582,6 +600,7 @@ class _$AppRouter extends RootStackRouter {
RouteConfig(
AppLogRoute.name,
path: '/app-log-page',
guards: [duplicateGuard],
),
RouteConfig(
AppLogDetailRoute.name,
@@ -675,6 +694,14 @@ class _$AppRouter extends RootStackRouter {
duplicateGuard,
],
),
RouteConfig(
ActivitiesRoute.name,
path: '/activities-page',
guards: [
authGuard,
duplicateGuard,
],
),
];
}
@@ -749,6 +776,8 @@ class GalleryViewerRoute extends PageRouteInfo<GalleryViewerRouteArgs> {
required int totalAssets,
int heroOffset = 0,
bool showStack = false,
bool isOwner = true,
String? sharedAlbumId,
}) : super(
GalleryViewerRoute.name,
path: '/gallery-viewer-page',
@@ -759,6 +788,8 @@ class GalleryViewerRoute extends PageRouteInfo<GalleryViewerRouteArgs> {
totalAssets: totalAssets,
heroOffset: heroOffset,
showStack: showStack,
isOwner: isOwner,
sharedAlbumId: sharedAlbumId,
),
);
@@ -773,6 +804,8 @@ class GalleryViewerRouteArgs {
required this.totalAssets,
this.heroOffset = 0,
this.showStack = false,
this.isOwner = true,
this.sharedAlbumId,
});
final Key? key;
@@ -787,9 +820,13 @@ class GalleryViewerRouteArgs {
final bool showStack;
final bool isOwner;
final String? sharedAlbumId;
@override
String toString() {
return 'GalleryViewerRouteArgs{key: $key, initialIndex: $initialIndex, loadAsset: $loadAsset, totalAssets: $totalAssets, heroOffset: $heroOffset, showStack: $showStack}';
return 'GalleryViewerRouteArgs{key: $key, initialIndex: $initialIndex, loadAsset: $loadAsset, totalAssets: $totalAssets, heroOffset: $heroOffset, showStack: $showStack, isOwner: $isOwner, sharedAlbumId: $sharedAlbumId}';
}
}
@@ -1523,6 +1560,65 @@ class SharedLinkEditRouteArgs {
}
}
/// generated route for
/// [ActivitiesPage]
class ActivitiesRoute extends PageRouteInfo<ActivitiesRouteArgs> {
ActivitiesRoute({
required String albumId,
String appBarTitle = "",
String? assetId,
bool withAssetThumbs = true,
bool isOwner = false,
bool isReadOnly = false,
Key? key,
}) : super(
ActivitiesRoute.name,
path: '/activities-page',
args: ActivitiesRouteArgs(
albumId: albumId,
appBarTitle: appBarTitle,
assetId: assetId,
withAssetThumbs: withAssetThumbs,
isOwner: isOwner,
isReadOnly: isReadOnly,
key: key,
),
);
static const String name = 'ActivitiesRoute';
}
class ActivitiesRouteArgs {
const ActivitiesRouteArgs({
required this.albumId,
this.appBarTitle = "",
this.assetId,
this.withAssetThumbs = true,
this.isOwner = false,
this.isReadOnly = false,
this.key,
});
final String albumId;
final String appBarTitle;
final String? assetId;
final bool withAssetThumbs;
final bool isOwner;
final bool isReadOnly;
final Key? key;
@override
String toString() {
return 'ActivitiesRouteArgs{albumId: $albumId, appBarTitle: $appBarTitle, assetId: $assetId, withAssetThumbs: $withAssetThumbs, isOwner: $isOwner, isReadOnly: $isReadOnly, key: $key}';
}
}
/// generated route for
/// [HomePage]
class HomeRoute extends PageRouteInfo<void> {

View File

@@ -22,6 +22,7 @@ class Album {
this.endDate,
this.lastModifiedAssetTimestamp,
required this.shared,
required this.activityEnabled,
});
Id id = Isar.autoIncrement;
@@ -36,6 +37,7 @@ class Album {
DateTime? endDate;
DateTime? lastModifiedAssetTimestamp;
bool shared;
bool activityEnabled;
final IsarLink<User> owner = IsarLink<User>();
final IsarLink<Asset> thumbnail = IsarLink<Asset>();
final IsarLinks<User> sharedUsers = IsarLinks<User>();
@@ -77,7 +79,8 @@ class Album {
}
Stream<void> watchRenderList(GroupAssetsBy groupAssetsBy) async* {
final query = assets.filter().sortByFileCreatedAtDesc();
final query =
assets.filter().isTrashedEqualTo(false).sortByFileCreatedAtDesc();
_renderList = await RenderList.fromQuery(query, groupAssetsBy);
yield _renderList;
await for (final _ in query.watchLazy()) {
@@ -105,6 +108,7 @@ class Album {
modifiedAt.isAtSameMomentAs(other.modifiedAt) &&
lastModifiedAssetTimestampIsSetAndEqual &&
shared == other.shared &&
activityEnabled == other.activityEnabled &&
owner.value == other.owner.value &&
thumbnail.value == other.thumbnail.value &&
sharedUsers.length == other.sharedUsers.length &&
@@ -122,6 +126,7 @@ class Album {
modifiedAt.hashCode ^
lastModifiedAssetTimestamp.hashCode ^
shared.hashCode ^
activityEnabled.hashCode ^
owner.value.hashCode ^
thumbnail.value.hashCode ^
sharedUsers.length.hashCode ^
@@ -133,6 +138,7 @@ class Album {
createdAt: ape.lastModified?.toUtc() ?? DateTime.now().toUtc(),
modifiedAt: ape.lastModified?.toUtc() ?? DateTime.now().toUtc(),
shared: false,
activityEnabled: false,
);
a.owner.value = Store.get(StoreKey.currentUser);
a.localId = ape.id;
@@ -150,6 +156,7 @@ class Album {
shared: dto.shared,
startDate: dto.startDate,
endDate: dto.endDate,
activityEnabled: dto.isActivityEnabled,
);
a.owner.value = await db.users.getById(dto.ownerId);
if (dto.albumThumbnailAssetId != null) {

View File

@@ -17,48 +17,53 @@ const AlbumSchema = CollectionSchema(
name: r'Album',
id: -1355968412107120937,
properties: {
r'createdAt': PropertySchema(
r'activityEnabled': PropertySchema(
id: 0,
name: r'activityEnabled',
type: IsarType.bool,
),
r'createdAt': PropertySchema(
id: 1,
name: r'createdAt',
type: IsarType.dateTime,
),
r'endDate': PropertySchema(
id: 1,
id: 2,
name: r'endDate',
type: IsarType.dateTime,
),
r'lastModifiedAssetTimestamp': PropertySchema(
id: 2,
id: 3,
name: r'lastModifiedAssetTimestamp',
type: IsarType.dateTime,
),
r'localId': PropertySchema(
id: 3,
id: 4,
name: r'localId',
type: IsarType.string,
),
r'modifiedAt': PropertySchema(
id: 4,
id: 5,
name: r'modifiedAt',
type: IsarType.dateTime,
),
r'name': PropertySchema(
id: 5,
id: 6,
name: r'name',
type: IsarType.string,
),
r'remoteId': PropertySchema(
id: 6,
id: 7,
name: r'remoteId',
type: IsarType.string,
),
r'shared': PropertySchema(
id: 7,
id: 8,
name: r'shared',
type: IsarType.bool,
),
r'startDate': PropertySchema(
id: 8,
id: 9,
name: r'startDate',
type: IsarType.dateTime,
)
@@ -157,15 +162,16 @@ void _albumSerialize(
List<int> offsets,
Map<Type, List<int>> allOffsets,
) {
writer.writeDateTime(offsets[0], object.createdAt);
writer.writeDateTime(offsets[1], object.endDate);
writer.writeDateTime(offsets[2], object.lastModifiedAssetTimestamp);
writer.writeString(offsets[3], object.localId);
writer.writeDateTime(offsets[4], object.modifiedAt);
writer.writeString(offsets[5], object.name);
writer.writeString(offsets[6], object.remoteId);
writer.writeBool(offsets[7], object.shared);
writer.writeDateTime(offsets[8], object.startDate);
writer.writeBool(offsets[0], object.activityEnabled);
writer.writeDateTime(offsets[1], object.createdAt);
writer.writeDateTime(offsets[2], object.endDate);
writer.writeDateTime(offsets[3], object.lastModifiedAssetTimestamp);
writer.writeString(offsets[4], object.localId);
writer.writeDateTime(offsets[5], object.modifiedAt);
writer.writeString(offsets[6], object.name);
writer.writeString(offsets[7], object.remoteId);
writer.writeBool(offsets[8], object.shared);
writer.writeDateTime(offsets[9], object.startDate);
}
Album _albumDeserialize(
@@ -175,15 +181,16 @@ Album _albumDeserialize(
Map<Type, List<int>> allOffsets,
) {
final object = Album(
createdAt: reader.readDateTime(offsets[0]),
endDate: reader.readDateTimeOrNull(offsets[1]),
lastModifiedAssetTimestamp: reader.readDateTimeOrNull(offsets[2]),
localId: reader.readStringOrNull(offsets[3]),
modifiedAt: reader.readDateTime(offsets[4]),
name: reader.readString(offsets[5]),
remoteId: reader.readStringOrNull(offsets[6]),
shared: reader.readBool(offsets[7]),
startDate: reader.readDateTimeOrNull(offsets[8]),
activityEnabled: reader.readBool(offsets[0]),
createdAt: reader.readDateTime(offsets[1]),
endDate: reader.readDateTimeOrNull(offsets[2]),
lastModifiedAssetTimestamp: reader.readDateTimeOrNull(offsets[3]),
localId: reader.readStringOrNull(offsets[4]),
modifiedAt: reader.readDateTime(offsets[5]),
name: reader.readString(offsets[6]),
remoteId: reader.readStringOrNull(offsets[7]),
shared: reader.readBool(offsets[8]),
startDate: reader.readDateTimeOrNull(offsets[9]),
);
object.id = id;
return object;
@@ -197,22 +204,24 @@ P _albumDeserializeProp<P>(
) {
switch (propertyId) {
case 0:
return (reader.readDateTime(offset)) as P;
return (reader.readBool(offset)) as P;
case 1:
return (reader.readDateTimeOrNull(offset)) as P;
return (reader.readDateTime(offset)) as P;
case 2:
return (reader.readDateTimeOrNull(offset)) as P;
case 3:
return (reader.readStringOrNull(offset)) as P;
return (reader.readDateTimeOrNull(offset)) as P;
case 4:
return (reader.readDateTime(offset)) as P;
case 5:
return (reader.readString(offset)) as P;
case 6:
return (reader.readStringOrNull(offset)) as P;
case 5:
return (reader.readDateTime(offset)) as P;
case 6:
return (reader.readString(offset)) as P;
case 7:
return (reader.readBool(offset)) as P;
return (reader.readStringOrNull(offset)) as P;
case 8:
return (reader.readBool(offset)) as P;
case 9:
return (reader.readDateTimeOrNull(offset)) as P;
default:
throw IsarError('Unknown property with id $propertyId');
@@ -442,6 +451,16 @@ extension AlbumQueryWhere on QueryBuilder<Album, Album, QWhereClause> {
}
extension AlbumQueryFilter on QueryBuilder<Album, Album, QFilterCondition> {
QueryBuilder<Album, Album, QAfterFilterCondition> activityEnabledEqualTo(
bool value) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'activityEnabled',
value: value,
));
});
}
QueryBuilder<Album, Album, QAfterFilterCondition> createdAtEqualTo(
DateTime value) {
return QueryBuilder.apply(this, (query) {
@@ -1385,6 +1404,18 @@ extension AlbumQueryLinks on QueryBuilder<Album, Album, QFilterCondition> {
}
extension AlbumQuerySortBy on QueryBuilder<Album, Album, QSortBy> {
QueryBuilder<Album, Album, QAfterSortBy> sortByActivityEnabled() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'activityEnabled', Sort.asc);
});
}
QueryBuilder<Album, Album, QAfterSortBy> sortByActivityEnabledDesc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'activityEnabled', Sort.desc);
});
}
QueryBuilder<Album, Album, QAfterSortBy> sortByCreatedAt() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'createdAt', Sort.asc);
@@ -1496,6 +1527,18 @@ extension AlbumQuerySortBy on QueryBuilder<Album, Album, QSortBy> {
}
extension AlbumQuerySortThenBy on QueryBuilder<Album, Album, QSortThenBy> {
QueryBuilder<Album, Album, QAfterSortBy> thenByActivityEnabled() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'activityEnabled', Sort.asc);
});
}
QueryBuilder<Album, Album, QAfterSortBy> thenByActivityEnabledDesc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'activityEnabled', Sort.desc);
});
}
QueryBuilder<Album, Album, QAfterSortBy> thenByCreatedAt() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'createdAt', Sort.asc);
@@ -1619,6 +1662,12 @@ extension AlbumQuerySortThenBy on QueryBuilder<Album, Album, QSortThenBy> {
}
extension AlbumQueryWhereDistinct on QueryBuilder<Album, Album, QDistinct> {
QueryBuilder<Album, Album, QDistinct> distinctByActivityEnabled() {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'activityEnabled');
});
}
QueryBuilder<Album, Album, QDistinct> distinctByCreatedAt() {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'createdAt');
@@ -1684,6 +1733,12 @@ extension AlbumQueryProperty on QueryBuilder<Album, Album, QQueryProperty> {
});
}
QueryBuilder<Album, bool, QQueryOperations> activityEnabledProperty() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'activityEnabled');
});
}
QueryBuilder<Album, DateTime, QQueryOperations> createdAtProperty() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'createdAt');

Some files were not shown because too many files have changed in this diff Show More