Compare commits
5 Commits
feat/fast-
...
chore/hand
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8d2a849edc | ||
|
|
fb4fe5d40b | ||
|
|
717961ce7b | ||
|
|
259386cf13 | ||
|
|
7e587c2703 |
23
.github/labeler.yml
vendored
23
.github/labeler.yml
vendored
@@ -1,23 +0,0 @@
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: cli/**
|
||||
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: docs/**
|
||||
|
||||
🖥️web:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: web/**
|
||||
|
||||
📱mobile:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: mobile/**
|
||||
|
||||
🗄️server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: server/**
|
||||
|
||||
🧠machine-learning:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: machine-learning/**
|
||||
25
.github/workflows/cli.yml
vendored
25
.github/workflows/cli.yml
vendored
@@ -1,17 +1,16 @@
|
||||
name: CLI Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
- "cli/**"
|
||||
- ".github/workflows/cli.yml"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
release:
|
||||
types: [published]
|
||||
- "cli/**"
|
||||
- ".github/workflows/cli.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -33,8 +32,8 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version: "20.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
@@ -42,7 +41,7 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
@@ -84,15 +83,15 @@ jobs:
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/immich-cli
|
||||
tags: |
|
||||
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'release' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'workflow_dispatch' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v5.4.0
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
push: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
|
||||
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -115,7 +115,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v5.4.0
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
|
||||
12
.github/workflows/pr-labeler.yml
vendored
12
.github/workflows/pr-labeler.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
13
.github/workflows/pr-require-label.yml
vendored
Normal file
13
.github/workflows/pr-require-label.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Enforce PR labels
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled, opened, edited, synchronize]
|
||||
jobs:
|
||||
enforce-label:
|
||||
name: Enforce label
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- if: toJson(github.event.pull_request.labels) == '[]'
|
||||
run: exit 1
|
||||
|
||||
6
Makefile
6
Makefile
@@ -10,6 +10,12 @@ dev-update:
|
||||
dev-scale:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml up --build -V --remove-orphans
|
||||
|
||||
pull-stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml pull
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
8
cli/package-lock.json
generated
8
cli/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.0",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
@@ -47,14 +47,14 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.11.0",
|
||||
"@types/node": "^20.12.13",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.0",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
|
||||
@@ -103,7 +103,7 @@ services:
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
|
||||
@@ -61,7 +61,7 @@ services:
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
|
||||
@@ -59,7 +59,7 @@ services:
|
||||
volumes:
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
|
||||
@@ -3,10 +3,10 @@ global:
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: immich_api
|
||||
- job_name: immich_server
|
||||
static_configs:
|
||||
- targets: ['immich-server:8081']
|
||||
|
||||
|
||||
- job_name: immich_microservices
|
||||
static_configs:
|
||||
- targets: ['immich-server:8082']
|
||||
- targets: ['immich-microservices:8081']
|
||||
|
||||
@@ -43,7 +43,7 @@ if [ -n "${quota:-}" ] && [ -n "${period:-}" ]; then
|
||||
cpus=1
|
||||
fi
|
||||
else
|
||||
cpus=$(grep -c ^processor /proc/cpuinfo)
|
||||
cpus=$(grep -c processor /proc/cpuinfo)
|
||||
fi
|
||||
|
||||
echo "$cpus"
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
# Email Notifications
|
||||
|
||||
Immich supports the option to send notifications via Email for the following events:
|
||||
|
||||
- Creating a new user
|
||||
- Notifying a user when they get added to a shared album
|
||||
- Informing other users about the addition of new assets to a shared album
|
||||
|
||||
## SMTP settings
|
||||
|
||||
You can access the settings panel from the web at `Administration -> Settings -> Notification settings`
|
||||
|
||||
Under Email, enter the following details to connect with SMTP servers.
|
||||
|
||||
You can use the following [guide](/docs/guides/smtp-gmail) to use Gmail's SMTP server.
|
||||
|
||||
<img src={require('./img/email-settings.png').default} width="80%" title="SMTP settings" />
|
||||
|
||||
## User's notifications settings
|
||||
|
||||
Users can manage their email notification settings from their account settings page on the web. They can choose to turn email notifications on or off for the following events:
|
||||
|
||||
<img src={require('./img/user-notifications-settings.png').default} width="80%" title="User notification settings" />
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 218 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 2.7 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 109 KiB |
@@ -10,59 +10,6 @@ Viewing and modifying the system settings is restricted to the Administrator.
|
||||
You can always return to the default settings by clicking the `Reset to default` button.
|
||||
:::
|
||||
|
||||
## Authentication Settings
|
||||
|
||||
Manage password, OAuth, and other authentication settings
|
||||
|
||||
### OAuth Authentication
|
||||
|
||||
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||
|
||||
### Password Authentication
|
||||
|
||||
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||
|
||||
:::tip
|
||||
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||
:::
|
||||
|
||||
## Image Settings (thumbnails and previews)
|
||||
|
||||
- Thumbnails - Used in the main timeline.
|
||||
- Previews - Used in the asset viewer.
|
||||
|
||||
By default Immich creates 3 thumbnails for each asset,
|
||||
Blurred (thumbhash) , Small - thumbnails (webp) , and Large - previews (jpeg/webp), using these settings you can change the quality for the thumbnails and previews files that are created.
|
||||
|
||||
**Thumbnail format**
|
||||
Allows you to choose the type of format you want for the Thumbnail images, Webp produces smaller files than jpeg, but is slower to encode.
|
||||
|
||||
:::tip
|
||||
You can read in detail about the advantages and disadvantages of using webp over jpeg on [Adobe's website](https://www.adobe.com/creativecloud/file-types/image/raster/webp-file.html)
|
||||
:::
|
||||
|
||||
**Thumbnail resolution**
|
||||
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Preview format**
|
||||
Allows you to choose the type of format you want for the Preview images, Webp produces smaller files than jpeg, but is slower to encode.
|
||||
|
||||
**Preview resolution**
|
||||
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Quality**
|
||||
Image quality from 1-100. Higher is better for quality but produces larger files, this option affects the Preview and Thumbnail images.
|
||||
|
||||
**Prefer wide gamut**
|
||||
Use Display P3 for thumbnails. This better preserves the vibrance of images with wide colorspaces, but images may appear differently on old devices with an old browser version. sRGB images are kept as sRGB to avoid color shifts.
|
||||
|
||||
**Prefer embedded preview**
|
||||
Use embedded previews in RAW photos as the input to image processing when available. This can produce more accurate colors for some images, but the quality of the preview is camera-dependent and the image may have more compression artifacts.
|
||||
|
||||
:::tip
|
||||
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||
:::
|
||||
|
||||
## Job Settings
|
||||
|
||||
Using these settings, you can determine the amount of work that will run concurrently for each task in microservices. Some tasks can be set to higher values on computers with powerful hardware and storage with good I/O capabilities.
|
||||
@@ -145,9 +92,17 @@ The map can be adjusted via [OpenMapTiles](https://openmaptiles.org/styles/) for
|
||||
|
||||
Immich supports [Reverse Geocoding](/docs/features/reverse-geocoding) using data from the [GeoNames](https://www.geonames.org/) geographical database.
|
||||
|
||||
## Notification Settings
|
||||
## OAuth Authentication
|
||||
|
||||
SMTP server setup, for user creation notifications, new albums, etc. More information can be found [here](/docs/administration/email-notification)
|
||||
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||
|
||||
## Password Authentication
|
||||
|
||||
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||
|
||||
:::tip
|
||||
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||
:::
|
||||
|
||||
## Server Settings
|
||||
|
||||
@@ -175,6 +130,27 @@ p {
|
||||
}
|
||||
```
|
||||
|
||||
## Thumbnail Settings
|
||||
|
||||
By default Immich creates 3 thumbnails for each asset,
|
||||
Blurred (thumbhash) , Small (webp) , and Large (jpeg), using these settings you can change the quality for the thumbnail files that are created.
|
||||
|
||||
**Small thumbnail resolution**
|
||||
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Large thumbnail resolution**
|
||||
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Quality**
|
||||
Thumbnail quality from 1-100. Higher is better for quality but produces larger files.
|
||||
|
||||
**Prefer wide gamut**
|
||||
Use display p3 for thumbnails. This better preserves the vibrance of images with wide color spaces, but images may appear differently on old devices with an old browser version. Srgb images are kept as srgb to avoid color shifts.
|
||||
|
||||
:::tip
|
||||
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||
:::
|
||||
|
||||
## Trash Settings
|
||||
|
||||
In the system administrator's option to set a trash for deleted files, these files will remain in the trash until the deletion date 30 days (default) or as defined by the system administrator.
|
||||
|
||||
@@ -13,20 +13,6 @@ Immich supports multiple users, each with their own library.
|
||||
|
||||
<UserCreate />
|
||||
|
||||
## Send new user email notification
|
||||
|
||||
:::note
|
||||
This option is only available if an SMTP server has been configured in the administrator settings.
|
||||
:::
|
||||
|
||||
Admin can send a welcome email if the Email option is set, you can learn here how to set up the SMTP server in Immich.
|
||||
|
||||
<img
|
||||
src={require('./img/send-user-email-notification.webp').default}
|
||||
width="40%"
|
||||
title="Send user email notification"
|
||||
/>
|
||||
|
||||
## Set Storage Quota For User
|
||||
|
||||
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
|
||||
|
||||
@@ -60,17 +60,17 @@ For RKMPP to work:
|
||||
#### Basic Setup
|
||||
|
||||
1. If you do not already have it, download the latest [`hwaccel.transcoding.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||
2. In the `docker-compose.yml` under `immich-server`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
2. In the `docker-compose.yml` under `immich-microservices`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
|
||||
- For VAAPI on WSL2, be sure to use `vaapi-wsl` rather than `vaapi`
|
||||
|
||||
3. Redeploy the `immich-server` container with these updated settings.
|
||||
3. Redeploy the `immich-microservices` container with these updated settings.
|
||||
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
|
||||
5. (Optional) If using a compatible backend, you may enable hardware decoding for optimal performance.
|
||||
|
||||
#### Single Compose File
|
||||
|
||||
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-server` service directly.
|
||||
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-microservices` service directly.
|
||||
|
||||
For example, the `qsv` section in this file is:
|
||||
|
||||
@@ -79,22 +79,21 @@ devices:
|
||||
- /dev/dri:/dev/dri
|
||||
```
|
||||
|
||||
You can add this to the `immich-server` service instead of extending from `hwaccel.transcoding.yml`:
|
||||
You can add this to the `immich-microservices` service instead of extending from `hwaccel.transcoding.yml`:
|
||||
|
||||
```yaml
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# Note the lack of an `extends` section
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
command: ['start.sh', 'microservices']
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 2283:3001
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 218 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 17 KiB |
@@ -1,20 +0,0 @@
|
||||
# SMTP settings using Gmail
|
||||
|
||||
This guide walks you through how to get the information you need to set up your Immich instance to send emails using Gmail's SMTP server.
|
||||
|
||||
## Create an app password
|
||||
|
||||
From your Google account settings
|
||||
|
||||
- Add [2-Step Verification](https://support.google.com/accounts/answer/185839) to your Google account (Required)
|
||||
- [Create an app password](https://myaccount.google.com/apppasswords).
|
||||
|
||||
At the end of creating your app passwords, a password will be displayed; save it, it will be used for the password field when setting up the SMTP server in Immich.
|
||||
|
||||
<img src={require('./img/google-app-password.webp').default} title="Authorised redirect URIs" />
|
||||
|
||||
## Entering the SMTP credential in Immich
|
||||
|
||||
Entering your credential in Immich's email notification settings at `Administration -> Settings -> Notification Settings`
|
||||
|
||||
<img src={require('./img/email-settings.png').default} width="80%" title="SMTP settings" />
|
||||
@@ -38,19 +38,17 @@ Regardless of filesystem, it is not recommended to use a network share for your
|
||||
|
||||
## General
|
||||
|
||||
| Variable | Description | Default | Containers | Workers |
|
||||
| :---------------------------------- | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server | api |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | server | microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||
| Variable | Description | Default | Containers | Workers |
|
||||
| :------------------------------ | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server | api |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | server | microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
|
||||
\*1: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
It only need to be set if the Immich deployment method is changing.
|
||||
|
||||
@@ -94,10 +94,6 @@ const config = {
|
||||
srcDark: 'img/immich-logo-inline-dark.png',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'custom-versionSwitcher',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
to: '/docs/overview/introduction',
|
||||
position: 'right',
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
import '@docusaurus/theme-classic/lib/theme/Unlisted/index';
|
||||
import { useWindowSize } from '@docusaurus/theme-common';
|
||||
import DropdownNavbarItem from '@theme/NavbarItem/DropdownNavbarItem';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
|
||||
export default function VersionSwitcher(): JSX.Element {
|
||||
const [versions, setVersions] = useState([]);
|
||||
const [label, setLabel] = useState('Versions');
|
||||
|
||||
const windowSize = useWindowSize();
|
||||
|
||||
useEffect(() => {
|
||||
async function getVersions() {
|
||||
try {
|
||||
let baseUrl = 'https://immich.app';
|
||||
if (window.location.origin === 'http://localhost:3005') {
|
||||
baseUrl = window.location.origin;
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}/archived-versions.json`);
|
||||
|
||||
const archiveVersions = await response.json();
|
||||
|
||||
const allVersions = [
|
||||
{ label: 'Next', url: 'https://main.preview.immich.app' },
|
||||
{ label: 'Latest', url: 'https://immich.app' },
|
||||
...archiveVersions,
|
||||
];
|
||||
setVersions(allVersions);
|
||||
|
||||
const activeVersion = allVersions.find((version) => new URL(version.url).origin === window.location.origin);
|
||||
if (activeVersion) {
|
||||
setLabel(activeVersion.label);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch versions', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (versions.length === 0) {
|
||||
getVersions();
|
||||
}
|
||||
}, []);
|
||||
|
||||
return (
|
||||
versions.length > 0 && (
|
||||
<DropdownNavbarItem
|
||||
className="navbar__item"
|
||||
label={label}
|
||||
mobile={windowSize === 'mobile'}
|
||||
items={versions.map(({ label, url }) => ({
|
||||
label,
|
||||
to: url,
|
||||
target: '_self',
|
||||
}))}
|
||||
/>
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -63,14 +63,12 @@ import {
|
||||
mdiVectorCombine,
|
||||
mdiVideo,
|
||||
mdiWeb,
|
||||
mdiContentDuplicate,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.106.0': new Date(2024, 5, 11),
|
||||
'v1.104.0': new Date(2024, 4, 13),
|
||||
'v1.103.0': new Date(2024, 3, 29),
|
||||
'v1.102.0': new Date(2024, 3, 15),
|
||||
@@ -218,19 +216,13 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiContentDuplicate,
|
||||
title: 'Similar image detection',
|
||||
description: 'Detect duplicate assets that aren’t exactly identical',
|
||||
release: 'v1.106.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiVectorCombine,
|
||||
title: 'Container consolidation',
|
||||
description:
|
||||
'The microservices container can be run as a worker within the server image, allowing us to remove it from the default stack.',
|
||||
release: 'v1.106.0',
|
||||
}),
|
||||
// withRelease({
|
||||
// icon: mdiVectorCombine,
|
||||
// title: 'Container consolidation',
|
||||
// description:
|
||||
// 'The microservices container can be run as a worker within the server image, allowing us to remove it from the default stack.',
|
||||
// release: 'v1.106.0',
|
||||
// }),
|
||||
withRelease({
|
||||
icon: mdiPencil,
|
||||
iconColor: 'saddlebrown',
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import ComponentTypes from '@theme-original/NavbarItem/ComponentTypes';
|
||||
import VersionSwitcher from '@site/src/components/version-switcher';
|
||||
|
||||
export default {
|
||||
...ComponentTypes,
|
||||
'custom-versionSwitcher': VersionSwitcher,
|
||||
};
|
||||
58
docs/static/archived-versions.json
vendored
58
docs/static/archived-versions.json
vendored
@@ -1,58 +0,0 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.106.3",
|
||||
"url": "https://v1.106.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.2",
|
||||
"url": "https://v1.106.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.1",
|
||||
"url": "https://v1.106.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.105.1",
|
||||
"url": "https://v1.105.1.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.105.0",
|
||||
"url": "https://v1.105.0.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.104.0",
|
||||
"url": "https://v1.104.0.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.103.1",
|
||||
"url": "https://v1.103.1.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.103.0",
|
||||
"url": "https://v1.103.0.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.3",
|
||||
"url": "https://v1.102.3.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.2",
|
||||
"url": "https://v1.102.2.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.1",
|
||||
"url": "https://v1.102.1.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.0",
|
||||
"url": "https://v1.102.0.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.101.0",
|
||||
"url": "https://v1.101.0.archive.immich.app/"
|
||||
},
|
||||
{
|
||||
"label": "v1.100.0",
|
||||
"url": "https://v1.100.0.archive.immich.app/"
|
||||
}
|
||||
]
|
||||
10
e2e/package-lock.json
generated
10
e2e/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-e2e",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"devDependencies": {
|
||||
"@immich/cli": "file:../cli",
|
||||
@@ -39,7 +39,7 @@
|
||||
},
|
||||
"../cli": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.0",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
@@ -81,14 +81,14 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.11.0",
|
||||
"@types/node": "^20.12.13",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
|
||||
@@ -1148,29 +1148,4 @@ describe('/asset', () => {
|
||||
expect(video.checksum).toStrictEqual(checksum);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
deviceId: 'test-assets-exist',
|
||||
deviceAssetIds: ['invalid', 'INVALID'],
|
||||
});
|
||||
|
||||
expect(response.existingIds).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('returns the IDs of existing assets', async () => {
|
||||
await utils.createAsset(user1.accessToken, {
|
||||
deviceId: 'test-assets-exist',
|
||||
deviceAssetId: 'test-asset-0',
|
||||
});
|
||||
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
deviceId: 'test-assets-exist',
|
||||
deviceAssetIds: ['test-asset-0'],
|
||||
});
|
||||
|
||||
expect(response.existingIds).toEqual(['test-asset-0']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import {
|
||||
LibraryResponseDto,
|
||||
LoginResponseDto,
|
||||
ScanLibraryDto,
|
||||
getAllLibraries,
|
||||
removeOfflineFiles,
|
||||
scanLibrary,
|
||||
} from '@immich/sdk';
|
||||
import { LibraryResponseDto, LoginResponseDto, ScanLibraryDto, getAllLibraries, scanLibrary } from '@immich/sdk';
|
||||
import { cpSync, existsSync } from 'node:fs';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { userDto, uuidDto } from 'src/fixtures';
|
||||
@@ -391,51 +384,6 @@ describe('/libraries', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should not try to delete offline files', async () => {
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
|
||||
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/offline1`],
|
||||
});
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
expect(initialAssets).toEqual({
|
||||
count: 1,
|
||||
total: 1,
|
||||
facets: [],
|
||||
items: [expect.objectContaining({ originalFileName: 'assetA.png' })],
|
||||
nextPage: null,
|
||||
});
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
|
||||
libraryId: library.id,
|
||||
isOffline: true,
|
||||
});
|
||||
expect(offlineAssets).toEqual({
|
||||
count: 1,
|
||||
total: 1,
|
||||
facets: [],
|
||||
items: [expect.objectContaining({ originalFileName: 'assetA.png' })],
|
||||
nextPage: null,
|
||||
});
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
|
||||
await removeOfflineFiles({ id: library.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.waitForWebsocketEvent({ event: 'assetDelete', total: 1 });
|
||||
|
||||
expect(existsSync(`${testAssetDir}/temp/offline1/assetA.png`)).toBe(true);
|
||||
});
|
||||
|
||||
it('should scan new files', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
@@ -559,10 +507,10 @@ describe('/libraries', () => {
|
||||
it('should remove offline files', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/offline2`],
|
||||
importPaths: [`${testAssetDirInternal}/temp`],
|
||||
});
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
@@ -570,9 +518,9 @@ describe('/libraries', () => {
|
||||
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
|
||||
libraryId: library.id,
|
||||
});
|
||||
expect(initialAssets.count).toBe(1);
|
||||
expect(initialAssets.count).toBe(3);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
@@ -593,7 +541,7 @@ describe('/libraries', () => {
|
||||
|
||||
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(0);
|
||||
expect(assets.count).toBe(2);
|
||||
});
|
||||
|
||||
it('should not remove online files', async () => {
|
||||
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
AssetMediaCreateDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetResponseDto,
|
||||
CheckExistingAssetsDto,
|
||||
CreateAlbumDto,
|
||||
CreateLibraryDto,
|
||||
MetadataSearchDto,
|
||||
@@ -11,7 +10,6 @@ import {
|
||||
SharedLinkCreateDto,
|
||||
UserAdminCreateDto,
|
||||
ValidateLibraryDto,
|
||||
checkExistingAssets,
|
||||
createAlbum,
|
||||
createApiKey,
|
||||
createLibrary,
|
||||
@@ -376,9 +374,6 @@ export const utils = {
|
||||
|
||||
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>
|
||||
checkExistingAssets({ checkExistingAssetsDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
metadataSearch: async (accessToken: string, dto: MetadataSearchDto) => {
|
||||
return searchMetadata({ metadataSearchDto: dto }, { headers: asBearerAuth(accessToken) });
|
||||
},
|
||||
|
||||
@@ -1,40 +1,33 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
from insightface.model_zoo import RetinaFace
|
||||
import onnxruntime as ort
|
||||
from numpy.typing import NDArray
|
||||
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.transforms import decode_cv2
|
||||
from app.models.session import ort_has_batch_dim, ort_expand_outputs
|
||||
from app.models.transforms import decode_pil
|
||||
from app.schemas import FaceDetectionOutput, ModelSession, ModelTask, ModelType
|
||||
|
||||
from .scrfd import SCRFD
|
||||
from PIL import Image
|
||||
from PIL.ImageOps import pad
|
||||
|
||||
class FaceDetector(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
self.model = RetinaFace(session=session)
|
||||
self.model.prepare(ctx_id=0, det_thresh=self.min_score, input_size=(640, 640))
|
||||
if isinstance(session, ort.InferenceSession) and not ort_has_batch_dim(session):
|
||||
ort_expand_outputs(session)
|
||||
self.model = SCRFD(session=session)
|
||||
|
||||
return session
|
||||
|
||||
def _predict(self, inputs: NDArray[np.uint8] | bytes, **kwargs: Any) -> FaceDetectionOutput:
|
||||
inputs = decode_cv2(inputs)
|
||||
def _predict(self, inputs: NDArray[np.uint8] | bytes | Image.Image, **kwargs: Any) -> FaceDetectionOutput:
|
||||
inputs = self._transform(inputs)
|
||||
|
||||
bboxes, landmarks = self._detect(inputs)
|
||||
[bboxes], [landmarks] = self.model.detect(inputs, threshold=kwargs.pop("minScore", 0.7))
|
||||
return {
|
||||
"boxes": bboxes[:, :4].round(),
|
||||
"scores": bboxes[:, 4],
|
||||
@@ -44,5 +37,7 @@ class FaceDetector(InferenceModel):
|
||||
def _detect(self, inputs: NDArray[np.uint8] | bytes) -> tuple[NDArray[np.float32], NDArray[np.float32]]:
|
||||
return self.model.detect(inputs) # type: ignore
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
self.model.det_thresh = kwargs.pop("minScore", self.model.det_thresh)
|
||||
def _transform(self, inputs: NDArray[np.uint8] | bytes | Image.Image) -> NDArray[np.uint8]:
|
||||
image = decode_pil(inputs)
|
||||
padded = pad(image, (640, 640), method=Image.Resampling.BICUBIC)
|
||||
return np.array(padded, dtype=np.uint8)[None, ...]
|
||||
|
||||
@@ -2,16 +2,15 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
import onnxruntime as ort
|
||||
from insightface.model_zoo import ArcFaceONNX
|
||||
from insightface.utils.face_align import norm_crop
|
||||
from numpy.typing import NDArray
|
||||
from onnx.tools.update_model_dims import update_inputs_outputs_dims
|
||||
from PIL import Image
|
||||
|
||||
from app.config import clean_name, log
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.session import ort_add_batch_dim, ort_has_batch_dim
|
||||
from app.models.transforms import decode_cv2
|
||||
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelSession, ModelTask, ModelType
|
||||
|
||||
@@ -32,8 +31,9 @@ class FaceRecognizer(InferenceModel):
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
if not self._has_batch_dim(session):
|
||||
self._add_batch_dim(self.model_path)
|
||||
if isinstance(session, ort.InferenceSession) and not ort_has_batch_dim(session):
|
||||
log.info(f"Adding batch dimension to recognition model {self.model_name}")
|
||||
ort_add_batch_dim(self.model_path, self.model_path)
|
||||
session = self._make_session(self.model_path)
|
||||
self.model = ArcFaceONNX(
|
||||
self.model_path.with_suffix(".onnx").as_posix(),
|
||||
@@ -62,16 +62,3 @@ class FaceRecognizer(InferenceModel):
|
||||
|
||||
def _crop(self, image: NDArray[np.uint8], faces: FaceDetectionOutput) -> list[NDArray[np.uint8]]:
|
||||
return [norm_crop(image, landmark) for landmark in faces["landmarks"]]
|
||||
|
||||
def _has_batch_dim(self, session: ort.InferenceSession) -> bool:
|
||||
return not isinstance(session, ort.InferenceSession) or session.get_inputs()[0].shape[0] == "batch"
|
||||
|
||||
def _add_batch_dim(self, model_path: Path) -> None:
|
||||
log.debug(f"Adding batch dimension to model {model_path}")
|
||||
proto = onnx.load(model_path)
|
||||
static_input_dims = [shape.dim_value for shape in proto.graph.input[0].type.tensor_type.shape.dim[1:]]
|
||||
static_output_dims = [shape.dim_value for shape in proto.graph.output[0].type.tensor_type.shape.dim[1:]]
|
||||
input_dims = {proto.graph.input[0].name: ["batch"] + static_input_dims}
|
||||
output_dims = {proto.graph.output[0].name: ["batch"] + static_output_dims}
|
||||
updated_proto = update_inputs_outputs_dims(proto, input_dims, output_dims)
|
||||
onnx.save(updated_proto, model_path)
|
||||
|
||||
325
machine-learning/app/models/facial_recognition/scrfd.py
Normal file
325
machine-learning/app/models/facial_recognition/scrfd.py
Normal file
@@ -0,0 +1,325 @@
|
||||
# Based on InsightFace-REST by SthPhoenix https://github.com/SthPhoenix/InsightFace-REST/blob/master/src/api_trt/modules/model_zoo/detectors/scrfd.py
|
||||
# Primary changes made:
|
||||
# 1. Removed CuPy-related code
|
||||
# 2. Adapted proposal generation to be thread-safe
|
||||
# 3. Added typing
|
||||
# 4. Assume RGB input
|
||||
# 5. Removed unused variables
|
||||
|
||||
# Copyright 2021 SthPhoenix
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
# Based on Jia Guo reference implementation at
|
||||
# https://github.com/deepinsight/insightface/blob/master/detection/scrfd/tools/scrfd.py
|
||||
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numba import njit
|
||||
from app.schemas import ModelSession
|
||||
from numpy.typing import NDArray
|
||||
|
||||
|
||||
@njit(cache=True, nogil=True)
|
||||
def nms(dets, threshold: float = 0.4) -> NDArray[np.float32]:
|
||||
x1 = dets[:, 0]
|
||||
y1 = dets[:, 1]
|
||||
x2 = dets[:, 2]
|
||||
y2 = dets[:, 3]
|
||||
scores = dets[:, 4]
|
||||
|
||||
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
|
||||
order = scores.argsort()[::-1]
|
||||
|
||||
keep = []
|
||||
while order.size > 0:
|
||||
i = order[0]
|
||||
keep.append(i)
|
||||
xx1 = np.maximum(x1[i], x1[order[1:]])
|
||||
yy1 = np.maximum(y1[i], y1[order[1:]])
|
||||
xx2 = np.minimum(x2[i], x2[order[1:]])
|
||||
yy2 = np.minimum(y2[i], y2[order[1:]])
|
||||
|
||||
w = np.maximum(0.0, xx2 - xx1 + 1)
|
||||
h = np.maximum(0.0, yy2 - yy1 + 1)
|
||||
inter = w * h
|
||||
ovr = inter / (areas[i] + areas[order[1:]] - inter)
|
||||
|
||||
inds = np.where(ovr <= threshold)[0]
|
||||
order = order[inds + 1]
|
||||
|
||||
return np.asarray(keep)
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def single_distance2bbox(point: NDArray[np.float32], distance: NDArray[np.float32], stride: int) -> NDArray[np.float32]:
|
||||
"""
|
||||
Fast conversion of single bbox distances to coordinates
|
||||
|
||||
:param point: Anchor point
|
||||
:param distance: Bbox distances from anchor point
|
||||
:param stride: Current stride scale
|
||||
:return: bbox
|
||||
"""
|
||||
distance[0] = point[0] - distance[0] * stride
|
||||
distance[1] = point[1] - distance[1] * stride
|
||||
distance[2] = point[0] + distance[2] * stride
|
||||
distance[3] = point[1] + distance[3] * stride
|
||||
return distance
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def single_distance2kps(point: NDArray[np.float32], distance: NDArray[np.float32], stride: int) -> NDArray[np.float32]:
|
||||
"""
|
||||
Fast conversion of single keypoint distances to coordinates
|
||||
|
||||
:param point: Anchor point
|
||||
:param distance: Keypoint distances from anchor point
|
||||
:param stride: Current stride scale
|
||||
:return: keypoint
|
||||
"""
|
||||
for ix in range(0, distance.shape[0], 2):
|
||||
distance[ix] = distance[ix] * stride + point[0]
|
||||
distance[ix + 1] = distance[ix + 1] * stride + point[1]
|
||||
return distance
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def generate_proposals(
|
||||
score_blob: NDArray[np.float32],
|
||||
bbox_blob: NDArray[np.float32],
|
||||
kpss_blob: NDArray[np.float32],
|
||||
stride: int,
|
||||
anchors: NDArray[np.float32],
|
||||
threshold: float,
|
||||
) -> tuple[NDArray[np.float32], NDArray[np.float32], NDArray[np.float32]]:
|
||||
"""
|
||||
Convert distances from anchors to actual coordinates on source image
|
||||
and filter proposals by confidence threshold.
|
||||
|
||||
:param score_blob: Raw scores for stride
|
||||
:param bbox_blob: Raw bbox distances for stride
|
||||
:param kpss_blob: Raw keypoints distances for stride
|
||||
:param stride: Stride scale
|
||||
:param anchors: Precomputed anchors for stride
|
||||
:param threshold: Confidence threshold
|
||||
:return: Filtered scores, bboxes and keypoints
|
||||
"""
|
||||
|
||||
idxs = []
|
||||
for ix in range(score_blob.shape[0]):
|
||||
if score_blob[ix][0] > threshold:
|
||||
idxs.append(ix)
|
||||
|
||||
score_out = np.empty((len(idxs), 1), dtype="float32")
|
||||
bbox_out = np.empty((len(idxs), 4), dtype="float32")
|
||||
kpss_out = np.empty((len(idxs), 10), dtype="float32")
|
||||
|
||||
for i in range(len(idxs)):
|
||||
ix = idxs[i]
|
||||
score_out[i] = score_blob[ix]
|
||||
bbox_out[i] = single_distance2bbox(anchors[ix], bbox_blob[ix], stride)
|
||||
kpss_out[i] = single_distance2kps(anchors[ix], kpss_blob[ix], stride)
|
||||
|
||||
return score_out, bbox_out, kpss_out
|
||||
|
||||
|
||||
@njit(fastmath=True, cache=True, nogil=True)
|
||||
def filter(
|
||||
bboxes_list: NDArray[np.float32],
|
||||
kpss_list: NDArray[np.float32],
|
||||
scores_list: NDArray[np.float32],
|
||||
nms_threshold: float = 0.4,
|
||||
) -> tuple[NDArray[np.float32], NDArray[np.float32]]:
|
||||
"""
|
||||
Filter postprocessed network outputs with NMS
|
||||
|
||||
:param bboxes_list: List of bboxes (np.ndarray)
|
||||
:param kpss_list: List of keypoints (np.ndarray)
|
||||
:param scores_list: List of scores (np.ndarray)
|
||||
:return: Face bboxes with scores [t,l,b,r,score], and key points
|
||||
"""
|
||||
|
||||
pre_det = np.hstack((bboxes_list, scores_list))
|
||||
keep = nms(pre_det, threshold=nms_threshold)
|
||||
det = pre_det[keep, :]
|
||||
kpss = kpss_list[keep, :]
|
||||
kpss = kpss.reshape((kpss.shape[0], -1, 2))
|
||||
|
||||
return det, kpss
|
||||
|
||||
|
||||
class SCRFD:
|
||||
def __init__(self, session: ModelSession):
|
||||
self.session = session
|
||||
self.center_cache: dict[tuple[int, int], NDArray[np.float32]] = {}
|
||||
self.nms_threshold = 0.4
|
||||
self.fmc = 3
|
||||
self._feat_stride_fpn = [8, 16, 32]
|
||||
self._num_anchors = 2
|
||||
|
||||
def prepare(self, nms_threshold: float = 0.4) -> None:
|
||||
"""
|
||||
Populate class parameters
|
||||
|
||||
:param nms_threshold: Threshold for NMS IoU
|
||||
"""
|
||||
|
||||
self.nms_threshold = nms_threshold
|
||||
|
||||
def detect(
|
||||
self, imgs: NDArray[np.uint8], threshold: float = 0.5
|
||||
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]]]:
|
||||
"""
|
||||
Run detection pipeline for provided images
|
||||
|
||||
:param img: Raw image as nd.ndarray with HWC shape
|
||||
:param threshold: Confidence threshold
|
||||
:return: Face bboxes with scores [t,l,b,r,score], and key points
|
||||
"""
|
||||
|
||||
height, width = imgs.shape[1:3]
|
||||
blob = self._preprocess(imgs)
|
||||
net_outs = self._forward(blob)
|
||||
|
||||
batch_bboxes, batch_kpss, batch_scores = self._postprocess(net_outs, height, width, threshold)
|
||||
|
||||
dets_list = []
|
||||
kpss_list = []
|
||||
for e in range(imgs.shape[0]):
|
||||
if len(batch_bboxes[e]) == 0:
|
||||
det, kpss = np.zeros((0, 5), dtype="float32"), np.zeros((0, 10), dtype="float32")
|
||||
else:
|
||||
det, kpss = filter(batch_bboxes[e], batch_kpss[e], batch_scores[e], self.nms_threshold)
|
||||
|
||||
dets_list.append(det)
|
||||
kpss_list.append(kpss)
|
||||
|
||||
return dets_list, kpss_list
|
||||
|
||||
@staticmethod
|
||||
def _build_anchors(
|
||||
input_height: int, input_width: int, strides: list[int], num_anchors: int
|
||||
) -> NDArray[np.float32]:
|
||||
"""
|
||||
Precompute anchor points for provided image size
|
||||
|
||||
:param input_height: Input image height
|
||||
:param input_width: Input image width
|
||||
:param strides: Model strides
|
||||
:param num_anchors: Model num anchors
|
||||
:return: box centers
|
||||
"""
|
||||
|
||||
centers = []
|
||||
for stride in strides:
|
||||
height = input_height // stride
|
||||
width = input_width // stride
|
||||
|
||||
anchor_centers = np.stack(np.mgrid[:height, :width][::-1], axis=-1).astype(np.float32)
|
||||
anchor_centers = (anchor_centers * stride).reshape((-1, 2))
|
||||
if num_anchors > 1:
|
||||
anchor_centers = np.stack([anchor_centers] * num_anchors, axis=1).reshape((-1, 2))
|
||||
centers.append(anchor_centers)
|
||||
return centers
|
||||
|
||||
def _preprocess(self, images: NDArray[np.uint8]):
|
||||
"""
|
||||
Normalize image on CPU if backend can't provide CUDA stream,
|
||||
otherwise preprocess image on GPU using CuPy
|
||||
|
||||
:param img: Raw image as np.ndarray with HWC shape
|
||||
:return: Preprocessed image or None if image was processed on device
|
||||
"""
|
||||
|
||||
input_size = tuple(images[0].shape[0:2][::-1])
|
||||
return cv2.dnn.blobFromImages(images, 1.0 / 128, input_size, (127.5, 127.5, 127.5), swapRB=False)
|
||||
|
||||
def _forward(self, blob: NDArray[np.float32]) -> list[NDArray[np.float32]]:
|
||||
"""
|
||||
Send input data to inference backend.
|
||||
|
||||
:param blob: Preprocessed image of shape NCHW or None
|
||||
:return: network outputs
|
||||
"""
|
||||
|
||||
return self.session.run(None, {"input.1": blob})
|
||||
|
||||
def _postprocess(
|
||||
self, net_outs: list[NDArray[np.float32]], height: int, width: int, threshold: float
|
||||
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]], list[NDArray[np.float32]]]:
|
||||
"""
|
||||
Precompute anchor points for provided image size and process network outputs
|
||||
|
||||
:param net_outs: Network outputs
|
||||
:param input_height: Input image height
|
||||
:param input_width: Input image width
|
||||
:param threshold: Confidence threshold
|
||||
:return: filtered bboxes, keypoints and scores
|
||||
"""
|
||||
|
||||
key = (height, width)
|
||||
|
||||
if not self.center_cache.get(key):
|
||||
self.center_cache[key] = self._build_anchors(height, width, self._feat_stride_fpn, self._num_anchors)
|
||||
anchor_centers = self.center_cache[key]
|
||||
bboxes, kpss, scores = self._process_strides(net_outs, threshold, anchor_centers)
|
||||
return bboxes, kpss, scores
|
||||
|
||||
def _process_strides(
|
||||
self, net_outs: list[NDArray[np.float32]], threshold: float, anchors: NDArray[np.float32]
|
||||
) -> tuple[list[NDArray[np.float32]], list[NDArray[np.float32]], list[NDArray[np.float32]]]:
|
||||
"""
|
||||
Process network outputs by strides and return results proposals filtered by threshold
|
||||
|
||||
:param net_outs: Network outputs
|
||||
:param threshold: Confidence threshold
|
||||
:param anchor_centers: Precomputed anchor centers for all strides
|
||||
:return: filtered bboxes, keypoints and scores
|
||||
"""
|
||||
|
||||
batch_size = net_outs[0].shape[0]
|
||||
bboxes_by_img = []
|
||||
kpss_by_img = []
|
||||
scores_by_img = []
|
||||
|
||||
for batch in range(batch_size):
|
||||
scores_strided = []
|
||||
bboxes_strided = []
|
||||
kpss_strided = []
|
||||
for idx, stride in enumerate(self._feat_stride_fpn):
|
||||
score_blob = net_outs[idx][batch]
|
||||
bbox_blob = net_outs[idx + self.fmc][batch]
|
||||
kpss_blob = net_outs[idx + self.fmc * 2][batch]
|
||||
stride_anchors = anchors[idx]
|
||||
score_list, bbox_list, kpss_list = generate_proposals(
|
||||
score_blob,
|
||||
bbox_blob,
|
||||
kpss_blob,
|
||||
stride,
|
||||
stride_anchors,
|
||||
threshold,
|
||||
)
|
||||
|
||||
scores_strided.append(score_list)
|
||||
bboxes_strided.append(bbox_list)
|
||||
kpss_strided.append(kpss_list)
|
||||
bboxes_by_img.append(np.concatenate(bboxes_strided, axis=0))
|
||||
kpss_by_img.append(np.concatenate(kpss_strided, axis=0))
|
||||
scores_by_img.append(np.concatenate(scores_strided, axis=0))
|
||||
|
||||
return bboxes_by_img, kpss_by_img, scores_by_img
|
||||
@@ -0,0 +1,34 @@
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
import onnxruntime as ort
|
||||
from numpy.typing import NDArray
|
||||
from onnx.shape_inference import infer_shapes
|
||||
from onnx.tools.update_model_dims import update_inputs_outputs_dims
|
||||
|
||||
|
||||
def ort_has_batch_dim(session: ort.InferenceSession) -> bool:
|
||||
return session.get_inputs()[0].shape[0] == "batch"
|
||||
|
||||
|
||||
def ort_expand_outputs(session: ort.InferenceSession) -> None:
|
||||
original_run = session.run
|
||||
|
||||
def run(output_names: list[str], input_feed: dict[str, NDArray[np.float32]]) -> list[NDArray[np.float32]]:
|
||||
out: list[NDArray[np.float32]] = original_run(output_names, input_feed)
|
||||
out = [np.expand_dims(o, axis=0) for o in out]
|
||||
return out
|
||||
|
||||
session.run = run
|
||||
|
||||
|
||||
def ort_add_batch_dim(input_path: Path, output_path: Path) -> None:
|
||||
proto = onnx.load(input_path)
|
||||
static_input_dims = [shape.dim_value for shape in proto.graph.input[0].type.tensor_type.shape.dim[1:]]
|
||||
static_output_dims = [shape.dim_value for shape in proto.graph.output[0].type.tensor_type.shape.dim[1:]]
|
||||
input_dims = {proto.graph.input[0].name: ["batch"] + static_input_dims}
|
||||
output_dims = {proto.graph.output[0].name: ["batch"] + static_output_dims}
|
||||
updated_proto = update_inputs_outputs_dims(proto, input_dims, output_dims)
|
||||
inferred = infer_shapes(updated_proto)
|
||||
onnx.save(inferred, output_path)
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import IO
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numba import njit
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
|
||||
@@ -30,10 +31,11 @@ def to_numpy(img: Image.Image) -> NDArray[np.float32]:
|
||||
return np.asarray(img if img.mode == "RGB" else img.convert("RGB"), dtype=np.float32) / 255.0
|
||||
|
||||
|
||||
@njit(cache=True, fastmath=True, nogil=True)
|
||||
def normalize(
|
||||
img: NDArray[np.float32], mean: float | NDArray[np.float32], std: float | NDArray[np.float32]
|
||||
) -> NDArray[np.float32]:
|
||||
return np.divide(img - mean, std, dtype=np.float32)
|
||||
return (img - mean) / std
|
||||
|
||||
|
||||
def get_pil_resampling(resample: str) -> Image.Resampling:
|
||||
|
||||
@@ -7,6 +7,7 @@ from types import SimpleNamespace
|
||||
from typing import Any, Callable
|
||||
from unittest import mock
|
||||
|
||||
from app.models.session import ort_add_batch_dim, ort_has_batch_dim, ort_squeeze_outputs
|
||||
import cv2
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
@@ -734,3 +735,69 @@ class TestEndpoints:
|
||||
assert expected_face["boundingBox"] == actual_face["boundingBox"]
|
||||
assert np.allclose(expected_face["embedding"], actual_face["embedding"])
|
||||
assert np.allclose(expected_face["score"], actual_face["score"])
|
||||
|
||||
|
||||
class TestSessionUtils:
|
||||
def test_ort_has_batch_dim(self, mocker: MockerFixture) -> None:
|
||||
mock_session = mocker.Mock(spec=ort.InferenceSession)
|
||||
mock_session.get_inputs.return_value = [SimpleNamespace(shape=["batch", 3, 224, 224], name="input.1")]
|
||||
|
||||
assert ort_has_batch_dim(mock_session) is True
|
||||
|
||||
def test_ort_has_no_batch_dim(self, mocker: MockerFixture) -> None:
|
||||
mock_session = mocker.Mock(spec=ort.InferenceSession)
|
||||
mock_session.get_inputs.return_value = [SimpleNamespace(shape=[1, 3, 224, 224], name="input.1")]
|
||||
|
||||
assert ort_has_batch_dim(mock_session) is False
|
||||
|
||||
def test_ort_squeeze_outputs(self, mocker: MockerFixture) -> None:
|
||||
mock_session = mocker.Mock(spec=ort.InferenceSession)
|
||||
mock_session.run.return_value = [np.random.rand(1, 3, 224, 224).astype(np.float32)]
|
||||
|
||||
ort_squeeze_outputs(mock_session)
|
||||
out = mock_session.run(["output"], {"input": np.random.rand(1, 3, 224, 224).astype(np.float32)})
|
||||
|
||||
assert len(out) == 1
|
||||
assert out[0].shape == (3, 224, 224)
|
||||
|
||||
def test_ort_add_batch_dim(self, mocker: MockerFixture) -> None:
|
||||
mock_proto = mocker.Mock()
|
||||
mock_proto.graph.input = [
|
||||
SimpleNamespace(name="input", type=mock.Mock(tensor_type=SimpleNamespace(shape=SimpleNamespace())))
|
||||
]
|
||||
mock_proto.graph.output = [
|
||||
SimpleNamespace(name="output", type=SimpleNamespace(tensor_type=SimpleNamespace(shape=SimpleNamespace())))
|
||||
]
|
||||
mock_proto.graph.input[0].type.tensor_type.shape.dim = [
|
||||
SimpleNamespace(dim_value=3),
|
||||
SimpleNamespace(dim_value=224),
|
||||
SimpleNamespace(dim_value=224),
|
||||
]
|
||||
mock_proto.graph.output[0].type.tensor_type.shape.dim = [
|
||||
SimpleNamespace(dim_value=3),
|
||||
SimpleNamespace(dim_value=224),
|
||||
SimpleNamespace(dim_value=224),
|
||||
]
|
||||
|
||||
mock_load = mocker.patch("app.models.session.onnx.load")
|
||||
mock_load.return_value = mock_proto
|
||||
|
||||
mock_update_dims = mocker.patch("app.models.session.update_inputs_outputs_dims")
|
||||
mock_updated = mocker.Mock()
|
||||
mock_update_dims.return_value = mock_updated
|
||||
|
||||
mock_infer_shapes = mocker.patch("app.models.session.infer_shapes")
|
||||
mock_inferred = mocker.Mock()
|
||||
mock_infer_shapes.return_value = mock_inferred
|
||||
|
||||
mock_save = mocker.patch("app.models.session.onnx.save")
|
||||
|
||||
input_path, output_path = Path("input.onnx"), Path("output.onnx")
|
||||
ort_add_batch_dim(input_path, output_path)
|
||||
|
||||
mock_load.assert_called_once_with(input_path)
|
||||
mock_save.assert_called_once_with(mock_inferred, output_path)
|
||||
mock_update_dims.assert_called_once_with(mock_proto, mock.ANY, mock.ANY)
|
||||
assert mock_update_dims.call_args_list == [
|
||||
mock.call(mock_proto, {"input": ["batch", 224, 224]}, {"output": ["batch", 224, 224]})
|
||||
]
|
||||
|
||||
134
machine-learning/poetry.lock
generated
134
machine-learning/poetry.lock
generated
@@ -1236,13 +1236,13 @@ socks = ["socksio (==1.*)"]
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.23.3"
|
||||
version = "0.23.2"
|
||||
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "huggingface_hub-0.23.3-py3-none-any.whl", hash = "sha256:22222c41223f1b7c209ae5511d2d82907325a0e3cdbce5f66949d43c598ff3bc"},
|
||||
{file = "huggingface_hub-0.23.3.tar.gz", hash = "sha256:1a1118a0b3dea3bab6c325d71be16f5ffe441d32f3ac7c348d6875911b694b5b"},
|
||||
{file = "huggingface_hub-0.23.2-py3-none-any.whl", hash = "sha256:48727a16e704d409c4bb5913613308499664f22a99743435dc3a13b23c485827"},
|
||||
{file = "huggingface_hub-0.23.2.tar.gz", hash = "sha256:f6829b62d5fdecb452a76fdbec620cba4c1573655a8d710c1df71735fd9edbd2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1528,6 +1528,36 @@ files = [
|
||||
lint = ["pre-commit (>=3.3)"]
|
||||
test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "llvmlite"
|
||||
version = "0.42.0"
|
||||
description = "lightweight wrapper around basic LLVM functionality"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "llvmlite-0.42.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3366938e1bf63d26c34fbfb4c8e8d2ded57d11e0567d5bb243d89aab1eb56098"},
|
||||
{file = "llvmlite-0.42.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c35da49666a21185d21b551fc3caf46a935d54d66969d32d72af109b5e7d2b6f"},
|
||||
{file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70f44ccc3c6220bd23e0ba698a63ec2a7d3205da0d848804807f37fc243e3f77"},
|
||||
{file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f8d8717a9073b9e0246998de89929071d15b47f254c10eef2310b9aac033d"},
|
||||
{file = "llvmlite-0.42.0-cp310-cp310-win_amd64.whl", hash = "sha256:8d90edf400b4ceb3a0e776b6c6e4656d05c7187c439587e06f86afceb66d2be5"},
|
||||
{file = "llvmlite-0.42.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ae511caed28beaf1252dbaf5f40e663f533b79ceb408c874c01754cafabb9cbf"},
|
||||
{file = "llvmlite-0.42.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81e674c2fe85576e6c4474e8c7e7aba7901ac0196e864fe7985492b737dbab65"},
|
||||
{file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3975787f13eb97629052edb5017f6c170eebc1c14a0433e8089e5db43bcce6"},
|
||||
{file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5bece0cdf77f22379f19b1959ccd7aee518afa4afbd3656c6365865f84903f9"},
|
||||
{file = "llvmlite-0.42.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e0c4c11c8c2aa9b0701f91b799cb9134a6a6de51444eff5a9087fc7c1384275"},
|
||||
{file = "llvmlite-0.42.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:08fa9ab02b0d0179c688a4216b8939138266519aaa0aa94f1195a8542faedb56"},
|
||||
{file = "llvmlite-0.42.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b2fce7d355068494d1e42202c7aff25d50c462584233013eb4470c33b995e3ee"},
|
||||
{file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebe66a86dc44634b59a3bc860c7b20d26d9aaffcd30364ebe8ba79161a9121f4"},
|
||||
{file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d47494552559e00d81bfb836cf1c4d5a5062e54102cc5767d5aa1e77ccd2505c"},
|
||||
{file = "llvmlite-0.42.0-cp312-cp312-win_amd64.whl", hash = "sha256:05cb7e9b6ce69165ce4d1b994fbdedca0c62492e537b0cc86141b6e2c78d5888"},
|
||||
{file = "llvmlite-0.42.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdd3888544538a94d7ec99e7c62a0cdd8833609c85f0c23fcb6c5c591aec60ad"},
|
||||
{file = "llvmlite-0.42.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0936c2067a67fb8816c908d5457d63eba3e2b17e515c5fe00e5ee2bace06040"},
|
||||
{file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a78ab89f1924fc11482209f6799a7a3fc74ddc80425a7a3e0e8174af0e9e2301"},
|
||||
{file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7599b65c7af7abbc978dbf345712c60fd596aa5670496561cc10e8a71cebfb2"},
|
||||
{file = "llvmlite-0.42.0-cp39-cp39-win_amd64.whl", hash = "sha256:43d65cc4e206c2e902c1004dd5418417c4efa6c1d04df05c6c5675a27e8ca90e"},
|
||||
{file = "llvmlite-0.42.0.tar.gz", hash = "sha256:f92b09243c0cc3f457da8b983f67bd8e1295d0f5b3746c7a1861d7a99403854a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "locust"
|
||||
version = "2.28.0"
|
||||
@@ -1864,6 +1894,40 @@ doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.
|
||||
extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"]
|
||||
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "numba"
|
||||
version = "0.59.1"
|
||||
description = "compiling Python code using LLVM"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "numba-0.59.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97385a7f12212c4f4bc28f648720a92514bee79d7063e40ef66c2d30600fd18e"},
|
||||
{file = "numba-0.59.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b77aecf52040de2a1eb1d7e314497b9e56fba17466c80b457b971a25bb1576d"},
|
||||
{file = "numba-0.59.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3476a4f641bfd58f35ead42f4dcaf5f132569c4647c6f1360ccf18ee4cda3990"},
|
||||
{file = "numba-0.59.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:525ef3f820931bdae95ee5379c670d5c97289c6520726bc6937a4a7d4230ba24"},
|
||||
{file = "numba-0.59.1-cp310-cp310-win_amd64.whl", hash = "sha256:990e395e44d192a12105eca3083b61307db7da10e093972ca285c85bef0963d6"},
|
||||
{file = "numba-0.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43727e7ad20b3ec23ee4fc642f5b61845c71f75dd2825b3c234390c6d8d64051"},
|
||||
{file = "numba-0.59.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:411df625372c77959570050e861981e9d196cc1da9aa62c3d6a836b5cc338966"},
|
||||
{file = "numba-0.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2801003caa263d1e8497fb84829a7ecfb61738a95f62bc05693fcf1733e978e4"},
|
||||
{file = "numba-0.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dd2842fac03be4e5324ebbbd4d2d0c8c0fc6e0df75c09477dd45b288a0777389"},
|
||||
{file = "numba-0.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:0594b3dfb369fada1f8bb2e3045cd6c61a564c62e50cf1f86b4666bc721b3450"},
|
||||
{file = "numba-0.59.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1cce206a3b92836cdf26ef39d3a3242fec25e07f020cc4feec4c4a865e340569"},
|
||||
{file = "numba-0.59.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8c8b4477763cb1fbd86a3be7050500229417bf60867c93e131fd2626edb02238"},
|
||||
{file = "numba-0.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d80bce4ef7e65bf895c29e3889ca75a29ee01da80266a01d34815918e365835"},
|
||||
{file = "numba-0.59.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7ad1d217773e89a9845886401eaaab0a156a90aa2f179fdc125261fd1105096"},
|
||||
{file = "numba-0.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bf68f4d69dd3a9f26a9b23548fa23e3bcb9042e2935257b471d2a8d3c424b7f"},
|
||||
{file = "numba-0.59.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e0318ae729de6e5dbe64c75ead1a95eb01fabfe0e2ebed81ebf0344d32db0ae"},
|
||||
{file = "numba-0.59.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f68589740a8c38bb7dc1b938b55d1145244c8353078eea23895d4f82c8b9ec1"},
|
||||
{file = "numba-0.59.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:649913a3758891c77c32e2d2a3bcbedf4a69f5fea276d11f9119677c45a422e8"},
|
||||
{file = "numba-0.59.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9712808e4545270291d76b9a264839ac878c5eb7d8b6e02c970dc0ac29bc8187"},
|
||||
{file = "numba-0.59.1-cp39-cp39-win_amd64.whl", hash = "sha256:8d51ccd7008a83105ad6a0082b6a2b70f1142dc7cfd76deb8c5a862367eb8c86"},
|
||||
{file = "numba-0.59.1.tar.gz", hash = "sha256:76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
llvmlite = "==0.42.*"
|
||||
numpy = ">=1.22,<1.27"
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "1.26.3"
|
||||
@@ -2037,11 +2101,8 @@ description = "ONNX Runtime is a runtime accelerator for Machine Learning models
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "onnxruntime_openvino-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ed693011b472f9a617b2d5c4785d5fa1e1b77f7cb2b02e47b899534ec6c6396"},
|
||||
{file = "onnxruntime_openvino-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:5152b5e56e83e022ced2986700d68dd8ba7b1466761725ce774f679c5710ab87"},
|
||||
{file = "onnxruntime_openvino-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ce3b1aa06d6b8b732d314d217028ec4735de5806215c44d3bdbcad03b9260d5"},
|
||||
{file = "onnxruntime_openvino-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:21133a701bb07ea19e01f48b8c23beee575f2e879f49173843f275d7c91a625a"},
|
||||
{file = "onnxruntime_openvino-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76824dac3c392ad4b812f29c18be2055ab3bba2e3c111e44baae847b33d5b081"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2054,18 +2115,18 @@ sympy = "*"
|
||||
|
||||
[[package]]
|
||||
name = "opencv-python-headless"
|
||||
version = "4.10.0.82"
|
||||
version = "4.9.0.80"
|
||||
description = "Wrapper package for OpenCV python bindings."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "opencv-python-headless-4.10.0.82.tar.gz", hash = "sha256:de9e742c1b9540816fbd115b0b03841d41ed0c65566b0d7a5371f98b131b7e6d"},
|
||||
{file = "opencv_python_headless-4.10.0.82-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a09ed50ba21cc5bf5d436cb0e784ad09c692d6b1d1454252772f6c8f2c7b4088"},
|
||||
{file = "opencv_python_headless-4.10.0.82-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:977a5fd21e1fe0d3d2134887db4441f8725abeae95150126302f31fcd9f548fa"},
|
||||
{file = "opencv_python_headless-4.10.0.82-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4ec6755838b0be12510bfc9ffb014779c612418f11f4f7e6f505c36124a3aa"},
|
||||
{file = "opencv_python_headless-4.10.0.82-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a37fa5276967ecf6eb297295b16b28b7a2eb3b568ca0ee469fb1a5954de298"},
|
||||
{file = "opencv_python_headless-4.10.0.82-cp37-abi3-win32.whl", hash = "sha256:94736e9b322d13db4768fd35588ad5e8995e78e207263076bfbee18aac835ad5"},
|
||||
{file = "opencv_python_headless-4.10.0.82-cp37-abi3-win_amd64.whl", hash = "sha256:c1822fa23d1641c0249ed5eb906f4c385f7959ff1bd601a776d56b0c18914af4"},
|
||||
{file = "opencv-python-headless-4.9.0.80.tar.gz", hash = "sha256:71a4cd8cf7c37122901d8e81295db7fb188730e33a0e40039a4e59c1030b0958"},
|
||||
{file = "opencv_python_headless-4.9.0.80-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:2ea8a2edc4db87841991b2fbab55fc07b97ecb602e0f47d5d485bd75cee17c1a"},
|
||||
{file = "opencv_python_headless-4.9.0.80-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e0ee54e27be493e8f7850847edae3128e18b540dac1d7b2e4001b8944e11e1c6"},
|
||||
{file = "opencv_python_headless-4.9.0.80-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57ce2865e8fec431c6f97a81e9faaf23fa5be61011d0a75ccf47a3c0d65fa73d"},
|
||||
{file = "opencv_python_headless-4.9.0.80-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:976656362d68d9f40a5c66f83901430538002465f7db59142784f3893918f3df"},
|
||||
{file = "opencv_python_headless-4.9.0.80-cp37-abi3-win32.whl", hash = "sha256:11e3849d83e6651d4e7699aadda9ec7ed7c38957cbbcb99db074f2a2d2de9670"},
|
||||
{file = "opencv_python_headless-4.9.0.80-cp37-abi3-win_amd64.whl", hash = "sha256:a8056c2cb37cd65dfcdf4153ca16f7362afcf3a50d600d6bb69c660fc61ee29c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2438,13 +2499,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.2.2"
|
||||
version = "8.2.1"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
|
||||
{file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
|
||||
{file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"},
|
||||
{file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2601,7 +2662,6 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||
@@ -2799,28 +2859,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.4.8"
|
||||
version = "0.4.7"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"},
|
||||
{file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"},
|
||||
{file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"},
|
||||
{file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"},
|
||||
{file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"},
|
||||
{file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"},
|
||||
{file = "ruff-0.4.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e089371c67892a73b6bb1525608e89a2aca1b77b5440acf7a71dda5dac958f9e"},
|
||||
{file = "ruff-0.4.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:10f973d521d910e5f9c72ab27e409e839089f955be8a4c8826601a6323a89753"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59c3d110970001dfa494bcd95478e62286c751126dfb15c3c46e7915fc49694f"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa9773c6c00f4958f73b317bc0fd125295110c3776089f6ef318f4b775f0abe4"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07fc80bbb61e42b3b23b10fda6a2a0f5a067f810180a3760c5ef1b456c21b9db"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:fa4dafe3fe66d90e2e2b63fa1591dd6e3f090ca2128daa0be33db894e6c18648"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7c0083febdec17571455903b184a10026603a1de078428ba155e7ce9358c5f6"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad1b20e66a44057c326168437d680a2166c177c939346b19c0d6b08a62a37589"},
|
||||
{file = "ruff-0.4.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf5d818553add7511c38b05532d94a407f499d1a76ebb0cad0374e32bc67202"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:50e9651578b629baec3d1513b2534de0ac7ed7753e1382272b8d609997e27e83"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8874a9df7766cb956b218a0a239e0a5d23d9e843e4da1e113ae1d27ee420877a"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b9de9a6e49f7d529decd09381c0860c3f82fa0b0ea00ea78409b785d2308a567"},
|
||||
{file = "ruff-0.4.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:13a1768b0691619822ae6d446132dbdfd568b700ecd3652b20d4e8bc1e498f78"},
|
||||
{file = "ruff-0.4.7-py3-none-win32.whl", hash = "sha256:769e5a51df61e07e887b81e6f039e7ed3573316ab7dd9f635c5afaa310e4030e"},
|
||||
{file = "ruff-0.4.7-py3-none-win_amd64.whl", hash = "sha256:9e3ab684ad403a9ed1226894c32c3ab9c2e0718440f6f50c7c5829932bc9e054"},
|
||||
{file = "ruff-0.4.7-py3-none-win_arm64.whl", hash = "sha256:10f2204b9a613988e3484194c2c9e96a22079206b22b787605c255f130db5ed7"},
|
||||
{file = "ruff-0.4.7.tar.gz", hash = "sha256:2331d2b051dc77a289a653fcc6a42cce357087c5975738157cd966590b18b5e1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3572,4 +3632,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.12"
|
||||
content-hash = "db51ad1e631b569e106927683a13124252bd80974def1f2edbe23ac87d89c461"
|
||||
content-hash = "a44e079d565fc1166458690ca2dc5826e198cc07ccab0ebaf71b5ab5e0eed150"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.106.3"
|
||||
version = "1.105.1"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
@@ -23,6 +23,7 @@ orjson = ">=3.9.5"
|
||||
gunicorn = ">=21.1.0"
|
||||
huggingface-hub = ">=0.20.1,<1.0"
|
||||
tokenizers = ">=0.15.0,<1.0"
|
||||
numba = "^0.59.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
mypy = ">=1.3.0"
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
#! /usr/bin/env node
|
||||
const { readFileSync, writeFileSync } = require('node:fs');
|
||||
|
||||
const nextVersion = process.argv[2];
|
||||
if (!nextVersion) {
|
||||
console.log('Usage: archive-version.js <version>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filename = './docs/static/archived-versions.json';
|
||||
const oldVersions = JSON.parse(readFileSync(filename));
|
||||
const newVersions = [
|
||||
{ label: `v${nextVersion}`, url: `https://v${nextVersion}.archive.immich.app` },
|
||||
...oldVersions,
|
||||
];
|
||||
|
||||
writeFileSync(filename, JSON.stringify(newVersions, null, 2) + '\n');
|
||||
@@ -66,12 +66,10 @@ if [ "$CURRENT_SERVER" != "$NEXT_SERVER" ]; then
|
||||
npm --prefix server run build
|
||||
make open-api
|
||||
npm --prefix open-api/typescript-sdk version "$SERVER_PUMP"
|
||||
# TODO use $SERVER_PUMP once we pass 2.2.x
|
||||
npm --prefix cli version patch
|
||||
npm --prefix cli i --package-lock-only
|
||||
npm --prefix web version "$SERVER_PUMP"
|
||||
npm --prefix web i --package-lock-only
|
||||
npm --prefix e2e version "$SERVER_PUMP"
|
||||
npm --prefix web i --package-lock-only
|
||||
npm --prefix cli i --package-lock-only
|
||||
npm --prefix e2e i --package-lock-only
|
||||
poetry --directory machine-learning version "$SERVER_PUMP"
|
||||
fi
|
||||
@@ -85,6 +83,4 @@ sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/
|
||||
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml
|
||||
|
||||
./misc/release/archive-version.js "$NEXT_SERVER"
|
||||
|
||||
echo "IMMICH_VERSION=v$NEXT_SERVER" >>"$GITHUB_ENV"
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 143,
|
||||
"android.injected.version.name" => "1.106.3",
|
||||
"android.injected.version.code" => 140,
|
||||
"android.injected.version.name" => "1.105.1",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -5,17 +5,17 @@
|
||||
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000381">
|
||||
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000374">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="1: bundleRelease" time="52.832426">
|
||||
<testcase classname="fastlane.lanes" name="1: bundleRelease" time="84.292464">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="2: upload_to_play_store" time="27.616558">
|
||||
<testcase classname="fastlane.lanes" name="2: upload_to_play_store" time="33.336934">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
@@ -383,7 +383,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 160;
|
||||
CURRENT_PROJECT_VERSION = 157;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -525,7 +525,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 160;
|
||||
CURRENT_PROJECT_VERSION = 157;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -553,7 +553,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 160;
|
||||
CURRENT_PROJECT_VERSION = 157;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
|
||||
@@ -58,11 +58,11 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.106.3</string>
|
||||
<string>1.105.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>160</string>
|
||||
<string>157</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true />
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
@@ -19,7 +19,7 @@ platform :ios do
|
||||
desc "iOS Beta"
|
||||
lane :beta do
|
||||
increment_version_number(
|
||||
version_number: "1.106.3"
|
||||
version_number: "1.105.1"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -5,32 +5,32 @@
|
||||
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000491">
|
||||
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.020864">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="1: increment_version_number" time="39.414297">
|
||||
<testcase classname="fastlane.lanes" name="1: increment_version_number" time="0.917777">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="2: latest_testflight_build_number" time="32.521647">
|
||||
<testcase classname="fastlane.lanes" name="2: latest_testflight_build_number" time="5.283943">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="3: increment_build_number" time="0.511733">
|
||||
<testcase classname="fastlane.lanes" name="3: increment_build_number" time="0.944748">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="4: build_app" time="202.628277">
|
||||
<testcase classname="fastlane.lanes" name="4: build_app" time="215.971639">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="5: upload_to_testflight" time="73.861852">
|
||||
<testcase classname="fastlane.lanes" name="5: upload_to_testflight" time="76.674601">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
@@ -75,7 +75,9 @@ class VideoViewerPage extends HookConsumerWidget {
|
||||
// Also sets the error if there is an error in the playback
|
||||
void updateVideoPlayback() {
|
||||
final videoPlayback = VideoPlaybackValue.fromController(controller);
|
||||
ref.read(videoPlaybackValueProvider.notifier).value = videoPlayback;
|
||||
if (!loopVideo) {
|
||||
ref.read(videoPlaybackValueProvider.notifier).value = videoPlayback;
|
||||
}
|
||||
final state = videoPlayback.state;
|
||||
|
||||
// Enable the WakeLock while the video is playing
|
||||
@@ -108,9 +110,7 @@ class VideoViewerPage extends HookConsumerWidget {
|
||||
}
|
||||
|
||||
// Subscribes to listener
|
||||
Future.microtask(() {
|
||||
controller.addListener(updateVideoPlayback);
|
||||
});
|
||||
controller.addListener(updateVideoPlayback);
|
||||
return () {
|
||||
// Removes listener when we dispose
|
||||
controller.removeListener(updateVideoPlayback);
|
||||
|
||||
@@ -3,7 +3,7 @@ import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/login_form.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login_form.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:package_info_plus/package_info_plus.dart';
|
||||
|
||||
|
||||
@@ -50,19 +50,8 @@ class AssetNotifier extends StateNotifier<bool> {
|
||||
await clearAssetsAndAlbums(_db);
|
||||
log.info("Manual refresh requested, cleared assets and albums from db");
|
||||
}
|
||||
|
||||
final bool changedUsers = await _userService.refreshUsers();
|
||||
|
||||
final assetCount = await _db.assets.count();
|
||||
|
||||
if (assetCount == 0) {
|
||||
debugPrint("First sync, refreshing all assets");
|
||||
await _assetService.refreshRemoteAssets(firstSync: true);
|
||||
debugPrint("First sync, DONE refreshing all assets");
|
||||
}
|
||||
debugPrint("First sync, CONTINUE refreshing all assets");
|
||||
final bool newRemote = await _assetService.refreshRemoteAssets();
|
||||
|
||||
final bool newLocal = await _albumService.refreshDeviceAlbums();
|
||||
debugPrint(
|
||||
"changedUsers: $changedUsers, newRemote: $newRemote, newLocal: $newLocal",
|
||||
|
||||
@@ -31,9 +31,6 @@ Future<VideoPlayerController> videoPlayerController(
|
||||
controller = VideoPlayerController.networkUrl(
|
||||
url,
|
||||
httpHeaders: {"x-immich-user-token": accessToken},
|
||||
videoPlayerOptions: asset.livePhotoVideoId != null
|
||||
? VideoPlayerOptions(mixWithOthers: true)
|
||||
: VideoPlayerOptions(mixWithOthers: false),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ part of 'video_player_controller_provider.dart';
|
||||
// **************************************************************************
|
||||
|
||||
String _$videoPlayerControllerHash() =>
|
||||
r'642469a44287188a7c301f5cad3df3e23c84d85c';
|
||||
r'40b31f7b1a73fab84c311b0f06bedf5322143cd9';
|
||||
|
||||
/// Copied from Dart SDK
|
||||
class _SystemHash {
|
||||
|
||||
@@ -93,18 +93,4 @@ class VideoPlayerControls extends StateNotifier<VideoPlaybackControls> {
|
||||
pause: !state.pause,
|
||||
);
|
||||
}
|
||||
|
||||
void restart() {
|
||||
state = VideoPlaybackControls(
|
||||
position: 0,
|
||||
mute: state.mute,
|
||||
pause: true,
|
||||
);
|
||||
|
||||
state = VideoPlaybackControls(
|
||||
position: 0,
|
||||
mute: state.mute,
|
||||
pause: false,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ part of 'map_state.provider.dart';
|
||||
// RiverpodGenerator
|
||||
// **************************************************************************
|
||||
|
||||
String _$mapStateNotifierHash() => r'31fafe17aa85c48379a22ed3db3cc94af59ce5b8';
|
||||
String _$mapStateNotifierHash() => r'87a8623f726d438d115d5a15609c71372726ee2f';
|
||||
|
||||
/// See also [MapStateNotifier].
|
||||
@ProviderFor(MapStateNotifier)
|
||||
|
||||
@@ -43,7 +43,7 @@ class AssetService {
|
||||
|
||||
/// Checks the server for updated assets and updates the local database if
|
||||
/// required. Returns `true` if there were any changes.
|
||||
Future<bool> refreshRemoteAssets({bool firstSync = false}) async {
|
||||
Future<bool> refreshRemoteAssets() async {
|
||||
final syncedUserIds = await _db.eTags.where().idProperty().findAll();
|
||||
final List<User> syncedUsers = syncedUserIds.isEmpty
|
||||
? []
|
||||
@@ -57,7 +57,6 @@ class AssetService {
|
||||
getChangedAssets: _getRemoteAssetChanges,
|
||||
loadAssets: _getRemoteAssets,
|
||||
refreshUsers: _userService.getUsersFromServer,
|
||||
firstSync: firstSync,
|
||||
);
|
||||
debugPrint("refreshRemoteAssets full took ${sw.elapsedMilliseconds}ms");
|
||||
return changes;
|
||||
@@ -98,14 +97,11 @@ class AssetService {
|
||||
}
|
||||
|
||||
/// Returns `null` if the server state did not change, else list of assets
|
||||
Future<List<Asset>?> _getRemoteAssets(
|
||||
User user,
|
||||
DateTime until,
|
||||
bool firstSync,
|
||||
) async {
|
||||
int chunkSize = firstSync ? 1000 : 10000;
|
||||
Future<List<Asset>?> _getRemoteAssets(User user, DateTime until) async {
|
||||
const int chunkSize = 10000;
|
||||
try {
|
||||
final List<Asset> allAssets = [];
|
||||
DateTime? lastCreationDate;
|
||||
String? lastId;
|
||||
// will break on error or once all assets are loaded
|
||||
while (true) {
|
||||
@@ -113,23 +109,16 @@ class AssetService {
|
||||
limit: chunkSize,
|
||||
updatedUntil: until,
|
||||
lastId: lastId,
|
||||
lastCreationDate: lastCreationDate,
|
||||
userId: user.id,
|
||||
);
|
||||
log.fine("Requesting $chunkSize assets from $lastId");
|
||||
final List<AssetResponseDto>? assets =
|
||||
await _apiService.syncApi.getFullSyncForUser(dto);
|
||||
if (assets == null) return null;
|
||||
log.fine(
|
||||
"Received ${assets.length} assets from ${assets.firstOrNull?.id} to ${assets.lastOrNull?.id}",
|
||||
);
|
||||
allAssets.addAll(assets.map(Asset.remote));
|
||||
if (assets.length != chunkSize) break;
|
||||
if (assets.isEmpty) break;
|
||||
lastCreationDate = assets.last.fileCreatedAt;
|
||||
lastId = assets.last.id;
|
||||
|
||||
if (firstSync) {
|
||||
// first sync only loads the first chunk
|
||||
break;
|
||||
}
|
||||
}
|
||||
return allAssets;
|
||||
} catch (error, stack) {
|
||||
|
||||
@@ -46,18 +46,14 @@ class SyncService {
|
||||
List<User> users,
|
||||
DateTime since,
|
||||
) getChangedAssets,
|
||||
required FutureOr<List<Asset>?> Function(
|
||||
User user,
|
||||
DateTime until,
|
||||
bool firstSync,
|
||||
) loadAssets,
|
||||
required FutureOr<List<Asset>?> Function(User user, DateTime until)
|
||||
loadAssets,
|
||||
required FutureOr<List<User>?> Function() refreshUsers,
|
||||
required bool firstSync,
|
||||
}) =>
|
||||
_lock.run(
|
||||
() async =>
|
||||
await _syncRemoteAssetChanges(users, getChangedAssets) ??
|
||||
await _syncRemoteAssetsFull(refreshUsers, loadAssets, firstSync),
|
||||
await _syncRemoteAssetsFull(refreshUsers, loadAssets),
|
||||
);
|
||||
|
||||
/// Syncs remote albums to the database
|
||||
@@ -216,9 +212,7 @@ class SyncService {
|
||||
/// Syncs assets by loading and comparing all assets from the server.
|
||||
Future<bool> _syncRemoteAssetsFull(
|
||||
FutureOr<List<User>?> Function() refreshUsers,
|
||||
FutureOr<List<Asset>?> Function(User user, DateTime until, bool firstSync)
|
||||
loadAssets,
|
||||
bool firstSync,
|
||||
FutureOr<List<Asset>?> Function(User user, DateTime until) loadAssets,
|
||||
) async {
|
||||
final serverUsers = await refreshUsers();
|
||||
if (serverUsers == null) {
|
||||
@@ -234,19 +228,17 @@ class SyncService {
|
||||
.findAll();
|
||||
bool changes = false;
|
||||
for (User u in users) {
|
||||
changes |= await _syncRemoteAssetsForUser(u, loadAssets, firstSync);
|
||||
changes |= await _syncRemoteAssetsForUser(u, loadAssets);
|
||||
}
|
||||
return changes;
|
||||
}
|
||||
|
||||
Future<bool> _syncRemoteAssetsForUser(
|
||||
User user,
|
||||
FutureOr<List<Asset>?> Function(User user, DateTime until, bool firstSync)
|
||||
loadAssets,
|
||||
bool firstSync,
|
||||
FutureOr<List<Asset>?> Function(User user, DateTime until) loadAssets,
|
||||
) async {
|
||||
final DateTime now = DateTime.now().toUtc();
|
||||
final List<Asset>? remote = await loadAssets(user, now, firstSync);
|
||||
final List<Asset>? remote = await loadAssets(user, now);
|
||||
if (remote == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
String? getVersionCompatibilityMessage(
|
||||
int appMajor,
|
||||
int appMinor,
|
||||
int serverMajor,
|
||||
int serverMinor,
|
||||
) {
|
||||
if (serverMajor != appMajor) {
|
||||
return 'Your app major version is not compatible with the server!';
|
||||
}
|
||||
|
||||
// Add latest compat info up top
|
||||
if (serverMinor < 106 && appMinor >= 106) {
|
||||
return 'Your app minor version is not compatible with the server! Please update your server to version v1.106.0 or newer to login';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -64,8 +64,6 @@ class CustomVideoPlayerControls extends HookConsumerWidget {
|
||||
final state = ref.read(videoPlaybackValueProvider).state;
|
||||
if (state == VideoPlaybackState.playing) {
|
||||
ref.read(videoPlayerControlsProvider.notifier).pause();
|
||||
} else if (state == VideoPlaybackState.completed) {
|
||||
ref.read(videoPlayerControlsProvider.notifier).restart();
|
||||
} else {
|
||||
ref.read(videoPlayerControlsProvider.notifier).play();
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
class EmailInput extends StatelessWidget {
|
||||
final TextEditingController controller;
|
||||
final FocusNode? focusNode;
|
||||
final Function()? onSubmit;
|
||||
|
||||
const EmailInput({
|
||||
super.key,
|
||||
required this.controller,
|
||||
this.focusNode,
|
||||
this.onSubmit,
|
||||
});
|
||||
|
||||
String? _validateInput(String? email) {
|
||||
if (email == null || email == '') return null;
|
||||
if (email.endsWith(' ')) return 'login_form_err_trailing_whitespace'.tr();
|
||||
if (email.startsWith(' ')) return 'login_form_err_leading_whitespace'.tr();
|
||||
if (email.contains(' ') || !email.contains('@')) {
|
||||
return 'login_form_err_invalid_email'.tr();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return TextFormField(
|
||||
autofocus: true,
|
||||
controller: controller,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'login_form_label_email'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
hintText: 'login_form_email_hint'.tr(),
|
||||
hintStyle: const TextStyle(
|
||||
fontWeight: FontWeight.normal,
|
||||
fontSize: 14,
|
||||
),
|
||||
),
|
||||
validator: _validateInput,
|
||||
autovalidateMode: AutovalidateMode.always,
|
||||
autofillHints: const [AutofillHints.email],
|
||||
keyboardType: TextInputType.emailAddress,
|
||||
onFieldSubmitted: (_) => onSubmit?.call(),
|
||||
focusNode: focusNode,
|
||||
textInputAction: TextInputAction.next,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
class LoadingIcon extends StatelessWidget {
|
||||
const LoadingIcon({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return const Padding(
|
||||
padding: EdgeInsets.only(top: 18.0),
|
||||
child: SizedBox(
|
||||
width: 24,
|
||||
height: 24,
|
||||
child: FittedBox(
|
||||
child: CircularProgressIndicator(
|
||||
strokeWidth: 2,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
|
||||
class LoginButton extends ConsumerWidget {
|
||||
final Function() onPressed;
|
||||
|
||||
const LoginButton({
|
||||
super.key,
|
||||
required this.onPressed,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
return ElevatedButton.icon(
|
||||
style: ElevatedButton.styleFrom(
|
||||
padding: const EdgeInsets.symmetric(vertical: 12),
|
||||
),
|
||||
onPressed: onPressed,
|
||||
icon: const Icon(Icons.login_rounded),
|
||||
label: const Text(
|
||||
"login_form_button_text",
|
||||
style: TextStyle(fontSize: 14, fontWeight: FontWeight.bold),
|
||||
).tr(),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
|
||||
class OAuthLoginButton extends ConsumerWidget {
|
||||
final TextEditingController serverEndpointController;
|
||||
final ValueNotifier<bool> isLoading;
|
||||
final String buttonLabel;
|
||||
final Function() onPressed;
|
||||
|
||||
const OAuthLoginButton({
|
||||
super.key,
|
||||
required this.serverEndpointController,
|
||||
required this.isLoading,
|
||||
required this.buttonLabel,
|
||||
required this.onPressed,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
return ElevatedButton.icon(
|
||||
style: ElevatedButton.styleFrom(
|
||||
backgroundColor: context.primaryColor.withAlpha(230),
|
||||
padding: const EdgeInsets.symmetric(vertical: 12),
|
||||
),
|
||||
onPressed: onPressed,
|
||||
icon: const Icon(Icons.pin_rounded),
|
||||
label: Text(
|
||||
buttonLabel,
|
||||
style: const TextStyle(fontSize: 14, fontWeight: FontWeight.bold),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
|
||||
class PasswordInput extends HookConsumerWidget {
|
||||
final TextEditingController controller;
|
||||
final FocusNode? focusNode;
|
||||
final Function()? onSubmit;
|
||||
|
||||
const PasswordInput({
|
||||
super.key,
|
||||
required this.controller,
|
||||
this.focusNode,
|
||||
this.onSubmit,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final isPasswordVisible = useState<bool>(false);
|
||||
|
||||
return TextFormField(
|
||||
obscureText: !isPasswordVisible.value,
|
||||
controller: controller,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'login_form_label_password'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
hintText: 'login_form_password_hint'.tr(),
|
||||
hintStyle: const TextStyle(
|
||||
fontWeight: FontWeight.normal,
|
||||
fontSize: 14,
|
||||
),
|
||||
suffixIcon: IconButton(
|
||||
onPressed: () => isPasswordVisible.value = !isPasswordVisible.value,
|
||||
icon: Icon(
|
||||
isPasswordVisible.value
|
||||
? Icons.visibility_off_sharp
|
||||
: Icons.visibility_sharp,
|
||||
),
|
||||
),
|
||||
),
|
||||
autofillHints: const [AutofillHints.password],
|
||||
keyboardType: TextInputType.text,
|
||||
onFieldSubmitted: (_) => onSubmit?.call(),
|
||||
focusNode: focusNode,
|
||||
textInputAction: TextInputAction.go,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:immich_mobile/utils/url_helper.dart';
|
||||
|
||||
class ServerEndpointInput extends StatelessWidget {
|
||||
final TextEditingController controller;
|
||||
final FocusNode focusNode;
|
||||
final Function()? onSubmit;
|
||||
|
||||
const ServerEndpointInput({
|
||||
super.key,
|
||||
required this.controller,
|
||||
required this.focusNode,
|
||||
this.onSubmit,
|
||||
});
|
||||
|
||||
String? _validateInput(String? url) {
|
||||
if (url == null || url.isEmpty) return null;
|
||||
|
||||
final parsedUrl = Uri.tryParse(sanitizeUrl(url));
|
||||
if (parsedUrl == null ||
|
||||
!parsedUrl.isAbsolute ||
|
||||
!parsedUrl.scheme.startsWith("http") ||
|
||||
parsedUrl.host.isEmpty) {
|
||||
return 'login_form_err_invalid_url'.tr();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Padding(
|
||||
padding: const EdgeInsets.only(top: 16.0),
|
||||
child: TextFormField(
|
||||
controller: controller,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'login_form_endpoint_url'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
hintText: 'login_form_endpoint_hint'.tr(),
|
||||
errorMaxLines: 4,
|
||||
),
|
||||
validator: _validateInput,
|
||||
autovalidateMode: AutovalidateMode.always,
|
||||
focusNode: focusNode,
|
||||
autofillHints: const [AutofillHints.url],
|
||||
keyboardType: TextInputType.url,
|
||||
autocorrect: false,
|
||||
onFieldSubmitted: (_) => onSubmit?.call(),
|
||||
textInputAction: TextInputAction.go,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -15,19 +15,11 @@ import 'package:immich_mobile/providers/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/authentication.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/utils/version_compatibility.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_logo.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_title_text.dart';
|
||||
import 'package:immich_mobile/widgets/common/immich_toast.dart';
|
||||
import 'package:immich_mobile/utils/url_helper.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/email_input.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/loading_icon.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/login_button.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/o_auth_login_button.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/password_input.dart';
|
||||
import 'package:immich_mobile/widgets/forms/login/server_endpoint_input.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:package_info_plus/package_info_plus.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
|
||||
class LoginForm extends HookConsumerWidget {
|
||||
@@ -53,31 +45,9 @@ class LoginForm extends HookConsumerWidget {
|
||||
final logoAnimationController = useAnimationController(
|
||||
duration: const Duration(seconds: 60),
|
||||
)..repeat();
|
||||
final serverInfo = ref.watch(serverInfoProvider);
|
||||
final warningMessage = useState<String?>(null);
|
||||
|
||||
final ValueNotifier<String?> serverEndpoint = useState<String?>(null);
|
||||
|
||||
checkVersionMismatch() async {
|
||||
try {
|
||||
final packageInfo = await PackageInfo.fromPlatform();
|
||||
final appVersion = packageInfo.version;
|
||||
final appMajorVersion = int.parse(appVersion.split('.')[0]);
|
||||
final appMinorVersion = int.parse(appVersion.split('.')[1]);
|
||||
final serverMajorVersion = serverInfo.serverVersion.major;
|
||||
final serverMinorVersion = serverInfo.serverVersion.minor;
|
||||
|
||||
warningMessage.value = getVersionCompatibilityMessage(
|
||||
appMajorVersion,
|
||||
appMinorVersion,
|
||||
serverMajorVersion,
|
||||
serverMinorVersion,
|
||||
);
|
||||
} catch (error) {
|
||||
warningMessage.value = 'Error checking version compatibility';
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch the server login credential and enables oAuth login if necessary
|
||||
/// Returns true if successful, false otherwise
|
||||
Future<bool> getServerLoginCredential() async {
|
||||
@@ -338,40 +308,11 @@ class LoginForm extends HookConsumerWidget {
|
||||
);
|
||||
}
|
||||
|
||||
buildVersionCompatWarning() {
|
||||
checkVersionMismatch();
|
||||
|
||||
if (warningMessage.value == null) {
|
||||
return const SizedBox.shrink();
|
||||
}
|
||||
|
||||
return Padding(
|
||||
padding: const EdgeInsets.only(bottom: 8.0),
|
||||
child: Container(
|
||||
padding: const EdgeInsets.all(16),
|
||||
decoration: BoxDecoration(
|
||||
color:
|
||||
context.isDarkTheme ? Colors.red.shade700 : Colors.red.shade100,
|
||||
borderRadius: BorderRadius.circular(8),
|
||||
border: Border.all(
|
||||
color:
|
||||
context.isDarkTheme ? Colors.red.shade900 : Colors.red[200]!,
|
||||
),
|
||||
),
|
||||
child: Text(
|
||||
warningMessage.value!,
|
||||
textAlign: TextAlign.center,
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
buildLogin() {
|
||||
return AutofillGroup(
|
||||
child: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.stretch,
|
||||
children: [
|
||||
buildVersionCompatWarning(),
|
||||
Text(
|
||||
sanitizeUrl(serverEndpointController.text),
|
||||
style: context.textTheme.displaySmall,
|
||||
@@ -475,6 +416,7 @@ class LoginForm extends HookConsumerWidget {
|
||||
),
|
||||
],
|
||||
),
|
||||
const SizedBox(height: 18),
|
||||
|
||||
// Note: This used to have an AnimatedSwitcher, but was removed
|
||||
// because of https://github.com/flutter/flutter/issues/120874
|
||||
@@ -488,3 +430,218 @@ class LoginForm extends HookConsumerWidget {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class ServerEndpointInput extends StatelessWidget {
|
||||
final TextEditingController controller;
|
||||
final FocusNode focusNode;
|
||||
final Function()? onSubmit;
|
||||
|
||||
const ServerEndpointInput({
|
||||
super.key,
|
||||
required this.controller,
|
||||
required this.focusNode,
|
||||
this.onSubmit,
|
||||
});
|
||||
|
||||
String? _validateInput(String? url) {
|
||||
if (url == null || url.isEmpty) return null;
|
||||
|
||||
final parsedUrl = Uri.tryParse(sanitizeUrl(url));
|
||||
if (parsedUrl == null ||
|
||||
!parsedUrl.isAbsolute ||
|
||||
!parsedUrl.scheme.startsWith("http") ||
|
||||
parsedUrl.host.isEmpty) {
|
||||
return 'login_form_err_invalid_url'.tr();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return TextFormField(
|
||||
controller: controller,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'login_form_endpoint_url'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
hintText: 'login_form_endpoint_hint'.tr(),
|
||||
errorMaxLines: 4,
|
||||
),
|
||||
validator: _validateInput,
|
||||
autovalidateMode: AutovalidateMode.always,
|
||||
focusNode: focusNode,
|
||||
autofillHints: const [AutofillHints.url],
|
||||
keyboardType: TextInputType.url,
|
||||
autocorrect: false,
|
||||
onFieldSubmitted: (_) => onSubmit?.call(),
|
||||
textInputAction: TextInputAction.go,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class EmailInput extends StatelessWidget {
|
||||
final TextEditingController controller;
|
||||
final FocusNode? focusNode;
|
||||
final Function()? onSubmit;
|
||||
|
||||
const EmailInput({
|
||||
super.key,
|
||||
required this.controller,
|
||||
this.focusNode,
|
||||
this.onSubmit,
|
||||
});
|
||||
|
||||
String? _validateInput(String? email) {
|
||||
if (email == null || email == '') return null;
|
||||
if (email.endsWith(' ')) return 'login_form_err_trailing_whitespace'.tr();
|
||||
if (email.startsWith(' ')) return 'login_form_err_leading_whitespace'.tr();
|
||||
if (email.contains(' ') || !email.contains('@')) {
|
||||
return 'login_form_err_invalid_email'.tr();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return TextFormField(
|
||||
autofocus: true,
|
||||
controller: controller,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'login_form_label_email'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
hintText: 'login_form_email_hint'.tr(),
|
||||
hintStyle: const TextStyle(
|
||||
fontWeight: FontWeight.normal,
|
||||
fontSize: 14,
|
||||
),
|
||||
),
|
||||
validator: _validateInput,
|
||||
autovalidateMode: AutovalidateMode.always,
|
||||
autofillHints: const [AutofillHints.email],
|
||||
keyboardType: TextInputType.emailAddress,
|
||||
onFieldSubmitted: (_) => onSubmit?.call(),
|
||||
focusNode: focusNode,
|
||||
textInputAction: TextInputAction.next,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class PasswordInput extends HookConsumerWidget {
|
||||
final TextEditingController controller;
|
||||
final FocusNode? focusNode;
|
||||
final Function()? onSubmit;
|
||||
|
||||
const PasswordInput({
|
||||
super.key,
|
||||
required this.controller,
|
||||
this.focusNode,
|
||||
this.onSubmit,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final isPasswordVisible = useState<bool>(false);
|
||||
|
||||
return TextFormField(
|
||||
obscureText: !isPasswordVisible.value,
|
||||
controller: controller,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'login_form_label_password'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
hintText: 'login_form_password_hint'.tr(),
|
||||
hintStyle: const TextStyle(
|
||||
fontWeight: FontWeight.normal,
|
||||
fontSize: 14,
|
||||
),
|
||||
suffixIcon: IconButton(
|
||||
onPressed: () => isPasswordVisible.value = !isPasswordVisible.value,
|
||||
icon: Icon(
|
||||
isPasswordVisible.value
|
||||
? Icons.visibility_off_sharp
|
||||
: Icons.visibility_sharp,
|
||||
),
|
||||
),
|
||||
),
|
||||
autofillHints: const [AutofillHints.password],
|
||||
keyboardType: TextInputType.text,
|
||||
onFieldSubmitted: (_) => onSubmit?.call(),
|
||||
focusNode: focusNode,
|
||||
textInputAction: TextInputAction.go,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class LoginButton extends ConsumerWidget {
|
||||
final Function() onPressed;
|
||||
|
||||
const LoginButton({
|
||||
super.key,
|
||||
required this.onPressed,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
return ElevatedButton.icon(
|
||||
style: ElevatedButton.styleFrom(
|
||||
padding: const EdgeInsets.symmetric(vertical: 12),
|
||||
),
|
||||
onPressed: onPressed,
|
||||
icon: const Icon(Icons.login_rounded),
|
||||
label: const Text(
|
||||
"login_form_button_text",
|
||||
style: TextStyle(fontSize: 14, fontWeight: FontWeight.bold),
|
||||
).tr(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class OAuthLoginButton extends ConsumerWidget {
|
||||
final TextEditingController serverEndpointController;
|
||||
final ValueNotifier<bool> isLoading;
|
||||
final String buttonLabel;
|
||||
final Function() onPressed;
|
||||
|
||||
const OAuthLoginButton({
|
||||
super.key,
|
||||
required this.serverEndpointController,
|
||||
required this.isLoading,
|
||||
required this.buttonLabel,
|
||||
required this.onPressed,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
return ElevatedButton.icon(
|
||||
style: ElevatedButton.styleFrom(
|
||||
backgroundColor: context.primaryColor.withAlpha(230),
|
||||
padding: const EdgeInsets.symmetric(vertical: 12),
|
||||
),
|
||||
onPressed: onPressed,
|
||||
icon: const Icon(Icons.pin_rounded),
|
||||
label: Text(
|
||||
buttonLabel,
|
||||
style: const TextStyle(fontSize: 14, fontWeight: FontWeight.bold),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class LoadingIcon extends StatelessWidget {
|
||||
const LoadingIcon({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return const Padding(
|
||||
padding: EdgeInsets.only(top: 18.0),
|
||||
child: SizedBox(
|
||||
width: 24,
|
||||
height: 24,
|
||||
child: FittedBox(
|
||||
child: CircularProgressIndicator(
|
||||
strokeWidth: 2,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
2
mobile/openapi/README.md
generated
2
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.106.3
|
||||
- API version: 1.105.1
|
||||
- Generator version: 7.5.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
|
||||
19
mobile/openapi/lib/model/asset_full_sync_dto.dart
generated
19
mobile/openapi/lib/model/asset_full_sync_dto.dart
generated
@@ -13,12 +13,21 @@ part of openapi.api;
|
||||
class AssetFullSyncDto {
|
||||
/// Returns a new [AssetFullSyncDto] instance.
|
||||
AssetFullSyncDto({
|
||||
this.lastCreationDate,
|
||||
this.lastId,
|
||||
required this.limit,
|
||||
required this.updatedUntil,
|
||||
this.userId,
|
||||
});
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
/// source code must fall back to having a nullable type.
|
||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
||||
///
|
||||
DateTime? lastCreationDate;
|
||||
|
||||
///
|
||||
/// Please note: This property should have been non-nullable! Since the specification file
|
||||
/// does not include a default value (using the "default:" property), however, the generated
|
||||
@@ -42,6 +51,7 @@ class AssetFullSyncDto {
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is AssetFullSyncDto &&
|
||||
other.lastCreationDate == lastCreationDate &&
|
||||
other.lastId == lastId &&
|
||||
other.limit == limit &&
|
||||
other.updatedUntil == updatedUntil &&
|
||||
@@ -50,16 +60,22 @@ class AssetFullSyncDto {
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(lastCreationDate == null ? 0 : lastCreationDate!.hashCode) +
|
||||
(lastId == null ? 0 : lastId!.hashCode) +
|
||||
(limit.hashCode) +
|
||||
(updatedUntil.hashCode) +
|
||||
(userId == null ? 0 : userId!.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'AssetFullSyncDto[lastId=$lastId, limit=$limit, updatedUntil=$updatedUntil, userId=$userId]';
|
||||
String toString() => 'AssetFullSyncDto[lastCreationDate=$lastCreationDate, lastId=$lastId, limit=$limit, updatedUntil=$updatedUntil, userId=$userId]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
if (this.lastCreationDate != null) {
|
||||
json[r'lastCreationDate'] = this.lastCreationDate!.toUtc().toIso8601String();
|
||||
} else {
|
||||
// json[r'lastCreationDate'] = null;
|
||||
}
|
||||
if (this.lastId != null) {
|
||||
json[r'lastId'] = this.lastId;
|
||||
} else {
|
||||
@@ -83,6 +99,7 @@ class AssetFullSyncDto {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return AssetFullSyncDto(
|
||||
lastCreationDate: mapDateTime(json, r'lastCreationDate', r''),
|
||||
lastId: mapValueOfType<String>(json, r'lastId'),
|
||||
limit: mapValueOfType<int>(json, r'limit')!,
|
||||
updatedUntil: mapDateTime(json, r'updatedUntil', r'')!,
|
||||
|
||||
@@ -53,7 +53,7 @@ class DuplicateDetectionConfig {
|
||||
|
||||
return DuplicateDetectionConfig(
|
||||
enabled: mapValueOfType<bool>(json, r'enabled')!,
|
||||
maxDistance: (mapValueOfType<num>(json, r'maxDistance')!).toDouble(),
|
||||
maxDistance: mapValueOfType<double>(json, r'maxDistance')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -74,9 +74,9 @@ class FacialRecognitionConfig {
|
||||
|
||||
return FacialRecognitionConfig(
|
||||
enabled: mapValueOfType<bool>(json, r'enabled')!,
|
||||
maxDistance: (mapValueOfType<num>(json, r'maxDistance')!).toDouble(),
|
||||
maxDistance: mapValueOfType<double>(json, r'maxDistance')!,
|
||||
minFaces: mapValueOfType<int>(json, r'minFaces')!,
|
||||
minScore: (mapValueOfType<num>(json, r'minScore')!).toDouble(),
|
||||
minScore: mapValueOfType<double>(json, r'minScore')!,
|
||||
modelName: mapValueOfType<String>(json, r'modelName')!,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ class ServerStorageResponseDto {
|
||||
diskAvailableRaw: mapValueOfType<int>(json, r'diskAvailableRaw')!,
|
||||
diskSize: mapValueOfType<String>(json, r'diskSize')!,
|
||||
diskSizeRaw: mapValueOfType<int>(json, r'diskSizeRaw')!,
|
||||
diskUsagePercentage: (mapValueOfType<num>(json, r'diskUsagePercentage')!).toDouble(),
|
||||
diskUsagePercentage: mapValueOfType<double>(json, r'diskUsagePercentage')!,
|
||||
diskUse: mapValueOfType<String>(json, r'diskUse')!,
|
||||
diskUseRaw: mapValueOfType<int>(json, r'diskUseRaw')!,
|
||||
);
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 1.106.3+143
|
||||
version: 1.105.1+140
|
||||
|
||||
environment:
|
||||
sdk: '>=3.3.0 <4.0.0'
|
||||
|
||||
@@ -78,9 +78,8 @@ void main() {
|
||||
final bool c1 = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: _failDiff,
|
||||
loadAssets: (u, d, firstSync) => remoteAssets,
|
||||
loadAssets: (u, d) => remoteAssets,
|
||||
refreshUsers: () => [owner],
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c1, isFalse);
|
||||
expect(db.assets.countSync(), 5);
|
||||
@@ -100,9 +99,8 @@ void main() {
|
||||
final bool c1 = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: _failDiff,
|
||||
loadAssets: (u, d, firstSync) => remoteAssets,
|
||||
loadAssets: (u, d) => remoteAssets,
|
||||
refreshUsers: () => [owner],
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c1, isTrue);
|
||||
expect(db.assets.countSync(), 7);
|
||||
@@ -122,18 +120,16 @@ void main() {
|
||||
final bool c1 = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: _failDiff,
|
||||
loadAssets: (u, d, firstSync) => remoteAssets,
|
||||
loadAssets: (u, d) => remoteAssets,
|
||||
refreshUsers: () => [owner],
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c1, isTrue);
|
||||
expect(db.assets.countSync(), 8);
|
||||
final bool c2 = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: _failDiff,
|
||||
loadAssets: (u, d, firstSync) => remoteAssets,
|
||||
loadAssets: (u, d) => remoteAssets,
|
||||
refreshUsers: () => [owner],
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c2, isFalse);
|
||||
expect(db.assets.countSync(), 8);
|
||||
@@ -141,9 +137,8 @@ void main() {
|
||||
final bool c3 = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: _failDiff,
|
||||
loadAssets: (u, d, firstSync) => remoteAssets,
|
||||
loadAssets: (u, d) => remoteAssets,
|
||||
refreshUsers: () => [owner],
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c3, isTrue);
|
||||
expect(db.assets.countSync(), 7);
|
||||
@@ -152,9 +147,8 @@ void main() {
|
||||
final bool c4 = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: _failDiff,
|
||||
loadAssets: (u, d, firstSync) => remoteAssets,
|
||||
loadAssets: (u, d) => remoteAssets,
|
||||
refreshUsers: () => [owner],
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c4, isTrue);
|
||||
expect(db.assets.countSync(), 9);
|
||||
@@ -172,9 +166,8 @@ void main() {
|
||||
final bool c = await s.syncRemoteAssetsToDb(
|
||||
users: [owner],
|
||||
getChangedAssets: (user, since) async => (toUpsert, toDelete),
|
||||
loadAssets: (user, date, firstSync) => throw Exception(),
|
||||
loadAssets: (user, date) => throw Exception(),
|
||||
refreshUsers: () => throw Exception(),
|
||||
firstSync: false,
|
||||
);
|
||||
expect(c, isTrue);
|
||||
expect(db.assets.countSync(), 6);
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:immich_mobile/utils/version_compatibility.dart';
|
||||
|
||||
void main() {
|
||||
test('getVersionCompatibilityMessage', () {
|
||||
String? result;
|
||||
|
||||
result = getVersionCompatibilityMessage(1, 0, 2, 0);
|
||||
expect(
|
||||
result,
|
||||
'Your app major version is not compatible with the server!',
|
||||
);
|
||||
|
||||
result = getVersionCompatibilityMessage(1, 106, 1, 105);
|
||||
expect(
|
||||
result,
|
||||
'Your app minor version is not compatible with the server! Please update your server to version v1.106.0 or newer to login',
|
||||
);
|
||||
|
||||
result = getVersionCompatibilityMessage(1, 107, 1, 105);
|
||||
expect(
|
||||
result,
|
||||
'Your app minor version is not compatible with the server! Please update your server to version v1.106.0 or newer to login',
|
||||
);
|
||||
|
||||
result = getVersionCompatibilityMessage(1, 106, 1, 106);
|
||||
expect(result, null);
|
||||
|
||||
result = getVersionCompatibilityMessage(1, 107, 1, 106);
|
||||
expect(result, null);
|
||||
|
||||
result = getVersionCompatibilityMessage(1, 107, 1, 108);
|
||||
expect(result, null);
|
||||
});
|
||||
}
|
||||
@@ -6735,7 +6735,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
@@ -7432,6 +7432,10 @@
|
||||
},
|
||||
"AssetFullSyncDto": {
|
||||
"properties": {
|
||||
"lastCreationDate": {
|
||||
"format": "date-time",
|
||||
"type": "string"
|
||||
},
|
||||
"lastId": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
@@ -8146,7 +8150,7 @@
|
||||
"type": "boolean"
|
||||
},
|
||||
"maxDistance": {
|
||||
"format": "double",
|
||||
"format": "float",
|
||||
"maximum": 0.1,
|
||||
"minimum": 0.001,
|
||||
"type": "number"
|
||||
@@ -8347,7 +8351,7 @@
|
||||
"type": "boolean"
|
||||
},
|
||||
"maxDistance": {
|
||||
"format": "double",
|
||||
"format": "float",
|
||||
"maximum": 2,
|
||||
"minimum": 0,
|
||||
"type": "number"
|
||||
@@ -8357,7 +8361,7 @@
|
||||
"type": "integer"
|
||||
},
|
||||
"minScore": {
|
||||
"format": "double",
|
||||
"format": "float",
|
||||
"maximum": 1,
|
||||
"minimum": 0,
|
||||
"type": "number"
|
||||
@@ -9797,7 +9801,7 @@
|
||||
"type": "integer"
|
||||
},
|
||||
"diskUsagePercentage": {
|
||||
"format": "double",
|
||||
"format": "float",
|
||||
"type": "number"
|
||||
},
|
||||
"diskUse": {
|
||||
|
||||
4
open-api/typescript-sdk/package-lock.json
generated
4
open-api/typescript-sdk/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 1.106.3
|
||||
* 1.105.1
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
@@ -904,6 +904,7 @@ export type AssetDeltaSyncResponseDto = {
|
||||
upserted: AssetResponseDto[];
|
||||
};
|
||||
export type AssetFullSyncDto = {
|
||||
lastCreationDate?: string;
|
||||
lastId?: string;
|
||||
limit: number;
|
||||
updatedUntil: string;
|
||||
|
||||
@@ -69,7 +69,7 @@
|
||||
"schedule": "on tuesday"
|
||||
}
|
||||
],
|
||||
"ignorePaths": ["mobile/openapi/pubspec.yaml", "mobile/ios", "mobile/android"],
|
||||
"ignorePaths": ["mobile/openapi/pubspec.yaml"],
|
||||
"ignoreDeps": ["http", "intl"],
|
||||
"labels": ["dependencies", "renovate"]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dev build
|
||||
FROM ghcr.io/immich-app/base-server-dev:20240611@sha256:2047ec0f857a800675379c65404dfdf4ac02ab7684c759916e3256d3d9566027 as dev
|
||||
FROM ghcr.io/immich-app/base-server-dev:20240604@sha256:bb31fafb1e8fcb4338c2f7a8f424da3d9c5cf6dd6bdb266c54477c795dd07819 as dev
|
||||
|
||||
RUN apt-get install --no-install-recommends -yqq tini
|
||||
WORKDIR /usr/src/app
|
||||
@@ -41,7 +41,7 @@ RUN npm run build
|
||||
|
||||
|
||||
# prod build
|
||||
FROM ghcr.io/immich-app/base-server-prod:20240611@sha256:efd32a2af6e7ace8bcea1e94115fe95a971fe1d1fef7e667ff6e77364ce51c46
|
||||
FROM ghcr.io/immich-app/base-server-prod:20240604@sha256:481ea3ee56fb0e130804fec25c124d28477f10f8a01f7d06fb2e3f85c181bbb9
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
|
||||
23
server/package-lock.json
generated
23
server/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@nestjs/bullmq": "^10.0.1",
|
||||
@@ -76,6 +76,7 @@
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/fluent-ffmpeg": "^2.1.21",
|
||||
"@types/imagemin": "^8.0.1",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
@@ -5789,6 +5790,15 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.2.tgz",
|
||||
"integrity": "sha512-lPG6KlZs88gef6aD85z3HNkztpj7w2R7HmR3gygjfXCQmsLloWNARFkMuzKiiY8FGdh1XDpgBdrSf4aKDiA7Kg=="
|
||||
},
|
||||
"node_modules/@types/imagemin": {
|
||||
"version": "8.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/imagemin/-/imagemin-8.0.5.tgz",
|
||||
"integrity": "sha512-tah3dm+5sG+fEDAz6CrQ5evuEaPX9K6DF3E5a01MPOKhA2oGBoC+oA5EJzSugB905sN4DE19EDzldT2Cld2g6Q==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/inquirer": {
|
||||
"version": "8.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/inquirer/-/inquirer-8.2.6.tgz",
|
||||
@@ -20204,6 +20214,15 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.2.tgz",
|
||||
"integrity": "sha512-lPG6KlZs88gef6aD85z3HNkztpj7w2R7HmR3gygjfXCQmsLloWNARFkMuzKiiY8FGdh1XDpgBdrSf4aKDiA7Kg=="
|
||||
},
|
||||
"@types/imagemin": {
|
||||
"version": "8.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/imagemin/-/imagemin-8.0.5.tgz",
|
||||
"integrity": "sha512-tah3dm+5sG+fEDAz6CrQ5evuEaPX9K6DF3E5a01MPOKhA2oGBoC+oA5EJzSugB905sN4DE19EDzldT2Cld2g6Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/inquirer": {
|
||||
"version": "8.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/inquirer/-/inquirer-8.2.6.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.106.3",
|
||||
"version": "1.105.1",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
@@ -102,6 +102,7 @@
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/fluent-ffmpeg": "^2.1.21",
|
||||
"@types/imagemin": "^8.0.1",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
|
||||
@@ -374,8 +374,7 @@ export const immichAppConfig: ConfigModuleOptions = {
|
||||
DB_SKIP_MIGRATIONS: Joi.boolean().optional().default(false),
|
||||
|
||||
IMMICH_PORT: Joi.number().optional(),
|
||||
IMMICH_API_METRICS_PORT: Joi.number().optional(),
|
||||
IMMICH_MICROSERVICES_METRICS_PORT: Joi.number().optional(),
|
||||
IMMICH_METRICS_PORT: Joi.number().optional(),
|
||||
|
||||
IMMICH_METRICS: Joi.boolean().optional().default(false),
|
||||
IMMICH_HOST_METRICS: Joi.boolean().optional().default(false),
|
||||
|
||||
@@ -254,7 +254,7 @@ export class StorageCore {
|
||||
this.logger.warn(`Unable to complete move. File size mismatch: ${newPathSize} !== ${oldPathSize}`);
|
||||
return false;
|
||||
}
|
||||
const config = await this.configCore.getConfig({ withCache: true });
|
||||
const config = await this.configCore.getConfig();
|
||||
if (assetInfo && config.storageTemplate.hashVerificationEnabled) {
|
||||
const { checksum } = assetInfo;
|
||||
const newChecksum = await this.cryptoRepository.hashFile(newPath);
|
||||
|
||||
@@ -42,8 +42,8 @@ export class SystemConfigCore {
|
||||
instance = null;
|
||||
}
|
||||
|
||||
async getConfig({ withCache }: { withCache: boolean }): Promise<SystemConfig> {
|
||||
if (!withCache || !this.config) {
|
||||
async getConfig(force = false): Promise<SystemConfig> {
|
||||
if (force || !this.config) {
|
||||
const lastUpdated = this.lastUpdated;
|
||||
await this.asyncLock.acquire(DatabaseLock[DatabaseLock.GetSystemConfig], async () => {
|
||||
if (lastUpdated === this.lastUpdated) {
|
||||
@@ -74,13 +74,13 @@ export class SystemConfigCore {
|
||||
|
||||
await this.repository.set(SystemMetadataKey.SYSTEM_CONFIG, partialConfig);
|
||||
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
const config = await this.getConfig(true);
|
||||
this.config$.next(config);
|
||||
return config;
|
||||
}
|
||||
|
||||
async refreshConfig() {
|
||||
const newConfig = await this.getConfig({ withCache: false });
|
||||
const newConfig = await this.getConfig(true);
|
||||
this.config$.next(newConfig);
|
||||
}
|
||||
|
||||
|
||||
@@ -127,10 +127,10 @@ export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): As
|
||||
stackParentId: withStack ? entity.stack?.primaryAssetId : undefined,
|
||||
stack: withStack
|
||||
? entity.stack?.assets
|
||||
?.filter((a) => a.id !== entity.stack?.primaryAssetId)
|
||||
?.map((a) => mapAsset(a, { stripMetadata, auth: options.auth }))
|
||||
.filter((a) => a.id !== entity.stack?.primaryAssetId)
|
||||
.map((a) => mapAsset(a, { stripMetadata, auth: options.auth }))
|
||||
: undefined,
|
||||
stackCount: entity.stack?.assetCount ?? entity.stack?.assets?.length ?? null,
|
||||
stackCount: entity.stack?.assets?.length ?? null,
|
||||
isOffline: entity.isOffline,
|
||||
hasMetadata: true,
|
||||
duplicateId: entity.duplicateId,
|
||||
|
||||
@@ -21,7 +21,7 @@ export class DuplicateDetectionConfig extends TaskConfig {
|
||||
@Min(0.001)
|
||||
@Max(0.1)
|
||||
@Type(() => Number)
|
||||
@ApiProperty({ type: 'number', format: 'double' })
|
||||
@ApiProperty({ type: 'number', format: 'float' })
|
||||
maxDistance!: number;
|
||||
}
|
||||
|
||||
@@ -30,14 +30,14 @@ export class FacialRecognitionConfig extends ModelConfig {
|
||||
@Min(0)
|
||||
@Max(1)
|
||||
@Type(() => Number)
|
||||
@ApiProperty({ type: 'number', format: 'double' })
|
||||
@ApiProperty({ type: 'number', format: 'float' })
|
||||
minScore!: number;
|
||||
|
||||
@IsNumber()
|
||||
@Min(0)
|
||||
@Max(2)
|
||||
@Type(() => Number)
|
||||
@ApiProperty({ type: 'number', format: 'double' })
|
||||
@ApiProperty({ type: 'number', format: 'float' })
|
||||
maxDistance!: number;
|
||||
|
||||
@IsNumber()
|
||||
|
||||
@@ -21,7 +21,7 @@ export class ServerStorageResponseDto {
|
||||
@ApiProperty({ type: 'integer', format: 'int64' })
|
||||
diskAvailableRaw!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double' })
|
||||
@ApiProperty({ type: 'number', format: 'float' })
|
||||
diskUsagePercentage!: number;
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,9 @@ export class AssetFullSyncDto {
|
||||
@ValidateUUID({ optional: true })
|
||||
lastId?: string;
|
||||
|
||||
@ValidateDate({ optional: true })
|
||||
lastCreationDate?: Date;
|
||||
|
||||
@ValidateDate()
|
||||
updatedUntil!: Date;
|
||||
|
||||
|
||||
@@ -16,6 +16,4 @@ export class AssetStackEntity {
|
||||
|
||||
@Column({ nullable: false })
|
||||
primaryAssetId!: string;
|
||||
|
||||
assetCount?: number;
|
||||
}
|
||||
|
||||
@@ -122,6 +122,7 @@ export interface AssetExploreOptions extends AssetExploreFieldOptions {
|
||||
|
||||
export interface AssetFullSyncOptions {
|
||||
ownerId: string;
|
||||
lastCreationDate?: Date;
|
||||
lastId?: string;
|
||||
updatedUntil: Date;
|
||||
limit: number;
|
||||
@@ -156,7 +157,7 @@ export interface IAssetRepository {
|
||||
getByChecksums(userId: string, checksums: Buffer[]): Promise<AssetEntity[]>;
|
||||
getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer): Promise<string | undefined>;
|
||||
getByAlbumId(pagination: PaginationOptions, albumId: string): Paginated<AssetEntity>;
|
||||
getByDeviceIds(ownerId: string, deviceId: string, deviceAssetIds: string[]): Promise<string[]>;
|
||||
getByDeviceIds(ownerId: string, deviceId: string, deviceAssetIds: string[]): Promise<AssetEntity[]>;
|
||||
getByUserId(pagination: PaginationOptions, userId: string, options?: AssetSearchOptions): Paginated<AssetEntity>;
|
||||
getById(
|
||||
id: string,
|
||||
|
||||
@@ -120,10 +120,6 @@ export interface IEntityJob extends IBaseJob {
|
||||
source?: 'upload' | 'sidecar-write' | 'copy';
|
||||
}
|
||||
|
||||
export interface IAssetDeleteJob extends IEntityJob {
|
||||
deleteOnDisk: boolean;
|
||||
}
|
||||
|
||||
export interface ILibraryFileJob extends IEntityJob {
|
||||
ownerId: string;
|
||||
assetPath: string;
|
||||
@@ -250,7 +246,7 @@ export type JobItem =
|
||||
|
||||
// Asset Deletion
|
||||
| { name: JobName.PERSON_CLEANUP; data?: IBaseJob }
|
||||
| { name: JobName.ASSET_DELETION; data: IAssetDeleteJob }
|
||||
| { name: JobName.ASSET_DELETION; data: IEntityJob }
|
||||
| { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob }
|
||||
|
||||
// Library Management
|
||||
|
||||
@@ -1049,18 +1049,50 @@ SELECT
|
||||
"exifInfo"."bitsPerSample" AS "exifInfo_bitsPerSample",
|
||||
"exifInfo"."fps" AS "exifInfo_fps",
|
||||
"stack"."id" AS "stack_id",
|
||||
"stack"."primaryAssetId" AS "stack_primaryAssetId"
|
||||
"stack"."primaryAssetId" AS "stack_primaryAssetId",
|
||||
"stackedAssets"."id" AS "stackedAssets_id",
|
||||
"stackedAssets"."deviceAssetId" AS "stackedAssets_deviceAssetId",
|
||||
"stackedAssets"."ownerId" AS "stackedAssets_ownerId",
|
||||
"stackedAssets"."libraryId" AS "stackedAssets_libraryId",
|
||||
"stackedAssets"."deviceId" AS "stackedAssets_deviceId",
|
||||
"stackedAssets"."type" AS "stackedAssets_type",
|
||||
"stackedAssets"."originalPath" AS "stackedAssets_originalPath",
|
||||
"stackedAssets"."previewPath" AS "stackedAssets_previewPath",
|
||||
"stackedAssets"."thumbnailPath" AS "stackedAssets_thumbnailPath",
|
||||
"stackedAssets"."thumbhash" AS "stackedAssets_thumbhash",
|
||||
"stackedAssets"."encodedVideoPath" AS "stackedAssets_encodedVideoPath",
|
||||
"stackedAssets"."createdAt" AS "stackedAssets_createdAt",
|
||||
"stackedAssets"."updatedAt" AS "stackedAssets_updatedAt",
|
||||
"stackedAssets"."deletedAt" AS "stackedAssets_deletedAt",
|
||||
"stackedAssets"."fileCreatedAt" AS "stackedAssets_fileCreatedAt",
|
||||
"stackedAssets"."localDateTime" AS "stackedAssets_localDateTime",
|
||||
"stackedAssets"."fileModifiedAt" AS "stackedAssets_fileModifiedAt",
|
||||
"stackedAssets"."isFavorite" AS "stackedAssets_isFavorite",
|
||||
"stackedAssets"."isArchived" AS "stackedAssets_isArchived",
|
||||
"stackedAssets"."isExternal" AS "stackedAssets_isExternal",
|
||||
"stackedAssets"."isOffline" AS "stackedAssets_isOffline",
|
||||
"stackedAssets"."checksum" AS "stackedAssets_checksum",
|
||||
"stackedAssets"."duration" AS "stackedAssets_duration",
|
||||
"stackedAssets"."isVisible" AS "stackedAssets_isVisible",
|
||||
"stackedAssets"."livePhotoVideoId" AS "stackedAssets_livePhotoVideoId",
|
||||
"stackedAssets"."originalFileName" AS "stackedAssets_originalFileName",
|
||||
"stackedAssets"."sidecarPath" AS "stackedAssets_sidecarPath",
|
||||
"stackedAssets"."stackId" AS "stackedAssets_stackId",
|
||||
"stackedAssets"."duplicateId" AS "stackedAssets_duplicateId"
|
||||
FROM
|
||||
"assets" "asset"
|
||||
LEFT JOIN "exif" "exifInfo" ON "exifInfo"."assetId" = "asset"."id"
|
||||
LEFT JOIN "asset_stack" "stack" ON "stack"."id" = "asset"."stackId"
|
||||
LEFT JOIN "assets" "stackedAssets" ON "stackedAssets"."stackId" = "stack"."id"
|
||||
AND ("stackedAssets"."deletedAt" IS NULL)
|
||||
WHERE
|
||||
"asset"."isVisible" = true
|
||||
AND "asset"."ownerId" IN ($1)
|
||||
AND "asset"."id" > $2
|
||||
AND "asset"."updatedAt" <= $3
|
||||
AND ("asset"."fileCreatedAt", "asset"."id") < ($2, $3)
|
||||
AND "asset"."updatedAt" <= $4
|
||||
ORDER BY
|
||||
"asset"."id" ASC
|
||||
"asset"."fileCreatedAt" DESC,
|
||||
"asset"."id" DESC
|
||||
LIMIT
|
||||
10
|
||||
|
||||
@@ -1124,11 +1156,42 @@ SELECT
|
||||
"exifInfo"."bitsPerSample" AS "exifInfo_bitsPerSample",
|
||||
"exifInfo"."fps" AS "exifInfo_fps",
|
||||
"stack"."id" AS "stack_id",
|
||||
"stack"."primaryAssetId" AS "stack_primaryAssetId"
|
||||
"stack"."primaryAssetId" AS "stack_primaryAssetId",
|
||||
"stackedAssets"."id" AS "stackedAssets_id",
|
||||
"stackedAssets"."deviceAssetId" AS "stackedAssets_deviceAssetId",
|
||||
"stackedAssets"."ownerId" AS "stackedAssets_ownerId",
|
||||
"stackedAssets"."libraryId" AS "stackedAssets_libraryId",
|
||||
"stackedAssets"."deviceId" AS "stackedAssets_deviceId",
|
||||
"stackedAssets"."type" AS "stackedAssets_type",
|
||||
"stackedAssets"."originalPath" AS "stackedAssets_originalPath",
|
||||
"stackedAssets"."previewPath" AS "stackedAssets_previewPath",
|
||||
"stackedAssets"."thumbnailPath" AS "stackedAssets_thumbnailPath",
|
||||
"stackedAssets"."thumbhash" AS "stackedAssets_thumbhash",
|
||||
"stackedAssets"."encodedVideoPath" AS "stackedAssets_encodedVideoPath",
|
||||
"stackedAssets"."createdAt" AS "stackedAssets_createdAt",
|
||||
"stackedAssets"."updatedAt" AS "stackedAssets_updatedAt",
|
||||
"stackedAssets"."deletedAt" AS "stackedAssets_deletedAt",
|
||||
"stackedAssets"."fileCreatedAt" AS "stackedAssets_fileCreatedAt",
|
||||
"stackedAssets"."localDateTime" AS "stackedAssets_localDateTime",
|
||||
"stackedAssets"."fileModifiedAt" AS "stackedAssets_fileModifiedAt",
|
||||
"stackedAssets"."isFavorite" AS "stackedAssets_isFavorite",
|
||||
"stackedAssets"."isArchived" AS "stackedAssets_isArchived",
|
||||
"stackedAssets"."isExternal" AS "stackedAssets_isExternal",
|
||||
"stackedAssets"."isOffline" AS "stackedAssets_isOffline",
|
||||
"stackedAssets"."checksum" AS "stackedAssets_checksum",
|
||||
"stackedAssets"."duration" AS "stackedAssets_duration",
|
||||
"stackedAssets"."isVisible" AS "stackedAssets_isVisible",
|
||||
"stackedAssets"."livePhotoVideoId" AS "stackedAssets_livePhotoVideoId",
|
||||
"stackedAssets"."originalFileName" AS "stackedAssets_originalFileName",
|
||||
"stackedAssets"."sidecarPath" AS "stackedAssets_sidecarPath",
|
||||
"stackedAssets"."stackId" AS "stackedAssets_stackId",
|
||||
"stackedAssets"."duplicateId" AS "stackedAssets_duplicateId"
|
||||
FROM
|
||||
"assets" "asset"
|
||||
LEFT JOIN "exif" "exifInfo" ON "exifInfo"."assetId" = "asset"."id"
|
||||
LEFT JOIN "asset_stack" "stack" ON "stack"."id" = "asset"."stackId"
|
||||
LEFT JOIN "assets" "stackedAssets" ON "stackedAssets"."stackId" = "stack"."id"
|
||||
AND ("stackedAssets"."deletedAt" IS NULL)
|
||||
WHERE
|
||||
"asset"."isVisible" = true
|
||||
AND "asset"."ownerId" IN ($1)
|
||||
|
||||
@@ -155,8 +155,8 @@ export class AssetRepository implements IAssetRepository {
|
||||
});
|
||||
}
|
||||
|
||||
async getByDeviceIds(ownerId: string, deviceId: string, deviceAssetIds: string[]): Promise<string[]> {
|
||||
const assets = await this.repository.find({
|
||||
getByDeviceIds(ownerId: string, deviceId: string, deviceAssetIds: string[]): Promise<AssetEntity[]> {
|
||||
return this.repository.find({
|
||||
select: { deviceAssetId: true },
|
||||
where: {
|
||||
deviceAssetId: In(deviceAssetIds),
|
||||
@@ -165,8 +165,6 @@ export class AssetRepository implements IAssetRepository {
|
||||
},
|
||||
withDeleted: true,
|
||||
});
|
||||
|
||||
return assets.map((asset) => asset.deviceAssetId);
|
||||
}
|
||||
|
||||
getByUserId(
|
||||
@@ -765,40 +763,36 @@ export class AssetRepository implements IAssetRepository {
|
||||
],
|
||||
})
|
||||
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]> {
|
||||
const { ownerId, lastId, updatedUntil, limit } = options;
|
||||
const { ownerId, lastCreationDate, lastId, updatedUntil, limit } = options;
|
||||
const builder = this.getBuilder({
|
||||
userIds: [ownerId],
|
||||
exifInfo: false, // need to do this manually because `exifInfo: true` also loads stacked assets messing with `limit`
|
||||
exifInfo: true, // also joins stack information
|
||||
withStacked: false, // return all assets individually as expected by the app
|
||||
})
|
||||
.leftJoinAndSelect('asset.exifInfo', 'exifInfo')
|
||||
.leftJoinAndSelect('asset.stack', 'stack')
|
||||
.loadRelationCountAndMap('stack.assetCount', 'stack.assets', 'stackedAssetsCount');
|
||||
});
|
||||
|
||||
if (lastId !== undefined) {
|
||||
builder.andWhere('asset.id > :lastId', { lastId });
|
||||
if (lastCreationDate !== undefined && lastId !== undefined) {
|
||||
builder.andWhere('(asset.fileCreatedAt, asset.id) < (:lastCreationDate, :lastId)', {
|
||||
lastCreationDate,
|
||||
lastId,
|
||||
});
|
||||
}
|
||||
builder
|
||||
|
||||
return builder
|
||||
.andWhere('asset.updatedAt <= :updatedUntil', { updatedUntil })
|
||||
.orderBy('asset.id', 'ASC')
|
||||
.limit(limit) // cannot use `take` for performance reasons
|
||||
.withDeleted();
|
||||
return builder.getMany();
|
||||
.orderBy('asset.fileCreatedAt', 'DESC')
|
||||
.addOrderBy('asset.id', 'DESC')
|
||||
.limit(limit)
|
||||
.withDeleted()
|
||||
.getMany();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userIds: [DummyValue.UUID], updatedAfter: DummyValue.DATE }] })
|
||||
getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]> {
|
||||
const builder = this.getBuilder({
|
||||
userIds: options.userIds,
|
||||
exifInfo: false, // need to do this manually because `exifInfo: true` also loads stacked assets messing with `limit`
|
||||
withStacked: false, // return all assets individually as expected by the app
|
||||
})
|
||||
.leftJoinAndSelect('asset.exifInfo', 'exifInfo')
|
||||
.leftJoinAndSelect('asset.stack', 'stack')
|
||||
.loadRelationCountAndMap('stack.assetCount', 'stack.assets', 'stackedAssetsCount')
|
||||
const builder = this.getBuilder({ userIds: options.userIds, exifInfo: true, withStacked: false })
|
||||
.andWhere({ updatedAt: MoreThan(options.updatedAfter) })
|
||||
.limit(options.limit) // cannot use `take` for performance reasons
|
||||
.limit(options.limit)
|
||||
.withDeleted();
|
||||
|
||||
return builder.getMany();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ export class MediaRepository implements IMediaRepository {
|
||||
}
|
||||
|
||||
async generateThumbnail(input: string | Buffer, output: string, options: ThumbnailOptions): Promise<void> {
|
||||
const pipeline = sharp(input, { failOn: 'none', limitInputPixels: false })
|
||||
const pipeline = sharp(input, { failOn: 'none' })
|
||||
.pipelineColorspace(options.colorspace === Colorspace.SRGB ? 'srgb' : 'rgb16')
|
||||
.rotate();
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { InjectDataSource, InjectRepository } from '@nestjs/typeorm';
|
||||
import { DefaultReadTaskOptions, Tags, exiftool } from 'exiftool-vendored';
|
||||
import { DefaultExiftoolArgs, DefaultReadTaskOptions, ExifTool, Tags } from 'exiftool-vendored';
|
||||
import geotz from 'geo-tz';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
@@ -21,18 +21,23 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
) {
|
||||
this.logger.setContext(MetadataRepository.name);
|
||||
}
|
||||
private exiftool: ExifTool = this.initExiftool();
|
||||
|
||||
async teardown() {
|
||||
await exiftool.end();
|
||||
await this.exiftool.end();
|
||||
}
|
||||
|
||||
private initExiftool() {
|
||||
// Enable exiftool LFS to parse metadata for files larger than 2GB.
|
||||
const exiftoolArgs = ['-api', 'largefilesupport=1', ...DefaultExiftoolArgs];
|
||||
return new ExifTool({ exiftoolArgs });
|
||||
}
|
||||
|
||||
readTags(path: string): Promise<ImmichTags | null> {
|
||||
return exiftool
|
||||
return this.exiftool
|
||||
.read(path, undefined, {
|
||||
...DefaultReadTaskOptions,
|
||||
|
||||
// Enable exiftool LFS to parse metadata for files larger than 2GB.
|
||||
optionalArgs: ['-api', 'largefilesupport=1'],
|
||||
defaultVideosToUTC: true,
|
||||
backfillTimezones: true,
|
||||
inferTimezoneFromDatestamps: true,
|
||||
@@ -48,12 +53,12 @@ export class MetadataRepository implements IMetadataRepository {
|
||||
}
|
||||
|
||||
extractBinaryTag(path: string, tagName: string): Promise<Buffer> {
|
||||
return exiftool.extractBinaryTagToBuffer(tagName, path);
|
||||
return this.exiftool.extractBinaryTagToBuffer(tagName, path);
|
||||
}
|
||||
|
||||
async writeTags(path: string, tags: Partial<Tags>): Promise<void> {
|
||||
try {
|
||||
await exiftool.write(path, tags, ['-overwrite_original']);
|
||||
await this.exiftool.write(path, tags, ['-overwrite_original']);
|
||||
} catch (error) {
|
||||
this.logger.warn(`Error writing exif data (${path}): ${error}`);
|
||||
}
|
||||
|
||||
@@ -277,12 +277,14 @@ export class AssetMediaService {
|
||||
auth: AuthDto,
|
||||
checkExistingAssetsDto: CheckExistingAssetsDto,
|
||||
): Promise<CheckExistingAssetsResponseDto> {
|
||||
const existingIds = await this.assetRepository.getByDeviceIds(
|
||||
const assets = await this.assetRepository.getByDeviceIds(
|
||||
auth.user.id,
|
||||
checkExistingAssetsDto.deviceId,
|
||||
checkExistingAssetsDto.deviceAssetIds,
|
||||
);
|
||||
return { existingIds };
|
||||
return {
|
||||
existingIds: assets.map((asset) => asset.id),
|
||||
};
|
||||
}
|
||||
|
||||
async bulkUploadCheck(auth: AuthDto, dto: AssetBulkUploadCheckDto): Promise<AssetBulkUploadCheckResponseDto> {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user