Compare commits
1 Commits
lighter_bu
...
postgres-q
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d46e5f2436 |
194
api.mustache
194
api.mustache
@@ -1,194 +0,0 @@
|
|||||||
{{>header}}
|
|
||||||
{{>part_of}}
|
|
||||||
{{#operations}}
|
|
||||||
|
|
||||||
class {{{classname}}} {
|
|
||||||
{{{classname}}}([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
|
||||||
|
|
||||||
final ApiClient apiClient;
|
|
||||||
{{#operation}}
|
|
||||||
|
|
||||||
{{#summary}}
|
|
||||||
/// {{{.}}}
|
|
||||||
{{/summary}}
|
|
||||||
{{#notes}}
|
|
||||||
{{#summary}}
|
|
||||||
///
|
|
||||||
{{/summary}}
|
|
||||||
/// {{{notes}}}
|
|
||||||
///
|
|
||||||
/// Note: This method returns the HTTP [Response].
|
|
||||||
{{/notes}}
|
|
||||||
{{^notes}}
|
|
||||||
{{#summary}}
|
|
||||||
///
|
|
||||||
/// Note: This method returns the HTTP [Response].
|
|
||||||
{{/summary}}
|
|
||||||
{{^summary}}
|
|
||||||
/// Performs an HTTP '{{{httpMethod}}} {{{path}}}' operation and returns the [Response].
|
|
||||||
{{/summary}}
|
|
||||||
{{/notes}}
|
|
||||||
{{#hasParams}}
|
|
||||||
{{#summary}}
|
|
||||||
///
|
|
||||||
{{/summary}}
|
|
||||||
{{^summary}}
|
|
||||||
{{#notes}}
|
|
||||||
///
|
|
||||||
{{/notes}}
|
|
||||||
{{/summary}}
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
{{/hasParams}}
|
|
||||||
{{#allParams}}
|
|
||||||
/// * [{{{dataType}}}] {{{paramName}}}{{#required}} (required){{/required}}{{#optional}} (optional){{/optional}}:
|
|
||||||
{{#description}}
|
|
||||||
/// {{{.}}}
|
|
||||||
{{/description}}
|
|
||||||
{{^-last}}
|
|
||||||
///
|
|
||||||
{{/-last}}
|
|
||||||
{{/allParams}}
|
|
||||||
Future<Response> {{{nickname}}}WithHttpInfo({{#allParams}}{{#required}}{{{dataType}}} {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}}{ {{#allParams}}{{^required}}{{{dataType}}}? {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} }{{/hasOptionalParams}}) async {
|
|
||||||
// ignore: prefer_const_declarations
|
|
||||||
final path = r'{{{path}}}'{{#pathParams}}
|
|
||||||
.replaceAll({{=<% %>=}}'{<% baseName %>}'<%={{ }}=%>, {{{paramName}}}{{^isString}}.toString(){{/isString}}){{/pathParams}};
|
|
||||||
|
|
||||||
// ignore: prefer_final_locals
|
|
||||||
Object? postBody{{#bodyParam}} = {{{paramName}}}{{/bodyParam}};
|
|
||||||
|
|
||||||
final queryParams = <QueryParam>[];
|
|
||||||
final headerParams = <String, String>{};
|
|
||||||
final formParams = <String, String>{};
|
|
||||||
{{#hasQueryParams}}
|
|
||||||
|
|
||||||
{{#queryParams}}
|
|
||||||
{{^required}}
|
|
||||||
if ({{{paramName}}} != null) {
|
|
||||||
{{/required}}
|
|
||||||
queryParams.addAll(_queryParams('{{{collectionFormat}}}', '{{{baseName}}}', {{{paramName}}}));
|
|
||||||
{{^required}}
|
|
||||||
}
|
|
||||||
{{/required}}
|
|
||||||
{{/queryParams}}
|
|
||||||
{{/hasQueryParams}}
|
|
||||||
{{#hasHeaderParams}}
|
|
||||||
|
|
||||||
{{#headerParams}}
|
|
||||||
{{#required}}
|
|
||||||
headerParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
|
|
||||||
{{/required}}
|
|
||||||
{{^required}}
|
|
||||||
if ({{{paramName}}} != null) {
|
|
||||||
headerParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
|
|
||||||
}
|
|
||||||
{{/required}}
|
|
||||||
{{/headerParams}}
|
|
||||||
{{/hasHeaderParams}}
|
|
||||||
|
|
||||||
const contentTypes = <String>[{{#prioritizedContentTypes}}'{{{mediaType}}}'{{^-last}}, {{/-last}}{{/prioritizedContentTypes}}];
|
|
||||||
|
|
||||||
{{#isMultipart}}
|
|
||||||
bool hasFields = false;
|
|
||||||
final mp = MultipartRequest('{{{httpMethod}}}', Uri.parse(path));
|
|
||||||
{{#formParams}}
|
|
||||||
{{^isFile}}
|
|
||||||
if ({{{paramName}}} != null) {
|
|
||||||
hasFields = true;
|
|
||||||
mp.fields[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
|
|
||||||
}
|
|
||||||
{{/isFile}}
|
|
||||||
{{#isFile}}
|
|
||||||
if ({{{paramName}}} != null) {
|
|
||||||
hasFields = true;
|
|
||||||
mp.fields[r'{{{baseName}}}'] = {{{paramName}}}.field;
|
|
||||||
mp.files.add({{{paramName}}});
|
|
||||||
}
|
|
||||||
{{/isFile}}
|
|
||||||
{{/formParams}}
|
|
||||||
if (hasFields) {
|
|
||||||
postBody = mp;
|
|
||||||
}
|
|
||||||
{{/isMultipart}}
|
|
||||||
{{^isMultipart}}
|
|
||||||
{{#formParams}}
|
|
||||||
{{^isFile}}
|
|
||||||
if ({{{paramName}}} != null) {
|
|
||||||
formParams[r'{{{baseName}}}'] = parameterToString({{{paramName}}});
|
|
||||||
}
|
|
||||||
{{/isFile}}
|
|
||||||
{{/formParams}}
|
|
||||||
{{/isMultipart}}
|
|
||||||
|
|
||||||
return apiClient.invokeAPI(
|
|
||||||
path,
|
|
||||||
'{{{httpMethod}}}',
|
|
||||||
queryParams,
|
|
||||||
postBody,
|
|
||||||
headerParams,
|
|
||||||
formParams,
|
|
||||||
contentTypes.isEmpty ? null : contentTypes.first,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{{#summary}}
|
|
||||||
/// {{{.}}}
|
|
||||||
{{/summary}}
|
|
||||||
{{#notes}}
|
|
||||||
{{#summary}}
|
|
||||||
///
|
|
||||||
{{/summary}}
|
|
||||||
/// {{{notes}}}
|
|
||||||
{{/notes}}
|
|
||||||
{{#hasParams}}
|
|
||||||
{{#summary}}
|
|
||||||
///
|
|
||||||
{{/summary}}
|
|
||||||
{{^summary}}
|
|
||||||
{{#notes}}
|
|
||||||
///
|
|
||||||
{{/notes}}
|
|
||||||
{{/summary}}
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
{{/hasParams}}
|
|
||||||
{{#allParams}}
|
|
||||||
/// * [{{{dataType}}}] {{{paramName}}}{{#required}} (required){{/required}}{{#optional}} (optional){{/optional}}:
|
|
||||||
{{#description}}
|
|
||||||
/// {{{.}}}
|
|
||||||
{{/description}}
|
|
||||||
{{^-last}}
|
|
||||||
///
|
|
||||||
{{/-last}}
|
|
||||||
{{/allParams}}
|
|
||||||
Future<{{#returnType}}{{{.}}}?{{/returnType}}{{^returnType}}void{{/returnType}}> {{{nickname}}}({{#allParams}}{{#required}}{{{dataType}}} {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}}{ {{#allParams}}{{^required}}{{{dataType}}}? {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} }{{/hasOptionalParams}}) async {
|
|
||||||
final response = await {{{nickname}}}WithHttpInfo({{#allParams}}{{#required}}{{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}}{{#hasOptionalParams}} {{#allParams}}{{^required}}{{{paramName}}}: {{{paramName}}},{{^-last}} {{/-last}}{{/required}}{{/allParams}} {{/hasOptionalParams}});
|
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
|
||||||
}
|
|
||||||
{{#returnType}}
|
|
||||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
|
||||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
|
||||||
// FormatException when trying to decode an empty string.
|
|
||||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
|
||||||
{{#native_serialization}}
|
|
||||||
{{#isArray}}
|
|
||||||
final responseBody = await _decodeBodyBytes(response);
|
|
||||||
return (await apiClient.deserializeAsync(responseBody, '{{{returnType}}}') as List)
|
|
||||||
.cast<{{{returnBaseType}}}>()
|
|
||||||
.{{#uniqueItems}}toSet(){{/uniqueItems}}{{^uniqueItems}}toList(growable: false){{/uniqueItems}};
|
|
||||||
{{/isArray}}
|
|
||||||
{{^isArray}}
|
|
||||||
{{#isMap}}
|
|
||||||
return {{{returnType}}}.from(await apiClient.deserializeAsync(await _decodeBodyBytes(response), '{{{returnType}}}'),);
|
|
||||||
{{/isMap}}
|
|
||||||
{{^isMap}}
|
|
||||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), '{{{returnType}}}',) as {{{returnType}}};
|
|
||||||
{{/isMap}}{{/isArray}}{{/native_serialization}}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
{{/returnType}}
|
|
||||||
}
|
|
||||||
{{/operation}}
|
|
||||||
}
|
|
||||||
{{/operations}}
|
|
||||||
@@ -33,6 +33,7 @@ services:
|
|||||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||||
- /usr/src/app/node_modules
|
- /usr/src/app/node_modules
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
- ../flickr30k-images:/flickr30k:ro
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
@@ -58,7 +59,6 @@ services:
|
|||||||
- 9231:9231
|
- 9231:9231
|
||||||
- 2283:2283
|
- 2283:2283
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
@@ -114,12 +114,6 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
|
|
||||||
redis:
|
|
||||||
container_name: immich_redis
|
|
||||||
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
|
|
||||||
healthcheck:
|
|
||||||
test: redis-cli ping || exit 1
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
@@ -154,25 +148,25 @@ services:
|
|||||||
-c wal_compression=on
|
-c wal_compression=on
|
||||||
|
|
||||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||||
# immich-prometheus:
|
immich-prometheus:
|
||||||
# container_name: immich_prometheus
|
container_name: immich_prometheus
|
||||||
# ports:
|
ports:
|
||||||
# - 9090:9090
|
- 9090:9090
|
||||||
# image: prom/prometheus
|
image: prom/prometheus
|
||||||
# volumes:
|
volumes:
|
||||||
# - ./prometheus.yml:/etc/prometheus/prometheus.yml
|
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||||
# - prometheus-data:/prometheus
|
- prometheus-data:/prometheus
|
||||||
|
|
||||||
# first login uses admin/admin
|
# first login uses admin/admin
|
||||||
# add data source for http://immich-prometheus:9090 to get started
|
# add data source for http://immich-prometheus:9090 to get started
|
||||||
# immich-grafana:
|
immich-grafana:
|
||||||
# container_name: immich_grafana
|
container_name: immich_grafana
|
||||||
# command: ['./run.sh', '-disable-reporting']
|
command: ['./run.sh', '-disable-reporting']
|
||||||
# ports:
|
ports:
|
||||||
# - 3000:3000
|
- 3001:3000
|
||||||
# image: grafana/grafana:10.3.3-ubuntu
|
image: grafana/grafana:10.3.3-ubuntu
|
||||||
# volumes:
|
volumes:
|
||||||
# - grafana-data:/var/lib/grafana
|
- grafana-data:/var/lib/grafana
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
model-cache:
|
model-cache:
|
||||||
|
|||||||
@@ -27,7 +27,6 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 2283:2283
|
- 2283:2283
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
restart: always
|
restart: always
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -54,13 +53,6 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
|
|
||||||
redis:
|
|
||||||
container_name: immich_redis
|
|
||||||
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
|
|
||||||
healthcheck:
|
|
||||||
test: redis-cli ping || exit 1
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- '2283:2283'
|
- '2283:2283'
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
restart: always
|
restart: always
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -47,13 +46,6 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
|
|
||||||
redis:
|
|
||||||
container_name: immich_redis
|
|
||||||
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
|
|
||||||
healthcheck:
|
|
||||||
test: redis-cli ping || exit 1
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
global:
|
global:
|
||||||
scrape_interval: 15s
|
scrape_interval: 3s
|
||||||
evaluation_interval: 15s
|
evaluation_interval: 3s
|
||||||
|
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: immich_api
|
- job_name: immich_api
|
||||||
|
scrape_interval: 3s
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ['immich-server:8081']
|
- targets: ["immich-server:8081"]
|
||||||
|
|
||||||
- job_name: immich_microservices
|
- job_name: immich_microservices
|
||||||
|
scrape_interval:
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ['immich-server:8082']
|
- targets: ["immich-server:8082"]
|
||||||
|
|||||||
@@ -278,7 +278,7 @@ You can use [Smart Search](/docs/features/searching.md) for this to some extent.
|
|||||||
|
|
||||||
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
||||||
|
|
||||||
You can increase the MIN DETECTION SCORE to 0.8 to help prevent bad thumbnails. Setting the score too high (above 0.9) might filter out too many real faces depending on the library used. If you just want to hide specific faces, you can adjust the 'MIN FACES DETECTED' setting in the administration panel
|
You can increase the MIN DETECTION SCORE to 0.8 to help prevent bad thumbnails. Setting the score too high (above 0.9) might filter out too many real faces depending on the library used. If you just want to hide specific faces, you can adjust the 'MIN FACES DETECTED' setting in the administration panel
|
||||||
to increase the bar for what the algorithm considers a "core face" for that person, reducing the chance of bad thumbnails being chosen.
|
to increase the bar for what the algorithm considers a "core face" for that person, reducing the chance of bad thumbnails being chosen.
|
||||||
|
|
||||||
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
||||||
@@ -367,12 +367,6 @@ You need to [enable WebSockets](/docs/administration/reverse-proxy/) on your rev
|
|||||||
|
|
||||||
Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
|
Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
|
||||||
|
|
||||||
### How can I reduce the log verbosity of Redis?
|
|
||||||
|
|
||||||
To decrease Redis logs, you can add the following line to the `redis:` section of the `docker-compose.yml`:
|
|
||||||
|
|
||||||
` command: redis-server --loglevel warning`
|
|
||||||
|
|
||||||
### How can I run Immich as a non-root user?
|
### How can I run Immich as a non-root user?
|
||||||
|
|
||||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||||
@@ -380,7 +374,6 @@ You may need to add mount points or docker volumes for the following internal co
|
|||||||
|
|
||||||
- `immich-machine-learning:/.config`
|
- `immich-machine-learning:/.config`
|
||||||
- `immich-machine-learning:/.cache`
|
- `immich-machine-learning:/.cache`
|
||||||
- `redis:/data`
|
|
||||||
|
|
||||||
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION` and `/cache` for machine-learning.
|
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION` and `/cache` for machine-learning.
|
||||||
|
|
||||||
@@ -425,7 +418,7 @@ After removing the containers and volumes, there are a few directories that need
|
|||||||
- `UPLOAD_LOCATION` contains all the media uploaded to Immich.
|
- `UPLOAD_LOCATION` contains all the media uploaded to Immich.
|
||||||
|
|
||||||
:::note Portainer
|
:::note Portainer
|
||||||
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
||||||
and remove all the volumes related to immich then restart the stack.
|
and remove all the volumes related to immich then restart the stack.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ Immich uses a traditional client-server design, with a dedicated database for da
|
|||||||
|
|
||||||
<img alt="Immich Architecture" src={AppArchitecture} className="p-4 dark:bg-immich-dark-primary my-4" />
|
<img alt="Immich Architecture" src={AppArchitecture} className="p-4 dark:bg-immich-dark-primary my-4" />
|
||||||
|
|
||||||
The diagram shows clients communicating with the server's API via REST. The server communicates with downstream systems (i.e. Redis, Postgres, Machine Learning, file system) through repository interfaces. Not shown in the diagram, is that the server is split into two separate containers `immich-server` and `immich-microservices`. The microservices container does not handle API requests or schedule cron jobs, but primarily handles incoming job requests from Redis.
|
The diagram shows clients communicating with the server's API via REST. The server communicates with downstream systems (i.e. Postgres, Machine Learning, file system) through repository interfaces. Not shown in the diagram, is that the server is split into two separate containers `immich-server` and `immich-microservices`. The microservices container does not handle API requests or schedule cron jobs, but primarily handles incoming job requests from Postgres.
|
||||||
|
|
||||||
## Clients
|
## Clients
|
||||||
|
|
||||||
@@ -53,7 +53,6 @@ The Immich backend is divided into several services, which are run as individual
|
|||||||
1. `immich-server` - Handle and respond to REST API requests, execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.)
|
1. `immich-server` - Handle and respond to REST API requests, execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.)
|
||||||
1. `immich-machine-learning` - Execute machine learning models
|
1. `immich-machine-learning` - Execute machine learning models
|
||||||
1. `postgres` - Persistent data storage
|
1. `postgres` - Persistent data storage
|
||||||
1. `redis`- Queue management for background jobs
|
|
||||||
|
|
||||||
### Immich Server
|
### Immich Server
|
||||||
|
|
||||||
@@ -111,7 +110,3 @@ Immich persists data in Postgres, which includes information about access and au
|
|||||||
:::info
|
:::info
|
||||||
See [Database Migrations](./database-migrations.md) for more information about how to modify the database to create an index, modify a table, add a new column, etc.
|
See [Database Migrations](./database-migrations.md) for more information about how to modify the database to create an index, modify a table, add a new column, etc.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Redis
|
|
||||||
|
|
||||||
Immich uses [Redis](https://redis.com/) via [BullMQ](https://docs.bullmq.io/) to manage job queues. Some jobs trigger subsequent jobs. For example, Smart Search and Facial Recognition relies on thumbnail generation and automatically run after one is generated.
|
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ This environment includes the services below. Additional details are available i
|
|||||||
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
||||||
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
||||||
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
||||||
- Redis
|
|
||||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to access it
|
- PostgreSQL development database with exposed port `5432` so you can use any database client to access it
|
||||||
|
|
||||||
All the services are packaged to run as with single Docker Compose command.
|
All the services are packaged to run as with single Docker Compose command.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Scaling Immich
|
# Scaling Immich
|
||||||
|
|
||||||
Immich is built with modern deployment practices in mind, and the backend is designed to be able to run multiple instances in parallel. When doing this, the only requirement you need to be aware of is that every instance needs to be connected to the shared infrastructure. That means they should all have access to the same Postgres and Redis instances, and have the same files mounted into the containers.
|
Immich is built with modern deployment practices in mind, and the backend is designed to be able to run multiple instances in parallel. When doing this, the only requirement you need to be aware of is that every instance needs to be connected to the shared infrastructure. That means they should all have access to the same Postgres instance, and have the same files mounted into the containers.
|
||||||
|
|
||||||
Scaling can be useful for many reasons. Maybe you have a gaming PC that you want to use for transcoding and thumbnail generation, or perhaps you run a Kubernetes cluster across a handful of powerful servers that you want to make use of.
|
Scaling can be useful for many reasons. Maybe you have a gaming PC that you want to use for transcoding and thumbnail generation, or perhaps you run a Kubernetes cluster across a handful of powerful servers that you want to make use of.
|
||||||
|
|
||||||
@@ -16,4 +16,4 @@ By default, each running `immich-server` container comes with multiple internal
|
|||||||
|
|
||||||
## Scaling down
|
## Scaling down
|
||||||
|
|
||||||
In the same way you can scale up to multiple containers, you can also choose to scale down. All state is stored in Postgres, Redis, and the filesystem so there is no risk in stopping a running immich-server container, for example if you want to use your GPU to play some games. As long as there is an API worker running you will still be able to browse Immich, and jobs will wait to be processed until there is a worker available for them.
|
In the same way you can scale up to multiple containers, you can also choose to scale down. All state is stored in Postgres and the filesystem so there is no risk in stopping a running immich-server container, for example if you want to use your GPU to play some games. As long as there is an API worker running you will still be able to browse Immich, and jobs will wait to be processed until there is a worker available for them.
|
||||||
|
|||||||
@@ -98,54 +98,6 @@ When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSW
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Redis
|
|
||||||
|
|
||||||
| Variable | Description | Default | Containers |
|
|
||||||
| :--------------- | :------------- | :-----: | :--------- |
|
|
||||||
| `REDIS_URL` | Redis URL | | server |
|
|
||||||
| `REDIS_SOCKET` | Redis socket | | server |
|
|
||||||
| `REDIS_HOSTNAME` | Redis host | `redis` | server |
|
|
||||||
| `REDIS_PORT` | Redis port | `6379` | server |
|
|
||||||
| `REDIS_USERNAME` | Redis username | | server |
|
|
||||||
| `REDIS_PASSWORD` | Redis password | | server |
|
|
||||||
| `REDIS_DBINDEX` | Redis DB index | `0` | server |
|
|
||||||
|
|
||||||
:::info
|
|
||||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
|
||||||
|
|
||||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
|
||||||
More information can be found in the upstream [ioredis] documentation.
|
|
||||||
|
|
||||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
|
||||||
:::
|
|
||||||
|
|
||||||
Redis (Sentinel) URL example JSON before encoding:
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>JSON</summary>
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"sentinels": [
|
|
||||||
{
|
|
||||||
"host": "redis-sentinel-node-0",
|
|
||||||
"port": 26379
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"host": "redis-sentinel-node-1",
|
|
||||||
"port": 26379
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"host": "redis-sentinel-node-2",
|
|
||||||
"port": 26379
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"name": "redis-sentinel"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
## Machine Learning
|
## Machine Learning
|
||||||
|
|
||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
@@ -212,16 +164,10 @@ the `_FILE` variable should be set to the path of a file containing the variable
|
|||||||
| `DB_USERNAME` | `DB_USERNAME_FILE`<sup>\*1</sup> |
|
| `DB_USERNAME` | `DB_USERNAME_FILE`<sup>\*1</sup> |
|
||||||
| `DB_PASSWORD` | `DB_PASSWORD_FILE`<sup>\*1</sup> |
|
| `DB_PASSWORD` | `DB_PASSWORD_FILE`<sup>\*1</sup> |
|
||||||
| `DB_URL` | `DB_URL_FILE`<sup>\*1</sup> |
|
| `DB_URL` | `DB_URL_FILE`<sup>\*1</sup> |
|
||||||
| `REDIS_PASSWORD` | `REDIS_PASSWORD_FILE`<sup>\*2</sup> |
|
|
||||||
|
|
||||||
\*1: See the [official documentation][docker-secrets-docs] for
|
\*1: See the [official documentation][docker-secrets-docs] for
|
||||||
details on how to use Docker Secrets in the Postgres image.
|
details on how to use Docker Secrets in the Postgres image.
|
||||||
|
|
||||||
\*2: See [this comment][docker-secrets-example] for an example of how
|
|
||||||
to use a Docker secret for the password in the Redis container.
|
|
||||||
|
|
||||||
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||||
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
|
||||||
[docker-secrets-docs]: https://github.com/docker-library/docs/tree/master/postgres#docker-secrets
|
[docker-secrets-docs]: https://github.com/docker-library/docs/tree/master/postgres#docker-secrets
|
||||||
[docker-secrets]: https://docs.docker.com/engine/swarm/secrets/
|
[docker-secrets]: https://docs.docker.com/engine/swarm/secrets/
|
||||||
[ioredis]: https://ioredis.readthedocs.io/en/latest/README/#connect-to-redis
|
|
||||||
|
|||||||
@@ -107,8 +107,6 @@ Accept the default option or select the **Machine Learning Image Type** for your
|
|||||||
|
|
||||||
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
|
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
|
||||||
|
|
||||||
The **Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`.
|
|
||||||
|
|
||||||
Accept the **Log Level** default of **Log**.
|
Accept the **Log Level** default of **Log**.
|
||||||
|
|
||||||
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
|
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
|
||||||
@@ -242,7 +240,7 @@ className="border rounded-xl"
|
|||||||
:::info
|
:::info
|
||||||
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||||
|
|
||||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Updating the App
|
## Updating the App
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ Immich can easily be installed and updated on Unraid via:
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
In order to install Immich from the Unraid CA, you will need an existing Redis and PostgreSQL 14 container, If you do not already have Redis or PostgreSQL you can install them from the Unraid CA, just make sure you choose PostgreSQL **14**.
|
In order to install Immich from the Unraid CA, you will need an existing PostgreSQL 14 container, If you do not already have PostgreSQL you can install it from the Unraid CA, just make sure you choose PostgreSQL **14**.
|
||||||
|
|
||||||
Once you have Redis and PostgreSQL running, search for Immich on the Unraid CA, choose either of the templates listed and fill out the example variables.
|
Once you have PostgreSQL running, search for Immich on the Unraid CA, choose either of the templates listed and fill out the example variables.
|
||||||
|
|
||||||
For more information about setting up the community image see [here](https://github.com/imagegenius/docker-immich#application-setup)
|
For more information about setting up the community image see [here](https://github.com/imagegenius/docker-immich#application-setup)
|
||||||
|
|
||||||
@@ -45,63 +45,63 @@ width="70%"
|
|||||||
alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
3. Select the cogwheel ⚙️ next to Immich and click "**Edit Stack**"
|
3. Select the cogwheel ⚙️ next to Immich and click "**Edit Stack**"
|
||||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
|
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
|
||||||
|
|
||||||
<details >
|
<details >
|
||||||
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
|
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
|
||||||
<ul>
|
<ul>
|
||||||
<li>Comment out the database service</li>
|
<li>Comment out the database service</li>
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid02.webp').default}
|
src={require('./img/unraid02.webp').default}
|
||||||
width="50%"
|
width="50%"
|
||||||
alt="Comment out database service in the compose file"
|
alt="Comment out database service in the compose file"
|
||||||
/>
|
/>
|
||||||
<li>Comment out the database dependency for <b>each service</b> <i>(example in screenshot below only shows 2 of the services - ensure you do this for all services)</i></li>
|
<li>Comment out the database dependency for <b>each service</b> <i>(example in screenshot below only shows 2 of the services - ensure you do this for all services)</i></li>
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid03.webp').default}
|
src={require('./img/unraid03.webp').default}
|
||||||
width="50%"
|
width="50%"
|
||||||
alt="Comment out every reference to the database service in the compose file"
|
alt="Comment out every reference to the database service in the compose file"
|
||||||
/>
|
/>
|
||||||
<li>Comment out the volumes</li>
|
<li>Comment out the volumes</li>
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid04.webp').default}
|
src={require('./img/unraid04.webp').default}
|
||||||
width="20%"
|
width="20%"
|
||||||
alt="Comment out database volume"
|
alt="Comment out database volume"
|
||||||
/>
|
/>
|
||||||
</ul>
|
</ul>
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
||||||
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
||||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||||
|
|
||||||
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
||||||
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
||||||
|
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid05.webp').default}
|
src={require('./img/unraid05.webp').default}
|
||||||
width="70%"
|
width="70%"
|
||||||
alt="Absolute location of where you want immich images stored"
|
alt="Absolute location of where you want immich images stored"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<details >
|
<details >
|
||||||
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 8.</summary>
|
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 8.</summary>
|
||||||
<p>Update the following database variables as relevant to your Postgres container:</p>
|
<p>Update the following database variables as relevant to your Postgres container:</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li><code>DB_HOSTNAME</code></li>
|
<li><code>DB_HOSTNAME</code></li>
|
||||||
<li><code>DB_USERNAME</code></li>
|
<li><code>DB_USERNAME</code></li>
|
||||||
<li><code>DB_PASSWORD</code></li>
|
<li><code>DB_PASSWORD</code></li>
|
||||||
<li><code>DB_DATABASE_NAME</code></li>
|
<li><code>DB_DATABASE_NAME</code></li>
|
||||||
<li><code>DB_PORT</code></li>
|
<li><code>DB_PORT</code></li>
|
||||||
</ul>
|
</ul>
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
8. Click "**Save Changes**" followed by "**Compose Up**" and Unraid will begin to create the Immich containers in a popup window. Once complete you will see a message on the popup window stating _"Connection Closed"_. Click "**Done**" and go to the Unraid "**Docker**" page
|
8. Click "**Save Changes**" followed by "**Compose Up**" and Unraid will begin to create the Immich containers in a popup window. Once complete you will see a message on the popup window stating _"Connection Closed"_. Click "**Done**" and go to the Unraid "**Docker**" page
|
||||||
|
|
||||||
> Note: This can take several minutes depending on your Internet speed and Unraid hardware
|
> Note: This can take several minutes depending on your Internet speed and Unraid hardware
|
||||||
|
|
||||||
9. Once on the Docker page you will see several Immich containers, one of them will be labelled `immich_server` and will have a port mapping. Visit the `IP:PORT` displayed in your web browser and you should see the Immich admin setup page.
|
9. Once on the Docker page you will see several Immich containers, one of them will be labelled `immich_server` and will have a port mapping. Visit the `IP:PORT` displayed in your web browser and you should see the Immich admin setup page.
|
||||||
|
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid06.webp').default}
|
src={require('./img/unraid06.webp').default}
|
||||||
@@ -122,7 +122,7 @@ alt="Go to Docker Tab and visit the address listed next to immich-web"
|
|||||||
width="90%"
|
width="90%"
|
||||||
alt="Go to Docker Tab and visit the address listed next to immich-web"
|
alt="Go to Docker Tab and visit the address listed next to immich-web"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|||||||
@@ -28,14 +28,10 @@ services:
|
|||||||
extra_hosts:
|
extra_hosts:
|
||||||
- 'auth-server:host-gateway'
|
- 'auth-server:host-gateway'
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
ports:
|
ports:
|
||||||
- 2285:2285
|
- 2285:2285
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ describe('/jobs', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||||
command: JobCommand.Empty,
|
command: JobCommand.Clear,
|
||||||
force: false,
|
force: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -160,7 +160,7 @@ describe('/jobs', () => {
|
|||||||
expect(assetBefore.thumbhash).toBeNull();
|
expect(assetBefore.thumbhash).toBeNull();
|
||||||
|
|
||||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
command: JobCommand.Empty,
|
command: JobCommand.Clear,
|
||||||
force: false,
|
force: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType } from '@immich/sdk';
|
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
|
||||||
import { DateTime } from 'luxon';
|
import { DateTime } from 'luxon';
|
||||||
import { createUserDto } from 'src/fixtures';
|
import { createUserDto } from 'src/fixtures';
|
||||||
import { errorDto } from 'src/responses';
|
import { errorDto } from 'src/responses';
|
||||||
@@ -52,7 +52,7 @@ describe('/timeline', () => {
|
|||||||
|
|
||||||
describe('GET /timeline/buckets', () => {
|
describe('GET /timeline/buckets', () => {
|
||||||
it('should require authentication', async () => {
|
it('should require authentication', async () => {
|
||||||
const { status, body } = await request(app).get('/timeline/buckets').query({});
|
const { status, body } = await request(app).get('/timeline/buckets').query({ size: TimeBucketSize.Month });
|
||||||
expect(status).toBe(401);
|
expect(status).toBe(401);
|
||||||
expect(body).toEqual(errorDto.unauthorized);
|
expect(body).toEqual(errorDto.unauthorized);
|
||||||
});
|
});
|
||||||
@@ -61,7 +61,7 @@ describe('/timeline', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.get('/timeline/buckets')
|
.get('/timeline/buckets')
|
||||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||||
.query({});
|
.query({ size: TimeBucketSize.Month });
|
||||||
|
|
||||||
expect(status).toBe(200);
|
expect(status).toBe(200);
|
||||||
expect(body).toEqual(
|
expect(body).toEqual(
|
||||||
@@ -78,17 +78,33 @@ describe('/timeline', () => {
|
|||||||
assetIds: userAssets.map(({ id }) => id),
|
assetIds: userAssets.map(({ id }) => id),
|
||||||
});
|
});
|
||||||
|
|
||||||
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
|
const { status, body } = await request(app)
|
||||||
|
.get('/timeline/buckets')
|
||||||
|
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
|
||||||
|
|
||||||
expect(status).toBe(400);
|
expect(status).toBe(400);
|
||||||
expect(body).toEqual(errorDto.noPermission);
|
expect(body).toEqual(errorDto.noPermission);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should get time buckets by day', async () => {
|
||||||
|
const { status, body } = await request(app)
|
||||||
|
.get('/timeline/buckets')
|
||||||
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||||
|
.query({ size: TimeBucketSize.Day });
|
||||||
|
|
||||||
|
expect(status).toBe(200);
|
||||||
|
expect(body).toEqual([
|
||||||
|
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
|
||||||
|
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
|
||||||
|
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||||
const req1 = await request(app)
|
const req1 = await request(app)
|
||||||
.get('/timeline/buckets')
|
.get('/timeline/buckets')
|
||||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||||
.query({ withPartners: true, isArchived: true });
|
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
|
||||||
|
|
||||||
expect(req1.status).toBe(400);
|
expect(req1.status).toBe(400);
|
||||||
expect(req1.body).toEqual(errorDto.badRequest());
|
expect(req1.body).toEqual(errorDto.badRequest());
|
||||||
@@ -96,7 +112,7 @@ describe('/timeline', () => {
|
|||||||
const req2 = await request(app)
|
const req2 = await request(app)
|
||||||
.get('/timeline/buckets')
|
.get('/timeline/buckets')
|
||||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||||
.query({ withPartners: true, isArchived: undefined });
|
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
|
||||||
|
|
||||||
expect(req2.status).toBe(400);
|
expect(req2.status).toBe(400);
|
||||||
expect(req2.body).toEqual(errorDto.badRequest());
|
expect(req2.body).toEqual(errorDto.badRequest());
|
||||||
@@ -106,7 +122,7 @@ describe('/timeline', () => {
|
|||||||
const req1 = await request(app)
|
const req1 = await request(app)
|
||||||
.get('/timeline/buckets')
|
.get('/timeline/buckets')
|
||||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||||
.query({ withPartners: true, isFavorite: true });
|
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
|
||||||
|
|
||||||
expect(req1.status).toBe(400);
|
expect(req1.status).toBe(400);
|
||||||
expect(req1.body).toEqual(errorDto.badRequest());
|
expect(req1.body).toEqual(errorDto.badRequest());
|
||||||
@@ -114,7 +130,7 @@ describe('/timeline', () => {
|
|||||||
const req2 = await request(app)
|
const req2 = await request(app)
|
||||||
.get('/timeline/buckets')
|
.get('/timeline/buckets')
|
||||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||||
.query({ withPartners: true, isFavorite: false });
|
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
|
||||||
|
|
||||||
expect(req2.status).toBe(400);
|
expect(req2.status).toBe(400);
|
||||||
expect(req2.body).toEqual(errorDto.badRequest());
|
expect(req2.body).toEqual(errorDto.badRequest());
|
||||||
@@ -124,7 +140,7 @@ describe('/timeline', () => {
|
|||||||
const req = await request(app)
|
const req = await request(app)
|
||||||
.get('/timeline/buckets')
|
.get('/timeline/buckets')
|
||||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||||
.query({ withPartners: true, isTrashed: true });
|
.query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
|
||||||
|
|
||||||
expect(req.status).toBe(400);
|
expect(req.status).toBe(400);
|
||||||
expect(req.body).toEqual(errorDto.badRequest());
|
expect(req.body).toEqual(errorDto.badRequest());
|
||||||
@@ -134,6 +150,7 @@ describe('/timeline', () => {
|
|||||||
describe('GET /timeline/bucket', () => {
|
describe('GET /timeline/bucket', () => {
|
||||||
it('should require authentication', async () => {
|
it('should require authentication', async () => {
|
||||||
const { status, body } = await request(app).get('/timeline/bucket').query({
|
const { status, body } = await request(app).get('/timeline/bucket').query({
|
||||||
|
size: TimeBucketSize.Month,
|
||||||
timeBucket: '1900-01-01',
|
timeBucket: '1900-01-01',
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -144,27 +161,11 @@ describe('/timeline', () => {
|
|||||||
it('should handle 5 digit years', async () => {
|
it('should handle 5 digit years', async () => {
|
||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.get('/timeline/bucket')
|
.get('/timeline/bucket')
|
||||||
.query({ timeBucket: '012345-01-01' })
|
.query({ size: TimeBucketSize.Month, timeBucket: '012345-01-01' })
|
||||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||||
|
|
||||||
expect(status).toBe(200);
|
expect(status).toBe(200);
|
||||||
expect(body).toEqual({
|
expect(body).toEqual([]);
|
||||||
city: [],
|
|
||||||
country: [],
|
|
||||||
duration: [],
|
|
||||||
id: [],
|
|
||||||
isArchived: [],
|
|
||||||
isFavorite: [],
|
|
||||||
isImage: [],
|
|
||||||
isTrashed: [],
|
|
||||||
livePhotoVideoId: [],
|
|
||||||
localDateTime: [],
|
|
||||||
ownerId: [],
|
|
||||||
projectionType: [],
|
|
||||||
ratio: [],
|
|
||||||
status: [],
|
|
||||||
thumbhash: [],
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO enable date string validation while still accepting 5 digit years
|
// TODO enable date string validation while still accepting 5 digit years
|
||||||
@@ -172,7 +173,7 @@ describe('/timeline', () => {
|
|||||||
// const { status, body } = await request(app)
|
// const { status, body } = await request(app)
|
||||||
// .get('/timeline/bucket')
|
// .get('/timeline/bucket')
|
||||||
// .set('Authorization', `Bearer ${user.accessToken}`)
|
// .set('Authorization', `Bearer ${user.accessToken}`)
|
||||||
// .query({ timeBucket: 'foo' });
|
// .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
|
||||||
|
|
||||||
// expect(status).toBe(400);
|
// expect(status).toBe(400);
|
||||||
// expect(body).toEqual(errorDto.badRequest);
|
// expect(body).toEqual(errorDto.badRequest);
|
||||||
@@ -182,26 +183,10 @@ describe('/timeline', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.get('/timeline/bucket')
|
.get('/timeline/bucket')
|
||||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||||
.query({ timeBucket: '1970-02-10' });
|
.query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10' });
|
||||||
|
|
||||||
expect(status).toBe(200);
|
expect(status).toBe(200);
|
||||||
expect(body).toEqual({
|
expect(body).toEqual([]);
|
||||||
city: [],
|
|
||||||
country: [],
|
|
||||||
duration: [],
|
|
||||||
id: [],
|
|
||||||
isArchived: [],
|
|
||||||
isFavorite: [],
|
|
||||||
isImage: [],
|
|
||||||
isTrashed: [],
|
|
||||||
livePhotoVideoId: [],
|
|
||||||
localDateTime: [],
|
|
||||||
ownerId: [],
|
|
||||||
projectionType: [],
|
|
||||||
ratio: [],
|
|
||||||
status: [],
|
|
||||||
thumbhash: [],
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ show_friendly_message() {
|
|||||||
Successfully deployed Immich!
|
Successfully deployed Immich!
|
||||||
You can access the website or the mobile app at http://$ip_address:2283
|
You can access the website or the mobile app at http://$ip_address:2283
|
||||||
---------------------------------------------------
|
---------------------------------------------------
|
||||||
If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
If you want to configure custom information of the server, including the database, or the backup (or upload) location, etc.
|
||||||
|
|
||||||
1. First bring down the containers with the command 'docker compose down' in the immich-app directory,
|
1. First bring down the containers with the command 'docker compose down' in the immich-app directory,
|
||||||
|
|
||||||
|
|||||||
4
mobile/openapi/README.md
generated
4
mobile/openapi/README.md
generated
@@ -477,8 +477,8 @@ Class | Method | HTTP request | Description
|
|||||||
- [TemplateDto](doc//TemplateDto.md)
|
- [TemplateDto](doc//TemplateDto.md)
|
||||||
- [TemplateResponseDto](doc//TemplateResponseDto.md)
|
- [TemplateResponseDto](doc//TemplateResponseDto.md)
|
||||||
- [TestEmailResponseDto](doc//TestEmailResponseDto.md)
|
- [TestEmailResponseDto](doc//TestEmailResponseDto.md)
|
||||||
- [TimeBucketAssetResponseDto](doc//TimeBucketAssetResponseDto.md)
|
- [TimeBucketResponseDto](doc//TimeBucketResponseDto.md)
|
||||||
- [TimeBucketsResponseDto](doc//TimeBucketsResponseDto.md)
|
- [TimeBucketSize](doc//TimeBucketSize.md)
|
||||||
- [ToneMapping](doc//ToneMapping.md)
|
- [ToneMapping](doc//ToneMapping.md)
|
||||||
- [TranscodeHWAccel](doc//TranscodeHWAccel.md)
|
- [TranscodeHWAccel](doc//TranscodeHWAccel.md)
|
||||||
- [TranscodePolicy](doc//TranscodePolicy.md)
|
- [TranscodePolicy](doc//TranscodePolicy.md)
|
||||||
|
|||||||
4
mobile/openapi/lib/api.dart
generated
4
mobile/openapi/lib/api.dart
generated
@@ -281,8 +281,8 @@ part 'model/tags_update.dart';
|
|||||||
part 'model/template_dto.dart';
|
part 'model/template_dto.dart';
|
||||||
part 'model/template_response_dto.dart';
|
part 'model/template_response_dto.dart';
|
||||||
part 'model/test_email_response_dto.dart';
|
part 'model/test_email_response_dto.dart';
|
||||||
part 'model/time_bucket_asset_response_dto.dart';
|
part 'model/time_bucket_response_dto.dart';
|
||||||
part 'model/time_buckets_response_dto.dart';
|
part 'model/time_bucket_size.dart';
|
||||||
part 'model/tone_mapping.dart';
|
part 'model/tone_mapping.dart';
|
||||||
part 'model/transcode_hw_accel.dart';
|
part 'model/transcode_hw_accel.dart';
|
||||||
part 'model/transcode_policy.dart';
|
part 'model/transcode_policy.dart';
|
||||||
|
|||||||
47
mobile/openapi/lib/api/timeline_api.dart
generated
47
mobile/openapi/lib/api/timeline_api.dart
generated
@@ -19,6 +19,8 @@ class TimelineApi {
|
|||||||
/// Performs an HTTP 'GET /timeline/bucket' operation and returns the [Response].
|
/// Performs an HTTP 'GET /timeline/bucket' operation and returns the [Response].
|
||||||
/// Parameters:
|
/// Parameters:
|
||||||
///
|
///
|
||||||
|
/// * [TimeBucketSize] size (required):
|
||||||
|
///
|
||||||
/// * [String] timeBucket (required):
|
/// * [String] timeBucket (required):
|
||||||
///
|
///
|
||||||
/// * [String] albumId:
|
/// * [String] albumId:
|
||||||
@@ -33,10 +35,6 @@ class TimelineApi {
|
|||||||
///
|
///
|
||||||
/// * [AssetOrder] order:
|
/// * [AssetOrder] order:
|
||||||
///
|
///
|
||||||
/// * [num] page:
|
|
||||||
///
|
|
||||||
/// * [num] pageSize:
|
|
||||||
///
|
|
||||||
/// * [String] personId:
|
/// * [String] personId:
|
||||||
///
|
///
|
||||||
/// * [String] tagId:
|
/// * [String] tagId:
|
||||||
@@ -46,7 +44,7 @@ class TimelineApi {
|
|||||||
/// * [bool] withPartners:
|
/// * [bool] withPartners:
|
||||||
///
|
///
|
||||||
/// * [bool] withStacked:
|
/// * [bool] withStacked:
|
||||||
Future<Response> getTimeBucketWithHttpInfo(String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, num? page, num? pageSize, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
Future<Response> getTimeBucketWithHttpInfo(TimeBucketSize size, String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
||||||
// ignore: prefer_const_declarations
|
// ignore: prefer_const_declarations
|
||||||
final apiPath = r'/timeline/bucket';
|
final apiPath = r'/timeline/bucket';
|
||||||
|
|
||||||
@@ -75,15 +73,10 @@ class TimelineApi {
|
|||||||
if (order != null) {
|
if (order != null) {
|
||||||
queryParams.addAll(_queryParams('', 'order', order));
|
queryParams.addAll(_queryParams('', 'order', order));
|
||||||
}
|
}
|
||||||
if (page != null) {
|
|
||||||
queryParams.addAll(_queryParams('', 'page', page));
|
|
||||||
}
|
|
||||||
if (pageSize != null) {
|
|
||||||
queryParams.addAll(_queryParams('', 'pageSize', pageSize));
|
|
||||||
}
|
|
||||||
if (personId != null) {
|
if (personId != null) {
|
||||||
queryParams.addAll(_queryParams('', 'personId', personId));
|
queryParams.addAll(_queryParams('', 'personId', personId));
|
||||||
}
|
}
|
||||||
|
queryParams.addAll(_queryParams('', 'size', size));
|
||||||
if (tagId != null) {
|
if (tagId != null) {
|
||||||
queryParams.addAll(_queryParams('', 'tagId', tagId));
|
queryParams.addAll(_queryParams('', 'tagId', tagId));
|
||||||
}
|
}
|
||||||
@@ -114,6 +107,8 @@ class TimelineApi {
|
|||||||
|
|
||||||
/// Parameters:
|
/// Parameters:
|
||||||
///
|
///
|
||||||
|
/// * [TimeBucketSize] size (required):
|
||||||
|
///
|
||||||
/// * [String] timeBucket (required):
|
/// * [String] timeBucket (required):
|
||||||
///
|
///
|
||||||
/// * [String] albumId:
|
/// * [String] albumId:
|
||||||
@@ -128,10 +123,6 @@ class TimelineApi {
|
|||||||
///
|
///
|
||||||
/// * [AssetOrder] order:
|
/// * [AssetOrder] order:
|
||||||
///
|
///
|
||||||
/// * [num] page:
|
|
||||||
///
|
|
||||||
/// * [num] pageSize:
|
|
||||||
///
|
|
||||||
/// * [String] personId:
|
/// * [String] personId:
|
||||||
///
|
///
|
||||||
/// * [String] tagId:
|
/// * [String] tagId:
|
||||||
@@ -141,8 +132,8 @@ class TimelineApi {
|
|||||||
/// * [bool] withPartners:
|
/// * [bool] withPartners:
|
||||||
///
|
///
|
||||||
/// * [bool] withStacked:
|
/// * [bool] withStacked:
|
||||||
Future<TimeBucketAssetResponseDto?> getTimeBucket(String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, num? page, num? pageSize, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
Future<List<AssetResponseDto>?> getTimeBucket(TimeBucketSize size, String timeBucket, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
||||||
final response = await getTimeBucketWithHttpInfo(timeBucket, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, page: page, pageSize: pageSize, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
|
final response = await getTimeBucketWithHttpInfo(size, timeBucket, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
if (response.statusCode >= HttpStatus.badRequest) {
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||||
}
|
}
|
||||||
@@ -150,8 +141,11 @@ class TimelineApi {
|
|||||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||||
// FormatException when trying to decode an empty string.
|
// FormatException when trying to decode an empty string.
|
||||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'TimeBucketAssetResponseDto',) as TimeBucketAssetResponseDto;
|
final responseBody = await _decodeBodyBytes(response);
|
||||||
|
return (await apiClient.deserializeAsync(responseBody, 'List<AssetResponseDto>') as List)
|
||||||
|
.cast<AssetResponseDto>()
|
||||||
|
.toList(growable: false);
|
||||||
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -159,6 +153,8 @@ class TimelineApi {
|
|||||||
/// Performs an HTTP 'GET /timeline/buckets' operation and returns the [Response].
|
/// Performs an HTTP 'GET /timeline/buckets' operation and returns the [Response].
|
||||||
/// Parameters:
|
/// Parameters:
|
||||||
///
|
///
|
||||||
|
/// * [TimeBucketSize] size (required):
|
||||||
|
///
|
||||||
/// * [String] albumId:
|
/// * [String] albumId:
|
||||||
///
|
///
|
||||||
/// * [bool] isArchived:
|
/// * [bool] isArchived:
|
||||||
@@ -180,7 +176,7 @@ class TimelineApi {
|
|||||||
/// * [bool] withPartners:
|
/// * [bool] withPartners:
|
||||||
///
|
///
|
||||||
/// * [bool] withStacked:
|
/// * [bool] withStacked:
|
||||||
Future<Response> getTimeBucketsWithHttpInfo({ String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
Future<Response> getTimeBucketsWithHttpInfo(TimeBucketSize size, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
||||||
// ignore: prefer_const_declarations
|
// ignore: prefer_const_declarations
|
||||||
final apiPath = r'/timeline/buckets';
|
final apiPath = r'/timeline/buckets';
|
||||||
|
|
||||||
@@ -212,6 +208,7 @@ class TimelineApi {
|
|||||||
if (personId != null) {
|
if (personId != null) {
|
||||||
queryParams.addAll(_queryParams('', 'personId', personId));
|
queryParams.addAll(_queryParams('', 'personId', personId));
|
||||||
}
|
}
|
||||||
|
queryParams.addAll(_queryParams('', 'size', size));
|
||||||
if (tagId != null) {
|
if (tagId != null) {
|
||||||
queryParams.addAll(_queryParams('', 'tagId', tagId));
|
queryParams.addAll(_queryParams('', 'tagId', tagId));
|
||||||
}
|
}
|
||||||
@@ -241,6 +238,8 @@ class TimelineApi {
|
|||||||
|
|
||||||
/// Parameters:
|
/// Parameters:
|
||||||
///
|
///
|
||||||
|
/// * [TimeBucketSize] size (required):
|
||||||
|
///
|
||||||
/// * [String] albumId:
|
/// * [String] albumId:
|
||||||
///
|
///
|
||||||
/// * [bool] isArchived:
|
/// * [bool] isArchived:
|
||||||
@@ -262,8 +261,8 @@ class TimelineApi {
|
|||||||
/// * [bool] withPartners:
|
/// * [bool] withPartners:
|
||||||
///
|
///
|
||||||
/// * [bool] withStacked:
|
/// * [bool] withStacked:
|
||||||
Future<List<TimeBucketsResponseDto>?> getTimeBuckets({ String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
Future<List<TimeBucketResponseDto>?> getTimeBuckets(TimeBucketSize size, { String? albumId, bool? isArchived, bool? isFavorite, bool? isTrashed, String? key, AssetOrder? order, String? personId, String? tagId, String? userId, bool? withPartners, bool? withStacked, }) async {
|
||||||
final response = await getTimeBucketsWithHttpInfo( albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
|
final response = await getTimeBucketsWithHttpInfo(size, albumId: albumId, isArchived: isArchived, isFavorite: isFavorite, isTrashed: isTrashed, key: key, order: order, personId: personId, tagId: tagId, userId: userId, withPartners: withPartners, withStacked: withStacked, );
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
if (response.statusCode >= HttpStatus.badRequest) {
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||||
}
|
}
|
||||||
@@ -272,8 +271,8 @@ class TimelineApi {
|
|||||||
// FormatException when trying to decode an empty string.
|
// FormatException when trying to decode an empty string.
|
||||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||||
final responseBody = await _decodeBodyBytes(response);
|
final responseBody = await _decodeBodyBytes(response);
|
||||||
return (await apiClient.deserializeAsync(responseBody, 'List<TimeBucketsResponseDto>') as List)
|
return (await apiClient.deserializeAsync(responseBody, 'List<TimeBucketResponseDto>') as List)
|
||||||
.cast<TimeBucketsResponseDto>()
|
.cast<TimeBucketResponseDto>()
|
||||||
.toList(growable: false);
|
.toList(growable: false);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
8
mobile/openapi/lib/api_client.dart
generated
8
mobile/openapi/lib/api_client.dart
generated
@@ -618,10 +618,10 @@ class ApiClient {
|
|||||||
return TemplateResponseDto.fromJson(value);
|
return TemplateResponseDto.fromJson(value);
|
||||||
case 'TestEmailResponseDto':
|
case 'TestEmailResponseDto':
|
||||||
return TestEmailResponseDto.fromJson(value);
|
return TestEmailResponseDto.fromJson(value);
|
||||||
case 'TimeBucketAssetResponseDto':
|
case 'TimeBucketResponseDto':
|
||||||
return TimeBucketAssetResponseDto.fromJson(value);
|
return TimeBucketResponseDto.fromJson(value);
|
||||||
case 'TimeBucketsResponseDto':
|
case 'TimeBucketSize':
|
||||||
return TimeBucketsResponseDto.fromJson(value);
|
return TimeBucketSizeTypeTransformer().decode(value);
|
||||||
case 'ToneMapping':
|
case 'ToneMapping':
|
||||||
return ToneMappingTypeTransformer().decode(value);
|
return ToneMappingTypeTransformer().decode(value);
|
||||||
case 'TranscodeHWAccel':
|
case 'TranscodeHWAccel':
|
||||||
|
|||||||
3
mobile/openapi/lib/api_helper.dart
generated
3
mobile/openapi/lib/api_helper.dart
generated
@@ -136,6 +136,9 @@ String parameterToString(dynamic value) {
|
|||||||
if (value is SyncRequestType) {
|
if (value is SyncRequestType) {
|
||||||
return SyncRequestTypeTypeTransformer().encode(value).toString();
|
return SyncRequestTypeTypeTransformer().encode(value).toString();
|
||||||
}
|
}
|
||||||
|
if (value is TimeBucketSize) {
|
||||||
|
return TimeBucketSizeTypeTransformer().encode(value).toString();
|
||||||
|
}
|
||||||
if (value is ToneMapping) {
|
if (value is ToneMapping) {
|
||||||
return ToneMappingTypeTransformer().encode(value).toString();
|
return ToneMappingTypeTransformer().encode(value).toString();
|
||||||
}
|
}
|
||||||
|
|||||||
6
mobile/openapi/lib/model/job_command.dart
generated
6
mobile/openapi/lib/model/job_command.dart
generated
@@ -26,7 +26,7 @@ class JobCommand {
|
|||||||
static const start = JobCommand._(r'start');
|
static const start = JobCommand._(r'start');
|
||||||
static const pause = JobCommand._(r'pause');
|
static const pause = JobCommand._(r'pause');
|
||||||
static const resume = JobCommand._(r'resume');
|
static const resume = JobCommand._(r'resume');
|
||||||
static const empty = JobCommand._(r'empty');
|
static const clear = JobCommand._(r'clear');
|
||||||
static const clearFailed = JobCommand._(r'clear-failed');
|
static const clearFailed = JobCommand._(r'clear-failed');
|
||||||
|
|
||||||
/// List of all possible values in this [enum][JobCommand].
|
/// List of all possible values in this [enum][JobCommand].
|
||||||
@@ -34,7 +34,7 @@ class JobCommand {
|
|||||||
start,
|
start,
|
||||||
pause,
|
pause,
|
||||||
resume,
|
resume,
|
||||||
empty,
|
clear,
|
||||||
clearFailed,
|
clearFailed,
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -77,7 +77,7 @@ class JobCommandTypeTransformer {
|
|||||||
case r'start': return JobCommand.start;
|
case r'start': return JobCommand.start;
|
||||||
case r'pause': return JobCommand.pause;
|
case r'pause': return JobCommand.pause;
|
||||||
case r'resume': return JobCommand.resume;
|
case r'resume': return JobCommand.resume;
|
||||||
case r'empty': return JobCommand.empty;
|
case r'clear': return JobCommand.clear;
|
||||||
case r'clear-failed': return JobCommand.clearFailed;
|
case r'clear-failed': return JobCommand.clearFailed;
|
||||||
default:
|
default:
|
||||||
if (!allowNull) {
|
if (!allowNull) {
|
||||||
|
|||||||
18
mobile/openapi/lib/model/job_counts_dto.dart
generated
18
mobile/openapi/lib/model/job_counts_dto.dart
generated
@@ -14,54 +14,42 @@ class JobCountsDto {
|
|||||||
/// Returns a new [JobCountsDto] instance.
|
/// Returns a new [JobCountsDto] instance.
|
||||||
JobCountsDto({
|
JobCountsDto({
|
||||||
required this.active,
|
required this.active,
|
||||||
required this.completed,
|
|
||||||
required this.delayed,
|
required this.delayed,
|
||||||
required this.failed,
|
required this.failed,
|
||||||
required this.paused,
|
|
||||||
required this.waiting,
|
required this.waiting,
|
||||||
});
|
});
|
||||||
|
|
||||||
int active;
|
int active;
|
||||||
|
|
||||||
int completed;
|
|
||||||
|
|
||||||
int delayed;
|
int delayed;
|
||||||
|
|
||||||
int failed;
|
int failed;
|
||||||
|
|
||||||
int paused;
|
|
||||||
|
|
||||||
int waiting;
|
int waiting;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is JobCountsDto &&
|
bool operator ==(Object other) => identical(this, other) || other is JobCountsDto &&
|
||||||
other.active == active &&
|
other.active == active &&
|
||||||
other.completed == completed &&
|
|
||||||
other.delayed == delayed &&
|
other.delayed == delayed &&
|
||||||
other.failed == failed &&
|
other.failed == failed &&
|
||||||
other.paused == paused &&
|
|
||||||
other.waiting == waiting;
|
other.waiting == waiting;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get hashCode =>
|
int get hashCode =>
|
||||||
// ignore: unnecessary_parenthesis
|
// ignore: unnecessary_parenthesis
|
||||||
(active.hashCode) +
|
(active.hashCode) +
|
||||||
(completed.hashCode) +
|
|
||||||
(delayed.hashCode) +
|
(delayed.hashCode) +
|
||||||
(failed.hashCode) +
|
(failed.hashCode) +
|
||||||
(paused.hashCode) +
|
|
||||||
(waiting.hashCode);
|
(waiting.hashCode);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'JobCountsDto[active=$active, completed=$completed, delayed=$delayed, failed=$failed, paused=$paused, waiting=$waiting]';
|
String toString() => 'JobCountsDto[active=$active, delayed=$delayed, failed=$failed, waiting=$waiting]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
json[r'active'] = this.active;
|
json[r'active'] = this.active;
|
||||||
json[r'completed'] = this.completed;
|
|
||||||
json[r'delayed'] = this.delayed;
|
json[r'delayed'] = this.delayed;
|
||||||
json[r'failed'] = this.failed;
|
json[r'failed'] = this.failed;
|
||||||
json[r'paused'] = this.paused;
|
|
||||||
json[r'waiting'] = this.waiting;
|
json[r'waiting'] = this.waiting;
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
@@ -76,10 +64,8 @@ class JobCountsDto {
|
|||||||
|
|
||||||
return JobCountsDto(
|
return JobCountsDto(
|
||||||
active: mapValueOfType<int>(json, r'active')!,
|
active: mapValueOfType<int>(json, r'active')!,
|
||||||
completed: mapValueOfType<int>(json, r'completed')!,
|
|
||||||
delayed: mapValueOfType<int>(json, r'delayed')!,
|
delayed: mapValueOfType<int>(json, r'delayed')!,
|
||||||
failed: mapValueOfType<int>(json, r'failed')!,
|
failed: mapValueOfType<int>(json, r'failed')!,
|
||||||
paused: mapValueOfType<int>(json, r'paused')!,
|
|
||||||
waiting: mapValueOfType<int>(json, r'waiting')!,
|
waiting: mapValueOfType<int>(json, r'waiting')!,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -129,10 +115,8 @@ class JobCountsDto {
|
|||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
'active',
|
'active',
|
||||||
'completed',
|
|
||||||
'delayed',
|
'delayed',
|
||||||
'failed',
|
'failed',
|
||||||
'paused',
|
|
||||||
'waiting',
|
'waiting',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
24
mobile/openapi/lib/model/queue_status_dto.dart
generated
24
mobile/openapi/lib/model/queue_status_dto.dart
generated
@@ -13,32 +13,26 @@ part of openapi.api;
|
|||||||
class QueueStatusDto {
|
class QueueStatusDto {
|
||||||
/// Returns a new [QueueStatusDto] instance.
|
/// Returns a new [QueueStatusDto] instance.
|
||||||
QueueStatusDto({
|
QueueStatusDto({
|
||||||
required this.isActive,
|
required this.paused,
|
||||||
required this.isPaused,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
bool isActive;
|
bool paused;
|
||||||
|
|
||||||
bool isPaused;
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is QueueStatusDto &&
|
bool operator ==(Object other) => identical(this, other) || other is QueueStatusDto &&
|
||||||
other.isActive == isActive &&
|
other.paused == paused;
|
||||||
other.isPaused == isPaused;
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get hashCode =>
|
int get hashCode =>
|
||||||
// ignore: unnecessary_parenthesis
|
// ignore: unnecessary_parenthesis
|
||||||
(isActive.hashCode) +
|
(paused.hashCode);
|
||||||
(isPaused.hashCode);
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'QueueStatusDto[isActive=$isActive, isPaused=$isPaused]';
|
String toString() => 'QueueStatusDto[paused=$paused]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
json[r'isActive'] = this.isActive;
|
json[r'paused'] = this.paused;
|
||||||
json[r'isPaused'] = this.isPaused;
|
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,8 +45,7 @@ class QueueStatusDto {
|
|||||||
final json = value.cast<String, dynamic>();
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
return QueueStatusDto(
|
return QueueStatusDto(
|
||||||
isActive: mapValueOfType<bool>(json, r'isActive')!,
|
paused: mapValueOfType<bool>(json, r'paused')!,
|
||||||
isPaused: mapValueOfType<bool>(json, r'isPaused')!,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
@@ -100,8 +93,7 @@ class QueueStatusDto {
|
|||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
'isActive',
|
'paused',
|
||||||
'isPaused',
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,243 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class TimeBucketAssetResponseDto {
|
|
||||||
/// Returns a new [TimeBucketAssetResponseDto] instance.
|
|
||||||
TimeBucketAssetResponseDto({
|
|
||||||
this.city = const [],
|
|
||||||
this.country = const [],
|
|
||||||
this.duration = const [],
|
|
||||||
this.id = const [],
|
|
||||||
this.isArchived = const [],
|
|
||||||
this.isFavorite = const [],
|
|
||||||
this.isImage = const [],
|
|
||||||
this.isTrashed = const [],
|
|
||||||
this.livePhotoVideoId = const [],
|
|
||||||
this.localDateTime = const [],
|
|
||||||
this.ownerId = const [],
|
|
||||||
this.projectionType = const [],
|
|
||||||
this.ratio = const [],
|
|
||||||
this.stack = const [],
|
|
||||||
this.thumbhash = const [],
|
|
||||||
});
|
|
||||||
|
|
||||||
List<String?> city;
|
|
||||||
|
|
||||||
List<String?> country;
|
|
||||||
|
|
||||||
List<String?> duration;
|
|
||||||
|
|
||||||
List<String> id;
|
|
||||||
|
|
||||||
List<num> isArchived;
|
|
||||||
|
|
||||||
List<num> isFavorite;
|
|
||||||
|
|
||||||
List<num> isImage;
|
|
||||||
|
|
||||||
List<num> isTrashed;
|
|
||||||
|
|
||||||
List<String?> livePhotoVideoId;
|
|
||||||
|
|
||||||
List<String> localDateTime;
|
|
||||||
|
|
||||||
List<String> ownerId;
|
|
||||||
|
|
||||||
List<String?> projectionType;
|
|
||||||
|
|
||||||
List<num> ratio;
|
|
||||||
|
|
||||||
/// (stack ID, stack asset count) tuple
|
|
||||||
List<List<String>?> stack;
|
|
||||||
|
|
||||||
List<String?> thumbhash;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is TimeBucketAssetResponseDto &&
|
|
||||||
_deepEquality.equals(other.city, city) &&
|
|
||||||
_deepEquality.equals(other.country, country) &&
|
|
||||||
_deepEquality.equals(other.duration, duration) &&
|
|
||||||
_deepEquality.equals(other.id, id) &&
|
|
||||||
_deepEquality.equals(other.isArchived, isArchived) &&
|
|
||||||
_deepEquality.equals(other.isFavorite, isFavorite) &&
|
|
||||||
_deepEquality.equals(other.isImage, isImage) &&
|
|
||||||
_deepEquality.equals(other.isTrashed, isTrashed) &&
|
|
||||||
_deepEquality.equals(other.livePhotoVideoId, livePhotoVideoId) &&
|
|
||||||
_deepEquality.equals(other.localDateTime, localDateTime) &&
|
|
||||||
_deepEquality.equals(other.ownerId, ownerId) &&
|
|
||||||
_deepEquality.equals(other.projectionType, projectionType) &&
|
|
||||||
_deepEquality.equals(other.ratio, ratio) &&
|
|
||||||
_deepEquality.equals(other.stack, stack) &&
|
|
||||||
_deepEquality.equals(other.thumbhash, thumbhash);
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(city.hashCode) +
|
|
||||||
(country.hashCode) +
|
|
||||||
(duration.hashCode) +
|
|
||||||
(id.hashCode) +
|
|
||||||
(isArchived.hashCode) +
|
|
||||||
(isFavorite.hashCode) +
|
|
||||||
(isImage.hashCode) +
|
|
||||||
(isTrashed.hashCode) +
|
|
||||||
(livePhotoVideoId.hashCode) +
|
|
||||||
(localDateTime.hashCode) +
|
|
||||||
(ownerId.hashCode) +
|
|
||||||
(projectionType.hashCode) +
|
|
||||||
(ratio.hashCode) +
|
|
||||||
(stack.hashCode) +
|
|
||||||
(thumbhash.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'TimeBucketAssetResponseDto[city=$city, country=$country, duration=$duration, id=$id, isArchived=$isArchived, isFavorite=$isFavorite, isImage=$isImage, isTrashed=$isTrashed, livePhotoVideoId=$livePhotoVideoId, localDateTime=$localDateTime, ownerId=$ownerId, projectionType=$projectionType, ratio=$ratio, stack=$stack, thumbhash=$thumbhash]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
json[r'city'] = this.city;
|
|
||||||
json[r'country'] = this.country;
|
|
||||||
json[r'duration'] = this.duration;
|
|
||||||
json[r'id'] = this.id;
|
|
||||||
json[r'isArchived'] = this.isArchived;
|
|
||||||
json[r'isFavorite'] = this.isFavorite;
|
|
||||||
json[r'isImage'] = this.isImage;
|
|
||||||
json[r'isTrashed'] = this.isTrashed;
|
|
||||||
json[r'livePhotoVideoId'] = this.livePhotoVideoId;
|
|
||||||
json[r'localDateTime'] = this.localDateTime;
|
|
||||||
json[r'ownerId'] = this.ownerId;
|
|
||||||
json[r'projectionType'] = this.projectionType;
|
|
||||||
json[r'ratio'] = this.ratio;
|
|
||||||
json[r'stack'] = this.stack;
|
|
||||||
json[r'thumbhash'] = this.thumbhash;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [TimeBucketAssetResponseDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static TimeBucketAssetResponseDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "TimeBucketAssetResponseDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return TimeBucketAssetResponseDto(
|
|
||||||
city: json[r'city'] is Iterable
|
|
||||||
? (json[r'city'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
country: json[r'country'] is Iterable
|
|
||||||
? (json[r'country'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
duration: json[r'duration'] is Iterable
|
|
||||||
? (json[r'duration'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
id: json[r'id'] is Iterable
|
|
||||||
? (json[r'id'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
isArchived: json[r'isArchived'] is Iterable
|
|
||||||
? (json[r'isArchived'] as Iterable).cast<num>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
isFavorite: json[r'isFavorite'] is Iterable
|
|
||||||
? (json[r'isFavorite'] as Iterable).cast<num>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
isImage: json[r'isImage'] is Iterable
|
|
||||||
? (json[r'isImage'] as Iterable).cast<num>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
isTrashed: json[r'isTrashed'] is Iterable
|
|
||||||
? (json[r'isTrashed'] as Iterable).cast<num>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
livePhotoVideoId: json[r'livePhotoVideoId'] is Iterable
|
|
||||||
? (json[r'livePhotoVideoId'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
localDateTime: json[r'localDateTime'] is Iterable
|
|
||||||
? (json[r'localDateTime'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
ownerId: json[r'ownerId'] is Iterable
|
|
||||||
? (json[r'ownerId'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
projectionType: json[r'projectionType'] is Iterable
|
|
||||||
? (json[r'projectionType'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
ratio: json[r'ratio'] is Iterable
|
|
||||||
? (json[r'ratio'] as Iterable).cast<num>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
stack: json[r'stack'] is List
|
|
||||||
? (json[r'stack'] as List).map((e) =>
|
|
||||||
e == null ? null : (e as List).cast<String>()
|
|
||||||
).toList()
|
|
||||||
: const [],
|
|
||||||
thumbhash: json[r'thumbhash'] is Iterable
|
|
||||||
? (json[r'thumbhash'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<TimeBucketAssetResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <TimeBucketAssetResponseDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = TimeBucketAssetResponseDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, TimeBucketAssetResponseDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, TimeBucketAssetResponseDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = TimeBucketAssetResponseDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of TimeBucketAssetResponseDto-objects as value to a dart map
|
|
||||||
static Map<String, List<TimeBucketAssetResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<TimeBucketAssetResponseDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = TimeBucketAssetResponseDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'city',
|
|
||||||
'country',
|
|
||||||
'duration',
|
|
||||||
'id',
|
|
||||||
'isArchived',
|
|
||||||
'isFavorite',
|
|
||||||
'isImage',
|
|
||||||
'isTrashed',
|
|
||||||
'livePhotoVideoId',
|
|
||||||
'localDateTime',
|
|
||||||
'ownerId',
|
|
||||||
'projectionType',
|
|
||||||
'ratio',
|
|
||||||
'thumbhash',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -10,9 +10,9 @@
|
|||||||
|
|
||||||
part of openapi.api;
|
part of openapi.api;
|
||||||
|
|
||||||
class TimeBucketsResponseDto {
|
class TimeBucketResponseDto {
|
||||||
/// Returns a new [TimeBucketsResponseDto] instance.
|
/// Returns a new [TimeBucketResponseDto] instance.
|
||||||
TimeBucketsResponseDto({
|
TimeBucketResponseDto({
|
||||||
required this.count,
|
required this.count,
|
||||||
required this.timeBucket,
|
required this.timeBucket,
|
||||||
});
|
});
|
||||||
@@ -22,7 +22,7 @@ class TimeBucketsResponseDto {
|
|||||||
String timeBucket;
|
String timeBucket;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is TimeBucketsResponseDto &&
|
bool operator ==(Object other) => identical(this, other) || other is TimeBucketResponseDto &&
|
||||||
other.count == count &&
|
other.count == count &&
|
||||||
other.timeBucket == timeBucket;
|
other.timeBucket == timeBucket;
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ class TimeBucketsResponseDto {
|
|||||||
(timeBucket.hashCode);
|
(timeBucket.hashCode);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'TimeBucketsResponseDto[count=$count, timeBucket=$timeBucket]';
|
String toString() => 'TimeBucketResponseDto[count=$count, timeBucket=$timeBucket]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
@@ -42,15 +42,15 @@ class TimeBucketsResponseDto {
|
|||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new [TimeBucketsResponseDto] instance and imports its values from
|
/// Returns a new [TimeBucketResponseDto] instance and imports its values from
|
||||||
/// [value] if it's a [Map], null otherwise.
|
/// [value] if it's a [Map], null otherwise.
|
||||||
// ignore: prefer_constructors_over_static_methods
|
// ignore: prefer_constructors_over_static_methods
|
||||||
static TimeBucketsResponseDto? fromJson(dynamic value) {
|
static TimeBucketResponseDto? fromJson(dynamic value) {
|
||||||
upgradeDto(value, "TimeBucketsResponseDto");
|
upgradeDto(value, "TimeBucketResponseDto");
|
||||||
if (value is Map) {
|
if (value is Map) {
|
||||||
final json = value.cast<String, dynamic>();
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
return TimeBucketsResponseDto(
|
return TimeBucketResponseDto(
|
||||||
count: mapValueOfType<int>(json, r'count')!,
|
count: mapValueOfType<int>(json, r'count')!,
|
||||||
timeBucket: mapValueOfType<String>(json, r'timeBucket')!,
|
timeBucket: mapValueOfType<String>(json, r'timeBucket')!,
|
||||||
);
|
);
|
||||||
@@ -58,11 +58,11 @@ class TimeBucketsResponseDto {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
static List<TimeBucketsResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
static List<TimeBucketResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||||
final result = <TimeBucketsResponseDto>[];
|
final result = <TimeBucketResponseDto>[];
|
||||||
if (json is List && json.isNotEmpty) {
|
if (json is List && json.isNotEmpty) {
|
||||||
for (final row in json) {
|
for (final row in json) {
|
||||||
final value = TimeBucketsResponseDto.fromJson(row);
|
final value = TimeBucketResponseDto.fromJson(row);
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
result.add(value);
|
result.add(value);
|
||||||
}
|
}
|
||||||
@@ -71,12 +71,12 @@ class TimeBucketsResponseDto {
|
|||||||
return result.toList(growable: growable);
|
return result.toList(growable: growable);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, TimeBucketsResponseDto> mapFromJson(dynamic json) {
|
static Map<String, TimeBucketResponseDto> mapFromJson(dynamic json) {
|
||||||
final map = <String, TimeBucketsResponseDto>{};
|
final map = <String, TimeBucketResponseDto>{};
|
||||||
if (json is Map && json.isNotEmpty) {
|
if (json is Map && json.isNotEmpty) {
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||||
for (final entry in json.entries) {
|
for (final entry in json.entries) {
|
||||||
final value = TimeBucketsResponseDto.fromJson(entry.value);
|
final value = TimeBucketResponseDto.fromJson(entry.value);
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
map[entry.key] = value;
|
map[entry.key] = value;
|
||||||
}
|
}
|
||||||
@@ -85,14 +85,14 @@ class TimeBucketsResponseDto {
|
|||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
// maps a json object with a list of TimeBucketsResponseDto-objects as value to a dart map
|
// maps a json object with a list of TimeBucketResponseDto-objects as value to a dart map
|
||||||
static Map<String, List<TimeBucketsResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
static Map<String, List<TimeBucketResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||||
final map = <String, List<TimeBucketsResponseDto>>{};
|
final map = <String, List<TimeBucketResponseDto>>{};
|
||||||
if (json is Map && json.isNotEmpty) {
|
if (json is Map && json.isNotEmpty) {
|
||||||
// ignore: parameter_assignments
|
// ignore: parameter_assignments
|
||||||
json = json.cast<String, dynamic>();
|
json = json.cast<String, dynamic>();
|
||||||
for (final entry in json.entries) {
|
for (final entry in json.entries) {
|
||||||
map[entry.key] = TimeBucketsResponseDto.listFromJson(entry.value, growable: growable,);
|
map[entry.key] = TimeBucketResponseDto.listFromJson(entry.value, growable: growable,);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return map;
|
return map;
|
||||||
85
mobile/openapi/lib/model/time_bucket_size.dart
generated
Normal file
85
mobile/openapi/lib/model/time_bucket_size.dart
generated
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
//
|
||||||
|
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||||
|
//
|
||||||
|
// @dart=2.18
|
||||||
|
|
||||||
|
// ignore_for_file: unused_element, unused_import
|
||||||
|
// ignore_for_file: always_put_required_named_parameters_first
|
||||||
|
// ignore_for_file: constant_identifier_names
|
||||||
|
// ignore_for_file: lines_longer_than_80_chars
|
||||||
|
|
||||||
|
part of openapi.api;
|
||||||
|
|
||||||
|
|
||||||
|
class TimeBucketSize {
|
||||||
|
/// Instantiate a new enum with the provided [value].
|
||||||
|
const TimeBucketSize._(this.value);
|
||||||
|
|
||||||
|
/// The underlying value of this enum member.
|
||||||
|
final String value;
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() => value;
|
||||||
|
|
||||||
|
String toJson() => value;
|
||||||
|
|
||||||
|
static const DAY = TimeBucketSize._(r'DAY');
|
||||||
|
static const MONTH = TimeBucketSize._(r'MONTH');
|
||||||
|
|
||||||
|
/// List of all possible values in this [enum][TimeBucketSize].
|
||||||
|
static const values = <TimeBucketSize>[
|
||||||
|
DAY,
|
||||||
|
MONTH,
|
||||||
|
];
|
||||||
|
|
||||||
|
static TimeBucketSize? fromJson(dynamic value) => TimeBucketSizeTypeTransformer().decode(value);
|
||||||
|
|
||||||
|
static List<TimeBucketSize> listFromJson(dynamic json, {bool growable = false,}) {
|
||||||
|
final result = <TimeBucketSize>[];
|
||||||
|
if (json is List && json.isNotEmpty) {
|
||||||
|
for (final row in json) {
|
||||||
|
final value = TimeBucketSize.fromJson(row);
|
||||||
|
if (value != null) {
|
||||||
|
result.add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result.toList(growable: growable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Transformation class that can [encode] an instance of [TimeBucketSize] to String,
|
||||||
|
/// and [decode] dynamic data back to [TimeBucketSize].
|
||||||
|
class TimeBucketSizeTypeTransformer {
|
||||||
|
factory TimeBucketSizeTypeTransformer() => _instance ??= const TimeBucketSizeTypeTransformer._();
|
||||||
|
|
||||||
|
const TimeBucketSizeTypeTransformer._();
|
||||||
|
|
||||||
|
String encode(TimeBucketSize data) => data.value;
|
||||||
|
|
||||||
|
/// Decodes a [dynamic value][data] to a TimeBucketSize.
|
||||||
|
///
|
||||||
|
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||||
|
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||||
|
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||||
|
///
|
||||||
|
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||||
|
/// and users are still using an old app with the old code.
|
||||||
|
TimeBucketSize? decode(dynamic data, {bool allowNull = true}) {
|
||||||
|
if (data != null) {
|
||||||
|
switch (data) {
|
||||||
|
case r'DAY': return TimeBucketSize.DAY;
|
||||||
|
case r'MONTH': return TimeBucketSize.MONTH;
|
||||||
|
default:
|
||||||
|
if (!allowNull) {
|
||||||
|
throw ArgumentError('Unknown enum value to decode: $data');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Singleton [TimeBucketSizeTypeTransformer] instance.
|
||||||
|
static TimeBucketSizeTypeTransformer? _instance;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
OPENAPI_GENERATOR_VERSION=v7.12.0
|
OPENAPI_GENERATOR_VERSION=v7.8.0
|
||||||
|
|
||||||
# usage: ./bin/generate-open-api.sh
|
# usage: ./bin/generate-open-api.sh
|
||||||
|
|
||||||
@@ -8,7 +8,6 @@ function dart {
|
|||||||
cd ./templates/mobile/serialization/native
|
cd ./templates/mobile/serialization/native
|
||||||
wget -O native_class.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/serialization/native/native_class.mustache
|
wget -O native_class.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/serialization/native/native_class.mustache
|
||||||
patch --no-backup-if-mismatch -u native_class.mustache <native_class.mustache.patch
|
patch --no-backup-if-mismatch -u native_class.mustache <native_class.mustache.patch
|
||||||
patch --no-backup-if-mismatch -u native_class.mustache <native_class_nullable_items_in_arrays.patch
|
|
||||||
|
|
||||||
cd ../../
|
cd ../../
|
||||||
wget -O api.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/api.mustache
|
wget -O api.mustache https://raw.githubusercontent.com/OpenAPITools/openapi-generator/$OPENAPI_GENERATOR_VERSION/modules/openapi-generator/src/main/resources/dart2/api.mustache
|
||||||
|
|||||||
@@ -1,301 +0,0 @@
|
|||||||
class {{{classname}}} {
|
|
||||||
{{>dart_constructor}}
|
|
||||||
{{#vars}}
|
|
||||||
{{#description}}
|
|
||||||
/// {{{.}}}
|
|
||||||
{{/description}}
|
|
||||||
{{^isEnum}}
|
|
||||||
{{#minimum}}
|
|
||||||
{{#description}}
|
|
||||||
///
|
|
||||||
{{/description}}
|
|
||||||
/// Minimum value: {{{.}}}
|
|
||||||
{{/minimum}}
|
|
||||||
{{#maximum}}
|
|
||||||
{{#description}}
|
|
||||||
{{^minimum}}
|
|
||||||
///
|
|
||||||
{{/minimum}}
|
|
||||||
{{/description}}
|
|
||||||
/// Maximum value: {{{.}}}
|
|
||||||
{{/maximum}}
|
|
||||||
{{^isNullable}}
|
|
||||||
{{^required}}
|
|
||||||
{{^defaultValue}}
|
|
||||||
///
|
|
||||||
/// Please note: This property should have been non-nullable! Since the specification file
|
|
||||||
/// does not include a default value (using the "default:" property), however, the generated
|
|
||||||
/// source code must fall back to having a nullable type.
|
|
||||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
|
||||||
///
|
|
||||||
{{/defaultValue}}
|
|
||||||
{{/required}}
|
|
||||||
{{/isNullable}}
|
|
||||||
{{/isEnum}}
|
|
||||||
{{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
|
|
||||||
|
|
||||||
{{/vars}}
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is {{{classname}}} &&
|
|
||||||
{{#vars}}
|
|
||||||
{{#isMap}}_deepEquality.equals(other.{{{name}}}, {{{name}}}){{/isMap}}{{^isMap}}{{#isArray}}_deepEquality.equals(other.{{{name}}}, {{{name}}}){{/isArray}}{{^isArray}}other.{{{name}}} == {{{name}}}{{/isArray}}{{/isMap}}{{^-last}} &&{{/-last}}{{#-last}};{{/-last}}
|
|
||||||
{{/vars}}
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
{{#vars}}
|
|
||||||
({{#isNullable}}{{{name}}} == null ? 0 : {{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}{{{name}}} == null ? 0 : {{/defaultValue}}{{/required}}{{/isNullable}}{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.hashCode){{^-last}} +{{/-last}}{{#-last}};{{/-last}}
|
|
||||||
{{/vars}}
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => '{{{classname}}}[{{#vars}}{{{name}}}=${{{name}}}{{^-last}}, {{/-last}}{{/vars}}]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
{{#vars}}
|
|
||||||
{{#isNullable}}
|
|
||||||
if (this.{{{name}}} != null) {
|
|
||||||
{{/isNullable}}
|
|
||||||
{{^isNullable}}
|
|
||||||
{{^required}}
|
|
||||||
{{^defaultValue}}
|
|
||||||
if (this.{{{name}}} != null) {
|
|
||||||
{{/defaultValue}}
|
|
||||||
{{/required}}
|
|
||||||
{{/isNullable}}
|
|
||||||
{{#isDateTime}}
|
|
||||||
{{#pattern}}
|
|
||||||
json[r'{{{baseName}}}'] = _isEpochMarker(r'{{{pattern}}}')
|
|
||||||
? this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.millisecondsSinceEpoch
|
|
||||||
: this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc().toIso8601String();
|
|
||||||
{{/pattern}}
|
|
||||||
{{^pattern}}
|
|
||||||
json[r'{{{baseName}}}'] = this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc().toIso8601String();
|
|
||||||
{{/pattern}}
|
|
||||||
{{/isDateTime}}
|
|
||||||
{{#isDate}}
|
|
||||||
{{#pattern}}
|
|
||||||
json[r'{{{baseName}}}'] = _isEpochMarker(r'{{{pattern}}}')
|
|
||||||
? this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.millisecondsSinceEpoch
|
|
||||||
: _dateFormatter.format(this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc());
|
|
||||||
{{/pattern}}
|
|
||||||
{{^pattern}}
|
|
||||||
json[r'{{{baseName}}}'] = _dateFormatter.format(this.{{{name}}}{{#isNullable}}!{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}!{{/defaultValue}}{{/required}}{{/isNullable}}.toUtc());
|
|
||||||
{{/pattern}}
|
|
||||||
{{/isDate}}
|
|
||||||
{{^isDateTime}}
|
|
||||||
{{^isDate}}
|
|
||||||
json[r'{{{baseName}}}'] = this.{{{name}}}{{#isArray}}{{#uniqueItems}}{{#isNullable}}!{{/isNullable}}.toList(growable: false){{/uniqueItems}}{{/isArray}};
|
|
||||||
{{/isDate}}
|
|
||||||
{{/isDateTime}}
|
|
||||||
{{#isNullable}}
|
|
||||||
} else {
|
|
||||||
json[r'{{{baseName}}}'] = null;
|
|
||||||
}
|
|
||||||
{{/isNullable}}
|
|
||||||
{{^isNullable}}
|
|
||||||
{{^required}}
|
|
||||||
{{^defaultValue}}
|
|
||||||
} else {
|
|
||||||
json[r'{{{baseName}}}'] = null;
|
|
||||||
}
|
|
||||||
{{/defaultValue}}
|
|
||||||
{{/required}}
|
|
||||||
{{/isNullable}}
|
|
||||||
{{/vars}}
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [{{{classname}}}] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static {{{classname}}}? fromJson(dynamic value) {
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
// Ensure that the map contains the required keys.
|
|
||||||
// Note 1: the values aren't checked for validity beyond being non-null.
|
|
||||||
// Note 2: this code is stripped in release mode!
|
|
||||||
assert(() {
|
|
||||||
requiredKeys.forEach((key) {
|
|
||||||
assert(json.containsKey(key), 'Required key "{{{classname}}}[$key]" is missing from JSON.');
|
|
||||||
assert(json[key] != null, 'Required key "{{{classname}}}[$key]" has a null value in JSON.');
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}());
|
|
||||||
|
|
||||||
return {{{classname}}}(
|
|
||||||
{{#vars}}
|
|
||||||
{{#isDateTime}}
|
|
||||||
{{{name}}}: mapDateTime(json, r'{{{baseName}}}', r'{{{pattern}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/isDateTime}}
|
|
||||||
{{#isDate}}
|
|
||||||
{{{name}}}: mapDateTime(json, r'{{{baseName}}}', r'{{{pattern}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/isDate}}
|
|
||||||
{{^isDateTime}}
|
|
||||||
{{^isDate}}
|
|
||||||
{{#complexType}}
|
|
||||||
{{#isArray}}
|
|
||||||
{{#items.isArray}}
|
|
||||||
{{{name}}}: json[r'{{{baseName}}}'] is List
|
|
||||||
? (json[r'{{{baseName}}}'] as List).map((e) =>
|
|
||||||
{{#items.complexType}}
|
|
||||||
{{items.complexType}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}}
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{^items.complexType}}
|
|
||||||
e == null ? {{#items.isNullable}}null{{/items.isNullable}}{{^items.isNullable}}const <{{items.items.dataType}}>[]{{/items.isNullable}} : (e as List).cast<{{items.items.dataType}}>()
|
|
||||||
{{/items.complexType}}
|
|
||||||
).toList()
|
|
||||||
: {{#isNullable}}null{{/isNullable}}{{^isNullable}}const []{{/isNullable}},
|
|
||||||
{{/items.isArray}}
|
|
||||||
{{^items.isArray}}
|
|
||||||
{{{name}}}: {{{complexType}}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}},
|
|
||||||
{{/items.isArray}}
|
|
||||||
{{/isArray}}
|
|
||||||
{{^isArray}}
|
|
||||||
{{#isMap}}
|
|
||||||
{{#items.isArray}}
|
|
||||||
{{{name}}}: json[r'{{{baseName}}}'] == null
|
|
||||||
? {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}}
|
|
||||||
{{#items.complexType}}
|
|
||||||
: {{items.complexType}}.mapListFromJson(json[r'{{{baseName}}}']),
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{^items.complexType}}
|
|
||||||
: mapCastOfType<String, List>(json, r'{{{baseName}}}'),
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{/items.isArray}}
|
|
||||||
{{^items.isArray}}
|
|
||||||
{{#items.isMap}}
|
|
||||||
{{#items.complexType}}
|
|
||||||
{{{name}}}: {{items.complexType}}.mapFromJson(json[r'{{{baseName}}}']),
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{^items.complexType}}
|
|
||||||
{{{name}}}: mapCastOfType<String, dynamic>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{/items.isMap}}
|
|
||||||
{{^items.isMap}}
|
|
||||||
{{#items.complexType}}
|
|
||||||
{{{name}}}: {{{items.complexType}}}.mapFromJson(json[r'{{{baseName}}}']),
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{^items.complexType}}
|
|
||||||
{{{name}}}: mapCastOfType<String, {{items.dataType}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/items.complexType}}
|
|
||||||
{{/items.isMap}}
|
|
||||||
{{/items.isArray}}
|
|
||||||
{{/isMap}}
|
|
||||||
{{^isMap}}
|
|
||||||
{{#isBinary}}
|
|
||||||
{{{name}}}: null, // No support for decoding binary content from JSON
|
|
||||||
{{/isBinary}}
|
|
||||||
{{^isBinary}}
|
|
||||||
{{{name}}}: {{{complexType}}}.fromJson(json[r'{{{baseName}}}']){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/isBinary}}
|
|
||||||
{{/isMap}}
|
|
||||||
{{/isArray}}
|
|
||||||
{{/complexType}}
|
|
||||||
{{^complexType}}
|
|
||||||
{{#isArray}}
|
|
||||||
{{#isEnum}}
|
|
||||||
{{{name}}}: {{{items.datatypeWithEnum}}}.listFromJson(json[r'{{{baseName}}}']){{#uniqueItems}}.toSet(){{/uniqueItems}},
|
|
||||||
{{/isEnum}}
|
|
||||||
{{^isEnum}}
|
|
||||||
{{{name}}}: json[r'{{{baseName}}}'] is Iterable
|
|
||||||
? (json[r'{{{baseName}}}'] as Iterable).cast<{{{items.datatype}}}>().{{#uniqueItems}}toSet(){{/uniqueItems}}{{^uniqueItems}}toList(growable: false){{/uniqueItems}}
|
|
||||||
: {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}},
|
|
||||||
{{/isEnum}}
|
|
||||||
{{/isArray}}
|
|
||||||
{{^isArray}}
|
|
||||||
{{#isMap}}
|
|
||||||
{{{name}}}: mapCastOfType<String, {{{items.datatype}}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/isMap}}
|
|
||||||
{{^isMap}}
|
|
||||||
{{#isNumber}}
|
|
||||||
{{{name}}}: {{#isNullable}}json[r'{{{baseName}}}'] == null
|
|
||||||
? {{#defaultValue}}{{{.}}}{{/defaultValue}}{{^defaultValue}}null{{/defaultValue}}
|
|
||||||
: {{/isNullable}}{{{datatypeWithEnum}}}.parse('${json[r'{{{baseName}}}']}'),
|
|
||||||
{{/isNumber}}
|
|
||||||
{{^isNumber}}
|
|
||||||
{{^isEnum}}
|
|
||||||
{{{name}}}: mapValueOfType<{{{datatypeWithEnum}}}>(json, r'{{{baseName}}}'){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/isEnum}}
|
|
||||||
{{#isEnum}}
|
|
||||||
{{{name}}}: {{{enumName}}}.fromJson(json[r'{{{baseName}}}']){{#required}}{{^isNullable}}!{{/isNullable}}{{/required}}{{^required}}{{#defaultValue}} ?? {{{.}}}{{/defaultValue}}{{/required}},
|
|
||||||
{{/isEnum}}
|
|
||||||
{{/isNumber}}
|
|
||||||
{{/isMap}}
|
|
||||||
{{/isArray}}
|
|
||||||
{{/complexType}}
|
|
||||||
{{/isDate}}
|
|
||||||
{{/isDateTime}}
|
|
||||||
{{/vars}}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<{{{classname}}}> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <{{{classname}}}>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = {{{classname}}}.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, {{{classname}}}> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, {{{classname}}}>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = {{{classname}}}.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of {{{classname}}}-objects as value to a dart map
|
|
||||||
static Map<String, List<{{{classname}}}>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<{{{classname}}}>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = {{{classname}}}.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
{{#vars}}
|
|
||||||
{{#required}}
|
|
||||||
'{{{baseName}}}',
|
|
||||||
{{/required}}
|
|
||||||
{{/vars}}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
{{#vars}}
|
|
||||||
{{^isModel}}
|
|
||||||
{{#isEnum}}
|
|
||||||
{{^isContainer}}
|
|
||||||
|
|
||||||
{{>serialization/native/native_enum_inline}}
|
|
||||||
{{/isContainer}}
|
|
||||||
{{#isContainer}}
|
|
||||||
{{#mostInnerItems}}
|
|
||||||
|
|
||||||
{{>serialization/native/native_enum_inline}}
|
|
||||||
{{/mostInnerItems}}
|
|
||||||
{{/isContainer}}
|
|
||||||
{{/isEnum}}
|
|
||||||
{{/isModel}}
|
|
||||||
{{/vars}}
|
|
||||||
@@ -6949,24 +6949,6 @@
|
|||||||
"$ref": "#/components/schemas/AssetOrder"
|
"$ref": "#/components/schemas/AssetOrder"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "page",
|
|
||||||
"required": false,
|
|
||||||
"in": "query",
|
|
||||||
"schema": {
|
|
||||||
"minimum": 1,
|
|
||||||
"type": "number"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "pageSize",
|
|
||||||
"required": false,
|
|
||||||
"in": "query",
|
|
||||||
"schema": {
|
|
||||||
"minimum": 1,
|
|
||||||
"type": "number"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "personId",
|
"name": "personId",
|
||||||
"required": false,
|
"required": false,
|
||||||
@@ -6976,6 +6958,14 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "size",
|
||||||
|
"required": true,
|
||||||
|
"in": "query",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/TimeBucketSize"
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "tagId",
|
"name": "tagId",
|
||||||
"required": false,
|
"required": false,
|
||||||
@@ -7024,7 +7014,10 @@
|
|||||||
"content": {
|
"content": {
|
||||||
"application/json": {
|
"application/json": {
|
||||||
"schema": {
|
"schema": {
|
||||||
"$ref": "#/components/schemas/TimeBucketAssetResponseDto"
|
"items": {
|
||||||
|
"$ref": "#/components/schemas/AssetResponseDto"
|
||||||
|
},
|
||||||
|
"type": "array"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -7109,6 +7102,14 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "size",
|
||||||
|
"required": true,
|
||||||
|
"in": "query",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/TimeBucketSize"
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "tagId",
|
"name": "tagId",
|
||||||
"required": false,
|
"required": false,
|
||||||
@@ -7150,7 +7151,7 @@
|
|||||||
"application/json": {
|
"application/json": {
|
||||||
"schema": {
|
"schema": {
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/components/schemas/TimeBucketsResponseDto"
|
"$ref": "#/components/schemas/TimeBucketResponseDto"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array"
|
||||||
}
|
}
|
||||||
@@ -9620,7 +9621,7 @@
|
|||||||
"start",
|
"start",
|
||||||
"pause",
|
"pause",
|
||||||
"resume",
|
"resume",
|
||||||
"empty",
|
"clear",
|
||||||
"clear-failed"
|
"clear-failed"
|
||||||
],
|
],
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@@ -9648,28 +9649,20 @@
|
|||||||
"active": {
|
"active": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"completed": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"delayed": {
|
"delayed": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"failed": {
|
"failed": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"paused": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"waiting": {
|
"waiting": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"active",
|
"active",
|
||||||
"completed",
|
|
||||||
"delayed",
|
"delayed",
|
||||||
"failed",
|
"failed",
|
||||||
"paused",
|
|
||||||
"waiting"
|
"waiting"
|
||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
@@ -11006,16 +10999,12 @@
|
|||||||
},
|
},
|
||||||
"QueueStatusDto": {
|
"QueueStatusDto": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"isActive": {
|
"paused": {
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"isPaused": {
|
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"isActive",
|
"paused"
|
||||||
"isPaused"
|
|
||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
@@ -13560,131 +13549,7 @@
|
|||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"TimeBucketAssetResponseDto": {
|
"TimeBucketResponseDto": {
|
||||||
"properties": {
|
|
||||||
"city": {
|
|
||||||
"items": {
|
|
||||||
"nullable": true,
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"country": {
|
|
||||||
"items": {
|
|
||||||
"nullable": true,
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"duration": {
|
|
||||||
"items": {
|
|
||||||
"nullable": true,
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"isArchived": {
|
|
||||||
"items": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"isFavorite": {
|
|
||||||
"items": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"isImage": {
|
|
||||||
"items": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"isTrashed": {
|
|
||||||
"items": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"livePhotoVideoId": {
|
|
||||||
"items": {
|
|
||||||
"nullable": true,
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"localDateTime": {
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"ownerId": {
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"projectionType": {
|
|
||||||
"items": {
|
|
||||||
"nullable": true,
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"ratio": {
|
|
||||||
"items": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"stack": {
|
|
||||||
"description": "(stack ID, stack asset count) tuple",
|
|
||||||
"items": {
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"maxItems": 2,
|
|
||||||
"minItems": 2,
|
|
||||||
"nullable": true,
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"thumbhash": {
|
|
||||||
"items": {
|
|
||||||
"nullable": true,
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"city",
|
|
||||||
"country",
|
|
||||||
"duration",
|
|
||||||
"id",
|
|
||||||
"isArchived",
|
|
||||||
"isFavorite",
|
|
||||||
"isImage",
|
|
||||||
"isTrashed",
|
|
||||||
"livePhotoVideoId",
|
|
||||||
"localDateTime",
|
|
||||||
"ownerId",
|
|
||||||
"projectionType",
|
|
||||||
"ratio",
|
|
||||||
"thumbhash"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"TimeBucketsResponseDto": {
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"count": {
|
"count": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
@@ -13699,6 +13564,13 @@
|
|||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
|
"TimeBucketSize": {
|
||||||
|
"enum": [
|
||||||
|
"DAY",
|
||||||
|
"MONTH"
|
||||||
|
],
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"ToneMapping": {
|
"ToneMapping": {
|
||||||
"enum": [
|
"enum": [
|
||||||
"hable",
|
"hable",
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class {{{classname}}} {
|
|||||||
{{/required}}
|
{{/required}}
|
||||||
{{/isNullable}}
|
{{/isNullable}}
|
||||||
{{/isEnum}}
|
{{/isEnum}}
|
||||||
{{#isArray}}{{#uniqueItems}}Set{{/uniqueItems}}{{^uniqueItems}}List{{/uniqueItems}}<{{{items.dataType}}}{{#items.isNullable}}?{{/items.isNullable}}>{{/isArray}}{{^isArray}}{{{datatypeWithEnum}}}{{/isArray}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
|
{{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
|
||||||
|
|
||||||
{{/vars}}
|
{{/vars}}
|
||||||
@override
|
@override
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
diff --git a/open-api/templates/mobile/serialization/native/native_class.mustache b/open-api/templates/mobile/serialization/native/native_class.mustache
|
|
||||||
index 9a7b1439b..9f40d5b0b 100644
|
|
||||||
--- a/open-api/templates/mobile/serialization/native/native_class.mustache
|
|
||||||
+++ b/open-api/templates/mobile/serialization/native/native_class.mustache
|
|
||||||
@@ -32,7 +32,7 @@ class {{{classname}}} {
|
|
||||||
{{/required}}
|
|
||||||
{{/isNullable}}
|
|
||||||
{{/isEnum}}
|
|
||||||
- {{{datatypeWithEnum}}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
|
|
||||||
+ {{#isArray}}{{#uniqueItems}}Set{{/uniqueItems}}{{^uniqueItems}}List{{/uniqueItems}}<{{{items.dataType}}}{{#items.isNullable}}?{{/items.isNullable}}>{{/isArray}}{{^isArray}}{{{datatypeWithEnum}}}{{/isArray}}{{#isNullable}}?{{/isNullable}}{{^isNullable}}{{^required}}{{^defaultValue}}?{{/defaultValue}}{{/required}}{{/isNullable}} {{{name}}};
|
|
||||||
|
|
||||||
{{/vars}}
|
|
||||||
@override
|
|
||||||
@@ -577,15 +577,12 @@ export type FaceDto = {
|
|||||||
};
|
};
|
||||||
export type JobCountsDto = {
|
export type JobCountsDto = {
|
||||||
active: number;
|
active: number;
|
||||||
completed: number;
|
|
||||||
delayed: number;
|
delayed: number;
|
||||||
failed: number;
|
failed: number;
|
||||||
paused: number;
|
|
||||||
waiting: number;
|
waiting: number;
|
||||||
};
|
};
|
||||||
export type QueueStatusDto = {
|
export type QueueStatusDto = {
|
||||||
isActive: boolean;
|
paused: boolean;
|
||||||
isPaused: boolean;
|
|
||||||
};
|
};
|
||||||
export type JobStatusDto = {
|
export type JobStatusDto = {
|
||||||
jobCounts: JobCountsDto;
|
jobCounts: JobCountsDto;
|
||||||
@@ -1384,25 +1381,7 @@ export type TagBulkAssetsResponseDto = {
|
|||||||
export type TagUpdateDto = {
|
export type TagUpdateDto = {
|
||||||
color?: string | null;
|
color?: string | null;
|
||||||
};
|
};
|
||||||
export type TimeBucketAssetResponseDto = {
|
export type TimeBucketResponseDto = {
|
||||||
city: (string | null)[];
|
|
||||||
country: (string | null)[];
|
|
||||||
duration: (string | null)[];
|
|
||||||
id: string[];
|
|
||||||
isArchived: number[];
|
|
||||||
isFavorite: number[];
|
|
||||||
isImage: number[];
|
|
||||||
isTrashed: number[];
|
|
||||||
livePhotoVideoId: (string | null)[];
|
|
||||||
localDateTime: string[];
|
|
||||||
ownerId: string[];
|
|
||||||
projectionType: (string | null)[];
|
|
||||||
ratio: number[];
|
|
||||||
/** (stack ID, stack asset count) tuple */
|
|
||||||
stack?: (string[] | null)[];
|
|
||||||
thumbhash: (string | null)[];
|
|
||||||
};
|
|
||||||
export type TimeBucketsResponseDto = {
|
|
||||||
count: number;
|
count: number;
|
||||||
timeBucket: string;
|
timeBucket: string;
|
||||||
};
|
};
|
||||||
@@ -3260,16 +3239,15 @@ export function tagAssets({ id, bulkIdsDto }: {
|
|||||||
body: bulkIdsDto
|
body: bulkIdsDto
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key, order, page, pageSize, personId, tagId, timeBucket, userId, withPartners, withStacked }: {
|
export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, size, tagId, timeBucket, userId, withPartners, withStacked }: {
|
||||||
albumId?: string;
|
albumId?: string;
|
||||||
isArchived?: boolean;
|
isArchived?: boolean;
|
||||||
isFavorite?: boolean;
|
isFavorite?: boolean;
|
||||||
isTrashed?: boolean;
|
isTrashed?: boolean;
|
||||||
key?: string;
|
key?: string;
|
||||||
order?: AssetOrder;
|
order?: AssetOrder;
|
||||||
page?: number;
|
|
||||||
pageSize?: number;
|
|
||||||
personId?: string;
|
personId?: string;
|
||||||
|
size: TimeBucketSize;
|
||||||
tagId?: string;
|
tagId?: string;
|
||||||
timeBucket: string;
|
timeBucket: string;
|
||||||
userId?: string;
|
userId?: string;
|
||||||
@@ -3278,7 +3256,7 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
|
|||||||
}, opts?: Oazapfts.RequestOpts) {
|
}, opts?: Oazapfts.RequestOpts) {
|
||||||
return oazapfts.ok(oazapfts.fetchJson<{
|
return oazapfts.ok(oazapfts.fetchJson<{
|
||||||
status: 200;
|
status: 200;
|
||||||
data: TimeBucketAssetResponseDto;
|
data: AssetResponseDto[];
|
||||||
}>(`/timeline/bucket${QS.query(QS.explode({
|
}>(`/timeline/bucket${QS.query(QS.explode({
|
||||||
albumId,
|
albumId,
|
||||||
isArchived,
|
isArchived,
|
||||||
@@ -3286,9 +3264,8 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
|
|||||||
isTrashed,
|
isTrashed,
|
||||||
key,
|
key,
|
||||||
order,
|
order,
|
||||||
page,
|
|
||||||
pageSize,
|
|
||||||
personId,
|
personId,
|
||||||
|
size,
|
||||||
tagId,
|
tagId,
|
||||||
timeBucket,
|
timeBucket,
|
||||||
userId,
|
userId,
|
||||||
@@ -3298,7 +3275,7 @@ export function getTimeBucket({ albumId, isArchived, isFavorite, isTrashed, key,
|
|||||||
...opts
|
...opts
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, tagId, userId, withPartners, withStacked }: {
|
export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key, order, personId, size, tagId, userId, withPartners, withStacked }: {
|
||||||
albumId?: string;
|
albumId?: string;
|
||||||
isArchived?: boolean;
|
isArchived?: boolean;
|
||||||
isFavorite?: boolean;
|
isFavorite?: boolean;
|
||||||
@@ -3306,6 +3283,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
|
|||||||
key?: string;
|
key?: string;
|
||||||
order?: AssetOrder;
|
order?: AssetOrder;
|
||||||
personId?: string;
|
personId?: string;
|
||||||
|
size: TimeBucketSize;
|
||||||
tagId?: string;
|
tagId?: string;
|
||||||
userId?: string;
|
userId?: string;
|
||||||
withPartners?: boolean;
|
withPartners?: boolean;
|
||||||
@@ -3313,7 +3291,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
|
|||||||
}, opts?: Oazapfts.RequestOpts) {
|
}, opts?: Oazapfts.RequestOpts) {
|
||||||
return oazapfts.ok(oazapfts.fetchJson<{
|
return oazapfts.ok(oazapfts.fetchJson<{
|
||||||
status: 200;
|
status: 200;
|
||||||
data: TimeBucketsResponseDto[];
|
data: TimeBucketResponseDto[];
|
||||||
}>(`/timeline/buckets${QS.query(QS.explode({
|
}>(`/timeline/buckets${QS.query(QS.explode({
|
||||||
albumId,
|
albumId,
|
||||||
isArchived,
|
isArchived,
|
||||||
@@ -3322,6 +3300,7 @@ export function getTimeBuckets({ albumId, isArchived, isFavorite, isTrashed, key
|
|||||||
key,
|
key,
|
||||||
order,
|
order,
|
||||||
personId,
|
personId,
|
||||||
|
size,
|
||||||
tagId,
|
tagId,
|
||||||
userId,
|
userId,
|
||||||
withPartners,
|
withPartners,
|
||||||
@@ -3691,7 +3670,7 @@ export enum JobCommand {
|
|||||||
Start = "start",
|
Start = "start",
|
||||||
Pause = "pause",
|
Pause = "pause",
|
||||||
Resume = "resume",
|
Resume = "resume",
|
||||||
Empty = "empty",
|
Clear = "clear",
|
||||||
ClearFailed = "clear-failed"
|
ClearFailed = "clear-failed"
|
||||||
}
|
}
|
||||||
export enum MemoryType {
|
export enum MemoryType {
|
||||||
@@ -3800,3 +3779,7 @@ export enum OAuthTokenEndpointAuthMethod {
|
|||||||
ClientSecretPost = "client_secret_post",
|
ClientSecretPost = "client_secret_post",
|
||||||
ClientSecretBasic = "client_secret_basic"
|
ClientSecretBasic = "client_secret_basic"
|
||||||
}
|
}
|
||||||
|
export enum TimeBucketSize {
|
||||||
|
Day = "DAY",
|
||||||
|
Month = "MONTH"
|
||||||
|
}
|
||||||
|
|||||||
403
server/package-lock.json
generated
403
server/package-lock.json
generated
@@ -10,7 +10,6 @@
|
|||||||
"hasInstallScript": true,
|
"hasInstallScript": true,
|
||||||
"license": "GNU Affero General Public License version 3",
|
"license": "GNU Affero General Public License version 3",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@nestjs/bullmq": "^11.0.1",
|
|
||||||
"@nestjs/common": "^11.0.4",
|
"@nestjs/common": "^11.0.4",
|
||||||
"@nestjs/core": "^11.0.4",
|
"@nestjs/core": "^11.0.4",
|
||||||
"@nestjs/event-emitter": "^3.0.0",
|
"@nestjs/event-emitter": "^3.0.0",
|
||||||
@@ -24,11 +23,11 @@
|
|||||||
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
||||||
"@opentelemetry/sdk-node": "^0.200.0",
|
"@opentelemetry/sdk-node": "^0.200.0",
|
||||||
"@react-email/components": "^0.0.36",
|
"@react-email/components": "^0.0.36",
|
||||||
"@socket.io/redis-adapter": "^8.3.0",
|
"@socket.io/postgres-adapter": "^0.4.0",
|
||||||
|
"@types/pg": "^8.11.14",
|
||||||
"archiver": "^7.0.0",
|
"archiver": "^7.0.0",
|
||||||
"async-lock": "^1.4.0",
|
"async-lock": "^1.4.0",
|
||||||
"bcrypt": "^5.1.1",
|
"bcrypt": "^5.1.1",
|
||||||
"bullmq": "^5.51.0",
|
|
||||||
"chokidar": "^3.5.3",
|
"chokidar": "^3.5.3",
|
||||||
"class-transformer": "^0.5.1",
|
"class-transformer": "^0.5.1",
|
||||||
"class-validator": "^0.14.0",
|
"class-validator": "^0.14.0",
|
||||||
@@ -39,9 +38,9 @@
|
|||||||
"fast-glob": "^3.3.2",
|
"fast-glob": "^3.3.2",
|
||||||
"fluent-ffmpeg": "^2.1.2",
|
"fluent-ffmpeg": "^2.1.2",
|
||||||
"geo-tz": "^8.0.0",
|
"geo-tz": "^8.0.0",
|
||||||
|
"graphile-worker": "^0.16.6",
|
||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
"i18n-iso-countries": "^7.6.0",
|
"i18n-iso-countries": "^7.6.0",
|
||||||
"ioredis": "^5.3.2",
|
|
||||||
"joi": "^17.10.0",
|
"joi": "^17.10.0",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"kysely": "^0.28.0",
|
"kysely": "^0.28.0",
|
||||||
@@ -54,7 +53,7 @@
|
|||||||
"nestjs-otel": "^6.0.0",
|
"nestjs-otel": "^6.0.0",
|
||||||
"nodemailer": "^6.9.13",
|
"nodemailer": "^6.9.13",
|
||||||
"openid-client": "^6.3.3",
|
"openid-client": "^6.3.3",
|
||||||
"pg": "^8.11.3",
|
"pg": "^8.15.6",
|
||||||
"picomatch": "^4.0.2",
|
"picomatch": "^4.0.2",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
@@ -80,7 +79,6 @@
|
|||||||
"@nestjs/testing": "^11.0.4",
|
"@nestjs/testing": "^11.0.4",
|
||||||
"@swc/core": "^1.4.14",
|
"@swc/core": "^1.4.14",
|
||||||
"@testcontainers/postgresql": "^10.2.1",
|
"@testcontainers/postgresql": "^10.2.1",
|
||||||
"@testcontainers/redis": "^10.18.0",
|
|
||||||
"@types/archiver": "^6.0.0",
|
"@types/archiver": "^6.0.0",
|
||||||
"@types/async-lock": "^1.4.2",
|
"@types/async-lock": "^1.4.2",
|
||||||
"@types/bcrypt": "^5.0.0",
|
"@types/bcrypt": "^5.0.0",
|
||||||
@@ -1072,6 +1070,12 @@
|
|||||||
"@nestjs/core": "^10.x || ^11.0.0"
|
"@nestjs/core": "^10.x || ^11.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@graphile/logger": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@graphile/logger/-/logger-0.2.0.tgz",
|
||||||
|
"integrity": "sha512-jjcWBokl9eb1gVJ85QmoaQ73CQ52xAaOCF29ukRbYNl6lY+ts0ErTaDYOBlejcbUs2OpaiqYLO5uDhyLFzWw4w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@grpc/grpc-js": {
|
"node_modules/@grpc/grpc-js": {
|
||||||
"version": "1.13.3",
|
"version": "1.13.3",
|
||||||
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.3.tgz",
|
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.3.tgz",
|
||||||
@@ -1883,7 +1887,9 @@
|
|||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz",
|
||||||
"integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==",
|
"integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/@isaacs/cliui": {
|
"node_modules/@isaacs/cliui": {
|
||||||
"version": "8.0.2",
|
"version": "8.0.2",
|
||||||
@@ -2118,45 +2124,13 @@
|
|||||||
"integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==",
|
"integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
"node_modules/@msgpack/msgpack": {
|
||||||
"version": "3.0.3",
|
"version": "2.8.0",
|
||||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@msgpack/msgpack/-/msgpack-2.8.0.tgz",
|
||||||
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
"integrity": "sha512-h9u4u/jiIRKbq25PM+zymTyW6bhTzELvOoUd+AvYriWOAKpLGnIamaET3pnHYoI5iYphAHBI4ayx0MehR+VVPQ==",
|
||||||
"cpu": [
|
"license": "ISC",
|
||||||
"x64"
|
"engines": {
|
||||||
],
|
"node": ">= 10"
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@nestjs/bull-shared": {
|
|
||||||
"version": "11.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@nestjs/bull-shared/-/bull-shared-11.0.2.tgz",
|
|
||||||
"integrity": "sha512-dFlttJvBqIFD6M8JVFbkrR4Feb39OTAJPJpFVILU50NOJCM4qziRw3dSNG84Q3v+7/M6xUGMFdZRRGvBBKxoSA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"tslib": "2.8.1"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@nestjs/common": "^10.0.0 || ^11.0.0",
|
|
||||||
"@nestjs/core": "^10.0.0 || ^11.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@nestjs/bullmq": {
|
|
||||||
"version": "11.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@nestjs/bullmq/-/bullmq-11.0.2.tgz",
|
|
||||||
"integrity": "sha512-Lq6lGpKkETsm0RDcUktlzsthFoE3A5QTMp2FwPi1eztKqKD6/90KS1TcnC9CJFzjpUaYnQzIMrlNs55e+/wsHA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@nestjs/bull-shared": "^11.0.2",
|
|
||||||
"tslib": "2.8.1"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@nestjs/common": "^10.0.0 || ^11.0.0",
|
|
||||||
"@nestjs/core": "^10.0.0 || ^11.0.0",
|
|
||||||
"bullmq": "^3.0.0 || ^4.0.0 || ^5.0.0"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@nestjs/cli": {
|
"node_modules/@nestjs/cli": {
|
||||||
@@ -3787,6 +3761,17 @@
|
|||||||
"@opentelemetry/api": "^1.3.0"
|
"@opentelemetry/api": "^1.3.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@opentelemetry/instrumentation-pg/node_modules/@types/pg": {
|
||||||
|
"version": "8.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz",
|
||||||
|
"integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*",
|
||||||
|
"pg-protocol": "*",
|
||||||
|
"pg-types": "^2.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@opentelemetry/instrumentation-pino": {
|
"node_modules/@opentelemetry/instrumentation-pino": {
|
||||||
"version": "0.47.0",
|
"version": "0.47.0",
|
||||||
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pino/-/instrumentation-pino-0.47.0.tgz",
|
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pino/-/instrumentation-pino-0.47.0.tgz",
|
||||||
@@ -4763,24 +4748,25 @@
|
|||||||
"integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==",
|
"integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@socket.io/redis-adapter": {
|
"node_modules/@socket.io/postgres-adapter": {
|
||||||
"version": "8.3.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-8.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/@socket.io/postgres-adapter/-/postgres-adapter-0.4.0.tgz",
|
||||||
"integrity": "sha512-ly0cra+48hDmChxmIpnESKrc94LjRL80TEmZVscuQ/WWkRP81nNj8W8cCGMqbI4L6NCuAaPRSzZF1a9GlAxxnA==",
|
"integrity": "sha512-FJQslCIchoT4oMHk0D8HeSi9nhAOE8/snId65zI10ykZsk3MQJnUH45+Jqd75IuQhtxxwrvNxqHmzLJEPw9PnA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"debug": "~4.3.1",
|
"@msgpack/msgpack": "~2.8.0",
|
||||||
"notepack.io": "~3.0.1",
|
"@types/pg": "^8.6.6",
|
||||||
"uid2": "1.0.0"
|
"debug": "~4.3.4",
|
||||||
|
"pg": "^8.9.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10.0.0"
|
"node": ">=12.0.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"socket.io-adapter": "^2.5.4"
|
"socket.io-adapter": "^2.5.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@socket.io/redis-adapter/node_modules/debug": {
|
"node_modules/@socket.io/postgres-adapter/node_modules/debug": {
|
||||||
"version": "4.3.7",
|
"version": "4.3.7",
|
||||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
|
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
|
||||||
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
|
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
|
||||||
@@ -4914,16 +4900,6 @@
|
|||||||
"testcontainers": "^10.24.2"
|
"testcontainers": "^10.24.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@testcontainers/redis": {
|
|
||||||
"version": "10.24.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@testcontainers/redis/-/redis-10.24.2.tgz",
|
|
||||||
"integrity": "sha512-m4/FZW5ltZPaK9pQTKNipjpBk73Vdj7Ql3sFr26A9dOr0wJyM3Wnc9jeHTNRal7RDnY5rvumXAIUWbBlvKMJEw==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"testcontainers": "^10.24.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tokenizer/inflate": {
|
"node_modules/@tokenizer/inflate": {
|
||||||
"version": "0.2.7",
|
"version": "0.2.7",
|
||||||
"resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz",
|
"resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz",
|
||||||
@@ -5089,6 +5065,15 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/debug": {
|
||||||
|
"version": "4.1.12",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
|
||||||
|
"integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/ms": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/docker-modem": {
|
"node_modules/@types/docker-modem": {
|
||||||
"version": "3.0.6",
|
"version": "3.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz",
|
||||||
@@ -5201,6 +5186,15 @@
|
|||||||
"rxjs": "^7.2.0"
|
"rxjs": "^7.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/interpret": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/interpret/-/interpret-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-uBaBhj/BhilG58r64mtDb/BEdH51HIQLgP5bmWzc5qCtFMja8dCk/IOJmk36j0lbi9QHwI6sbtUNGuqXdKCAtQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/js-yaml": {
|
"node_modules/@types/js-yaml": {
|
||||||
"version": "4.0.9",
|
"version": "4.0.9",
|
||||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz",
|
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz",
|
||||||
@@ -5261,6 +5255,12 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/ms": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/multer": {
|
"node_modules/@types/multer": {
|
||||||
"version": "1.4.12",
|
"version": "1.4.12",
|
||||||
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.12.tgz",
|
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.12.tgz",
|
||||||
@@ -5317,14 +5317,14 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/pg": {
|
"node_modules/@types/pg": {
|
||||||
"version": "8.6.1",
|
"version": "8.11.14",
|
||||||
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.14.tgz",
|
||||||
"integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==",
|
"integrity": "sha512-qyD11E5R3u0eJmd1lB0WnWKXJGA7s015nyARWljfz5DcX83TKAIlY+QrmvzQTsbIe+hkiFtkyL2gHC6qwF6Fbg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/node": "*",
|
"@types/node": "*",
|
||||||
"pg-protocol": "*",
|
"pg-protocol": "*",
|
||||||
"pg-types": "^2.2.0"
|
"pg-types": "^4.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@types/pg-pool": {
|
"node_modules/@types/pg-pool": {
|
||||||
@@ -5336,6 +5336,63 @@
|
|||||||
"@types/pg": "*"
|
"@types/pg": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/pg-types": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"pg-int8": "1.0.1",
|
||||||
|
"pg-numeric": "1.0.2",
|
||||||
|
"postgres-array": "~3.0.1",
|
||||||
|
"postgres-bytea": "~3.0.0",
|
||||||
|
"postgres-date": "~2.1.0",
|
||||||
|
"postgres-interval": "^3.0.0",
|
||||||
|
"postgres-range": "^1.1.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-array": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-nAUSGfSDGOaOAEGwqsRY27GPOea7CNipJPOA7lPbdEpx5Kg3qzdP0AaWC5MlhTWV9s4hFX39nomVZ+C4tnGOJQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-bytea": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"obuf": "~1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-date": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-interval": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/picomatch": {
|
"node_modules/@types/picomatch": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-3.0.2.tgz",
|
||||||
@@ -5401,7 +5458,6 @@
|
|||||||
"version": "7.7.0",
|
"version": "7.7.0",
|
||||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz",
|
||||||
"integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==",
|
"integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/send": {
|
"node_modules/@types/send": {
|
||||||
@@ -6885,21 +6941,6 @@
|
|||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/bullmq": {
|
|
||||||
"version": "5.51.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.51.0.tgz",
|
|
||||||
"integrity": "sha512-YjX+CO2U4nmbCq2ZgNb/Hnu6Xk953j8EFmp0eehTuudavPyNstoZsbnyvvM6PX9rfD9clhcc5kRLyyWoFEM3Lg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"cron-parser": "^4.9.0",
|
|
||||||
"ioredis": "^5.4.1",
|
|
||||||
"msgpackr": "^1.11.2",
|
|
||||||
"node-abort-controller": "^3.1.1",
|
|
||||||
"semver": "^7.5.4",
|
|
||||||
"tslib": "^2.0.0",
|
|
||||||
"uuid": "^9.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/busboy": {
|
"node_modules/busboy": {
|
||||||
"version": "1.6.0",
|
"version": "1.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
||||||
@@ -7487,6 +7528,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
||||||
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
@@ -7889,18 +7932,6 @@
|
|||||||
"luxon": "~3.5.0"
|
"luxon": "~3.5.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/cron-parser": {
|
|
||||||
"version": "4.9.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
|
|
||||||
"integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"luxon": "^3.2.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/cron/node_modules/luxon": {
|
"node_modules/cron/node_modules/luxon": {
|
||||||
"version": "3.5.0",
|
"version": "3.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
||||||
@@ -8127,6 +8158,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10"
|
"node": ">=0.10"
|
||||||
}
|
}
|
||||||
@@ -10016,6 +10049,64 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/graphile-config": {
|
||||||
|
"version": "0.0.1-beta.15",
|
||||||
|
"resolved": "https://registry.npmjs.org/graphile-config/-/graphile-config-0.0.1-beta.15.tgz",
|
||||||
|
"integrity": "sha512-J+hYqhZlx5yY7XdU7XjOAqNCAUZU33fEx3PdkNc1cfAAbo1TNMWiib4DFH5XkT8BagJtTyFrMnDCuKxnphCu+g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/interpret": "^1.1.1",
|
||||||
|
"@types/node": "^20.5.7",
|
||||||
|
"@types/semver": "^7.5.1",
|
||||||
|
"chalk": "^4.1.2",
|
||||||
|
"debug": "^4.3.4",
|
||||||
|
"interpret": "^3.1.1",
|
||||||
|
"semver": "^7.5.4",
|
||||||
|
"tslib": "^2.6.2",
|
||||||
|
"yargs": "^17.7.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/graphile-config/node_modules/@types/node": {
|
||||||
|
"version": "20.17.32",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.32.tgz",
|
||||||
|
"integrity": "sha512-zeMXFn8zQ+UkjK4ws0RiOC9EWByyW1CcVmLe+2rQocXRsGEDxUCwPEIVgpsGcLHS/P8JkT0oa3839BRABS0oPw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"undici-types": "~6.19.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/graphile-config/node_modules/undici-types": {
|
||||||
|
"version": "6.19.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
|
||||||
|
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/graphile-worker": {
|
||||||
|
"version": "0.16.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/graphile-worker/-/graphile-worker-0.16.6.tgz",
|
||||||
|
"integrity": "sha512-e7gGYDmGqzju2l83MpzX8vNG/lOtVJiSzI3eZpAFubSxh/cxs7sRrRGBGjzBP1kNG0H+c95etPpNRNlH65PYhw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@graphile/logger": "^0.2.0",
|
||||||
|
"@types/debug": "^4.1.10",
|
||||||
|
"@types/pg": "^8.10.5",
|
||||||
|
"cosmiconfig": "^8.3.6",
|
||||||
|
"graphile-config": "^0.0.1-beta.4",
|
||||||
|
"json5": "^2.2.3",
|
||||||
|
"pg": "^8.11.3",
|
||||||
|
"tslib": "^2.6.2",
|
||||||
|
"yargs": "^17.7.2"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"graphile-worker": "dist/cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/handlebars": {
|
"node_modules/handlebars": {
|
||||||
"version": "4.7.8",
|
"version": "4.7.8",
|
||||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
|
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
|
||||||
@@ -10458,11 +10549,22 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/interpret": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/interpret/-/interpret-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.13.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ioredis": {
|
"node_modules/ioredis": {
|
||||||
"version": "5.6.1",
|
"version": "5.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz",
|
||||||
"integrity": "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA==",
|
"integrity": "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ioredis/commands": "^1.1.1",
|
"@ioredis/commands": "^1.1.1",
|
||||||
"cluster-key-slot": "^1.1.0",
|
"cluster-key-slot": "^1.1.0",
|
||||||
@@ -11332,13 +11434,17 @@
|
|||||||
"version": "4.2.0",
|
"version": "4.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
|
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/lodash.isarguments": {
|
"node_modules/lodash.isarguments": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
||||||
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
|
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/lodash.merge": {
|
"node_modules/lodash.merge": {
|
||||||
"version": "4.6.2",
|
"version": "4.6.2",
|
||||||
@@ -11865,37 +11971,6 @@
|
|||||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/msgpackr": {
|
|
||||||
"version": "1.11.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.2.tgz",
|
|
||||||
"integrity": "sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==",
|
|
||||||
"license": "MIT",
|
|
||||||
"optionalDependencies": {
|
|
||||||
"msgpackr-extract": "^3.0.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/msgpackr-extract": {
|
|
||||||
"version": "3.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
|
||||||
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
|
||||||
"hasInstallScript": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"node-gyp-build-optional-packages": "5.2.2"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/multer": {
|
"node_modules/multer": {
|
||||||
"version": "1.4.5-lts.2",
|
"version": "1.4.5-lts.2",
|
||||||
"resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz",
|
"resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz",
|
||||||
@@ -12256,6 +12331,7 @@
|
|||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
||||||
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==",
|
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/node-addon-api": {
|
"node_modules/node-addon-api": {
|
||||||
@@ -12323,21 +12399,6 @@
|
|||||||
"node": "^18.17.0 || >=20.5.0"
|
"node": "^18.17.0 || >=20.5.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-gyp-build-optional-packages": {
|
|
||||||
"version": "5.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
|
||||||
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"detect-libc": "^2.0.1"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"node-gyp-build-optional-packages": "bin.js",
|
|
||||||
"node-gyp-build-optional-packages-optional": "optional.js",
|
|
||||||
"node-gyp-build-optional-packages-test": "build-test.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/node-gyp/node_modules/abbrev": {
|
"node_modules/node-gyp/node_modules/abbrev": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
|
||||||
@@ -12511,12 +12572,6 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/notepack.io": {
|
|
||||||
"version": "3.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/notepack.io/-/notepack.io-3.0.1.tgz",
|
|
||||||
"integrity": "sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/npmlog": {
|
"node_modules/npmlog": {
|
||||||
"version": "5.0.1",
|
"version": "5.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
||||||
@@ -12586,6 +12641,12 @@
|
|||||||
"node": ">= 0.4"
|
"node": ">= 0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/obuf": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/on-finished": {
|
"node_modules/on-finished": {
|
||||||
"version": "2.4.1",
|
"version": "2.4.1",
|
||||||
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
||||||
@@ -13108,13 +13169,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/pg": {
|
"node_modules/pg": {
|
||||||
"version": "8.15.5",
|
"version": "8.15.6",
|
||||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.15.5.tgz",
|
"resolved": "https://registry.npmjs.org/pg/-/pg-8.15.6.tgz",
|
||||||
"integrity": "sha512-EpAhHFQc+aH9VfeffWIVC+XXk6lmAhS9W1FxtxcPXs94yxhrI1I6w/zkWfIOII/OkBv3Be04X3xMOj0kQ78l6w==",
|
"integrity": "sha512-yvao7YI3GdmmrslNVsZgx9PfntfWrnXwtR+K/DjI0I/sTKif4Z623um+sjVZ1hk5670B+ODjvHDAckKdjmPTsg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"pg-connection-string": "^2.8.5",
|
"pg-connection-string": "^2.8.5",
|
||||||
"pg-pool": "^3.9.5",
|
"pg-pool": "^3.9.6",
|
||||||
"pg-protocol": "^1.9.5",
|
"pg-protocol": "^1.9.5",
|
||||||
"pg-types": "^2.1.0",
|
"pg-types": "^2.1.0",
|
||||||
"pgpass": "1.x"
|
"pgpass": "1.x"
|
||||||
@@ -13156,6 +13217,15 @@
|
|||||||
"node": ">=4.0.0"
|
"node": ">=4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pg-numeric": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-numeric/-/pg-numeric-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/pg-pool": {
|
"node_modules/pg-pool": {
|
||||||
"version": "3.9.6",
|
"version": "3.9.6",
|
||||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.9.6.tgz",
|
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.9.6.tgz",
|
||||||
@@ -13465,6 +13535,12 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/postgres-range": {
|
||||||
|
"version": "1.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-range/-/postgres-range-1.1.4.tgz",
|
||||||
|
"integrity": "sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/prelude-ls": {
|
"node_modules/prelude-ls": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
||||||
@@ -14176,6 +14252,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
||||||
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=4"
|
"node": ">=4"
|
||||||
}
|
}
|
||||||
@@ -14185,6 +14263,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
||||||
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"redis-errors": "^1.0.0"
|
"redis-errors": "^1.0.0"
|
||||||
},
|
},
|
||||||
@@ -15303,7 +15383,9 @@
|
|||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
||||||
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
|
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/statuses": {
|
"node_modules/statuses": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
@@ -16922,15 +17004,6 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/uid2": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/uid2/-/uid2-1.0.0.tgz",
|
|
||||||
"integrity": "sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 4.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/uint8array-extras": {
|
"node_modules/uint8array-extras": {
|
||||||
"version": "1.4.0",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.4.0.tgz",
|
||||||
|
|||||||
@@ -35,7 +35,6 @@
|
|||||||
"postinstall": "patch-package"
|
"postinstall": "patch-package"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@nestjs/bullmq": "^11.0.1",
|
|
||||||
"@nestjs/common": "^11.0.4",
|
"@nestjs/common": "^11.0.4",
|
||||||
"@nestjs/core": "^11.0.4",
|
"@nestjs/core": "^11.0.4",
|
||||||
"@nestjs/event-emitter": "^3.0.0",
|
"@nestjs/event-emitter": "^3.0.0",
|
||||||
@@ -49,11 +48,11 @@
|
|||||||
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
||||||
"@opentelemetry/sdk-node": "^0.200.0",
|
"@opentelemetry/sdk-node": "^0.200.0",
|
||||||
"@react-email/components": "^0.0.36",
|
"@react-email/components": "^0.0.36",
|
||||||
"@socket.io/redis-adapter": "^8.3.0",
|
"@socket.io/postgres-adapter": "^0.4.0",
|
||||||
|
"@types/pg": "^8.11.14",
|
||||||
"archiver": "^7.0.0",
|
"archiver": "^7.0.0",
|
||||||
"async-lock": "^1.4.0",
|
"async-lock": "^1.4.0",
|
||||||
"bcrypt": "^5.1.1",
|
"bcrypt": "^5.1.1",
|
||||||
"bullmq": "^5.51.0",
|
|
||||||
"chokidar": "^3.5.3",
|
"chokidar": "^3.5.3",
|
||||||
"class-transformer": "^0.5.1",
|
"class-transformer": "^0.5.1",
|
||||||
"class-validator": "^0.14.0",
|
"class-validator": "^0.14.0",
|
||||||
@@ -64,9 +63,9 @@
|
|||||||
"fast-glob": "^3.3.2",
|
"fast-glob": "^3.3.2",
|
||||||
"fluent-ffmpeg": "^2.1.2",
|
"fluent-ffmpeg": "^2.1.2",
|
||||||
"geo-tz": "^8.0.0",
|
"geo-tz": "^8.0.0",
|
||||||
|
"graphile-worker": "^0.16.6",
|
||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
"i18n-iso-countries": "^7.6.0",
|
"i18n-iso-countries": "^7.6.0",
|
||||||
"ioredis": "^5.3.2",
|
|
||||||
"joi": "^17.10.0",
|
"joi": "^17.10.0",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"kysely": "^0.28.0",
|
"kysely": "^0.28.0",
|
||||||
@@ -79,7 +78,7 @@
|
|||||||
"nestjs-otel": "^6.0.0",
|
"nestjs-otel": "^6.0.0",
|
||||||
"nodemailer": "^6.9.13",
|
"nodemailer": "^6.9.13",
|
||||||
"openid-client": "^6.3.3",
|
"openid-client": "^6.3.3",
|
||||||
"pg": "^8.11.3",
|
"pg": "^8.15.6",
|
||||||
"picomatch": "^4.0.2",
|
"picomatch": "^4.0.2",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
@@ -105,7 +104,6 @@
|
|||||||
"@nestjs/testing": "^11.0.4",
|
"@nestjs/testing": "^11.0.4",
|
||||||
"@swc/core": "^1.4.14",
|
"@swc/core": "^1.4.14",
|
||||||
"@testcontainers/postgresql": "^10.2.1",
|
"@testcontainers/postgresql": "^10.2.1",
|
||||||
"@testcontainers/redis": "^10.18.0",
|
|
||||||
"@types/archiver": "^6.0.0",
|
"@types/archiver": "^6.0.0",
|
||||||
"@types/async-lock": "^1.4.2",
|
"@types/async-lock": "^1.4.2",
|
||||||
"@types/bcrypt": "^5.0.0",
|
"@types/bcrypt": "^5.0.0",
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { BullModule } from '@nestjs/bullmq';
|
|
||||||
import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common';
|
import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common';
|
||||||
import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core';
|
import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core';
|
||||||
import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule';
|
import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule';
|
||||||
@@ -37,11 +36,9 @@ export const middleware = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
const configRepository = new ConfigRepository();
|
const configRepository = new ConfigRepository();
|
||||||
const { bull, cls, database, otel } = configRepository.getEnv();
|
const { cls, database, otel } = configRepository.getEnv();
|
||||||
|
|
||||||
const imports = [
|
const imports = [
|
||||||
BullModule.forRoot(bull.config),
|
|
||||||
BullModule.registerQueue(...bull.queues),
|
|
||||||
ClsModule.forRoot(cls.config),
|
ClsModule.forRoot(cls.config),
|
||||||
OpenTelemetryModule.forRoot(otel),
|
OpenTelemetryModule.forRoot(otel),
|
||||||
KyselyModule.forRoot(getKyselyConfig(database.config)),
|
KyselyModule.forRoot(getKyselyConfig(database.config)),
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ const runQuery = async (query: string) => {
|
|||||||
|
|
||||||
const runMigrations = async () => {
|
const runMigrations = async () => {
|
||||||
const configRepository = new ConfigRepository();
|
const configRepository = new ConfigRepository();
|
||||||
const logger = LoggingRepository.create();
|
const logger = new LoggingRepository(undefined, configRepository);
|
||||||
const db = getDatabaseClient();
|
const db = getDatabaseClient();
|
||||||
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
||||||
await databaseRepository.runMigrations();
|
await databaseRepository.runMigrations();
|
||||||
|
|||||||
@@ -72,9 +72,7 @@ class SqlGenerator {
|
|||||||
await rm(this.options.targetDir, { force: true, recursive: true });
|
await rm(this.options.targetDir, { force: true, recursive: true });
|
||||||
await mkdir(this.options.targetDir);
|
await mkdir(this.options.targetDir);
|
||||||
|
|
||||||
if (!process.env.DB_HOSTNAME) {
|
process.env.DB_HOSTNAME = 'localhost';
|
||||||
process.env.DB_HOSTNAME = 'localhost';
|
|
||||||
}
|
|
||||||
const { database, cls, otel } = new ConfigRepository().getEnv();
|
const { database, cls, otel } = new ConfigRepository().getEnv();
|
||||||
|
|
||||||
const moduleFixture = await Test.createTestingModule({
|
const moduleFixture = await Test.createTestingModule({
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { Controller, Get, Query, Res } from '@nestjs/common';
|
import { Controller, Get, Query } from '@nestjs/common';
|
||||||
import { ApiTags } from '@nestjs/swagger';
|
import { ApiTags } from '@nestjs/swagger';
|
||||||
import { Response } from 'express';
|
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { TimeBucketAssetDto, TimeBucketAssetResponseDto, TimeBucketDto } from 'src/dtos/time-bucket.dto';
|
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
|
||||||
import { Permission } from 'src/enum';
|
import { Permission } from 'src/enum';
|
||||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||||
import { TimelineService } from 'src/services/timeline.service';
|
import { TimelineService } from 'src/services/timeline.service';
|
||||||
@@ -14,19 +14,13 @@ export class TimelineController {
|
|||||||
|
|
||||||
@Get('buckets')
|
@Get('buckets')
|
||||||
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
|
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
|
||||||
getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto) {
|
getTimeBuckets(@Auth() auth: AuthDto, @Query() dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
|
||||||
return this.service.getTimeBuckets(auth, dto);
|
return this.service.getTimeBuckets(auth, dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get('bucket')
|
@Get('bucket')
|
||||||
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
|
@Authenticated({ permission: Permission.ASSET_READ, sharedLink: true })
|
||||||
async getTimeBucket(
|
getTimeBucket(@Auth() auth: AuthDto, @Query() dto: TimeBucketAssetDto): Promise<AssetResponseDto[]> {
|
||||||
@Auth() auth: AuthDto,
|
return this.service.getTimeBucket(auth, dto) as Promise<AssetResponseDto[]>;
|
||||||
@Query() dto: TimeBucketAssetDto,
|
|
||||||
@Res({ passthrough: true }) res: Response,
|
|
||||||
): Promise<TimeBucketAssetResponseDto> {
|
|
||||||
res.contentType('application/json');
|
|
||||||
const jsonBucket = await this.service.getTimeBucket(auth, dto);
|
|
||||||
return jsonBucket as unknown as TimeBucketAssetResponseDto;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -165,12 +165,6 @@ export type Stack = {
|
|||||||
assetCount?: number;
|
assetCount?: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TimelineStack = {
|
|
||||||
id: string;
|
|
||||||
primaryAssetId: string;
|
|
||||||
assetCount: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type AuthSharedLink = {
|
export type AuthSharedLink = {
|
||||||
id: string;
|
id: string;
|
||||||
expiresAt: Date | null;
|
expiresAt: Date | null;
|
||||||
|
|||||||
27
server/src/db.d.ts
vendored
27
server/src/db.d.ts
vendored
@@ -236,6 +236,30 @@ export interface GeodataPlaces {
|
|||||||
name: string;
|
name: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface GraphileWorkerJobs {
|
||||||
|
id: Generated<string>;
|
||||||
|
task_identifier: string;
|
||||||
|
locked_at: Timestamp | null;
|
||||||
|
locked_by: string | null;
|
||||||
|
run_at: Timestamp | null;
|
||||||
|
attempts: number;
|
||||||
|
max_attempts: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GraphileWorkerPrivateJobs {
|
||||||
|
id: Generated<string>;
|
||||||
|
task_id: string;
|
||||||
|
locked_at: Timestamp | null;
|
||||||
|
locked_by: string | null;
|
||||||
|
attempts: number;
|
||||||
|
max_attempts: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GraphileWorkerPrivateTasks {
|
||||||
|
id: Generated<string>;
|
||||||
|
identifier: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface Libraries {
|
export interface Libraries {
|
||||||
createdAt: Generated<Timestamp>;
|
createdAt: Generated<Timestamp>;
|
||||||
deletedAt: Timestamp | null;
|
deletedAt: Timestamp | null;
|
||||||
@@ -476,6 +500,9 @@ export interface DB {
|
|||||||
exif: Exif;
|
exif: Exif;
|
||||||
face_search: FaceSearch;
|
face_search: FaceSearch;
|
||||||
geodata_places: GeodataPlaces;
|
geodata_places: GeodataPlaces;
|
||||||
|
'graphile_worker.jobs': GraphileWorkerJobs;
|
||||||
|
'graphile_worker._private_jobs': GraphileWorkerPrivateJobs;
|
||||||
|
'graphile_worker._private_tasks': GraphileWorkerPrivateTasks;
|
||||||
libraries: Libraries;
|
libraries: Libraries;
|
||||||
memories: Memories;
|
memories: Memories;
|
||||||
memories_assets_assets: MemoriesAssetsAssets;
|
memories_assets_assets: MemoriesAssetsAssets;
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import {
|
|||||||
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
||||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||||
import { AssetStatus, AssetType } from 'src/enum';
|
import { AssetStatus, AssetType } from 'src/enum';
|
||||||
import { hexOrBufferToBase64 } from 'src/utils/bytes';
|
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
|
|
||||||
export class SanitizedAssetResponseDto {
|
export class SanitizedAssetResponseDto {
|
||||||
@@ -141,6 +140,15 @@ const mapStack = (entity: { stack?: Stack | null }) => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
|
||||||
|
export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
|
||||||
|
if (typeof encoded === 'string') {
|
||||||
|
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
return encoded.toString('base64');
|
||||||
|
};
|
||||||
|
|
||||||
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
|
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
|
||||||
const { stripMetadata = false, withStack = false } = options;
|
const { stripMetadata = false, withStack = false } = options;
|
||||||
|
|
||||||
@@ -183,7 +191,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
|||||||
tags: entity.tags?.map((tag) => mapTag(tag)),
|
tags: entity.tags?.map((tag) => mapTag(tag)),
|
||||||
people: peopleWithFaces(entity.faces),
|
people: peopleWithFaces(entity.faces),
|
||||||
unassignedFaces: entity.faces?.filter((face) => !face.person).map((a) => mapFacesWithoutPerson(a)),
|
unassignedFaces: entity.faces?.filter((face) => !face.person).map((a) => mapFacesWithoutPerson(a)),
|
||||||
checksum: hexOrBufferToBase64(entity.checksum)!,
|
checksum: hexOrBufferToBase64(entity.checksum),
|
||||||
stack: withStack ? mapStack(entity) : undefined,
|
stack: withStack ? mapStack(entity) : undefined,
|
||||||
isOffline: entity.isOffline,
|
isOffline: entity.isOffline,
|
||||||
hasMetadata: true,
|
hasMetadata: true,
|
||||||
|
|||||||
@@ -157,34 +157,4 @@ export class EnvDto {
|
|||||||
@IsString()
|
@IsString()
|
||||||
@Optional()
|
@Optional()
|
||||||
NO_COLOR?: string;
|
NO_COLOR?: string;
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_HOSTNAME?: string;
|
|
||||||
|
|
||||||
@IsInt()
|
|
||||||
@Optional()
|
|
||||||
@Type(() => Number)
|
|
||||||
REDIS_PORT?: number;
|
|
||||||
|
|
||||||
@IsInt()
|
|
||||||
@Optional()
|
|
||||||
@Type(() => Number)
|
|
||||||
REDIS_DBINDEX?: number;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_USERNAME?: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_PASSWORD?: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_SOCKET?: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_URL?: string;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,20 +30,15 @@ export class JobCountsDto {
|
|||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
active!: number;
|
active!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
completed!: number;
|
waiting!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
|
||||||
failed!: number;
|
|
||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
delayed!: number;
|
delayed!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
waiting!: number;
|
failed!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
|
||||||
paused!: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class QueueStatusDto {
|
export class QueueStatusDto {
|
||||||
isActive!: boolean;
|
paused!: boolean;
|
||||||
isPaused!: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class JobStatusDto {
|
export class JobStatusDto {
|
||||||
|
|||||||
@@ -1,11 +1,15 @@
|
|||||||
import { ApiProperty } from '@nestjs/swagger';
|
import { ApiProperty } from '@nestjs/swagger';
|
||||||
|
import { IsEnum, IsNotEmpty, IsString } from 'class-validator';
|
||||||
import { IsEnum, IsInt, IsString, Min } from 'class-validator';
|
|
||||||
import { AssetOrder } from 'src/enum';
|
import { AssetOrder } from 'src/enum';
|
||||||
import { TimeBucketAssets, TimelineStack } from 'src/services/timeline.service.types';
|
import { TimeBucketSize } from 'src/repositories/asset.repository';
|
||||||
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
|
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
|
||||||
|
|
||||||
export class TimeBucketDto {
|
export class TimeBucketDto {
|
||||||
|
@IsNotEmpty()
|
||||||
|
@IsEnum(TimeBucketSize)
|
||||||
|
@ApiProperty({ enum: TimeBucketSize, enumName: 'TimeBucketSize' })
|
||||||
|
size!: TimeBucketSize;
|
||||||
|
|
||||||
@ValidateUUID({ optional: true })
|
@ValidateUUID({ optional: true })
|
||||||
userId?: string;
|
userId?: string;
|
||||||
|
|
||||||
@@ -42,75 +46,9 @@ export class TimeBucketDto {
|
|||||||
export class TimeBucketAssetDto extends TimeBucketDto {
|
export class TimeBucketAssetDto extends TimeBucketDto {
|
||||||
@IsString()
|
@IsString()
|
||||||
timeBucket!: string;
|
timeBucket!: string;
|
||||||
|
|
||||||
@IsInt()
|
|
||||||
@Min(1)
|
|
||||||
@Optional()
|
|
||||||
page?: number;
|
|
||||||
|
|
||||||
@IsInt()
|
|
||||||
@Min(1)
|
|
||||||
@Optional()
|
|
||||||
pageSize?: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class TimelineStackResponseDto implements TimelineStack {
|
export class TimeBucketResponseDto {
|
||||||
id!: string;
|
|
||||||
primaryAssetId!: string;
|
|
||||||
assetCount!: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class TimeBucketAssetResponseDto implements TimeBucketAssets {
|
|
||||||
id!: string[];
|
|
||||||
|
|
||||||
ownerId!: string[];
|
|
||||||
|
|
||||||
ratio!: number[];
|
|
||||||
|
|
||||||
isFavorite!: number[];
|
|
||||||
|
|
||||||
isArchived!: number[];
|
|
||||||
|
|
||||||
isTrashed!: number[];
|
|
||||||
|
|
||||||
isImage!: number[];
|
|
||||||
|
|
||||||
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
|
|
||||||
thumbhash!: (string | null)[];
|
|
||||||
|
|
||||||
localDateTime!: string[];
|
|
||||||
|
|
||||||
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
|
|
||||||
duration!: (string | null)[];
|
|
||||||
|
|
||||||
// id, count
|
|
||||||
@ApiProperty({
|
|
||||||
type: 'array',
|
|
||||||
items: {
|
|
||||||
type: 'array',
|
|
||||||
items: { type: 'string' },
|
|
||||||
minItems: 2,
|
|
||||||
maxItems: 2,
|
|
||||||
nullable: true,
|
|
||||||
},
|
|
||||||
description: '(stack ID, stack asset count) tuple',
|
|
||||||
})
|
|
||||||
stack?: ([string, string] | null)[];
|
|
||||||
|
|
||||||
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
|
|
||||||
projectionType!: (string | null)[];
|
|
||||||
|
|
||||||
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
|
|
||||||
livePhotoVideoId!: (string | null)[];
|
|
||||||
|
|
||||||
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
|
|
||||||
city!: (string | null)[];
|
|
||||||
|
|
||||||
@ApiProperty({ type: 'array', items: { type: 'string', nullable: true } })
|
|
||||||
country!: (string | null)[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export class TimeBucketsResponseDto {
|
|
||||||
@ApiProperty({ type: 'string' })
|
@ApiProperty({ type: 'string' })
|
||||||
timeBucket!: string;
|
timeBucket!: string;
|
||||||
|
|
||||||
|
|||||||
@@ -204,6 +204,7 @@ export enum SystemMetadataKey {
|
|||||||
SYSTEM_FLAGS = 'system-flags',
|
SYSTEM_FLAGS = 'system-flags',
|
||||||
VERSION_CHECK_STATE = 'version-check-state',
|
VERSION_CHECK_STATE = 'version-check-state',
|
||||||
LICENSE = 'license',
|
LICENSE = 'license',
|
||||||
|
QUEUES_STATE = 'queues-state',
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum UserMetadataKey {
|
export enum UserMetadataKey {
|
||||||
@@ -533,10 +534,20 @@ export enum JobName {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export enum JobCommand {
|
export enum JobCommand {
|
||||||
|
// The behavior of start depends on the queue. Usually it is a request to
|
||||||
|
// reprocess everything associated with the queue from scratch.
|
||||||
START = 'start',
|
START = 'start',
|
||||||
|
|
||||||
|
// Pause prevents workers from processing jobs.
|
||||||
PAUSE = 'pause',
|
PAUSE = 'pause',
|
||||||
|
|
||||||
|
// Resume allows workers to continue processing jobs.
|
||||||
RESUME = 'resume',
|
RESUME = 'resume',
|
||||||
EMPTY = 'empty',
|
|
||||||
|
// Clear removes all pending jobs.
|
||||||
|
CLEAR = 'clear',
|
||||||
|
|
||||||
|
// ClearFailed removes all failed jobs.
|
||||||
CLEAR_FAILED = 'clear-failed',
|
CLEAR_FAILED = 'clear-failed',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import { INestApplicationContext } from '@nestjs/common';
|
import { INestApplicationContext } from '@nestjs/common';
|
||||||
import { IoAdapter } from '@nestjs/platform-socket.io';
|
import { IoAdapter } from '@nestjs/platform-socket.io';
|
||||||
import { createAdapter } from '@socket.io/redis-adapter';
|
import { createAdapter } from '@socket.io/postgres-adapter';
|
||||||
import { Redis } from 'ioredis';
|
import pg, { PoolConfig } from 'pg';
|
||||||
import { ServerOptions } from 'socket.io';
|
import { ServerOptions } from 'socket.io';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
|
import { asPostgresConnectionConfig } from 'src/utils/database';
|
||||||
|
|
||||||
export class WebSocketAdapter extends IoAdapter {
|
export class WebSocketAdapter extends IoAdapter {
|
||||||
constructor(private app: INestApplicationContext) {
|
constructor(private app: INestApplicationContext) {
|
||||||
@@ -11,11 +12,11 @@ export class WebSocketAdapter extends IoAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
createIOServer(port: number, options?: ServerOptions): any {
|
createIOServer(port: number, options?: ServerOptions): any {
|
||||||
const { redis } = this.app.get(ConfigRepository).getEnv();
|
|
||||||
const server = super.createIOServer(port, options);
|
const server = super.createIOServer(port, options);
|
||||||
const pubClient = new Redis(redis);
|
const configRepository = new ConfigRepository();
|
||||||
const subClient = pubClient.duplicate();
|
const { database } = configRepository.getEnv();
|
||||||
server.adapter(createAdapter(pubClient, subClient));
|
const pool = new pg.Pool(asPostgresConnectionConfig(database.config) as PoolConfig);
|
||||||
|
server.adapter(createAdapter(pool));
|
||||||
return server;
|
return server;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -236,12 +236,12 @@ limit
|
|||||||
with
|
with
|
||||||
"assets" as (
|
"assets" as (
|
||||||
select
|
select
|
||||||
date_trunc('MONTH', "localDateTime" at time zone 'UTC') at time zone 'UTC' as "timeBucket"
|
date_trunc($1, "localDateTime" at time zone 'UTC') at time zone 'UTC' as "timeBucket"
|
||||||
from
|
from
|
||||||
"assets"
|
"assets"
|
||||||
where
|
where
|
||||||
"assets"."deletedAt" is null
|
"assets"."deletedAt" is null
|
||||||
and "assets"."isVisible" = $1
|
and "assets"."isVisible" = $2
|
||||||
)
|
)
|
||||||
select
|
select
|
||||||
"timeBucket",
|
"timeBucket",
|
||||||
@@ -254,95 +254,37 @@ order by
|
|||||||
"timeBucket" desc
|
"timeBucket" desc
|
||||||
|
|
||||||
-- AssetRepository.getTimeBucket
|
-- AssetRepository.getTimeBucket
|
||||||
with
|
|
||||||
"cte" as (
|
|
||||||
select
|
|
||||||
"assets"."duration",
|
|
||||||
"assets"."id",
|
|
||||||
assets."isArchived"::int as "isArchived",
|
|
||||||
assets."isFavorite"::int as "isFavorite",
|
|
||||||
(assets.type = 'IMAGE')::int as "isImage",
|
|
||||||
(assets."deletedAt" is null)::int as "isTrashed",
|
|
||||||
(assets.type = 'VIDEO')::int as "isVideo",
|
|
||||||
"assets"."livePhotoVideoId",
|
|
||||||
"assets"."localDateTime",
|
|
||||||
"assets"."ownerId",
|
|
||||||
"assets"."status",
|
|
||||||
encode("assets"."thumbhash", 'base64') as "thumbhash",
|
|
||||||
"exif"."city",
|
|
||||||
"exif"."country",
|
|
||||||
"exif"."projectionType",
|
|
||||||
coalesce(
|
|
||||||
case
|
|
||||||
when exif."exifImageHeight" = 0
|
|
||||||
or exif."exifImageWidth" = 0 then 1
|
|
||||||
when "exif"."orientation" in ('5', '6', '7', '8', '-90', '90') then round(
|
|
||||||
exif."exifImageHeight"::numeric / exif."exifImageWidth"::numeric,
|
|
||||||
3
|
|
||||||
)
|
|
||||||
else round(
|
|
||||||
exif."exifImageWidth"::numeric / exif."exifImageHeight"::numeric,
|
|
||||||
3
|
|
||||||
)
|
|
||||||
end,
|
|
||||||
1
|
|
||||||
) as "ratio",
|
|
||||||
"stack"
|
|
||||||
from
|
|
||||||
"assets"
|
|
||||||
inner join "exif" on "assets"."id" = "exif"."assetId"
|
|
||||||
left join lateral (
|
|
||||||
select
|
|
||||||
array[stacked."stackId"::text, count('stacked')::text] as "stack"
|
|
||||||
from
|
|
||||||
"assets" as "stacked"
|
|
||||||
where
|
|
||||||
"stacked"."stackId" = "assets"."stackId"
|
|
||||||
and "stacked"."deletedAt" is null
|
|
||||||
and "stacked"."isArchived" = $1
|
|
||||||
group by
|
|
||||||
"stacked"."stackId"
|
|
||||||
) as "stacked_assets" on true
|
|
||||||
where
|
|
||||||
"assets"."deletedAt" is null
|
|
||||||
and "assets"."isVisible" = $2
|
|
||||||
and date_trunc('MONTH', "localDateTime" at time zone 'UTC') at time zone 'UTC' = $3
|
|
||||||
and not exists (
|
|
||||||
select
|
|
||||||
from
|
|
||||||
"asset_stack"
|
|
||||||
where
|
|
||||||
"asset_stack"."id" = "assets"."stackId"
|
|
||||||
and "asset_stack"."primaryAssetId" != "assets"."id"
|
|
||||||
)
|
|
||||||
order by
|
|
||||||
"assets"."localDateTime" desc
|
|
||||||
),
|
|
||||||
"agg" as (
|
|
||||||
select
|
|
||||||
coalesce(array_agg("city"), '{}') as "city",
|
|
||||||
coalesce(array_agg("country"), '{}') as "country",
|
|
||||||
coalesce(array_agg("duration"), '{}') as "duration",
|
|
||||||
coalesce(array_agg("id"), '{}') as "id",
|
|
||||||
coalesce(array_agg("isArchived"), '{}') as "isArchived",
|
|
||||||
coalesce(array_agg("isFavorite"), '{}') as "isFavorite",
|
|
||||||
coalesce(array_agg("isImage"), '{}') as "isImage",
|
|
||||||
coalesce(array_agg("isTrashed"), '{}') as "isTrashed",
|
|
||||||
coalesce(array_agg("livePhotoVideoId"), '{}') as "livePhotoVideoId",
|
|
||||||
coalesce(array_agg("localDateTime"), '{}') as "localDateTime",
|
|
||||||
coalesce(array_agg("ownerId"), '{}') as "ownerId",
|
|
||||||
coalesce(array_agg("projectionType"), '{}') as "projectionType",
|
|
||||||
coalesce(array_agg("ratio"), '{}') as "ratio",
|
|
||||||
coalesce(array_agg("status"), '{}') as "status",
|
|
||||||
coalesce(array_agg("thumbhash"), '{}') as "thumbhash",
|
|
||||||
coalesce(json_agg("stack"), '[]') as "stack"
|
|
||||||
from
|
|
||||||
"cte"
|
|
||||||
)
|
|
||||||
select
|
select
|
||||||
to_json(agg)::text as "assets"
|
"assets".*,
|
||||||
|
to_json("exif") as "exifInfo",
|
||||||
|
to_json("stacked_assets") as "stack"
|
||||||
from
|
from
|
||||||
"agg"
|
"assets"
|
||||||
|
left join "exif" on "assets"."id" = "exif"."assetId"
|
||||||
|
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
|
||||||
|
left join lateral (
|
||||||
|
select
|
||||||
|
"asset_stack".*,
|
||||||
|
count("stacked") as "assetCount"
|
||||||
|
from
|
||||||
|
"assets" as "stacked"
|
||||||
|
where
|
||||||
|
"stacked"."stackId" = "asset_stack"."id"
|
||||||
|
and "stacked"."deletedAt" is null
|
||||||
|
and "stacked"."isArchived" = $1
|
||||||
|
group by
|
||||||
|
"asset_stack"."id"
|
||||||
|
) as "stacked_assets" on "asset_stack"."id" is not null
|
||||||
|
where
|
||||||
|
(
|
||||||
|
"asset_stack"."primaryAssetId" = "assets"."id"
|
||||||
|
or "assets"."stackId" is null
|
||||||
|
)
|
||||||
|
and "assets"."deletedAt" is null
|
||||||
|
and "assets"."isVisible" = $2
|
||||||
|
and date_trunc($3, "localDateTime" at time zone 'UTC') at time zone 'UTC' = $4
|
||||||
|
order by
|
||||||
|
"assets"."localDateTime" desc
|
||||||
|
|
||||||
-- AssetRepository.getDuplicates
|
-- AssetRepository.getDuplicates
|
||||||
with
|
with
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ export interface AssetBuilderOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface TimeBucketOptions extends AssetBuilderOptions {
|
export interface TimeBucketOptions extends AssetBuilderOptions {
|
||||||
|
size: TimeBucketSize;
|
||||||
order?: AssetOrder;
|
order?: AssetOrder;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -538,7 +539,7 @@ export class AssetRepository {
|
|||||||
.with('assets', (qb) =>
|
.with('assets', (qb) =>
|
||||||
qb
|
qb
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.select(truncatedDate<Date>(TimeBucketSize.MONTH).as('timeBucket'))
|
.select(truncatedDate<Date>(options.size).as('timeBucket'))
|
||||||
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
|
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
|
||||||
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
|
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
|
||||||
.where('assets.isVisible', '=', true)
|
.where('assets.isVisible', '=', true)
|
||||||
@@ -580,125 +581,53 @@ export class AssetRepository {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({
|
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
|
||||||
params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }, { skip: 0, take: 1000 }],
|
async getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
|
||||||
})
|
return this.db
|
||||||
getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
|
.selectFrom('assets')
|
||||||
const query = this.db
|
.selectAll('assets')
|
||||||
.with('cte', (qb) =>
|
.$call(withExif)
|
||||||
|
.$if(!!options.albumId, (qb) =>
|
||||||
qb
|
qb
|
||||||
.selectFrom('assets')
|
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
|
||||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
.where('albums_assets_assets.albumsId', '=', options.albumId!),
|
||||||
.select((eb) => [
|
|
||||||
'assets.duration',
|
|
||||||
'assets.id',
|
|
||||||
sql`assets."isArchived"::int`.as('isArchived'),
|
|
||||||
sql`assets."isFavorite"::int`.as('isFavorite'),
|
|
||||||
sql`(assets.type = 'IMAGE')::int`.as('isImage'),
|
|
||||||
sql`(assets."deletedAt" is null)::int`.as('isTrashed'),
|
|
||||||
sql`(assets.type = 'VIDEO')::int`.as('isVideo'),
|
|
||||||
'assets.livePhotoVideoId',
|
|
||||||
'assets.localDateTime',
|
|
||||||
'assets.ownerId',
|
|
||||||
'assets.status',
|
|
||||||
eb.fn('encode', ['assets.thumbhash', sql.lit('base64')]).as('thumbhash'),
|
|
||||||
'exif.city',
|
|
||||||
'exif.country',
|
|
||||||
'exif.projectionType',
|
|
||||||
eb.fn
|
|
||||||
.coalesce(
|
|
||||||
eb
|
|
||||||
.case()
|
|
||||||
.when(sql`exif."exifImageHeight" = 0 or exif."exifImageWidth" = 0`)
|
|
||||||
.then(eb.lit(1))
|
|
||||||
.when('exif.orientation', 'in', sql<string>`('5', '6', '7', '8', '-90', '90')`)
|
|
||||||
.then(sql`round(exif."exifImageHeight"::numeric / exif."exifImageWidth"::numeric, 3)`)
|
|
||||||
.else(sql`round(exif."exifImageWidth"::numeric / exif."exifImageHeight"::numeric, 3)`)
|
|
||||||
.end(),
|
|
||||||
eb.lit(1),
|
|
||||||
)
|
|
||||||
.as('ratio'),
|
|
||||||
])
|
|
||||||
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
|
|
||||||
.where('assets.isVisible', '=', true)
|
|
||||||
.where(truncatedDate(TimeBucketSize.MONTH), '=', timeBucket.replace(/^[+-]/, ''))
|
|
||||||
.$if(!!options.albumId, (qb) =>
|
|
||||||
qb.where((eb) =>
|
|
||||||
eb.exists(
|
|
||||||
eb
|
|
||||||
.selectFrom('albums_assets_assets')
|
|
||||||
.whereRef('albums_assets_assets.assetsId', '=', 'assets.id')
|
|
||||||
.where('albums_assets_assets.albumsId', '=', asUuid(options.albumId!)),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
|
|
||||||
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
|
|
||||||
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
|
|
||||||
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
|
|
||||||
.$if(!!options.withStacked, (qb) =>
|
|
||||||
qb
|
|
||||||
.where((eb) =>
|
|
||||||
eb.not(
|
|
||||||
eb.exists(
|
|
||||||
eb
|
|
||||||
.selectFrom('asset_stack')
|
|
||||||
.whereRef('asset_stack.id', '=', 'assets.stackId')
|
|
||||||
.whereRef('asset_stack.primaryAssetId', '!=', 'assets.id'),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.leftJoinLateral(
|
|
||||||
(eb) =>
|
|
||||||
eb
|
|
||||||
.selectFrom('assets as stacked')
|
|
||||||
.select(sql`array[stacked."stackId"::text, count('stacked')::text]`.as('stack'))
|
|
||||||
.whereRef('stacked.stackId', '=', 'assets.stackId')
|
|
||||||
.where('stacked.deletedAt', 'is', null)
|
|
||||||
.where('stacked.isArchived', '=', false)
|
|
||||||
.groupBy('stacked.stackId')
|
|
||||||
.as('stacked_assets'),
|
|
||||||
(join) => join.onTrue(),
|
|
||||||
)
|
|
||||||
.select('stack'),
|
|
||||||
)
|
|
||||||
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
|
|
||||||
.$if(options.isDuplicate !== undefined, (qb) =>
|
|
||||||
qb.where('assets.duplicateId', options.isDuplicate ? 'is not' : 'is', null),
|
|
||||||
)
|
|
||||||
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
|
|
||||||
.$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!))
|
|
||||||
.orderBy('assets.localDateTime', options.order ?? 'desc'),
|
|
||||||
)
|
)
|
||||||
.with('agg', (qb) =>
|
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
|
||||||
|
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
|
||||||
|
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
|
||||||
|
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
|
||||||
|
.$if(!!options.withStacked, (qb) =>
|
||||||
qb
|
qb
|
||||||
.selectFrom('cte')
|
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
||||||
.select((eb) => [
|
.where((eb) =>
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['city']), sql.lit('{}')).as('city'),
|
eb.or([eb('asset_stack.primaryAssetId', '=', eb.ref('assets.id')), eb('assets.stackId', 'is', null)]),
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['country']), sql.lit('{}')).as('country'),
|
)
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['duration']), sql.lit('{}')).as('duration'),
|
.leftJoinLateral(
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['id']), sql.lit('{}')).as('id'),
|
(eb) =>
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['isArchived']), sql.lit('{}')).as('isArchived'),
|
eb
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['isFavorite']), sql.lit('{}')).as('isFavorite'),
|
.selectFrom('assets as stacked')
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['isImage']), sql.lit('{}')).as('isImage'),
|
.selectAll('asset_stack')
|
||||||
// TODO: isTrashed is redundant as it will always be all 0s or 1s depending on the options
|
.select((eb) => eb.fn.count(eb.table('stacked')).as('assetCount'))
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['isTrashed']), sql.lit('{}')).as('isTrashed'),
|
.whereRef('stacked.stackId', '=', 'asset_stack.id')
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['livePhotoVideoId']), sql.lit('{}')).as('livePhotoVideoId'),
|
.where('stacked.deletedAt', 'is', null)
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['localDateTime']), sql.lit('{}')).as('localDateTime'),
|
.where('stacked.isArchived', '=', false)
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['ownerId']), sql.lit('{}')).as('ownerId'),
|
.groupBy('asset_stack.id')
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['projectionType']), sql.lit('{}')).as('projectionType'),
|
.as('stacked_assets'),
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['ratio']), sql.lit('{}')).as('ratio'),
|
(join) => join.on('asset_stack.id', 'is not', null),
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['status']), sql.lit('{}')).as('status'),
|
)
|
||||||
eb.fn.coalesce(eb.fn('array_agg', ['thumbhash']), sql.lit('{}')).as('thumbhash'),
|
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack')),
|
||||||
])
|
|
||||||
.$if(!!options.withStacked, (qb) =>
|
|
||||||
qb.select((eb) => eb.fn.coalesce(eb.fn('json_agg', ['stack']), sql.lit('[]')).as('stack')),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
.selectFrom('agg')
|
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
|
||||||
.select(sql<string>`to_json(agg)::text`.as('assets'));
|
.$if(options.isDuplicate !== undefined, (qb) =>
|
||||||
|
qb.where('assets.duplicateId', options.isDuplicate ? 'is not' : 'is', null),
|
||||||
return query.executeTakeFirstOrThrow();
|
)
|
||||||
|
.$if(!!options.isTrashed, (qb) => qb.where('assets.status', '!=', AssetStatus.DELETED))
|
||||||
|
.$if(!!options.tagId, (qb) => withTagId(qb, options.tagId!))
|
||||||
|
.where('assets.deletedAt', options.isTrashed ? 'is not' : 'is', null)
|
||||||
|
.where('assets.isVisible', '=', true)
|
||||||
|
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
|
||||||
|
.orderBy('assets.localDateTime', options.order ?? 'desc')
|
||||||
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID] })
|
@GenerateSql({ params: [DummyValue.UUID] })
|
||||||
|
|||||||
@@ -26,38 +26,12 @@ const resetEnv = () => {
|
|||||||
'DB_SKIP_MIGRATIONS',
|
'DB_SKIP_MIGRATIONS',
|
||||||
'DB_VECTOR_EXTENSION',
|
'DB_VECTOR_EXTENSION',
|
||||||
|
|
||||||
'REDIS_HOSTNAME',
|
|
||||||
'REDIS_PORT',
|
|
||||||
'REDIS_DBINDEX',
|
|
||||||
'REDIS_USERNAME',
|
|
||||||
'REDIS_PASSWORD',
|
|
||||||
'REDIS_SOCKET',
|
|
||||||
'REDIS_URL',
|
|
||||||
|
|
||||||
'NO_COLOR',
|
'NO_COLOR',
|
||||||
]) {
|
]) {
|
||||||
delete process.env[env];
|
delete process.env[env];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const sentinelConfig = {
|
|
||||||
sentinels: [
|
|
||||||
{
|
|
||||||
host: 'redis-sentinel-node-0',
|
|
||||||
port: 26_379,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
host: 'redis-sentinel-node-1',
|
|
||||||
port: 26_379,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
host: 'redis-sentinel-node-2',
|
|
||||||
port: 26_379,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
name: 'redis-sentinel',
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('getEnv', () => {
|
describe('getEnv', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetEnv();
|
resetEnv();
|
||||||
@@ -108,34 +82,6 @@ describe('getEnv', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('redis', () => {
|
|
||||||
it('should use defaults', () => {
|
|
||||||
const { redis } = getEnv();
|
|
||||||
expect(redis).toEqual({
|
|
||||||
host: 'redis',
|
|
||||||
port: 6379,
|
|
||||||
db: 0,
|
|
||||||
username: undefined,
|
|
||||||
password: undefined,
|
|
||||||
path: undefined,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should parse base64 encoded config, ignore other env', () => {
|
|
||||||
process.env.REDIS_URL = `ioredis://${Buffer.from(JSON.stringify(sentinelConfig)).toString('base64')}`;
|
|
||||||
process.env.REDIS_HOSTNAME = 'redis-host';
|
|
||||||
process.env.REDIS_USERNAME = 'redis-user';
|
|
||||||
process.env.REDIS_PASSWORD = 'redis-password';
|
|
||||||
const { redis } = getEnv();
|
|
||||||
expect(redis).toEqual(sentinelConfig);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should reject invalid json', () => {
|
|
||||||
process.env.REDIS_URL = `ioredis://${Buffer.from('{ "invalid json"').toString('base64')}`;
|
|
||||||
expect(() => getEnv()).toThrowError('Failed to decode redis options');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('noColor', () => {
|
describe('noColor', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
delete process.env.NO_COLOR;
|
delete process.env.NO_COLOR;
|
||||||
|
|||||||
@@ -1,25 +1,14 @@
|
|||||||
import { RegisterQueueOptions } from '@nestjs/bullmq';
|
|
||||||
import { Inject, Injectable, Optional } from '@nestjs/common';
|
import { Inject, Injectable, Optional } from '@nestjs/common';
|
||||||
import { QueueOptions } from 'bullmq';
|
|
||||||
import { plainToInstance } from 'class-transformer';
|
import { plainToInstance } from 'class-transformer';
|
||||||
import { validateSync } from 'class-validator';
|
import { validateSync } from 'class-validator';
|
||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { RedisOptions } from 'ioredis';
|
|
||||||
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
|
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
|
||||||
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
|
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
|
||||||
import { join } from 'node:path';
|
import { join, resolve } from 'node:path';
|
||||||
import { citiesFile, excludePaths, IWorker } from 'src/constants';
|
import { citiesFile, excludePaths, IWorker } from 'src/constants';
|
||||||
import { Telemetry } from 'src/decorators';
|
import { Telemetry } from 'src/decorators';
|
||||||
import { EnvDto } from 'src/dtos/env.dto';
|
import { EnvDto } from 'src/dtos/env.dto';
|
||||||
import {
|
import { DatabaseExtension, ImmichEnvironment, ImmichHeader, ImmichTelemetry, ImmichWorker, LogLevel } from 'src/enum';
|
||||||
DatabaseExtension,
|
|
||||||
ImmichEnvironment,
|
|
||||||
ImmichHeader,
|
|
||||||
ImmichTelemetry,
|
|
||||||
ImmichWorker,
|
|
||||||
LogLevel,
|
|
||||||
QueueName,
|
|
||||||
} from 'src/enum';
|
|
||||||
import { DatabaseConnectionParams, VectorExtension } from 'src/types';
|
import { DatabaseConnectionParams, VectorExtension } from 'src/types';
|
||||||
import { setDifference } from 'src/utils/set';
|
import { setDifference } from 'src/utils/set';
|
||||||
|
|
||||||
@@ -46,11 +35,6 @@ export interface EnvData {
|
|||||||
thirdPartySupportUrl?: string;
|
thirdPartySupportUrl?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
bull: {
|
|
||||||
config: QueueOptions;
|
|
||||||
queues: RegisterQueueOptions[];
|
|
||||||
};
|
|
||||||
|
|
||||||
cls: {
|
cls: {
|
||||||
config: ClsModuleOptions;
|
config: ClsModuleOptions;
|
||||||
};
|
};
|
||||||
@@ -87,8 +71,6 @@ export interface EnvData {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
redis: RedisOptions;
|
|
||||||
|
|
||||||
telemetry: {
|
telemetry: {
|
||||||
apiPort: number;
|
apiPort: number;
|
||||||
microservicesPort: number;
|
microservicesPort: number;
|
||||||
@@ -149,28 +131,12 @@ const getEnv = (): EnvData => {
|
|||||||
const isProd = environment === ImmichEnvironment.PRODUCTION;
|
const isProd = environment === ImmichEnvironment.PRODUCTION;
|
||||||
const buildFolder = dto.IMMICH_BUILD_DATA || '/build';
|
const buildFolder = dto.IMMICH_BUILD_DATA || '/build';
|
||||||
const folders = {
|
const folders = {
|
||||||
|
// eslint-disable-next-line unicorn/prefer-module
|
||||||
|
dist: resolve(`${__dirname}/..`),
|
||||||
geodata: join(buildFolder, 'geodata'),
|
geodata: join(buildFolder, 'geodata'),
|
||||||
web: join(buildFolder, 'www'),
|
web: join(buildFolder, 'www'),
|
||||||
};
|
};
|
||||||
|
|
||||||
let redisConfig = {
|
|
||||||
host: dto.REDIS_HOSTNAME || 'redis',
|
|
||||||
port: dto.REDIS_PORT || 6379,
|
|
||||||
db: dto.REDIS_DBINDEX || 0,
|
|
||||||
username: dto.REDIS_USERNAME || undefined,
|
|
||||||
password: dto.REDIS_PASSWORD || undefined,
|
|
||||||
path: dto.REDIS_SOCKET || undefined,
|
|
||||||
};
|
|
||||||
|
|
||||||
const redisUrl = dto.REDIS_URL;
|
|
||||||
if (redisUrl && redisUrl.startsWith('ioredis://')) {
|
|
||||||
try {
|
|
||||||
redisConfig = JSON.parse(Buffer.from(redisUrl.slice(10), 'base64').toString());
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Failed to decode redis options: ${error}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const includedTelemetries =
|
const includedTelemetries =
|
||||||
dto.IMMICH_TELEMETRY_INCLUDE === 'all'
|
dto.IMMICH_TELEMETRY_INCLUDE === 'all'
|
||||||
? new Set(Object.values(ImmichTelemetry))
|
? new Set(Object.values(ImmichTelemetry))
|
||||||
@@ -218,19 +184,6 @@ const getEnv = (): EnvData => {
|
|||||||
thirdPartySupportUrl: dto.IMMICH_THIRD_PARTY_SUPPORT_URL,
|
thirdPartySupportUrl: dto.IMMICH_THIRD_PARTY_SUPPORT_URL,
|
||||||
},
|
},
|
||||||
|
|
||||||
bull: {
|
|
||||||
config: {
|
|
||||||
prefix: 'immich_bull',
|
|
||||||
connection: { ...redisConfig },
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 3,
|
|
||||||
removeOnComplete: true,
|
|
||||||
removeOnFail: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queues: Object.values(QueueName).map((name) => ({ name })),
|
|
||||||
},
|
|
||||||
|
|
||||||
cls: {
|
cls: {
|
||||||
config: {
|
config: {
|
||||||
middleware: {
|
middleware: {
|
||||||
@@ -269,8 +222,6 @@ const getEnv = (): EnvData => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
redis: redisConfig,
|
|
||||||
|
|
||||||
resourcePaths: {
|
resourcePaths: {
|
||||||
lockFile: join(buildFolder, 'build-lock.json'),
|
lockFile: join(buildFolder, 'build-lock.json'),
|
||||||
geodata: {
|
geodata: {
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.d
|
|||||||
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
|
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||||
import { JobItem, JobSource } from 'src/types';
|
import { JobItem } from 'src/types';
|
||||||
import { handlePromiseError } from 'src/utils/misc';
|
import { handlePromiseError } from 'src/utils/misc';
|
||||||
|
|
||||||
type EmitHandlers = Partial<{ [T in EmitEvent]: Array<EventItem<T>> }>;
|
type EmitHandlers = Partial<{ [T in EmitEvent]: Array<EventItem<T>> }>;
|
||||||
@@ -48,7 +48,7 @@ type EventMap = {
|
|||||||
'config.validate': [{ newConfig: SystemConfig; oldConfig: SystemConfig }];
|
'config.validate': [{ newConfig: SystemConfig; oldConfig: SystemConfig }];
|
||||||
|
|
||||||
// album events
|
// album events
|
||||||
'album.update': [{ id: string; recipientId: string }];
|
'album.update': [{ id: string; recipientIds: string[] }];
|
||||||
'album.invite': [{ id: string; userId: string }];
|
'album.invite': [{ id: string; userId: string }];
|
||||||
|
|
||||||
// asset events
|
// asset events
|
||||||
@@ -58,13 +58,15 @@ type EventMap = {
|
|||||||
'asset.show': [{ assetId: string; userId: string }];
|
'asset.show': [{ assetId: string; userId: string }];
|
||||||
'asset.trash': [{ assetId: string; userId: string }];
|
'asset.trash': [{ assetId: string; userId: string }];
|
||||||
'asset.delete': [{ assetId: string; userId: string }];
|
'asset.delete': [{ assetId: string; userId: string }];
|
||||||
'asset.metadataExtracted': [{ assetId: string; userId: string; source?: JobSource }];
|
|
||||||
|
|
||||||
// asset bulk events
|
// asset bulk events
|
||||||
'assets.trash': [{ assetIds: string[]; userId: string }];
|
'assets.trash': [{ assetIds: string[]; userId: string }];
|
||||||
'assets.delete': [{ assetIds: string[]; userId: string }];
|
'assets.delete': [{ assetIds: string[]; userId: string }];
|
||||||
'assets.restore': [{ assetIds: string[]; userId: string }];
|
'assets.restore': [{ assetIds: string[]; userId: string }];
|
||||||
|
|
||||||
|
'queue.pause': [QueueName];
|
||||||
|
'queue.resume': [QueueName];
|
||||||
|
|
||||||
'job.start': [QueueName, JobItem];
|
'job.start': [QueueName, JobItem];
|
||||||
'job.failed': [{ job: JobItem; error: Error | any }];
|
'job.failed': [{ job: JobItem; error: Error | any }];
|
||||||
|
|
||||||
@@ -86,7 +88,7 @@ type EventMap = {
|
|||||||
'websocket.connect': [{ userId: string }];
|
'websocket.connect': [{ userId: string }];
|
||||||
};
|
};
|
||||||
|
|
||||||
export const serverEvents = ['config.update'] as const;
|
export const serverEvents = ['config.update', 'queue.pause', 'queue.resume'] as const;
|
||||||
export type ServerEvents = (typeof serverEvents)[number];
|
export type ServerEvents = (typeof serverEvents)[number];
|
||||||
|
|
||||||
export type EmitEvent = keyof EventMap;
|
export type EmitEvent = keyof EventMap;
|
||||||
|
|||||||
@@ -1,15 +1,20 @@
|
|||||||
import { getQueueToken } from '@nestjs/bullmq';
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { ModuleRef, Reflector } from '@nestjs/core';
|
import { ModuleRef, Reflector } from '@nestjs/core';
|
||||||
import { JobsOptions, Queue, Worker } from 'bullmq';
|
|
||||||
import { ClassConstructor } from 'class-transformer';
|
import { ClassConstructor } from 'class-transformer';
|
||||||
import { setTimeout } from 'node:timers/promises';
|
import { makeWorkerUtils, run, Runner, TaskSpec, WorkerUtils } from 'graphile-worker';
|
||||||
import { JobConfig } from 'src/decorators';
|
import { Kysely } from 'kysely';
|
||||||
import { JobName, JobStatus, MetadataKey, QueueCleanType, QueueName } from 'src/enum';
|
import { DateTime, Duration } from 'luxon';
|
||||||
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
|
import pg, { PoolConfig } from 'pg';
|
||||||
|
import { DB } from 'src/db';
|
||||||
|
import { GenerateSql, JobConfig } from 'src/decorators';
|
||||||
|
import { JobName, JobStatus, MetadataKey, QueueName, SystemMetadataKey } from 'src/enum';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
import { EventRepository } from 'src/repositories/event.repository';
|
import { EventRepository } from 'src/repositories/event.repository';
|
||||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||||
|
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||||
import { JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
|
import { JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
|
||||||
|
import { asPostgresConnectionConfig } from 'src/utils/database';
|
||||||
import { getKeyByValue, getMethodNames, ImmichStartupError } from 'src/utils/misc';
|
import { getKeyByValue, getMethodNames, ImmichStartupError } from 'src/utils/misc';
|
||||||
|
|
||||||
type JobMapItem = {
|
type JobMapItem = {
|
||||||
@@ -19,26 +24,38 @@ type JobMapItem = {
|
|||||||
label: string;
|
label: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type QueueConfiguration = {
|
||||||
|
paused: boolean;
|
||||||
|
concurrency: number;
|
||||||
|
};
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class JobRepository {
|
export class JobRepository {
|
||||||
private workers: Partial<Record<QueueName, Worker>> = {};
|
|
||||||
private handlers: Partial<Record<JobName, JobMapItem>> = {};
|
private handlers: Partial<Record<JobName, JobMapItem>> = {};
|
||||||
|
|
||||||
|
// todo inject the pg pool
|
||||||
|
private pool?: pg.Pool;
|
||||||
|
// todo inject worker utils?
|
||||||
|
private workerUtils?: WorkerUtils;
|
||||||
|
private queueConfig: Record<string, QueueConfiguration> = {};
|
||||||
|
private runners: Record<string, Runner> = {};
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private moduleRef: ModuleRef,
|
@InjectKysely() private db: Kysely<DB>,
|
||||||
private configRepository: ConfigRepository,
|
|
||||||
private eventRepository: EventRepository,
|
|
||||||
private logger: LoggingRepository,
|
private logger: LoggingRepository,
|
||||||
|
private moduleRef: ModuleRef,
|
||||||
|
private eventRepository: EventRepository,
|
||||||
|
private configRepository: ConfigRepository,
|
||||||
|
private systemMetadataRepository: SystemMetadataRepository,
|
||||||
) {
|
) {
|
||||||
this.logger.setContext(JobRepository.name);
|
logger.setContext(JobRepository.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
setup(services: ClassConstructor<unknown>[]) {
|
async setup(services: ClassConstructor<unknown>[]) {
|
||||||
const reflector = this.moduleRef.get(Reflector, { strict: false });
|
const reflector = this.moduleRef.get(Reflector, { strict: false });
|
||||||
|
|
||||||
// discovery
|
for (const service of services) {
|
||||||
for (const Service of services) {
|
const instance = this.moduleRef.get<any>(service);
|
||||||
const instance = this.moduleRef.get<any>(Service);
|
|
||||||
for (const methodName of getMethodNames(instance)) {
|
for (const methodName of getMethodNames(instance)) {
|
||||||
const handler = instance[methodName];
|
const handler = instance[methodName];
|
||||||
const config = reflector.get<JobConfig>(MetadataKey.JOB_CONFIG, handler);
|
const config = reflector.get<JobConfig>(MetadataKey.JOB_CONFIG, handler);
|
||||||
@@ -47,7 +64,7 @@ export class JobRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { name: jobName, queue: queueName } = config;
|
const { name: jobName, queue: queueName } = config;
|
||||||
const label = `${Service.name}.${handler.name}`;
|
const label = `${service.name}.${handler.name}`;
|
||||||
|
|
||||||
// one handler per job
|
// one handler per job
|
||||||
if (this.handlers[jobName]) {
|
if (this.handlers[jobName]) {
|
||||||
@@ -70,172 +87,216 @@ export class JobRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// no missing handlers
|
const { database } = this.configRepository.getEnv();
|
||||||
for (const [jobKey, jobName] of Object.entries(JobName)) {
|
const pool = new pg.Pool({
|
||||||
const item = this.handlers[jobName];
|
...asPostgresConnectionConfig(database.config),
|
||||||
if (!item) {
|
max: 100,
|
||||||
const errorMessage = `Failed to find job handler for Job.${jobKey} ("${jobName}")`;
|
} as PoolConfig);
|
||||||
this.logger.error(
|
|
||||||
`${errorMessage}. Make sure to add the @OnJob({ name: JobName.${jobKey}, queue: QueueName.XYZ }) decorator for the new job.`,
|
// todo: remove debug info
|
||||||
);
|
setInterval(() => {
|
||||||
throw new ImmichStartupError(errorMessage);
|
this.logger.log(`connections:
|
||||||
}
|
total: ${pool.totalCount}
|
||||||
|
idle: ${pool.idleCount}
|
||||||
|
waiting: ${pool.waitingCount}`);
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
|
pool.setMaxListeners(100);
|
||||||
|
|
||||||
|
pool.on('connect', (client) => {
|
||||||
|
client.setMaxListeners(200);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.pool = pool;
|
||||||
|
|
||||||
|
this.workerUtils = await makeWorkerUtils({ pgPool: pool });
|
||||||
|
}
|
||||||
|
|
||||||
|
async start(queueName: QueueName, concurrency?: number): Promise<void> {
|
||||||
|
if (concurrency) {
|
||||||
|
this.queueConfig[queueName] = {
|
||||||
|
...this.queueConfig[queueName],
|
||||||
|
concurrency,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
concurrency = this.queueConfig[queueName].concurrency;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.queueConfig[queueName].paused) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.stop(queueName);
|
||||||
|
this.runners[queueName] = await run({
|
||||||
|
concurrency,
|
||||||
|
taskList: {
|
||||||
|
[queueName]: async (payload: unknown): Promise<void> => {
|
||||||
|
this.logger.log(`Job ${queueName} started with payload: ${JSON.stringify(payload)}`);
|
||||||
|
await this.eventRepository.emit('job.start', queueName, payload as JobItem);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pgPool: this.pool,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async stop(queueName: QueueName): Promise<void> {
|
||||||
|
const runner = this.runners[queueName];
|
||||||
|
if (runner) {
|
||||||
|
await runner.stop();
|
||||||
|
delete this.runners[queueName];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
startWorkers() {
|
async pause(queueName: QueueName): Promise<void> {
|
||||||
const { bull } = this.configRepository.getEnv();
|
await this.setState(queueName, true);
|
||||||
for (const queueName of Object.values(QueueName)) {
|
await this.stop(queueName);
|
||||||
this.logger.debug(`Starting worker for queue: ${queueName}`);
|
|
||||||
this.workers[queueName] = new Worker(
|
|
||||||
queueName,
|
|
||||||
(job) => this.eventRepository.emit('job.start', queueName, job as JobItem),
|
|
||||||
{ ...bull.config, concurrency: 1 },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async run({ name, data }: JobItem) {
|
async resume(queueName: QueueName): Promise<void> {
|
||||||
|
await this.setState(queueName, false);
|
||||||
|
await this.start(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async setState(queueName: QueueName, paused: boolean): Promise<void> {
|
||||||
|
const state = await this.systemMetadataRepository.get(SystemMetadataKey.QUEUES_STATE);
|
||||||
|
await this.systemMetadataRepository.set(SystemMetadataKey.QUEUES_STATE, {
|
||||||
|
...state,
|
||||||
|
[queueName]: { paused },
|
||||||
|
});
|
||||||
|
this.queueConfig[queueName] = {
|
||||||
|
...this.queueConfig[queueName],
|
||||||
|
paused,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: we should consolidate queue and job names and have queues be
|
||||||
|
// homogenous.
|
||||||
|
//
|
||||||
|
// the reason there are multiple kinds of jobs per queue is so that
|
||||||
|
// concurrency settings apply to all of them. We could instead create a
|
||||||
|
// concept of "queue" groups, such that workers will run for groups of queues
|
||||||
|
// rather than just a single queue and achieve the same outcome.
|
||||||
|
private getQueueName(name: JobName) {
|
||||||
|
return (this.handlers[name] as JobMapItem).queueName;
|
||||||
|
}
|
||||||
|
|
||||||
|
async run({ name, data }: JobItem): Promise<JobStatus> {
|
||||||
const item = this.handlers[name as JobName];
|
const item = this.handlers[name as JobName];
|
||||||
if (!item) {
|
if (!item) {
|
||||||
this.logger.warn(`Skipping unknown job: "${name}"`);
|
this.logger.warn(`Skipping unknown job: "${name}"`);
|
||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
return item.handler(data);
|
return item.handler(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
setConcurrency(queueName: QueueName, concurrency: number) {
|
async queue(item: JobItem): Promise<void> {
|
||||||
const worker = this.workers[queueName];
|
await this.workerUtils!.addJob(this.getQueueName(item.name), item, this.getJobOptions(item));
|
||||||
if (!worker) {
|
|
||||||
this.logger.warn(`Unable to set queue concurrency, worker not found: '${queueName}'`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
worker.concurrency = concurrency;
|
|
||||||
}
|
|
||||||
|
|
||||||
async getQueueStatus(name: QueueName): Promise<QueueStatus> {
|
|
||||||
const queue = this.getQueue(name);
|
|
||||||
|
|
||||||
return {
|
|
||||||
isActive: !!(await queue.getActiveCount()),
|
|
||||||
isPaused: await queue.isPaused(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pause(name: QueueName) {
|
|
||||||
return this.getQueue(name).pause();
|
|
||||||
}
|
|
||||||
|
|
||||||
resume(name: QueueName) {
|
|
||||||
return this.getQueue(name).resume();
|
|
||||||
}
|
|
||||||
|
|
||||||
empty(name: QueueName) {
|
|
||||||
return this.getQueue(name).drain();
|
|
||||||
}
|
|
||||||
|
|
||||||
clear(name: QueueName, type: QueueCleanType) {
|
|
||||||
return this.getQueue(name).clean(0, 1000, type);
|
|
||||||
}
|
|
||||||
|
|
||||||
getJobCounts(name: QueueName): Promise<JobCounts> {
|
|
||||||
return this.getQueue(name).getJobCounts(
|
|
||||||
'active',
|
|
||||||
'completed',
|
|
||||||
'failed',
|
|
||||||
'delayed',
|
|
||||||
'waiting',
|
|
||||||
'paused',
|
|
||||||
) as unknown as Promise<JobCounts>;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getQueueName(name: JobName) {
|
|
||||||
return (this.handlers[name] as JobMapItem).queueName;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async queueAll(items: JobItem[]): Promise<void> {
|
async queueAll(items: JobItem[]): Promise<void> {
|
||||||
if (items.length === 0) {
|
await Promise.all(items.map((item) => this.queue(item)));
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const promises = [];
|
|
||||||
const itemsByQueue = {} as Record<string, (JobItem & { data: any; options: JobsOptions | undefined })[]>;
|
|
||||||
for (const item of items) {
|
|
||||||
const queueName = this.getQueueName(item.name);
|
|
||||||
const job = {
|
|
||||||
name: item.name,
|
|
||||||
data: item.data || {},
|
|
||||||
options: this.getJobOptions(item) || undefined,
|
|
||||||
} as JobItem & { data: any; options: JobsOptions | undefined };
|
|
||||||
|
|
||||||
if (job.options?.jobId) {
|
|
||||||
// need to use add() instead of addBulk() for jobId deduplication
|
|
||||||
promises.push(this.getQueue(queueName).add(item.name, item.data, job.options));
|
|
||||||
} else {
|
|
||||||
itemsByQueue[queueName] = itemsByQueue[queueName] || [];
|
|
||||||
itemsByQueue[queueName].push(job);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [queueName, jobs] of Object.entries(itemsByQueue)) {
|
|
||||||
const queue = this.getQueue(queueName as QueueName);
|
|
||||||
promises.push(queue.addBulk(jobs));
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async queue(item: JobItem): Promise<void> {
|
// todo: are we actually generating sql
|
||||||
return this.queueAll([item]);
|
async clear(name: QueueName): Promise<void> {
|
||||||
|
await this.db
|
||||||
|
.deleteFrom('graphile_worker._private_jobs')
|
||||||
|
.where(({ eb, selectFrom }) =>
|
||||||
|
eb('task_id', 'in', selectFrom('graphile_worker._private_tasks').select('id').where('identifier', '=', name)),
|
||||||
|
)
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
const workers = await this.db
|
||||||
|
.selectFrom('graphile_worker.jobs')
|
||||||
|
.select('locked_by')
|
||||||
|
.where('locked_by', 'is not', null)
|
||||||
|
.distinct()
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
// Potentially dangerous? It helps if jobs get stuck active though. The
|
||||||
|
// documentation says that stuck jobs will be unlocked automatically after 4
|
||||||
|
// hours. Though, it can be strange to click "clear" in the UI and see
|
||||||
|
// nothing happen. Especially as the UI is binary, such that new jobs cannot
|
||||||
|
// usually be scheduled unless both active and waiting are zero.
|
||||||
|
await this.workerUtils!.forceUnlockWorkers(workers.map((worker) => worker.locked_by!));
|
||||||
}
|
}
|
||||||
|
|
||||||
async waitForQueueCompletion(...queues: QueueName[]): Promise<void> {
|
async clearFailed(name: QueueName): Promise<void> {
|
||||||
let activeQueue: QueueStatus | undefined;
|
await this.db
|
||||||
do {
|
.deleteFrom('graphile_worker._private_jobs')
|
||||||
const statuses = await Promise.all(queues.map((name) => this.getQueueStatus(name)));
|
.where(({ eb, selectFrom }) =>
|
||||||
activeQueue = statuses.find((status) => status.isActive);
|
eb(
|
||||||
} while (activeQueue);
|
'task_id',
|
||||||
{
|
'in',
|
||||||
this.logger.verbose(`Waiting for ${activeQueue} queue to stop...`);
|
selectFrom('graphile_worker._private_tasks')
|
||||||
await setTimeout(1000);
|
.select('id')
|
||||||
}
|
.where((eb) => eb.and([eb('identifier', '=', name), eb('attempts', '>=', eb.ref('max_attempts'))])),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
private getJobOptions(item: JobItem): JobsOptions | null {
|
// todo: are we actually generating sql
|
||||||
|
@GenerateSql({ params: [] })
|
||||||
|
async getJobCounts(name: QueueName): Promise<JobCounts> {
|
||||||
|
return await this.db
|
||||||
|
.selectFrom('graphile_worker.jobs')
|
||||||
|
.select((eb) => [
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) => eb.and([eb('task_identifier', '=', name), eb('locked_by', 'is not', null)]))
|
||||||
|
.as('active'),
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) =>
|
||||||
|
eb.and([
|
||||||
|
eb('task_identifier', '=', name),
|
||||||
|
eb('locked_by', 'is', null),
|
||||||
|
eb('run_at', '<=', eb.fn<Date>('now')),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.as('waiting'),
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) =>
|
||||||
|
eb.and([
|
||||||
|
eb('task_identifier', '=', name),
|
||||||
|
eb('locked_by', 'is', null),
|
||||||
|
eb('run_at', '>', eb.fn<Date>('now')),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.as('delayed'),
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) => eb.and([eb('task_identifier', '=', name), eb('attempts', '>=', eb.ref('max_attempts'))]))
|
||||||
|
.as('failed'),
|
||||||
|
])
|
||||||
|
.executeTakeFirstOrThrow();
|
||||||
|
}
|
||||||
|
|
||||||
|
async getQueueStatus(queueName: QueueName): Promise<QueueStatus> {
|
||||||
|
const state = await this.systemMetadataRepository.get(SystemMetadataKey.QUEUES_STATE);
|
||||||
|
return { paused: state?.[queueName]?.paused ?? false };
|
||||||
|
}
|
||||||
|
|
||||||
|
private getJobOptions(item: JobItem): TaskSpec | undefined {
|
||||||
switch (item.name) {
|
switch (item.name) {
|
||||||
case JobName.NOTIFY_ALBUM_UPDATE: {
|
case JobName.NOTIFY_ALBUM_UPDATE: {
|
||||||
return {
|
let runAt: Date | undefined;
|
||||||
jobId: `${item.data.id}/${item.data.recipientId}`,
|
if (item.data?.delay) {
|
||||||
delay: item.data?.delay,
|
runAt = DateTime.now().plus(Duration.fromMillis(item.data.delay)).toJSDate();
|
||||||
};
|
}
|
||||||
|
return { jobKey: item.data.id, runAt };
|
||||||
}
|
}
|
||||||
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
||||||
return { jobId: item.data.id };
|
return { jobKey: QueueName.STORAGE_TEMPLATE_MIGRATION };
|
||||||
}
|
}
|
||||||
case JobName.GENERATE_PERSON_THUMBNAIL: {
|
case JobName.GENERATE_PERSON_THUMBNAIL: {
|
||||||
return { priority: 1 };
|
return { priority: 1 };
|
||||||
}
|
}
|
||||||
case JobName.QUEUE_FACIAL_RECOGNITION: {
|
case JobName.QUEUE_FACIAL_RECOGNITION: {
|
||||||
return { jobId: JobName.QUEUE_FACIAL_RECOGNITION };
|
return { jobKey: JobName.QUEUE_FACIAL_RECOGNITION };
|
||||||
}
|
}
|
||||||
default: {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private getQueue(queue: QueueName): Queue {
|
|
||||||
return this.moduleRef.get<Queue>(getQueueToken(queue), { strict: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @deprecated */
|
|
||||||
// todo: remove this when asset notifications no longer need it.
|
|
||||||
public async removeJob(name: JobName, jobID: string): Promise<void> {
|
|
||||||
const existingJob = await this.getQueue(this.getQueueName(name)).getJob(jobID);
|
|
||||||
if (existingJob) {
|
|
||||||
await existingJob.remove();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { MetricOptions } from '@opentelemetry/api';
|
|||||||
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
|
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
|
||||||
import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
||||||
import { HttpInstrumentation } from '@opentelemetry/instrumentation-http';
|
import { HttpInstrumentation } from '@opentelemetry/instrumentation-http';
|
||||||
import { IORedisInstrumentation } from '@opentelemetry/instrumentation-ioredis';
|
|
||||||
import { NestInstrumentation } from '@opentelemetry/instrumentation-nestjs-core';
|
import { NestInstrumentation } from '@opentelemetry/instrumentation-nestjs-core';
|
||||||
import { PgInstrumentation } from '@opentelemetry/instrumentation-pg';
|
import { PgInstrumentation } from '@opentelemetry/instrumentation-pg';
|
||||||
import { resourceFromAttributes } from '@opentelemetry/resources';
|
import { resourceFromAttributes } from '@opentelemetry/resources';
|
||||||
@@ -68,12 +67,7 @@ export const bootstrapTelemetry = (port: number) => {
|
|||||||
}),
|
}),
|
||||||
metricReader: new PrometheusExporter({ port }),
|
metricReader: new PrometheusExporter({ port }),
|
||||||
contextManager: new AsyncLocalStorageContextManager(),
|
contextManager: new AsyncLocalStorageContextManager(),
|
||||||
instrumentations: [
|
instrumentations: [new HttpInstrumentation(), new NestInstrumentation(), new PgInstrumentation()],
|
||||||
new HttpInstrumentation(),
|
|
||||||
new IORedisInstrumentation(),
|
|
||||||
new NestInstrumentation(),
|
|
||||||
new PgInstrumentation(),
|
|
||||||
],
|
|
||||||
views: [
|
views: [
|
||||||
{
|
{
|
||||||
instrumentName: '*',
|
instrumentName: '*',
|
||||||
|
|||||||
@@ -606,7 +606,7 @@ describe(AlbumService.name, () => {
|
|||||||
expect(mocks.album.addAssetIds).toHaveBeenCalledWith('album-123', ['asset-1', 'asset-2', 'asset-3']);
|
expect(mocks.album.addAssetIds).toHaveBeenCalledWith('album-123', ['asset-1', 'asset-2', 'asset-3']);
|
||||||
expect(mocks.event.emit).toHaveBeenCalledWith('album.update', {
|
expect(mocks.event.emit).toHaveBeenCalledWith('album.update', {
|
||||||
id: 'album-123',
|
id: 'album-123',
|
||||||
recipientId: 'admin_id',
|
recipientIds: ['admin_id'],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -170,8 +170,8 @@ export class AlbumService extends BaseService {
|
|||||||
(userId) => userId !== auth.user.id,
|
(userId) => userId !== auth.user.id,
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const recipientId of allUsersExceptUs) {
|
if (allUsersExceptUs.length > 0) {
|
||||||
await this.eventRepository.emit('album.update', { id, recipientId });
|
await this.eventRepository.emit('album.update', { id, recipientIds: allUsersExceptUs });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { BadRequestException } from '@nestjs/common';
|
|||||||
import { defaults, SystemConfig } from 'src/config';
|
import { defaults, SystemConfig } from 'src/config';
|
||||||
import { ImmichWorker, JobCommand, JobName, JobStatus, QueueName } from 'src/enum';
|
import { ImmichWorker, JobCommand, JobName, JobStatus, QueueName } from 'src/enum';
|
||||||
import { JobService } from 'src/services/job.service';
|
import { JobService } from 'src/services/job.service';
|
||||||
import { JobItem } from 'src/types';
|
import { JobCounts, JobItem } from 'src/types';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { newTestService, ServiceMocks } from 'test/utils';
|
import { newTestService, ServiceMocks } from 'test/utils';
|
||||||
|
|
||||||
@@ -21,14 +21,14 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('onConfigUpdate', () => {
|
describe('onConfigUpdate', () => {
|
||||||
it('should update concurrency', () => {
|
it('should update concurrency', async () => {
|
||||||
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
await sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
||||||
|
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15);
|
expect(mocks.job.start).toHaveBeenCalledTimes(15);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(9, QueueName.STORAGE_TEMPLATE_MIGRATION, 1);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(9, QueueName.STORAGE_TEMPLATE_MIGRATION, 1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -55,29 +55,20 @@ describe(JobService.name, () => {
|
|||||||
it('should get all job statuses', async () => {
|
it('should get all job statuses', async () => {
|
||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
completed: 1,
|
|
||||||
failed: 1,
|
|
||||||
delayed: 1,
|
|
||||||
waiting: 1,
|
waiting: 1,
|
||||||
paused: 1,
|
delayed: 1,
|
||||||
});
|
failed: 1,
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({
|
|
||||||
isActive: true,
|
|
||||||
isPaused: true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const expectedJobStatus = {
|
const expectedJobStatus = {
|
||||||
jobCounts: {
|
jobCounts: {
|
||||||
active: 1,
|
active: 1,
|
||||||
completed: 1,
|
waiting: 1,
|
||||||
delayed: 1,
|
delayed: 1,
|
||||||
failed: 1,
|
failed: 1,
|
||||||
waiting: 1,
|
|
||||||
paused: 1,
|
|
||||||
},
|
},
|
||||||
queueStatus: {
|
queueStatus: {
|
||||||
isActive: true,
|
paused: true,
|
||||||
isPaused: true,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -114,14 +105,20 @@ describe(JobService.name, () => {
|
|||||||
expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle an empty command', async () => {
|
it('should handle a clear command', async () => {
|
||||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.EMPTY, force: false });
|
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.CLEAR, force: false });
|
||||||
|
|
||||||
expect(mocks.job.empty).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
expect(mocks.job.clear).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a clear-failed command', async () => {
|
||||||
|
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.CLEAR_FAILED, force: false });
|
||||||
|
|
||||||
|
expect(mocks.job.clearFailed).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not start a job that is already running', async () => {
|
it('should not start a job that is already running', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: true, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 1 } as JobCounts);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }),
|
sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }),
|
||||||
@@ -132,7 +129,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start video conversion command', async () => {
|
it('should handle a start video conversion command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -140,7 +137,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start storage template migration command', async () => {
|
it('should handle a start storage template migration command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -148,7 +145,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start smart search command', async () => {
|
it('should handle a start smart search command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -156,7 +153,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start metadata extraction command', async () => {
|
it('should handle a start metadata extraction command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -164,7 +161,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start sidecar command', async () => {
|
it('should handle a start sidecar command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -172,7 +169,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start thumbnail generation command', async () => {
|
it('should handle a start thumbnail generation command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -180,7 +177,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start face detection command', async () => {
|
it('should handle a start face detection command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -188,7 +185,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start facial recognition command', async () => {
|
it('should handle a start facial recognition command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -196,7 +193,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start backup database command', async () => {
|
it('should handle a start backup database command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.BACKUP_DATABASE, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.BACKUP_DATABASE, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -204,7 +201,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should throw a bad request when an invalid queue is used', async () => {
|
it('should throw a bad request when an invalid queue is used', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }),
|
sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }),
|
||||||
@@ -239,6 +236,10 @@ describe(JobService.name, () => {
|
|||||||
item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } },
|
item: { name: JobName.SIDECAR_DISCOVERY, data: { id: 'asset-1' } },
|
||||||
jobs: [JobName.METADATA_EXTRACTION],
|
jobs: [JobName.METADATA_EXTRACTION],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
item: { name: JobName.METADATA_EXTRACTION, data: { id: 'asset-1' } },
|
||||||
|
jobs: [JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
|
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
|
||||||
jobs: [JobName.GENERATE_THUMBNAILS],
|
jobs: [JobName.GENERATE_THUMBNAILS],
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import {
|
|||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
ManualJobName,
|
ManualJobName,
|
||||||
QueueCleanType,
|
|
||||||
QueueName,
|
QueueName,
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
import { ArgOf, ArgsOf } from 'src/repositories/event.repository';
|
import { ArgOf, ArgsOf } from 'src/repositories/event.repository';
|
||||||
@@ -56,7 +55,7 @@ export class JobService extends BaseService {
|
|||||||
private services: ClassConstructor<unknown>[] = [];
|
private services: ClassConstructor<unknown>[] = [];
|
||||||
|
|
||||||
@OnEvent({ name: 'config.init', workers: [ImmichWorker.MICROSERVICES] })
|
@OnEvent({ name: 'config.init', workers: [ImmichWorker.MICROSERVICES] })
|
||||||
onConfigInit({ newConfig: config }: ArgOf<'config.init'>) {
|
async onConfigInit({ newConfig: config }: ArgOf<'config.init'>) {
|
||||||
this.logger.debug(`Updating queue concurrency settings`);
|
this.logger.debug(`Updating queue concurrency settings`);
|
||||||
for (const queueName of Object.values(QueueName)) {
|
for (const queueName of Object.values(QueueName)) {
|
||||||
let concurrency = 1;
|
let concurrency = 1;
|
||||||
@@ -64,21 +63,18 @@ export class JobService extends BaseService {
|
|||||||
concurrency = config.job[queueName].concurrency;
|
concurrency = config.job[queueName].concurrency;
|
||||||
}
|
}
|
||||||
this.logger.debug(`Setting ${queueName} concurrency to ${concurrency}`);
|
this.logger.debug(`Setting ${queueName} concurrency to ${concurrency}`);
|
||||||
this.jobRepository.setConcurrency(queueName, concurrency);
|
await this.jobRepository.start(queueName, concurrency);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'config.update', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
@OnEvent({ name: 'config.update', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
||||||
onConfigUpdate({ newConfig: config }: ArgOf<'config.update'>) {
|
async onConfigUpdate({ newConfig: config }: ArgOf<'config.update'>) {
|
||||||
this.onConfigInit({ newConfig: config });
|
await this.onConfigInit({ newConfig: config });
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'app.bootstrap', priority: BootstrapEventPriority.JobService })
|
@OnEvent({ name: 'app.bootstrap', priority: BootstrapEventPriority.JobService })
|
||||||
onBootstrap() {
|
async onBootstrap() {
|
||||||
this.jobRepository.setup(this.services);
|
await this.jobRepository.setup(this.services);
|
||||||
if (this.worker === ImmichWorker.MICROSERVICES) {
|
|
||||||
this.jobRepository.startWorkers();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setServices(services: ClassConstructor<unknown>[]) {
|
setServices(services: ClassConstructor<unknown>[]) {
|
||||||
@@ -97,25 +93,20 @@ export class JobService extends BaseService {
|
|||||||
await this.start(queueName, dto);
|
await this.start(queueName, dto);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case JobCommand.PAUSE: {
|
case JobCommand.PAUSE: {
|
||||||
await this.jobRepository.pause(queueName);
|
this.eventRepository.serverSend('queue.pause', queueName);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case JobCommand.RESUME: {
|
case JobCommand.RESUME: {
|
||||||
await this.jobRepository.resume(queueName);
|
this.eventRepository.serverSend('queue.resume', queueName);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case JobCommand.CLEAR: {
|
||||||
case JobCommand.EMPTY: {
|
await this.jobRepository.clear(queueName);
|
||||||
await this.jobRepository.empty(queueName);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case JobCommand.CLEAR_FAILED: {
|
case JobCommand.CLEAR_FAILED: {
|
||||||
const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.FAILED);
|
await this.jobRepository.clearFailed(queueName);
|
||||||
this.logger.debug(`Cleared failed jobs: ${failedJobs}`);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -141,9 +132,9 @@ export class JobService extends BaseService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private async start(name: QueueName, { force }: JobCommandDto): Promise<void> {
|
private async start(name: QueueName, { force }: JobCommandDto): Promise<void> {
|
||||||
const { isActive } = await this.jobRepository.getQueueStatus(name);
|
const { active } = await this.jobRepository.getJobCounts(name);
|
||||||
if (isActive) {
|
if (active > 0) {
|
||||||
throw new BadRequestException(`Job is already running`);
|
throw new BadRequestException(`Jobs are already running`);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
|
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
|
||||||
@@ -203,6 +194,16 @@ export class JobService extends BaseService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@OnEvent({ name: 'queue.pause', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
||||||
|
async pause(...[queueName]: ArgsOf<'queue.pause'>): Promise<void> {
|
||||||
|
await this.jobRepository.pause(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@OnEvent({ name: 'queue.resume', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
||||||
|
async resume(...[queueName]: ArgsOf<'queue.resume'>): Promise<void> {
|
||||||
|
await this.jobRepository.resume(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'job.start' })
|
@OnEvent({ name: 'job.start' })
|
||||||
async onJobStart(...[queueName, job]: ArgsOf<'job.start'>) {
|
async onJobStart(...[queueName, job]: ArgsOf<'job.start'>) {
|
||||||
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
||||||
@@ -264,6 +265,17 @@ export class JobService extends BaseService {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case JobName.METADATA_EXTRACTION: {
|
||||||
|
if (item.data.source === 'sidecar-write') {
|
||||||
|
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([item.data.id]);
|
||||||
|
if (asset) {
|
||||||
|
this.eventRepository.clientSend('on_asset_update', asset.ownerId, mapAsset(asset));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: item.data });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
||||||
if (item.data.source === 'upload' || item.data.source === 'copy') {
|
if (item.data.source === 'upload' || item.data.source === 'copy') {
|
||||||
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: item.data });
|
await this.jobRepository.queue({ name: JobName.GENERATE_THUMBNAILS, data: item.data });
|
||||||
|
|||||||
@@ -266,9 +266,7 @@ export class MediaService extends BaseService {
|
|||||||
|
|
||||||
const { info, data, colorspace } = await this.decodeImage(
|
const { info, data, colorspace } = await this.decodeImage(
|
||||||
extracted ? extracted.buffer : asset.originalPath,
|
extracted ? extracted.buffer : asset.originalPath,
|
||||||
// only specify orientation to extracted images which don't have EXIF orientation data
|
asset.exifInfo,
|
||||||
// or it can double rotate the image
|
|
||||||
extracted ? asset.exifInfo : { ...asset.exifInfo, orientation: null },
|
|
||||||
convertFullsize ? undefined : image.preview.size,
|
convertFullsize ? undefined : image.preview.size,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -67,16 +67,12 @@ describe(MetadataService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('onBootstrapEvent', () => {
|
describe('onBootstrapEvent', () => {
|
||||||
it('should pause and resume queue during init', async () => {
|
it('should init', async () => {
|
||||||
mocks.job.pause.mockResolvedValue();
|
|
||||||
mocks.map.init.mockResolvedValue();
|
mocks.map.init.mockResolvedValue();
|
||||||
mocks.job.resume.mockResolvedValue();
|
|
||||||
|
|
||||||
await sut.onBootstrap();
|
await sut.onBootstrap();
|
||||||
|
|
||||||
expect(mocks.job.pause).toHaveBeenCalledTimes(1);
|
|
||||||
expect(mocks.map.init).toHaveBeenCalledTimes(1);
|
expect(mocks.map.init).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.job.resume).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -143,8 +139,7 @@ describe(MetadataService.name, () => {
|
|||||||
|
|
||||||
it('should handle an asset that could not be found', async () => {
|
it('should handle an asset that could not be found', async () => {
|
||||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(void 0);
|
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(void 0);
|
||||||
|
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.FAILED);
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
|
||||||
|
|
||||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||||
expect(mocks.asset.upsertExif).not.toHaveBeenCalled();
|
expect(mocks.asset.upsertExif).not.toHaveBeenCalled();
|
||||||
@@ -527,7 +522,7 @@ describe(MetadataService.name, () => {
|
|||||||
ContainerDirectory: [{ Foo: 100 }],
|
ContainerDirectory: [{ Foo: 100 }],
|
||||||
});
|
});
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should extract the correct video orientation', async () => {
|
it('should extract the correct video orientation', async () => {
|
||||||
@@ -1202,7 +1197,7 @@ describe(MetadataService.name, () => {
|
|||||||
it('should handle livePhotoCID not set', async () => {
|
it('should handle livePhotoCID not set', async () => {
|
||||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
await expect(sut.handleMetadataExtraction({ id: assetStub.image.id })).resolves.toBe(JobStatus.SUCCESS);
|
||||||
|
|
||||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||||
expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled();
|
expect(mocks.asset.findLivePhotoMatch).not.toHaveBeenCalled();
|
||||||
@@ -1215,7 +1210,9 @@ describe(MetadataService.name, () => {
|
|||||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||||
mockReadTags({ ContentIdentifier: 'CID' });
|
mockReadTags({ ContentIdentifier: 'CID' });
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id });
|
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoMotionAsset.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id);
|
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoMotionAsset.id);
|
||||||
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
|
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
|
||||||
@@ -1234,7 +1231,9 @@ describe(MetadataService.name, () => {
|
|||||||
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||||
mockReadTags({ ContentIdentifier: 'CID' });
|
mockReadTags({ ContentIdentifier: 'CID' });
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoStillAsset.id);
|
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.livePhotoStillAsset.id);
|
||||||
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
|
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
|
||||||
@@ -1258,7 +1257,9 @@ describe(MetadataService.name, () => {
|
|||||||
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||||
mockReadTags({ ContentIdentifier: 'CID' });
|
mockReadTags({ ContentIdentifier: 'CID' });
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
expect(mocks.event.emit).toHaveBeenCalledWith('asset.hide', {
|
expect(mocks.event.emit).toHaveBeenCalledWith('asset.hide', {
|
||||||
userId: assetStub.livePhotoMotionAsset.ownerId,
|
userId: assetStub.livePhotoMotionAsset.ownerId,
|
||||||
@@ -1274,12 +1275,10 @@ describe(MetadataService.name, () => {
|
|||||||
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
mocks.asset.findLivePhotoMatch.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||||
mockReadTags({ ContentIdentifier: 'CID' });
|
mockReadTags({ ContentIdentifier: 'CID' });
|
||||||
|
|
||||||
await sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id });
|
await expect(sut.handleMetadataExtraction({ id: assetStub.livePhotoStillAsset.id })).resolves.toBe(
|
||||||
|
JobStatus.SUCCESS,
|
||||||
|
);
|
||||||
|
|
||||||
expect(mocks.event.emit).toHaveBeenCalledWith('asset.metadataExtracted', {
|
|
||||||
assetId: assetStub.livePhotoStillAsset.id,
|
|
||||||
userId: assetStub.livePhotoStillAsset.ownerId,
|
|
||||||
});
|
|
||||||
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
|
expect(mocks.asset.findLivePhotoMatch).toHaveBeenCalledWith({
|
||||||
ownerId: 'user-id',
|
ownerId: 'user-id',
|
||||||
otherAssetId: 'live-photo-still-asset',
|
otherAssetId: 'live-photo-still-asset',
|
||||||
|
|||||||
@@ -121,9 +121,7 @@ export class MetadataService extends BaseService {
|
|||||||
this.logger.log('Initializing metadata service');
|
this.logger.log('Initializing metadata service');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.jobRepository.pause(QueueName.METADATA_EXTRACTION);
|
|
||||||
await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.mapRepository.init());
|
await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.mapRepository.init());
|
||||||
await this.jobRepository.resume(QueueName.METADATA_EXTRACTION);
|
|
||||||
|
|
||||||
this.logger.log(`Initialized local reverse geocoder`);
|
this.logger.log(`Initialized local reverse geocoder`);
|
||||||
} catch (error: Error | any) {
|
} catch (error: Error | any) {
|
||||||
@@ -182,14 +180,14 @@ export class MetadataService extends BaseService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
|
@OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION })
|
||||||
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>) {
|
async handleMetadataExtraction(data: JobOf<JobName.METADATA_EXTRACTION>): Promise<JobStatus> {
|
||||||
const [{ metadata, reverseGeocoding }, asset] = await Promise.all([
|
const [{ metadata, reverseGeocoding }, asset] = await Promise.all([
|
||||||
this.getConfig({ withCache: true }),
|
this.getConfig({ withCache: true }),
|
||||||
this.assetJobRepository.getForMetadataExtraction(data.id),
|
this.assetJobRepository.getForMetadataExtraction(data.id),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (!asset) {
|
if (!asset) {
|
||||||
return;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [exifTags, stats] = await Promise.all([
|
const [exifTags, stats] = await Promise.all([
|
||||||
@@ -283,11 +281,7 @@ export class MetadataService extends BaseService {
|
|||||||
|
|
||||||
await this.assetRepository.upsertJobStatus({ assetId: asset.id, metadataExtractedAt: new Date() });
|
await this.assetRepository.upsertJobStatus({ assetId: asset.id, metadataExtractedAt: new Date() });
|
||||||
|
|
||||||
await this.eventRepository.emit('asset.metadataExtracted', {
|
return JobStatus.SUCCESS;
|
||||||
assetId: asset.id,
|
|
||||||
userId: asset.ownerId,
|
|
||||||
source: data.source,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
|
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
|
||||||
|
|||||||
@@ -154,10 +154,10 @@ describe(NotificationService.name, () => {
|
|||||||
|
|
||||||
describe('onAlbumUpdateEvent', () => {
|
describe('onAlbumUpdateEvent', () => {
|
||||||
it('should queue notify album update event', async () => {
|
it('should queue notify album update event', async () => {
|
||||||
await sut.onAlbumUpdate({ id: 'album', recipientId: '42' });
|
await sut.onAlbumUpdate({ id: 'album', recipientIds: ['42'] });
|
||||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||||
name: JobName.NOTIFY_ALBUM_UPDATE,
|
name: JobName.NOTIFY_ALBUM_UPDATE,
|
||||||
data: { id: 'album', recipientId: '42', delay: 300_000 },
|
data: { id: 'album', recipientIds: ['42'], delay: 300_000 },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -414,14 +414,14 @@ describe(NotificationService.name, () => {
|
|||||||
|
|
||||||
describe('handleAlbumUpdate', () => {
|
describe('handleAlbumUpdate', () => {
|
||||||
it('should skip if album could not be found', async () => {
|
it('should skip if album could not be found', async () => {
|
||||||
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
|
await expect(sut.handleAlbumUpdate({ id: '', recipientIds: ['1'] })).resolves.toBe(JobStatus.SKIPPED);
|
||||||
expect(mocks.user.get).not.toHaveBeenCalled();
|
expect(mocks.user.get).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should skip if owner could not be found', async () => {
|
it('should skip if owner could not be found', async () => {
|
||||||
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
||||||
|
|
||||||
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.SKIPPED);
|
await expect(sut.handleAlbumUpdate({ id: '', recipientIds: ['1'] })).resolves.toBe(JobStatus.SKIPPED);
|
||||||
expect(mocks.systemMetadata.get).not.toHaveBeenCalled();
|
expect(mocks.systemMetadata.get).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -434,7 +434,7 @@ describe(NotificationService.name, () => {
|
|||||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||||
|
|
||||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
|
||||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||||
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
@@ -456,7 +456,7 @@ describe(NotificationService.name, () => {
|
|||||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||||
|
|
||||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
|
||||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||||
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
@@ -478,7 +478,7 @@ describe(NotificationService.name, () => {
|
|||||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||||
|
|
||||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
|
||||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||||
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
@@ -492,21 +492,20 @@ describe(NotificationService.name, () => {
|
|||||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||||
|
|
||||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
await sut.handleAlbumUpdate({ id: '', recipientIds: [userStub.user1.id] });
|
||||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||||
expect(mocks.email.renderEmail).toHaveBeenCalled();
|
expect(mocks.email.renderEmail).toHaveBeenCalled();
|
||||||
expect(mocks.job.queue).toHaveBeenCalled();
|
expect(mocks.job.queue).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add new recipients for new images if job is already queued', async () => {
|
it('should add new recipients for new images if job is already queued', async () => {
|
||||||
await sut.onAlbumUpdate({ id: '1', recipientId: '2' } as INotifyAlbumUpdateJob);
|
await sut.onAlbumUpdate({ id: '1', recipientIds: ['1', '2', '3'] } as INotifyAlbumUpdateJob);
|
||||||
expect(mocks.job.removeJob).toHaveBeenCalledWith(JobName.NOTIFY_ALBUM_UPDATE, '1/2');
|
|
||||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||||
name: JobName.NOTIFY_ALBUM_UPDATE,
|
name: JobName.NOTIFY_ALBUM_UPDATE,
|
||||||
data: {
|
data: {
|
||||||
id: '1',
|
id: '1',
|
||||||
delay: 300_000,
|
delay: 300_000,
|
||||||
recipientId: '2',
|
recipientIds: ['1', '2', '3'],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||||
import { OnEvent, OnJob } from 'src/decorators';
|
import { OnEvent, OnJob } from 'src/decorators';
|
||||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import {
|
import {
|
||||||
mapNotification,
|
mapNotification,
|
||||||
@@ -23,7 +22,7 @@ import {
|
|||||||
import { EmailTemplate } from 'src/repositories/email.repository';
|
import { EmailTemplate } from 'src/repositories/email.repository';
|
||||||
import { ArgOf } from 'src/repositories/event.repository';
|
import { ArgOf } from 'src/repositories/event.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { EmailImageAttachment, JobOf } from 'src/types';
|
import { EmailImageAttachment, IEntityJob, INotifyAlbumUpdateJob, JobItem, JobOf } from 'src/types';
|
||||||
import { getFilenameExtension } from 'src/utils/file';
|
import { getFilenameExtension } from 'src/utils/file';
|
||||||
import { getExternalDomain } from 'src/utils/misc';
|
import { getExternalDomain } from 'src/utils/misc';
|
||||||
import { isEqualObject } from 'src/utils/object';
|
import { isEqualObject } from 'src/utils/object';
|
||||||
@@ -153,18 +152,6 @@ export class NotificationService extends BaseService {
|
|||||||
this.eventRepository.clientSend('on_asset_trash', userId, assetIds);
|
this.eventRepository.clientSend('on_asset_trash', userId, assetIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'asset.metadataExtracted' })
|
|
||||||
async onAssetMetadataExtracted({ assetId, userId, source }: ArgOf<'asset.metadataExtracted'>) {
|
|
||||||
if (source !== 'sidecar-write') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([assetId]);
|
|
||||||
if (asset) {
|
|
||||||
this.eventRepository.clientSend('on_asset_update', userId, mapAsset(asset));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@OnEvent({ name: 'assets.restore' })
|
@OnEvent({ name: 'assets.restore' })
|
||||||
onAssetsRestore({ assetIds, userId }: ArgOf<'assets.restore'>) {
|
onAssetsRestore({ assetIds, userId }: ArgOf<'assets.restore'>) {
|
||||||
this.eventRepository.clientSend('on_asset_restore', userId, assetIds);
|
this.eventRepository.clientSend('on_asset_restore', userId, assetIds);
|
||||||
@@ -198,12 +185,31 @@ export class NotificationService extends BaseService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'album.update' })
|
@OnEvent({ name: 'album.update' })
|
||||||
async onAlbumUpdate({ id, recipientId }: ArgOf<'album.update'>) {
|
async onAlbumUpdate({ id, recipientIds }: ArgOf<'album.update'>) {
|
||||||
await this.jobRepository.removeJob(JobName.NOTIFY_ALBUM_UPDATE, `${id}/${recipientId}`);
|
// if recipientIds is empty, album likely only has one user part of it, don't queue notification if so
|
||||||
await this.jobRepository.queue({
|
if (recipientIds.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const job: JobItem = {
|
||||||
name: JobName.NOTIFY_ALBUM_UPDATE,
|
name: JobName.NOTIFY_ALBUM_UPDATE,
|
||||||
data: { id, recipientId, delay: NotificationService.albumUpdateEmailDelayMs },
|
data: { id, recipientIds, delay: NotificationService.albumUpdateEmailDelayMs },
|
||||||
});
|
};
|
||||||
|
|
||||||
|
// todo: https://github.com/immich-app/immich/pull/17879
|
||||||
|
// const previousJobData = await this.jobRepository.removeJob(id, JobName.NOTIFY_ALBUM_UPDATE);
|
||||||
|
// if (previousJobData && this.isAlbumUpdateJob(previousJobData)) {
|
||||||
|
// for (const id of previousJobData.recipientIds) {
|
||||||
|
// if (!recipientIds.includes(id)) {
|
||||||
|
// recipientIds.push(id);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
await this.jobRepository.queue(job);
|
||||||
|
}
|
||||||
|
|
||||||
|
private isAlbumUpdateJob(job: IEntityJob): job is INotifyAlbumUpdateJob {
|
||||||
|
return 'recipientIds' in job;
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'album.invite' })
|
@OnEvent({ name: 'album.invite' })
|
||||||
@@ -394,7 +400,7 @@ export class NotificationService extends BaseService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.NOTIFY_ALBUM_UPDATE, queue: QueueName.NOTIFICATION })
|
@OnJob({ name: JobName.NOTIFY_ALBUM_UPDATE, queue: QueueName.NOTIFICATION })
|
||||||
async handleAlbumUpdate({ id, recipientId }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
|
async handleAlbumUpdate({ id, recipientIds }: JobOf<JobName.NOTIFY_ALBUM_UPDATE>) {
|
||||||
const album = await this.albumRepository.getById(id, { withAssets: false });
|
const album = await this.albumRepository.getById(id, { withAssets: false });
|
||||||
|
|
||||||
if (!album) {
|
if (!album) {
|
||||||
@@ -406,44 +412,49 @@ export class NotificationService extends BaseService {
|
|||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const recipients = [...album.albumUsers.map((user) => user.user), owner].filter((user) =>
|
||||||
|
recipientIds.includes(user.id),
|
||||||
|
);
|
||||||
const attachment = await this.getAlbumThumbnailAttachment(album);
|
const attachment = await this.getAlbumThumbnailAttachment(album);
|
||||||
|
|
||||||
const { server, templates } = await this.getConfig({ withCache: false });
|
const { server, templates } = await this.getConfig({ withCache: false });
|
||||||
|
|
||||||
const user = await this.userRepository.get(recipientId, { withDeleted: false });
|
for (const recipient of recipients) {
|
||||||
if (!user) {
|
const user = await this.userRepository.get(recipient.id, { withDeleted: false });
|
||||||
return JobStatus.SKIPPED;
|
if (!user) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { emailNotifications } = getPreferences(user.metadata);
|
||||||
|
|
||||||
|
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { html, text } = await this.emailRepository.renderEmail({
|
||||||
|
template: EmailTemplate.ALBUM_UPDATE,
|
||||||
|
data: {
|
||||||
|
baseUrl: getExternalDomain(server),
|
||||||
|
albumId: album.id,
|
||||||
|
albumName: album.albumName,
|
||||||
|
recipientName: recipient.name,
|
||||||
|
cid: attachment ? attachment.cid : undefined,
|
||||||
|
},
|
||||||
|
customTemplate: templates.email.albumUpdateTemplate,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.jobRepository.queue({
|
||||||
|
name: JobName.SEND_EMAIL,
|
||||||
|
data: {
|
||||||
|
to: recipient.email,
|
||||||
|
subject: `New media has been added to an album - ${album.albumName}`,
|
||||||
|
html,
|
||||||
|
text,
|
||||||
|
imageAttachments: attachment ? [attachment] : undefined,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { emailNotifications } = getPreferences(user.metadata);
|
|
||||||
|
|
||||||
if (!emailNotifications.enabled || !emailNotifications.albumUpdate) {
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { html, text } = await this.emailRepository.renderEmail({
|
|
||||||
template: EmailTemplate.ALBUM_UPDATE,
|
|
||||||
data: {
|
|
||||||
baseUrl: getExternalDomain(server),
|
|
||||||
albumId: album.id,
|
|
||||||
albumName: album.albumName,
|
|
||||||
recipientName: user.name,
|
|
||||||
cid: attachment ? attachment.cid : undefined,
|
|
||||||
},
|
|
||||||
customTemplate: templates.email.albumUpdateTemplate,
|
|
||||||
});
|
|
||||||
|
|
||||||
await this.jobRepository.queue({
|
|
||||||
name: JobName.SEND_EMAIL,
|
|
||||||
data: {
|
|
||||||
to: user.email,
|
|
||||||
subject: `New media has been added to an album - ${album.albumName}`,
|
|
||||||
html,
|
|
||||||
text,
|
|
||||||
imageAttachments: attachment ? [attachment] : undefined,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -529,10 +529,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
||||||
|
|
||||||
@@ -546,10 +544,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 1,
|
waiting: 1,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
|
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
|
||||||
@@ -561,10 +557,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
mocks.person.getAllWithoutFaces.mockResolvedValue([]);
|
mocks.person.getAllWithoutFaces.mockResolvedValue([]);
|
||||||
@@ -590,10 +584,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAll.mockReturnValue(makeStream());
|
mocks.person.getAll.mockReturnValue(makeStream());
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
@@ -619,10 +611,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAll.mockReturnValue(makeStream());
|
mocks.person.getAll.mockReturnValue(makeStream());
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
@@ -666,10 +656,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
|
|||||||
@@ -392,7 +392,8 @@ export class PersonService extends BaseService {
|
|||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
|
// todo
|
||||||
|
// await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
|
||||||
|
|
||||||
if (nightly) {
|
if (nightly) {
|
||||||
const [state, latestFaceDate] = await Promise.all([
|
const [state, latestFaceDate] = await Promise.all([
|
||||||
|
|||||||
@@ -116,11 +116,6 @@ export class StorageTemplateService extends BaseService {
|
|||||||
return { ...storageTokens, presetOptions: storagePresets };
|
return { ...storageTokens, presetOptions: storagePresets };
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'asset.metadataExtracted' })
|
|
||||||
async onAssetMetadataExtracted({ source, assetId }: ArgOf<'asset.metadataExtracted'>) {
|
|
||||||
await this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { source, id: assetId } });
|
|
||||||
}
|
|
||||||
|
|
||||||
@OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, queue: QueueName.STORAGE_TEMPLATE_MIGRATION })
|
@OnJob({ name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, queue: QueueName.STORAGE_TEMPLATE_MIGRATION })
|
||||||
async handleMigrationSingle({ id }: JobOf<JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE>): Promise<JobStatus> {
|
async handleMigrationSingle({ id }: JobOf<JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE>): Promise<JobStatus> {
|
||||||
const config = await this.getConfig({ withCache: true });
|
const config = await this.getConfig({ withCache: true });
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { DateTime } from 'luxon';
|
|||||||
import { Writable } from 'node:stream';
|
import { Writable } from 'node:stream';
|
||||||
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
|
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
|
||||||
import { SessionSyncCheckpoints } from 'src/db';
|
import { SessionSyncCheckpoints } from 'src/db';
|
||||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
import { AssetResponseDto, hexOrBufferToBase64, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import {
|
import {
|
||||||
AssetDeltaSyncDto,
|
AssetDeltaSyncDto,
|
||||||
@@ -18,7 +18,6 @@ import { DatabaseAction, EntityType, Permission, SyncEntityType, SyncRequestType
|
|||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { SyncAck } from 'src/types';
|
import { SyncAck } from 'src/types';
|
||||||
import { getMyPartnerIds } from 'src/utils/asset.util';
|
import { getMyPartnerIds } from 'src/utils/asset.util';
|
||||||
import { hexOrBufferToBase64 } from 'src/utils/bytes';
|
|
||||||
import { setIsEqual } from 'src/utils/set';
|
import { setIsEqual } from 'src/utils/set';
|
||||||
import { fromAck, serialize } from 'src/utils/sync';
|
import { fromAck, serialize } from 'src/utils/sync';
|
||||||
|
|
||||||
@@ -142,7 +141,7 @@ export class SyncService extends BaseService {
|
|||||||
updateId,
|
updateId,
|
||||||
data: {
|
data: {
|
||||||
...data,
|
...data,
|
||||||
checksum: hexOrBufferToBase64(checksum)!,
|
checksum: hexOrBufferToBase64(checksum),
|
||||||
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
|
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
@@ -172,7 +171,7 @@ export class SyncService extends BaseService {
|
|||||||
updateId,
|
updateId,
|
||||||
data: {
|
data: {
|
||||||
...data,
|
...data,
|
||||||
checksum: hexOrBufferToBase64(checksum)!,
|
checksum: hexOrBufferToBase64(checksum),
|
||||||
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
|
thumbhash: thumbhash ? hexOrBufferToBase64(thumbhash) : null,
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
import { BadRequestException } from '@nestjs/common';
|
import { BadRequestException } from '@nestjs/common';
|
||||||
|
import { TimeBucketSize } from 'src/repositories/asset.repository';
|
||||||
import { TimelineService } from 'src/services/timeline.service';
|
import { TimelineService } from 'src/services/timeline.service';
|
||||||
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
|
import { factory } from 'test/small.factory';
|
||||||
import { newTestService, ServiceMocks } from 'test/utils';
|
import { newTestService, ServiceMocks } from 'test/utils';
|
||||||
|
|
||||||
describe(TimelineService.name, () => {
|
describe(TimelineService.name, () => {
|
||||||
@@ -15,10 +18,13 @@ describe(TimelineService.name, () => {
|
|||||||
it("should return buckets if userId and albumId aren't set", async () => {
|
it("should return buckets if userId and albumId aren't set", async () => {
|
||||||
mocks.asset.getTimeBuckets.mockResolvedValue([{ timeBucket: 'bucket', count: 1 }]);
|
mocks.asset.getTimeBuckets.mockResolvedValue([{ timeBucket: 'bucket', count: 1 }]);
|
||||||
|
|
||||||
await expect(sut.getTimeBuckets(authStub.admin, {})).resolves.toEqual(
|
await expect(
|
||||||
expect.arrayContaining([{ timeBucket: 'bucket', count: 1 }]),
|
sut.getTimeBuckets(authStub.admin, {
|
||||||
);
|
size: TimeBucketSize.DAY,
|
||||||
|
}),
|
||||||
|
).resolves.toEqual(expect.arrayContaining([{ timeBucket: 'bucket', count: 1 }]));
|
||||||
expect(mocks.asset.getTimeBuckets).toHaveBeenCalledWith({
|
expect(mocks.asset.getTimeBuckets).toHaveBeenCalledWith({
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
userIds: [authStub.admin.user.id],
|
userIds: [authStub.admin.user.id],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -27,34 +33,35 @@ describe(TimelineService.name, () => {
|
|||||||
describe('getTimeBucket', () => {
|
describe('getTimeBucket', () => {
|
||||||
it('should return the assets for a album time bucket if user has album.read', async () => {
|
it('should return the assets for a album time bucket if user has album.read', async () => {
|
||||||
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set(['album-id']));
|
||||||
const json = `[{ id: ['asset-id'] }]`;
|
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||||
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
|
|
||||||
|
|
||||||
await expect(sut.getTimeBucket(authStub.admin, { timeBucket: 'bucket', albumId: 'album-id' })).resolves.toEqual(
|
await expect(
|
||||||
json,
|
sut.getTimeBucket(authStub.admin, { size: TimeBucketSize.DAY, timeBucket: 'bucket', albumId: 'album-id' }),
|
||||||
);
|
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
|
||||||
|
|
||||||
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-id']));
|
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-id']));
|
||||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
albumId: 'album-id',
|
albumId: 'album-id',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the assets for a archive time bucket if user has archive.read', async () => {
|
it('should return the assets for a archive time bucket if user has archive.read', async () => {
|
||||||
const json = `[{ id: ['asset-id'] }]`;
|
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||||
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
|
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isArchived: true,
|
isArchived: true,
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
}),
|
}),
|
||||||
).resolves.toEqual(json);
|
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
|
||||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
||||||
'bucket',
|
'bucket',
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isArchived: true,
|
isArchived: true,
|
||||||
userIds: [authStub.admin.user.id],
|
userIds: [authStub.admin.user.id],
|
||||||
@@ -63,19 +70,20 @@ describe(TimelineService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should include partner shared assets', async () => {
|
it('should include partner shared assets', async () => {
|
||||||
const json = `[{ id: ['asset-id'] }]`;
|
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||||
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
|
|
||||||
mocks.partner.getAll.mockResolvedValue([]);
|
mocks.partner.getAll.mockResolvedValue([]);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isArchived: false,
|
isArchived: false,
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
}),
|
}),
|
||||||
).resolves.toEqual(json);
|
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
|
||||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isArchived: false,
|
isArchived: false,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
@@ -84,37 +92,62 @@ describe(TimelineService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should check permissions to read tag', async () => {
|
it('should check permissions to read tag', async () => {
|
||||||
const json = `[{ id: ['asset-id'] }]`;
|
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||||
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
|
|
||||||
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-123']));
|
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-123']));
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
tagId: 'tag-123',
|
tagId: 'tag-123',
|
||||||
}),
|
}),
|
||||||
).resolves.toEqual(json);
|
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
|
||||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
tagId: 'tag-123',
|
tagId: 'tag-123',
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
userIds: [authStub.admin.user.id],
|
userIds: [authStub.admin.user.id],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should strip metadata if showExif is disabled', async () => {
|
||||||
|
mocks.access.album.checkSharedLinkAccess.mockResolvedValue(new Set(['album-id']));
|
||||||
|
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||||
|
|
||||||
|
const auth = factory.auth({ sharedLink: { showExif: false } });
|
||||||
|
|
||||||
|
const buckets = await sut.getTimeBucket(auth, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
|
timeBucket: 'bucket',
|
||||||
|
isArchived: true,
|
||||||
|
albumId: 'album-id',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(buckets).toEqual([expect.objectContaining({ id: 'asset-id' })]);
|
||||||
|
expect(buckets[0]).not.toHaveProperty('exif');
|
||||||
|
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
|
timeBucket: 'bucket',
|
||||||
|
isArchived: true,
|
||||||
|
albumId: 'album-id',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should return the assets for a library time bucket if user has library.read', async () => {
|
it('should return the assets for a library time bucket if user has library.read', async () => {
|
||||||
const json = `[{ id: ['asset-id'] }]`;
|
mocks.asset.getTimeBucket.mockResolvedValue([assetStub.image]);
|
||||||
mocks.asset.getTimeBucket.mockResolvedValue({ assets: json });
|
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
userId: authStub.admin.user.id,
|
userId: authStub.admin.user.id,
|
||||||
}),
|
}),
|
||||||
).resolves.toEqual(json);
|
).resolves.toEqual(expect.arrayContaining([expect.objectContaining({ id: 'asset-id' })]));
|
||||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
||||||
'bucket',
|
'bucket',
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
userIds: [authStub.admin.user.id],
|
userIds: [authStub.admin.user.id],
|
||||||
}),
|
}),
|
||||||
@@ -124,6 +157,7 @@ describe(TimelineService.name, () => {
|
|||||||
it('should throw an error if withParners is true and isArchived true or undefined', async () => {
|
it('should throw an error if withParners is true and isArchived true or undefined', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isArchived: true,
|
isArchived: true,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
@@ -133,6 +167,7 @@ describe(TimelineService.name, () => {
|
|||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isArchived: undefined,
|
isArchived: undefined,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
@@ -144,6 +179,7 @@ describe(TimelineService.name, () => {
|
|||||||
it('should throw an error if withParners is true and isFavorite is either true or false', async () => {
|
it('should throw an error if withParners is true and isFavorite is either true or false', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isFavorite: true,
|
isFavorite: true,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
@@ -153,6 +189,7 @@ describe(TimelineService.name, () => {
|
|||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isFavorite: false,
|
isFavorite: false,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
@@ -164,6 +201,7 @@ describe(TimelineService.name, () => {
|
|||||||
it('should throw an error if withParners is true and isTrash is true', async () => {
|
it('should throw an error if withParners is true and isTrash is true', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
sut.getTimeBucket(authStub.admin, {
|
sut.getTimeBucket(authStub.admin, {
|
||||||
|
size: TimeBucketSize.DAY,
|
||||||
timeBucket: 'bucket',
|
timeBucket: 'bucket',
|
||||||
isTrashed: true,
|
isTrashed: true,
|
||||||
withPartners: true,
|
withPartners: true,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||||
import { Stack } from 'src/database';
|
import { AssetResponseDto, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketsResponseDto } from 'src/dtos/time-bucket.dto';
|
import { TimeBucketAssetDto, TimeBucketDto, TimeBucketResponseDto } from 'src/dtos/time-bucket.dto';
|
||||||
import { Permission } from 'src/enum';
|
import { Permission } from 'src/enum';
|
||||||
import { TimeBucketOptions } from 'src/repositories/asset.repository';
|
import { TimeBucketOptions } from 'src/repositories/asset.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
@@ -9,32 +9,22 @@ import { getMyPartnerIds } from 'src/utils/asset.util';
|
|||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class TimelineService extends BaseService {
|
export class TimelineService extends BaseService {
|
||||||
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketsResponseDto[]> {
|
async getTimeBuckets(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketResponseDto[]> {
|
||||||
await this.timeBucketChecks(auth, dto);
|
await this.timeBucketChecks(auth, dto);
|
||||||
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
|
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
|
||||||
return await this.assetRepository.getTimeBuckets(timeBucketOptions);
|
return this.assetRepository.getTimeBuckets(timeBucketOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
// pre-jsonified response
|
async getTimeBucket(
|
||||||
async getTimeBucket(auth: AuthDto, dto: TimeBucketAssetDto): Promise<string> {
|
auth: AuthDto,
|
||||||
|
dto: TimeBucketAssetDto,
|
||||||
|
): Promise<AssetResponseDto[] | SanitizedAssetResponseDto[]> {
|
||||||
await this.timeBucketChecks(auth, dto);
|
await this.timeBucketChecks(auth, dto);
|
||||||
const timeBucketOptions = await this.buildTimeBucketOptions(auth, { ...dto });
|
const timeBucketOptions = await this.buildTimeBucketOptions(auth, dto);
|
||||||
|
const assets = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
|
||||||
// TODO: use id cursor for pagination
|
return !auth.sharedLink || auth.sharedLink?.showExif
|
||||||
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
|
? assets.map((asset) => mapAsset(asset, { withStack: true, auth }))
|
||||||
return bucket.assets;
|
: assets.map((asset) => mapAsset(asset, { stripMetadata: true, auth }));
|
||||||
}
|
|
||||||
|
|
||||||
mapStack(entity?: Stack | null) {
|
|
||||||
if (!entity) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: entity.id!,
|
|
||||||
primaryAssetId: entity.primaryAssetId!,
|
|
||||||
assetCount: entity.assetCount as number,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async buildTimeBucketOptions(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketOptions> {
|
private async buildTimeBucketOptions(auth: AuthDto, dto: TimeBucketDto): Promise<TimeBucketOptions> {
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
export type TimelineStack = {
|
|
||||||
id: string;
|
|
||||||
primaryAssetId: string;
|
|
||||||
assetCount: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type AssetDescription = {
|
|
||||||
city: string | null;
|
|
||||||
country: string | null;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type TimeBucketAssets = {
|
|
||||||
id: string[];
|
|
||||||
ownerId: string[];
|
|
||||||
ratio: number[];
|
|
||||||
isFavorite: number[];
|
|
||||||
isArchived: number[];
|
|
||||||
isTrashed: number[];
|
|
||||||
isImage: number[];
|
|
||||||
thumbhash: (string | null)[];
|
|
||||||
localDateTime: string[];
|
|
||||||
stack?: ([string, string] | null)[];
|
|
||||||
duration: (string | null)[];
|
|
||||||
projectionType: (string | null)[];
|
|
||||||
livePhotoVideoId: (string | null)[];
|
|
||||||
city: (string | null)[];
|
|
||||||
country: (string | null)[];
|
|
||||||
};
|
|
||||||
@@ -177,10 +177,9 @@ export interface IDelayedJob extends IBaseJob {
|
|||||||
delay?: number;
|
delay?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type JobSource = 'upload' | 'sidecar-write' | 'copy';
|
|
||||||
export interface IEntityJob extends IBaseJob {
|
export interface IEntityJob extends IBaseJob {
|
||||||
id: string;
|
id: string;
|
||||||
source?: JobSource;
|
source?: 'upload' | 'sidecar-write' | 'copy';
|
||||||
notify?: boolean;
|
notify?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -252,21 +251,18 @@ export interface INotifyAlbumInviteJob extends IEntityJob {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface INotifyAlbumUpdateJob extends IEntityJob, IDelayedJob {
|
export interface INotifyAlbumUpdateJob extends IEntityJob, IDelayedJob {
|
||||||
recipientId: string;
|
recipientIds: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface JobCounts {
|
export interface JobCounts {
|
||||||
active: number;
|
active: number;
|
||||||
completed: number;
|
|
||||||
failed: number;
|
|
||||||
delayed: number;
|
|
||||||
waiting: number;
|
waiting: number;
|
||||||
paused: number;
|
delayed: number;
|
||||||
|
failed: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueueStatus {
|
export interface QueueStatus {
|
||||||
isActive: boolean;
|
paused: boolean;
|
||||||
isPaused: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type JobItem =
|
export type JobItem =
|
||||||
@@ -451,6 +447,14 @@ export type MemoriesState = {
|
|||||||
lastOnThisDayDate: string;
|
lastOnThisDayDate: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type QueueState = {
|
||||||
|
paused: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type QueuesState = {
|
||||||
|
[key in QueueName]?: QueueState;
|
||||||
|
};
|
||||||
|
|
||||||
export interface SystemMetadata extends Record<SystemMetadataKey, Record<string, any>> {
|
export interface SystemMetadata extends Record<SystemMetadataKey, Record<string, any>> {
|
||||||
[SystemMetadataKey.ADMIN_ONBOARDING]: { isOnboarded: boolean };
|
[SystemMetadataKey.ADMIN_ONBOARDING]: { isOnboarded: boolean };
|
||||||
[SystemMetadataKey.FACIAL_RECOGNITION_STATE]: { lastRun?: string };
|
[SystemMetadataKey.FACIAL_RECOGNITION_STATE]: { lastRun?: string };
|
||||||
@@ -460,6 +464,7 @@ export interface SystemMetadata extends Record<SystemMetadataKey, Record<string,
|
|||||||
[SystemMetadataKey.SYSTEM_FLAGS]: DeepPartial<SystemFlags>;
|
[SystemMetadataKey.SYSTEM_FLAGS]: DeepPartial<SystemFlags>;
|
||||||
[SystemMetadataKey.VERSION_CHECK_STATE]: VersionCheckMetadata;
|
[SystemMetadataKey.VERSION_CHECK_STATE]: VersionCheckMetadata;
|
||||||
[SystemMetadataKey.MEMORIES_STATE]: MemoriesState;
|
[SystemMetadataKey.MEMORIES_STATE]: MemoriesState;
|
||||||
|
[SystemMetadataKey.QUEUES_STATE]: QueuesState;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type UserMetadataItem<T extends keyof UserMetadata = UserMetadataKey> = {
|
export type UserMetadataItem<T extends keyof UserMetadata = UserMetadataKey> = {
|
||||||
|
|||||||
@@ -197,16 +197,3 @@ export const asRequest = (request: AuthRequest, file: Express.Multer.File) => {
|
|||||||
file: mapToUploadFile(file as ImmichFile),
|
file: mapToUploadFile(file as ImmichFile),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
function isRotated90CW(orientation: number) {
|
|
||||||
return orientation === 5 || orientation === 6 || orientation === 90;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isRotated270CW(orientation: number) {
|
|
||||||
return orientation === 7 || orientation === 8 || orientation === -90;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isFlipped(orientation?: string | null) {
|
|
||||||
const value = Number(orientation);
|
|
||||||
return value && (isRotated270CW(value) || isRotated90CW(value));
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -22,15 +22,3 @@ export function asHumanReadable(bytes: number, precision = 1): string {
|
|||||||
|
|
||||||
return `${remainder.toFixed(magnitude == 0 ? 0 : precision)} ${units[magnitude]}`;
|
return `${remainder.toFixed(magnitude == 0 ? 0 : precision)} ${units[magnitude]}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// if an asset is jsonified in the DB before being returned, its buffer fields will be hex-encoded strings
|
|
||||||
export const hexOrBufferToBase64 = (encoded: string | Buffer | null) => {
|
|
||||||
if (!encoded) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (typeof encoded === 'string') {
|
|
||||||
return Buffer.from(encoded.slice(2), 'hex').toString('base64');
|
|
||||||
}
|
|
||||||
|
|
||||||
return encoded.toString('base64');
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export const asPostgresConnectionConfig = (params: DatabaseConnectionParams) =>
|
|||||||
return {
|
return {
|
||||||
host: params.host,
|
host: params.host,
|
||||||
port: params.port,
|
port: params.port,
|
||||||
username: params.username,
|
user: params.username,
|
||||||
password: params.password,
|
password: params.password,
|
||||||
database: params.database,
|
database: params.database,
|
||||||
ssl: undefined,
|
ssl: undefined,
|
||||||
@@ -51,7 +51,7 @@ export const asPostgresConnectionConfig = (params: DatabaseConnectionParams) =>
|
|||||||
return {
|
return {
|
||||||
host: host ?? undefined,
|
host: host ?? undefined,
|
||||||
port: port ? Number(port) : undefined,
|
port: port ? Number(port) : undefined,
|
||||||
username: user,
|
user,
|
||||||
password,
|
password,
|
||||||
database: database ?? undefined,
|
database: database ?? undefined,
|
||||||
ssl,
|
ssl,
|
||||||
@@ -92,7 +92,7 @@ export const getKyselyConfig = (
|
|||||||
},
|
},
|
||||||
host: config.host,
|
host: config.host,
|
||||||
port: config.port,
|
port: config.port,
|
||||||
username: config.username,
|
user: config.user,
|
||||||
password: config.password,
|
password: config.password,
|
||||||
database: config.database,
|
database: config.database,
|
||||||
ssl: config.ssl,
|
ssl: config.ssl,
|
||||||
@@ -262,7 +262,7 @@ export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function truncatedDate<O>(size: TimeBucketSize) {
|
export function truncatedDate<O>(size: TimeBucketSize) {
|
||||||
return sql<O>`date_trunc(${sql.lit(size)}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
|
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
|
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
|
||||||
@@ -276,7 +276,6 @@ export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: str
|
|||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
|
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
|
||||||
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
|
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import { LoggingRepository } from 'src/repositories/logging.repository';
|
|||||||
import { bootstrapTelemetry } from 'src/repositories/telemetry.repository';
|
import { bootstrapTelemetry } from 'src/repositories/telemetry.repository';
|
||||||
import { ApiService } from 'src/services/api.service';
|
import { ApiService } from 'src/services/api.service';
|
||||||
import { isStartUpError, useSwagger } from 'src/utils/misc';
|
import { isStartUpError, useSwagger } from 'src/utils/misc';
|
||||||
|
|
||||||
async function bootstrap() {
|
async function bootstrap() {
|
||||||
process.title = 'immich-api';
|
process.title = 'immich-api';
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ read_file_and_export "DB_HOSTNAME_FILE" "DB_HOSTNAME"
|
|||||||
read_file_and_export "DB_DATABASE_NAME_FILE" "DB_DATABASE_NAME"
|
read_file_and_export "DB_DATABASE_NAME_FILE" "DB_DATABASE_NAME"
|
||||||
read_file_and_export "DB_USERNAME_FILE" "DB_USERNAME"
|
read_file_and_export "DB_USERNAME_FILE" "DB_USERNAME"
|
||||||
read_file_and_export "DB_PASSWORD_FILE" "DB_PASSWORD"
|
read_file_and_export "DB_PASSWORD_FILE" "DB_PASSWORD"
|
||||||
read_file_and_export "REDIS_PASSWORD_FILE" "REDIS_PASSWORD"
|
|
||||||
|
|
||||||
export CPU_CORES="${CPU_CORES:=$(./get-cpus.sh)}"
|
export CPU_CORES="${CPU_CORES:=$(./get-cpus.sh)}"
|
||||||
echo "Detected CPU Cores: $CPU_CORES"
|
echo "Detected CPU Cores: $CPU_CORES"
|
||||||
|
|||||||
4
server/test/fixtures/asset.stub.ts
vendored
4
server/test/fixtures/asset.stub.ts
vendored
@@ -257,10 +257,6 @@ export const assetStub = {
|
|||||||
duplicateId: null,
|
duplicateId: null,
|
||||||
isOffline: false,
|
isOffline: false,
|
||||||
stack: null,
|
stack: null,
|
||||||
orientation: '',
|
|
||||||
projectionType: null,
|
|
||||||
height: 3840,
|
|
||||||
width: 2160,
|
|
||||||
}),
|
}),
|
||||||
|
|
||||||
trashed: Object.freeze({
|
trashed: Object.freeze({
|
||||||
|
|||||||
@@ -142,15 +142,18 @@ export const getRepository = <K extends keyof RepositoriesTypes>(key: K, db: Kys
|
|||||||
}
|
}
|
||||||
|
|
||||||
case 'database': {
|
case 'database': {
|
||||||
return new DatabaseRepository(db, LoggingRepository.create(), new ConfigRepository());
|
const configRepo = new ConfigRepository();
|
||||||
|
return new DatabaseRepository(db, new LoggingRepository(undefined, configRepo), configRepo);
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'email': {
|
case 'email': {
|
||||||
return new EmailRepository(LoggingRepository.create());
|
const logger = new LoggingRepository(undefined, new ConfigRepository());
|
||||||
|
return new EmailRepository(logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'logger': {
|
case 'logger': {
|
||||||
return LoggingRepository.create();
|
const configMock = { getEnv: () => ({ noColor: false }) };
|
||||||
|
return new LoggingRepository(undefined, configMock as ConfigRepository);
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'memory': {
|
case 'memory': {
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ const globalSetup = async () => {
|
|||||||
const db = new Kysely<DB>(getKyselyConfig({ connectionType: 'url', url: postgresUrl }));
|
const db = new Kysely<DB>(getKyselyConfig({ connectionType: 'url', url: postgresUrl }));
|
||||||
|
|
||||||
const configRepository = new ConfigRepository();
|
const configRepository = new ConfigRepository();
|
||||||
const logger = LoggingRepository.create();
|
const logger = new LoggingRepository(undefined, configRepository);
|
||||||
await new DatabaseRepository(db, logger, configRepository).runMigrations();
|
await new DatabaseRepository(db, logger, configRepository).runMigrations();
|
||||||
|
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
|
|||||||
@@ -8,13 +8,6 @@ const envData: EnvData = {
|
|||||||
environment: ImmichEnvironment.PRODUCTION,
|
environment: ImmichEnvironment.PRODUCTION,
|
||||||
|
|
||||||
buildMetadata: {},
|
buildMetadata: {},
|
||||||
bull: {
|
|
||||||
config: {
|
|
||||||
connection: {},
|
|
||||||
prefix: 'immich_bull',
|
|
||||||
},
|
|
||||||
queues: [{ name: 'queue-1' }],
|
|
||||||
},
|
|
||||||
|
|
||||||
cls: {
|
cls: {
|
||||||
config: {},
|
config: {},
|
||||||
@@ -53,12 +46,6 @@ const envData: EnvData = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
redis: {
|
|
||||||
host: 'redis',
|
|
||||||
port: 6379,
|
|
||||||
db: 0,
|
|
||||||
},
|
|
||||||
|
|
||||||
resourcePaths: {
|
resourcePaths: {
|
||||||
lockFile: 'build-lock.json',
|
lockFile: 'build-lock.json',
|
||||||
geodata: {
|
geodata: {
|
||||||
|
|||||||
@@ -5,18 +5,16 @@ import { Mocked, vitest } from 'vitest';
|
|||||||
export const newJobRepositoryMock = (): Mocked<RepositoryInterface<JobRepository>> => {
|
export const newJobRepositoryMock = (): Mocked<RepositoryInterface<JobRepository>> => {
|
||||||
return {
|
return {
|
||||||
setup: vitest.fn(),
|
setup: vitest.fn(),
|
||||||
startWorkers: vitest.fn(),
|
start: vitest.fn(),
|
||||||
run: vitest.fn(),
|
stop: vitest.fn(),
|
||||||
setConcurrency: vitest.fn(),
|
|
||||||
empty: vitest.fn(),
|
|
||||||
pause: vitest.fn(),
|
pause: vitest.fn(),
|
||||||
resume: vitest.fn(),
|
resume: vitest.fn(),
|
||||||
|
run: vitest.fn(),
|
||||||
queue: vitest.fn().mockImplementation(() => Promise.resolve()),
|
queue: vitest.fn().mockImplementation(() => Promise.resolve()),
|
||||||
queueAll: vitest.fn().mockImplementation(() => Promise.resolve()),
|
queueAll: vitest.fn().mockImplementation(() => Promise.resolve()),
|
||||||
getQueueStatus: vitest.fn(),
|
|
||||||
getJobCounts: vitest.fn(),
|
|
||||||
clear: vitest.fn(),
|
clear: vitest.fn(),
|
||||||
waitForQueueCompletion: vitest.fn(),
|
clearFailed: vitest.fn(),
|
||||||
removeJob: vitest.fn(),
|
getJobCounts: vitest.fn(),
|
||||||
|
getQueueStatus: vitest.fn(),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "typescript-sdk",
|
|
||||||
"lockfileVersion": 3,
|
|
||||||
"requires": true,
|
|
||||||
"packages": {}
|
|
||||||
}
|
|
||||||
8
web/package-lock.json
generated
8
web/package-lock.json
generated
@@ -11,7 +11,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@formatjs/icu-messageformat-parser": "^2.9.8",
|
"@formatjs/icu-messageformat-parser": "^2.9.8",
|
||||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||||
"@immich/ui": "^0.19.0",
|
"@immich/ui": "^0.18.1",
|
||||||
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
|
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
|
||||||
"@mdi/js": "^7.4.47",
|
"@mdi/js": "^7.4.47",
|
||||||
"@photo-sphere-viewer/core": "^5.11.5",
|
"@photo-sphere-viewer/core": "^5.11.5",
|
||||||
@@ -1320,9 +1320,9 @@
|
|||||||
"link": true
|
"link": true
|
||||||
},
|
},
|
||||||
"node_modules/@immich/ui": {
|
"node_modules/@immich/ui": {
|
||||||
"version": "0.19.0",
|
"version": "0.18.1",
|
||||||
"resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/@immich/ui/-/ui-0.18.1.tgz",
|
||||||
"integrity": "sha512-XVjSUoQVIoe83pxM4q8kmlejb2xep/TZEfoGbasI7takEGKNiWEyXr5eZaXZCSVgq78fcNRr4jyWz290ZAXh7A==",
|
"integrity": "sha512-XWWO6OTfH3MektyxCn0hWefZyOGyWwwx/2zHinuShpxTHSyfveJ4mOkFP8DkyMz0dnvJ1EfdkPBMkld3y5R/Hw==",
|
||||||
"license": "GNU Affero General Public License version 3",
|
"license": "GNU Affero General Public License version 3",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mdi/js": "^7.4.47",
|
"@mdi/js": "^7.4.47",
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@formatjs/icu-messageformat-parser": "^2.9.8",
|
"@formatjs/icu-messageformat-parser": "^2.9.8",
|
||||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||||
"@immich/ui": "^0.19.0",
|
"@immich/ui": "^0.18.1",
|
||||||
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
|
"@mapbox/mapbox-gl-rtl-text": "0.2.3",
|
||||||
"@mdi/js": "^7.4.47",
|
"@mdi/js": "^7.4.47",
|
||||||
"@photo-sphere-viewer/core": "^5.11.5",
|
"@photo-sphere-viewer/core": "^5.11.5",
|
||||||
|
|||||||
@@ -8,6 +8,7 @@
|
|||||||
--immich-primary: 66 80 175;
|
--immich-primary: 66 80 175;
|
||||||
--immich-bg: 255 255 255;
|
--immich-bg: 255 255 255;
|
||||||
--immich-fg: 0 0 0;
|
--immich-fg: 0 0 0;
|
||||||
|
--immich-gray: 246 246 244;
|
||||||
--immich-error: 229 115 115;
|
--immich-error: 229 115 115;
|
||||||
--immich-success: 129 199 132;
|
--immich-success: 129 199 132;
|
||||||
--immich-warning: 255 183 77;
|
--immich-warning: 255 183 77;
|
||||||
@@ -32,7 +33,6 @@
|
|||||||
--immich-ui-warning: 255 170 0;
|
--immich-ui-warning: 255 170 0;
|
||||||
--immich-ui-info: 14 165 233;
|
--immich-ui-info: 14 165 233;
|
||||||
--immich-ui-default-border: 209 213 219;
|
--immich-ui-default-border: 209 213 219;
|
||||||
--immich-ui-gray: 246 246 246;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark {
|
.dark {
|
||||||
@@ -45,7 +45,6 @@
|
|||||||
--immich-ui-warning: 255 170 0;
|
--immich-ui-warning: 255 170 0;
|
||||||
--immich-ui-info: 14 165 233;
|
--immich-ui-info: 14 165 233;
|
||||||
--immich-ui-default-border: 55 65 81;
|
--immich-ui-default-border: 55 65 81;
|
||||||
--immich-ui-gray: 33 33 33;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -47,7 +47,8 @@
|
|||||||
<ConfirmDialog
|
<ConfirmDialog
|
||||||
title={$t('delete_user')}
|
title={$t('delete_user')}
|
||||||
confirmText={forceDelete ? $t('permanently_delete') : $t('delete')}
|
confirmText={forceDelete ? $t('permanently_delete') : $t('delete')}
|
||||||
onClose={(confirmed) => (confirmed ? handleDeleteUser() : onCancel())}
|
onConfirm={handleDeleteUser}
|
||||||
|
{onCancel}
|
||||||
disabled={deleteButtonDisabled}
|
disabled={deleteButtonDisabled}
|
||||||
>
|
>
|
||||||
{#snippet promptSnippet()}
|
{#snippet promptSnippet()}
|
||||||
|
|||||||
@@ -47,20 +47,20 @@
|
|||||||
onCommand,
|
onCommand,
|
||||||
}: Props = $props();
|
}: Props = $props();
|
||||||
|
|
||||||
let waitingCount = $derived(jobCounts.waiting + jobCounts.paused + jobCounts.delayed);
|
let waitingCount = $derived(jobCounts.waiting + jobCounts.delayed);
|
||||||
let isIdle = $derived(!queueStatus.isActive && !queueStatus.isPaused);
|
let idle = $derived(jobCounts.active + jobCounts.waiting + jobCounts.delayed === 0);
|
||||||
let multipleButtons = $derived(allText || refreshText);
|
let multipleButtons = $derived(allText || refreshText);
|
||||||
|
|
||||||
const commonClasses = 'flex place-items-center justify-between w-full py-2 sm:py-4 pe-4 ps-6';
|
const commonClasses = 'flex place-items-center justify-between w-full py-2 sm:py-4 pr-4 pl-6';
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
class="flex flex-col overflow-hidden rounded-2xl bg-gray-100 dark:bg-immich-dark-gray sm:flex-row sm:rounded-[35px]"
|
class="flex flex-col overflow-hidden rounded-2xl bg-gray-100 dark:bg-immich-dark-gray sm:flex-row sm:rounded-[35px]"
|
||||||
>
|
>
|
||||||
<div class="flex w-full flex-col">
|
<div class="flex w-full flex-col">
|
||||||
{#if queueStatus.isPaused}
|
{#if queueStatus.paused}
|
||||||
<JobTileStatus color="warning">{$t('paused')}</JobTileStatus>
|
<JobTileStatus color="warning">{$t('paused')}</JobTileStatus>
|
||||||
{:else if queueStatus.isActive}
|
{:else if !idle}
|
||||||
<JobTileStatus color="success">{$t('active')}</JobTileStatus>
|
<JobTileStatus color="success">{$t('active')}</JobTileStatus>
|
||||||
{/if}
|
{/if}
|
||||||
<div class="flex flex-col gap-2 p-5 sm:p-7 md:p-9">
|
<div class="flex flex-col gap-2 p-5 sm:p-7 md:p-9">
|
||||||
@@ -119,12 +119,12 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
class="{commonClasses} flex-row-reverse rounded-b-lg bg-gray-200 text-immich-dark-bg dark:bg-gray-700 dark:text-immich-gray sm:rounded-s-none sm:rounded-e-lg"
|
class="{commonClasses} rounded-b-lg bg-gray-200 text-immich-dark-bg dark:bg-gray-700 dark:text-immich-gray sm:rounded-s-none sm:rounded-e-lg"
|
||||||
>
|
>
|
||||||
|
<p>{$t('waiting')}</p>
|
||||||
<p class="text-2xl">
|
<p class="text-2xl">
|
||||||
{waitingCount.toLocaleString($locale)}
|
{waitingCount.toLocaleString($locale)}
|
||||||
</p>
|
</p>
|
||||||
<p>{$t('waiting')}</p>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -139,54 +139,52 @@
|
|||||||
<Icon path={mdiAlertCircle} size="36" />
|
<Icon path={mdiAlertCircle} size="36" />
|
||||||
{$t('disabled').toUpperCase()}
|
{$t('disabled').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{/if}
|
{:else}
|
||||||
|
{#if !idle}
|
||||||
{#if !disabled && !isIdle}
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Clear, force: false })}>
|
||||||
{#if waitingCount > 0}
|
|
||||||
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Empty, force: false })}>
|
|
||||||
<Icon path={mdiClose} size="24" />
|
<Icon path={mdiClose} size="24" />
|
||||||
{$t('clear').toUpperCase()}
|
{$t('clear').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{/if}
|
{/if}
|
||||||
{#if queueStatus.isPaused}
|
|
||||||
{@const size = waitingCount > 0 ? '24' : '48'}
|
{#if multipleButtons && idle}
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Resume, force: false })}>
|
{#if allText}
|
||||||
|
<JobTileButton color="dark-gray" onClick={() => onCommand({ command: JobCommand.Start, force: true })}>
|
||||||
|
<Icon path={mdiAllInclusive} size="24" />
|
||||||
|
{allText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
{#if refreshText}
|
||||||
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Start, force: undefined })}>
|
||||||
|
<Icon path={mdiImageRefreshOutline} size="24" />
|
||||||
|
{refreshText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
||||||
|
<Icon path={mdiSelectionSearch} size="24" />
|
||||||
|
{missingText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if !multipleButtons && idle}
|
||||||
|
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
||||||
|
<Icon path={mdiPlay} size="24" />
|
||||||
|
{missingText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if queueStatus.paused}
|
||||||
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Resume, force: false })}>
|
||||||
<!-- size property is not reactive, so have to use width and height -->
|
<!-- size property is not reactive, so have to use width and height -->
|
||||||
<Icon path={mdiFastForward} {size} />
|
<Icon path={mdiFastForward} size="24" />
|
||||||
{$t('resume').toUpperCase()}
|
{$t('resume').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{:else}
|
{:else}
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Pause, force: false })}>
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Pause, force: false })}>
|
||||||
<Icon path={mdiPause} size="24" />
|
<Icon path={mdiPause} size="24" />
|
||||||
{$t('pause').toUpperCase()}
|
{$t('pause').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
{#if !disabled && multipleButtons && isIdle}
|
|
||||||
{#if allText}
|
|
||||||
<JobTileButton color="dark-gray" onClick={() => onCommand({ command: JobCommand.Start, force: true })}>
|
|
||||||
<Icon path={mdiAllInclusive} size="24" />
|
|
||||||
{allText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
{#if refreshText}
|
|
||||||
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Start, force: undefined })}>
|
|
||||||
<Icon path={mdiImageRefreshOutline} size="24" />
|
|
||||||
{refreshText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
|
||||||
<Icon path={mdiSelectionSearch} size="24" />
|
|
||||||
{missingText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
|
|
||||||
{#if !disabled && !multipleButtons && isIdle}
|
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
|
||||||
<Icon path={mdiPlay} size="48" />
|
|
||||||
{missingText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -154,7 +154,7 @@
|
|||||||
jobs[jobId] = await sendJobCommand({ id: jobId, jobCommandDto: jobCommand });
|
jobs[jobId] = await sendJobCommand({ id: jobId, jobCommandDto: jobCommand });
|
||||||
|
|
||||||
switch (jobCommand.command) {
|
switch (jobCommand.command) {
|
||||||
case JobCommand.Empty: {
|
case JobCommand.Clear: {
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
message: $t('admin.cleared_jobs', { values: { job: title } }),
|
message: $t('admin.cleared_jobs', { values: { job: title } }),
|
||||||
type: NotificationType.Info,
|
type: NotificationType.Info,
|
||||||
|
|||||||
@@ -33,7 +33,8 @@
|
|||||||
title={$t('restore_user')}
|
title={$t('restore_user')}
|
||||||
confirmText={$t('continue')}
|
confirmText={$t('continue')}
|
||||||
confirmColor="success"
|
confirmColor="success"
|
||||||
onClose={(confirmed) => (confirmed ? handleRestoreUser() : onCancel())}
|
onConfirm={handleRestoreUser}
|
||||||
|
{onCancel}
|
||||||
>
|
>
|
||||||
{#snippet promptSnippet()}
|
{#snippet promptSnippet()}
|
||||||
<p>
|
<p>
|
||||||
|
|||||||
@@ -49,7 +49,8 @@
|
|||||||
{#if isConfirmOpen}
|
{#if isConfirmOpen}
|
||||||
<ConfirmDialog
|
<ConfirmDialog
|
||||||
title={$t('admin.disable_login')}
|
title={$t('admin.disable_login')}
|
||||||
onClose={(confirmed) => (confirmed ? handleSave(true) : (isConfirmOpen = false))}
|
onCancel={() => (isConfirmOpen = false)}
|
||||||
|
onConfirm={() => handleSave(true)}
|
||||||
>
|
>
|
||||||
{#snippet promptSnippet()}
|
{#snippet promptSnippet()}
|
||||||
<div class="flex flex-col gap-4">
|
<div class="flex flex-col gap-4">
|
||||||
|
|||||||
@@ -1,27 +1,27 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import Icon from '$lib/components/elements/icon.svelte';
|
import Icon from '$lib/components/elements/icon.svelte';
|
||||||
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
|
|
||||||
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
|
||||||
import ConfirmDialog from '$lib/components/shared-components/dialog/confirm-dialog.svelte';
|
|
||||||
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
|
|
||||||
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
|
|
||||||
import UserAvatar from '$lib/components/shared-components/user-avatar.svelte';
|
|
||||||
import { handleError } from '$lib/utils/handle-error';
|
|
||||||
import {
|
import {
|
||||||
AlbumUserRole,
|
|
||||||
AssetOrder,
|
|
||||||
removeUserFromAlbum,
|
|
||||||
updateAlbumInfo,
|
updateAlbumInfo,
|
||||||
updateAlbumUser,
|
removeUserFromAlbum,
|
||||||
type AlbumResponseDto,
|
type AlbumResponseDto,
|
||||||
type UserResponseDto,
|
type UserResponseDto,
|
||||||
|
AssetOrder,
|
||||||
|
AlbumUserRole,
|
||||||
|
updateAlbumUser,
|
||||||
} from '@immich/sdk';
|
} from '@immich/sdk';
|
||||||
import { mdiArrowDownThin, mdiArrowUpThin, mdiDotsVertical, mdiPlus } from '@mdi/js';
|
import { mdiArrowDownThin, mdiArrowUpThin, mdiPlus, mdiDotsVertical } from '@mdi/js';
|
||||||
|
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
|
||||||
|
import UserAvatar from '$lib/components/shared-components/user-avatar.svelte';
|
||||||
|
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
|
||||||
|
import SettingDropdown from '../shared-components/settings/setting-dropdown.svelte';
|
||||||
|
import type { RenderedOption } from '../elements/dropdown.svelte';
|
||||||
|
import { handleError } from '$lib/utils/handle-error';
|
||||||
import { findKey } from 'lodash-es';
|
import { findKey } from 'lodash-es';
|
||||||
import { t } from 'svelte-i18n';
|
import { t } from 'svelte-i18n';
|
||||||
import type { RenderedOption } from '../elements/dropdown.svelte';
|
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
|
||||||
|
import ConfirmDialog from '$lib/components/shared-components/dialog/confirm-dialog.svelte';
|
||||||
import { notificationController, NotificationType } from '../shared-components/notification/notification';
|
import { notificationController, NotificationType } from '../shared-components/notification/notification';
|
||||||
import SettingDropdown from '../shared-components/settings/setting-dropdown.svelte';
|
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
album: AlbumResponseDto;
|
album: AlbumResponseDto;
|
||||||
@@ -195,6 +195,7 @@
|
|||||||
title={$t('album_remove_user')}
|
title={$t('album_remove_user')}
|
||||||
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
|
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
|
||||||
confirmText={$t('remove_user')}
|
confirmText={$t('remove_user')}
|
||||||
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
|
onConfirm={handleRemoveUser}
|
||||||
|
onCancel={() => (selectedRemoveUser = null)}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|||||||
@@ -1,25 +1,25 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { shortcut } from '$lib/actions/shortcut';
|
|
||||||
import SelectAllAssets from '$lib/components/photos-page/actions/select-all-assets.svelte';
|
import SelectAllAssets from '$lib/components/photos-page/actions/select-all-assets.svelte';
|
||||||
import AssetSelectControlBar from '$lib/components/photos-page/asset-select-control-bar.svelte';
|
|
||||||
import { AssetInteraction } from '$lib/stores/asset-interaction.svelte';
|
|
||||||
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
|
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
|
||||||
import { AssetStore } from '$lib/stores/assets-store.svelte';
|
|
||||||
import { dragAndDropFilesStore } from '$lib/stores/drag-and-drop-files.store';
|
import { dragAndDropFilesStore } from '$lib/stores/drag-and-drop-files.store';
|
||||||
import { handlePromiseError } from '$lib/utils';
|
|
||||||
import { cancelMultiselect, downloadAlbum } from '$lib/utils/asset-utils';
|
|
||||||
import { fileUploadHandler, openFileUploadDialog } from '$lib/utils/file-uploader';
|
import { fileUploadHandler, openFileUploadDialog } from '$lib/utils/file-uploader';
|
||||||
import type { AlbumResponseDto, SharedLinkResponseDto, UserResponseDto } from '@immich/sdk';
|
import type { AlbumResponseDto, SharedLinkResponseDto, UserResponseDto } from '@immich/sdk';
|
||||||
import { mdiFileImagePlusOutline, mdiFolderDownloadOutline } from '@mdi/js';
|
import { AssetStore } from '$lib/stores/assets-store.svelte';
|
||||||
import { onDestroy } from 'svelte';
|
import { cancelMultiselect, downloadAlbum } from '$lib/utils/asset-utils';
|
||||||
import { t } from 'svelte-i18n';
|
|
||||||
import CircleIconButton from '../elements/buttons/circle-icon-button.svelte';
|
import CircleIconButton from '../elements/buttons/circle-icon-button.svelte';
|
||||||
import DownloadAction from '../photos-page/actions/download-action.svelte';
|
import DownloadAction from '../photos-page/actions/download-action.svelte';
|
||||||
import AssetGrid from '../photos-page/asset-grid.svelte';
|
import AssetGrid from '../photos-page/asset-grid.svelte';
|
||||||
|
import AssetSelectControlBar from '../photos-page/asset-select-control-bar.svelte';
|
||||||
import ControlAppBar from '../shared-components/control-app-bar.svelte';
|
import ControlAppBar from '../shared-components/control-app-bar.svelte';
|
||||||
import ImmichLogoSmallLink from '../shared-components/immich-logo-small-link.svelte';
|
import ImmichLogoSmallLink from '../shared-components/immich-logo-small-link.svelte';
|
||||||
import ThemeButton from '../shared-components/theme-button.svelte';
|
import ThemeButton from '../shared-components/theme-button.svelte';
|
||||||
|
import { shortcut } from '$lib/actions/shortcut';
|
||||||
|
import { mdiFileImagePlusOutline, mdiFolderDownloadOutline } from '@mdi/js';
|
||||||
|
import { handlePromiseError } from '$lib/utils';
|
||||||
import AlbumSummary from './album-summary.svelte';
|
import AlbumSummary from './album-summary.svelte';
|
||||||
|
import { t } from 'svelte-i18n';
|
||||||
|
import { onDestroy } from 'svelte';
|
||||||
|
import { AssetInteraction } from '$lib/stores/asset-interaction.svelte';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
sharedLink: SharedLinkResponseDto;
|
sharedLink: SharedLinkResponseDto;
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
|
|
||||||
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
|
|
||||||
import {
|
import {
|
||||||
AlbumUserRole,
|
|
||||||
getMyUser,
|
getMyUser,
|
||||||
removeUserFromAlbum,
|
removeUserFromAlbum,
|
||||||
updateAlbumUser,
|
|
||||||
type AlbumResponseDto,
|
type AlbumResponseDto,
|
||||||
type UserResponseDto,
|
type UserResponseDto,
|
||||||
|
updateAlbumUser,
|
||||||
|
AlbumUserRole,
|
||||||
} from '@immich/sdk';
|
} from '@immich/sdk';
|
||||||
import { mdiDotsVertical } from '@mdi/js';
|
import { mdiDotsVertical } from '@mdi/js';
|
||||||
import { onMount } from 'svelte';
|
import { onMount } from 'svelte';
|
||||||
import { t } from 'svelte-i18n';
|
|
||||||
import { handleError } from '../../utils/handle-error';
|
import { handleError } from '../../utils/handle-error';
|
||||||
import MenuOption from '../shared-components/context-menu/menu-option.svelte';
|
|
||||||
import ConfirmDialog from '../shared-components/dialog/confirm-dialog.svelte';
|
import ConfirmDialog from '../shared-components/dialog/confirm-dialog.svelte';
|
||||||
|
import MenuOption from '../shared-components/context-menu/menu-option.svelte';
|
||||||
import { NotificationType, notificationController } from '../shared-components/notification/notification';
|
import { NotificationType, notificationController } from '../shared-components/notification/notification';
|
||||||
import UserAvatar from '../shared-components/user-avatar.svelte';
|
import UserAvatar from '../shared-components/user-avatar.svelte';
|
||||||
|
import FullScreenModal from '$lib/components/shared-components/full-screen-modal.svelte';
|
||||||
|
import { t } from 'svelte-i18n';
|
||||||
|
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
album: AlbumResponseDto;
|
album: AlbumResponseDto;
|
||||||
@@ -144,7 +144,8 @@
|
|||||||
title={$t('album_leave')}
|
title={$t('album_leave')}
|
||||||
prompt={$t('album_leave_confirmation', { values: { album: album.albumName } })}
|
prompt={$t('album_leave_confirmation', { values: { album: album.albumName } })}
|
||||||
confirmText={$t('leave')}
|
confirmText={$t('leave')}
|
||||||
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
|
onConfirm={handleRemoveUser}
|
||||||
|
onCancel={() => (selectedRemoveUser = null)}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
@@ -153,6 +154,7 @@
|
|||||||
title={$t('album_remove_user')}
|
title={$t('album_remove_user')}
|
||||||
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
|
prompt={$t('album_remove_user_confirmation', { values: { user: selectedRemoveUser.name } })}
|
||||||
confirmText={$t('remove_user')}
|
confirmText={$t('remove_user')}
|
||||||
onClose={(confirmed) => (confirmed ? handleRemoveUser() : (selectedRemoveUser = null))}
|
onConfirm={handleRemoveUser}
|
||||||
|
onCancel={() => (selectedRemoveUser = null)}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|||||||
@@ -1,19 +1,18 @@
|
|||||||
import type { AssetAction } from '$lib/constants';
|
import type { AssetAction } from '$lib/constants';
|
||||||
import type { TimelineAsset } from '$lib/stores/assets-store.svelte';
|
import type { AlbumResponseDto, AssetResponseDto } from '@immich/sdk';
|
||||||
import type { AlbumResponseDto } from '@immich/sdk';
|
|
||||||
|
|
||||||
type ActionMap = {
|
type ActionMap = {
|
||||||
[AssetAction.ARCHIVE]: { asset: TimelineAsset };
|
[AssetAction.ARCHIVE]: { asset: AssetResponseDto };
|
||||||
[AssetAction.UNARCHIVE]: { asset: TimelineAsset };
|
[AssetAction.UNARCHIVE]: { asset: AssetResponseDto };
|
||||||
[AssetAction.FAVORITE]: { asset: TimelineAsset };
|
[AssetAction.FAVORITE]: { asset: AssetResponseDto };
|
||||||
[AssetAction.UNFAVORITE]: { asset: TimelineAsset };
|
[AssetAction.UNFAVORITE]: { asset: AssetResponseDto };
|
||||||
[AssetAction.TRASH]: { asset: TimelineAsset };
|
[AssetAction.TRASH]: { asset: AssetResponseDto };
|
||||||
[AssetAction.DELETE]: { asset: TimelineAsset };
|
[AssetAction.DELETE]: { asset: AssetResponseDto };
|
||||||
[AssetAction.RESTORE]: { asset: TimelineAsset };
|
[AssetAction.RESTORE]: { asset: AssetResponseDto };
|
||||||
[AssetAction.ADD]: { asset: TimelineAsset };
|
[AssetAction.ADD]: { asset: AssetResponseDto };
|
||||||
[AssetAction.ADD_TO_ALBUM]: { asset: TimelineAsset; album: AlbumResponseDto };
|
[AssetAction.ADD_TO_ALBUM]: { asset: AssetResponseDto; album: AlbumResponseDto };
|
||||||
[AssetAction.UNSTACK]: { assets: TimelineAsset[] };
|
[AssetAction.UNSTACK]: { assets: AssetResponseDto[] };
|
||||||
[AssetAction.KEEP_THIS_DELETE_OTHERS]: { asset: TimelineAsset };
|
[AssetAction.KEEP_THIS_DELETE_OTHERS]: { asset: AssetResponseDto };
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Action = {
|
export type Action = {
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import Portal from '$lib/components/shared-components/portal/portal.svelte';
|
import Portal from '$lib/components/shared-components/portal/portal.svelte';
|
||||||
import { AssetAction } from '$lib/constants';
|
import { AssetAction } from '$lib/constants';
|
||||||
import { addAssetsToAlbum, addAssetsToNewAlbum } from '$lib/utils/asset-utils';
|
import { addAssetsToAlbum, addAssetsToNewAlbum } from '$lib/utils/asset-utils';
|
||||||
import { toTimelineAsset } from '$lib/utils/timeline-util';
|
|
||||||
import type { AlbumResponseDto, AssetResponseDto } from '@immich/sdk';
|
import type { AlbumResponseDto, AssetResponseDto } from '@immich/sdk';
|
||||||
import { mdiImageAlbum, mdiShareVariantOutline } from '@mdi/js';
|
import { mdiImageAlbum, mdiShareVariantOutline } from '@mdi/js';
|
||||||
import { t } from 'svelte-i18n';
|
import { t } from 'svelte-i18n';
|
||||||
@@ -25,14 +24,14 @@
|
|||||||
showSelectionModal = false;
|
showSelectionModal = false;
|
||||||
const album = await addAssetsToNewAlbum(albumName, [asset.id]);
|
const album = await addAssetsToNewAlbum(albumName, [asset.id]);
|
||||||
if (album) {
|
if (album) {
|
||||||
onAction({ type: AssetAction.ADD_TO_ALBUM, asset: toTimelineAsset(asset), album });
|
onAction({ type: AssetAction.ADD_TO_ALBUM, asset, album });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleAddToAlbum = async (album: AlbumResponseDto) => {
|
const handleAddToAlbum = async (album: AlbumResponseDto) => {
|
||||||
showSelectionModal = false;
|
showSelectionModal = false;
|
||||||
await addAssetsToAlbum(album.id, [asset.id]);
|
await addAssetsToAlbum(album.id, [asset.id]);
|
||||||
onAction({ type: AssetAction.ADD_TO_ALBUM, asset: toTimelineAsset(asset), album });
|
onAction({ type: AssetAction.ADD_TO_ALBUM, asset, album });
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
||||||
import { AssetAction } from '$lib/constants';
|
import { AssetAction } from '$lib/constants';
|
||||||
import { toggleArchive } from '$lib/utils/asset-utils';
|
import { toggleArchive } from '$lib/utils/asset-utils';
|
||||||
import { toTimelineAsset } from '$lib/utils/timeline-util';
|
|
||||||
import type { AssetResponseDto } from '@immich/sdk';
|
import type { AssetResponseDto } from '@immich/sdk';
|
||||||
import { mdiArchiveArrowDownOutline, mdiArchiveArrowUpOutline } from '@mdi/js';
|
import { mdiArchiveArrowDownOutline, mdiArchiveArrowUpOutline } from '@mdi/js';
|
||||||
import { t } from 'svelte-i18n';
|
import { t } from 'svelte-i18n';
|
||||||
@@ -19,11 +18,11 @@
|
|||||||
|
|
||||||
const onArchive = async () => {
|
const onArchive = async () => {
|
||||||
if (!asset.isArchived) {
|
if (!asset.isArchived) {
|
||||||
preAction({ type: AssetAction.ARCHIVE, asset: toTimelineAsset(asset) });
|
preAction({ type: AssetAction.ARCHIVE, asset });
|
||||||
}
|
}
|
||||||
const updatedAsset = await toggleArchive(asset);
|
const updatedAsset = await toggleArchive(asset);
|
||||||
if (updatedAsset) {
|
if (updatedAsset) {
|
||||||
onAction({ type: asset.isArchived ? AssetAction.ARCHIVE : AssetAction.UNARCHIVE, asset: toTimelineAsset(asset) });
|
onAction({ type: asset.isArchived ? AssetAction.ARCHIVE : AssetAction.UNARCHIVE, asset });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
import { showDeleteModal } from '$lib/stores/preferences.store';
|
import { showDeleteModal } from '$lib/stores/preferences.store';
|
||||||
import { featureFlags } from '$lib/stores/server-config.store';
|
import { featureFlags } from '$lib/stores/server-config.store';
|
||||||
import { handleError } from '$lib/utils/handle-error';
|
import { handleError } from '$lib/utils/handle-error';
|
||||||
import { toTimelineAsset } from '$lib/utils/timeline-util';
|
|
||||||
import { deleteAssets, type AssetResponseDto } from '@immich/sdk';
|
import { deleteAssets, type AssetResponseDto } from '@immich/sdk';
|
||||||
import { mdiDeleteForeverOutline, mdiDeleteOutline } from '@mdi/js';
|
import { mdiDeleteForeverOutline, mdiDeleteOutline } from '@mdi/js';
|
||||||
import { t } from 'svelte-i18n';
|
import { t } from 'svelte-i18n';
|
||||||
@@ -43,9 +42,9 @@
|
|||||||
|
|
||||||
const trashAsset = async () => {
|
const trashAsset = async () => {
|
||||||
try {
|
try {
|
||||||
preAction({ type: AssetAction.TRASH, asset: toTimelineAsset(asset) });
|
preAction({ type: AssetAction.TRASH, asset });
|
||||||
await deleteAssets({ assetBulkDeleteDto: { ids: [asset.id] } });
|
await deleteAssets({ assetBulkDeleteDto: { ids: [asset.id] } });
|
||||||
onAction({ type: AssetAction.TRASH, asset: toTimelineAsset(asset) });
|
onAction({ type: AssetAction.TRASH, asset });
|
||||||
|
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
message: $t('moved_to_trash'),
|
message: $t('moved_to_trash'),
|
||||||
@@ -59,7 +58,7 @@
|
|||||||
const deleteAsset = async () => {
|
const deleteAsset = async () => {
|
||||||
try {
|
try {
|
||||||
await deleteAssets({ assetBulkDeleteDto: { ids: [asset.id], force: true } });
|
await deleteAssets({ assetBulkDeleteDto: { ids: [asset.id], force: true } });
|
||||||
onAction({ type: AssetAction.DELETE, asset: toTimelineAsset(asset) });
|
onAction({ type: AssetAction.DELETE, asset });
|
||||||
|
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
message: $t('permanently_deleted_asset'),
|
message: $t('permanently_deleted_asset'),
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
} from '$lib/components/shared-components/notification/notification';
|
} from '$lib/components/shared-components/notification/notification';
|
||||||
import { AssetAction } from '$lib/constants';
|
import { AssetAction } from '$lib/constants';
|
||||||
import { handleError } from '$lib/utils/handle-error';
|
import { handleError } from '$lib/utils/handle-error';
|
||||||
import { toTimelineAsset } from '$lib/utils/timeline-util';
|
|
||||||
import { updateAsset, type AssetResponseDto } from '@immich/sdk';
|
import { updateAsset, type AssetResponseDto } from '@immich/sdk';
|
||||||
import { mdiHeart, mdiHeartOutline } from '@mdi/js';
|
import { mdiHeart, mdiHeartOutline } from '@mdi/js';
|
||||||
import { t } from 'svelte-i18n';
|
import { t } from 'svelte-i18n';
|
||||||
@@ -31,10 +30,7 @@
|
|||||||
|
|
||||||
asset = { ...asset, isFavorite: data.isFavorite };
|
asset = { ...asset, isFavorite: data.isFavorite };
|
||||||
|
|
||||||
onAction({
|
onAction({ type: asset.isFavorite ? AssetAction.FAVORITE : AssetAction.UNFAVORITE, asset });
|
||||||
type: asset.isFavorite ? AssetAction.FAVORITE : AssetAction.UNFAVORITE,
|
|
||||||
asset: toTimelineAsset(asset),
|
|
||||||
});
|
|
||||||
|
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
type: NotificationType.Info,
|
type: NotificationType.Info,
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
||||||
import { dialogController } from '$lib/components/shared-components/dialog/dialog';
|
|
||||||
import { AssetAction } from '$lib/constants';
|
import { AssetAction } from '$lib/constants';
|
||||||
import { keepThisDeleteOthers } from '$lib/utils/asset-utils';
|
import { keepThisDeleteOthers } from '$lib/utils/asset-utils';
|
||||||
import { toTimelineAsset } from '$lib/utils/timeline-util';
|
|
||||||
import type { AssetResponseDto, StackResponseDto } from '@immich/sdk';
|
import type { AssetResponseDto, StackResponseDto } from '@immich/sdk';
|
||||||
import { mdiPinOutline } from '@mdi/js';
|
import { mdiPinOutline } from '@mdi/js';
|
||||||
import { t } from 'svelte-i18n';
|
|
||||||
import type { OnAction } from './action';
|
import type { OnAction } from './action';
|
||||||
|
import { t } from 'svelte-i18n';
|
||||||
|
import { dialogController } from '$lib/components/shared-components/dialog/dialog';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
stack: StackResponseDto;
|
stack: StackResponseDto;
|
||||||
@@ -30,7 +29,7 @@
|
|||||||
|
|
||||||
const keptAsset = await keepThisDeleteOthers(asset, stack);
|
const keptAsset = await keepThisDeleteOthers(asset, stack);
|
||||||
if (keptAsset) {
|
if (keptAsset) {
|
||||||
onAction({ type: AssetAction.UNSTACK, assets: [toTimelineAsset(keptAsset)] });
|
onAction({ type: AssetAction.UNSTACK, assets: [keptAsset] });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
} from '$lib/components/shared-components/notification/notification';
|
} from '$lib/components/shared-components/notification/notification';
|
||||||
import { AssetAction } from '$lib/constants';
|
import { AssetAction } from '$lib/constants';
|
||||||
import { handleError } from '$lib/utils/handle-error';
|
import { handleError } from '$lib/utils/handle-error';
|
||||||
import { toTimelineAsset } from '$lib/utils/timeline-util';
|
|
||||||
import { restoreAssets, type AssetResponseDto } from '@immich/sdk';
|
import { restoreAssets, type AssetResponseDto } from '@immich/sdk';
|
||||||
import { mdiHistory } from '@mdi/js';
|
import { mdiHistory } from '@mdi/js';
|
||||||
import { t } from 'svelte-i18n';
|
import { t } from 'svelte-i18n';
|
||||||
@@ -24,7 +23,7 @@
|
|||||||
await restoreAssets({ bulkIdsDto: { ids: [asset.id] } });
|
await restoreAssets({ bulkIdsDto: { ids: [asset.id] } });
|
||||||
asset.isTrashed = false;
|
asset.isTrashed = false;
|
||||||
|
|
||||||
onAction({ type: AssetAction.RESTORE, asset: toTimelineAsset(asset) });
|
onAction({ type: AssetAction.RESTORE, asset });
|
||||||
|
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
type: NotificationType.Info,
|
type: NotificationType.Info,
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user