Compare commits
173 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6efc2ec9be | ||
|
|
d0c6c7cb33 | ||
|
|
b3b5f063cf | ||
|
|
3731cc4334 | ||
|
|
13df619ba9 | ||
|
|
3edb347666 | ||
|
|
c73832bd9c | ||
|
|
2f26a7edae | ||
|
|
deaf81e2a4 | ||
|
|
f1b92718d5 | ||
|
|
1f64649434 | ||
|
|
ff32506c5e | ||
|
|
68b5202730 | ||
|
|
c6abef186c | ||
|
|
e5bdf671b5 | ||
|
|
88e92332ee | ||
|
|
6da51deb83 | ||
|
|
b44f8d52ee | ||
|
|
fa03ed7dd7 | ||
|
|
5617b57b26 | ||
|
|
01210dceac | ||
|
|
e4e049d040 | ||
|
|
a405fba3bb | ||
|
|
b5844db0c7 | ||
|
|
28ab1d4551 | ||
|
|
050ee91289 | ||
|
|
5eb8d7e8b0 | ||
|
|
5e4403bb2e | ||
|
|
fb6591607f | ||
|
|
1cf3378499 | ||
|
|
a336aeb007 | ||
|
|
ee49f470b7 | ||
|
|
b9cda59172 | ||
|
|
ba71c83948 | ||
|
|
2835919931 | ||
|
|
310fab526d | ||
|
|
690b87e375 | ||
|
|
e53625b067 | ||
|
|
9e085c1071 | ||
|
|
5f9dfa9493 | ||
|
|
13051c1e5a | ||
|
|
a9cd3609dd | ||
|
|
e0a3e5a200 | ||
|
|
c587fb1df8 | ||
|
|
51cfe10c28 | ||
|
|
bc3f95c57c | ||
|
|
e368b9e50b | ||
|
|
95c75c289c | ||
|
|
23d3657ac2 | ||
|
|
74f04336bb | ||
|
|
54db2a48af | ||
|
|
cde56d5a22 | ||
|
|
3f1cf44717 | ||
|
|
2d83ac4125 | ||
|
|
7147486b6a | ||
|
|
89ddbac8bc | ||
|
|
e071b82e8a | ||
|
|
13b2b2fc4e | ||
|
|
fe9ef1a3ea | ||
|
|
afb0d0f54d | ||
|
|
26085ff82b | ||
|
|
2872886e77 | ||
|
|
a21112e4ab | ||
|
|
f3edf43158 | ||
|
|
1c5926553a | ||
|
|
05fa3092bf | ||
|
|
7d3ec8af37 | ||
|
|
8db008ef0b | ||
|
|
e493e05e99 | ||
|
|
b83e535010 | ||
|
|
111372edc1 | ||
|
|
625a899f64 | ||
|
|
aaf0496f74 | ||
|
|
4977926c88 | ||
|
|
f41e1159d1 | ||
|
|
670107373b | ||
|
|
e660f05c31 | ||
|
|
baf1ea313e | ||
|
|
ed64c91da6 | ||
|
|
c40aa4399b | ||
|
|
8f08100a30 | ||
|
|
337cd33042 | ||
|
|
1e8fc7266c | ||
|
|
7f35583c2c | ||
|
|
ace755f264 | ||
|
|
7b25c9d0a7 | ||
|
|
c0bee2a6b7 | ||
|
|
b48d5cab22 | ||
|
|
4f59e6c7ab | ||
|
|
82a5d54d2c | ||
|
|
5e6d830ecd | ||
|
|
f700f3427b | ||
|
|
0c07c0ba4e | ||
|
|
bc885f3644 | ||
|
|
6668964d92 | ||
|
|
1835fbae49 | ||
|
|
593489a14c | ||
|
|
9f7bf36786 | ||
|
|
f0302670d2 | ||
|
|
4b8cc7b533 | ||
|
|
6e953ff5eb | ||
|
|
7316ad5a72 | ||
|
|
f28fc8fa5c | ||
|
|
02b70e693c | ||
|
|
b2e06477f8 | ||
|
|
632971a2ac | ||
|
|
8045fd3f14 | ||
|
|
a2568f711f | ||
|
|
f9032866e7 | ||
|
|
e287b18435 | ||
|
|
c415ee82d1 | ||
|
|
c8f1a15f21 | ||
|
|
9012cf6946 | ||
|
|
7595d01956 | ||
|
|
ed3c239b7e | ||
|
|
c254a04aec | ||
|
|
d5b96c0257 | ||
|
|
436a2e9bf3 | ||
|
|
b34f4345e1 | ||
|
|
f55d63fae8 | ||
|
|
ed594c1987 | ||
|
|
ab85dd9fa8 | ||
|
|
08c7054845 | ||
|
|
9ef41bf1c7 | ||
|
|
1064128fde | ||
|
|
382341f550 | ||
|
|
81e07fda08 | ||
|
|
4c4435bc19 | ||
|
|
f952bc0b64 | ||
|
|
05e1a6d949 | ||
|
|
ea3d01ec62 | ||
|
|
2d4e2af629 | ||
|
|
f18c2fd339 | ||
|
|
cd184cf366 | ||
|
|
6387e38e27 | ||
|
|
2fb85f4a16 | ||
|
|
34d1f74b77 | ||
|
|
48c9cfb432 | ||
|
|
f9739c9730 | ||
|
|
863e983726 | ||
|
|
b71d7e33bb | ||
|
|
93462aafbc | ||
|
|
ea64fdd7b4 | ||
|
|
c86b2ae500 | ||
|
|
848ba685eb | ||
|
|
9ad024c189 | ||
|
|
0b15f6035b | ||
|
|
1e7b657156 | ||
|
|
6180828ed2 | ||
|
|
785f61ba70 | ||
|
|
50f26374e3 | ||
|
|
398bd04ffd | ||
|
|
8349a28ed8 | ||
|
|
27018e4ab6 | ||
|
|
73e82303e7 | ||
|
|
64697235d6 | ||
|
|
a5cc408469 | ||
|
|
d590dec159 | ||
|
|
b262bcec03 | ||
|
|
fe2330ebf6 | ||
|
|
50c7b35291 | ||
|
|
927d6ab1c6 | ||
|
|
d064477a45 | ||
|
|
6588bb3d79 | ||
|
|
812cb3d940 | ||
|
|
852ef3cd1b | ||
|
|
3cc77d945b | ||
|
|
6f4449d5e9 | ||
|
|
37edef834e | ||
|
|
814030be77 | ||
|
|
71a2914f3e | ||
|
|
0d30ceb284 | ||
|
|
4add6cb26e |
12
.gitattributes
vendored
12
.gitattributes
vendored
@@ -2,12 +2,16 @@ mobile/openapi/**/*.md -diff -merge
|
||||
mobile/openapi/**/*.md linguist-generated=true
|
||||
mobile/openapi/**/*.dart -diff -merge
|
||||
mobile/openapi/**/*.dart linguist-generated=true
|
||||
mobile/openapi/.openapi-generator/FILES -diff -merge
|
||||
mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
||||
|
||||
|
||||
cli/src/api/open-api/**/*.md -diff -merge
|
||||
cli/src/api/open-api/**/*.md linguist-generated=true
|
||||
cli/src/api/open-api/**/*.ts -diff -merge
|
||||
cli/src/api/open-api/**/*.ts linguist-generated=true
|
||||
|
||||
web/src/api/open-api/**/*.md -diff -merge
|
||||
web/src/api/open-api/**/*.md linguist-generated=true
|
||||
|
||||
web/src/api/open-api/**/*.ts -diff -merge
|
||||
web/src/api/open-api/**/*.ts linguist-generated=true
|
||||
|
||||
mobile/openapi/.openapi-generator/FILES -diff -merge
|
||||
mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
||||
|
||||
4
.github/workflows/docker-cleanup.yml
vendored
4
.github/workflows/docker-cleanup.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
-
|
||||
name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.1.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.2.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
-
|
||||
name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.1.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.2.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
|
||||
93
.github/workflows/docker.yml
vendored
93
.github/workflows/docker.yml
vendored
@@ -24,18 +24,15 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- context: "server"
|
||||
image: "immich-server"
|
||||
platforms: "linux/arm/v7,linux/amd64,linux/arm64"
|
||||
- context: "web"
|
||||
image: "immich-web"
|
||||
platforms: "linux/arm/v7,linux/amd64,linux/arm64"
|
||||
platforms: "linux/amd64,linux/arm64"
|
||||
- context: "machine-learning"
|
||||
image: "immich-machine-learning"
|
||||
platforms: "linux/amd64,linux/arm64"
|
||||
- context: "nginx"
|
||||
image: "immich-proxy"
|
||||
platforms: "linux/arm/v7,linux/amd64,linux/arm64"
|
||||
platforms: "linux/amd64,linux/arm64"
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -45,7 +42,91 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.8.0
|
||||
uses: docker/setup-buildx-action@v2.9.1
|
||||
# Workaround to fix error:
|
||||
# failed to push: failed to copy: io: read/write on closed pipe
|
||||
# See https://github.com/docker/build-push-action/issues/761
|
||||
with:
|
||||
driver-opts: |
|
||||
image=moby/buildkit:v0.10.6
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{matrix.image}}
|
||||
name=altran1502/${{matrix.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Determine build cache output
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ matrix.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v4.1.1
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
|
||||
build_and_push_server_arm_64:
|
||||
runs-on: self-hosted
|
||||
strategy:
|
||||
# Prevent a failure in one image from stopping the other builds
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- context: "server"
|
||||
image: "immich-server"
|
||||
platforms: "linux/arm64,linux/amd64"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.9.1
|
||||
# Workaround to fix error:
|
||||
# failed to push: failed to copy: io: read/write on closed pipe
|
||||
# See https://github.com/docker/build-push-action/issues/761
|
||||
|
||||
1
.github/workflows/prepare-release.yml
vendored
1
.github/workflows/prepare-release.yml
vendored
@@ -83,4 +83,5 @@ jobs:
|
||||
files: |
|
||||
docker/docker-compose.yml
|
||||
docker/example.env
|
||||
docker/hwaccel.yml
|
||||
*.apk
|
||||
|
||||
43
.github/workflows/test.yml
vendored
43
.github/workflows/test.yml
vendored
@@ -13,13 +13,20 @@ jobs:
|
||||
e2e-tests:
|
||||
name: Run end-to-end test suites
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run Immich Server E2E Test
|
||||
run: docker-compose -f ./docker/docker-compose.test.yml --env-file ./docker/.env.test up --abort-on-container-exit --exit-code-from immich-server-test
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run e2e tests
|
||||
run: npm run test:e2e
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
doc-tests:
|
||||
name: Run documentation checks
|
||||
@@ -73,6 +80,32 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests:
|
||||
name: Run cli test suites
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Run web unit test suites and checks
|
||||
runs-on: ubuntu-latest
|
||||
@@ -103,9 +136,9 @@ jobs:
|
||||
run: npm run check:typescript
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
# - name: Run unit tests & coverage
|
||||
# run: npm run test:cov
|
||||
# if: ${{ !cancelled() }}
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Run mobile unit tests
|
||||
|
||||
4
Makefile
4
Makefile
@@ -23,10 +23,10 @@ test-e2e:
|
||||
docker-compose -f ./docker/docker-compose.test.yml --env-file ./docker/.env.test -p immich-test-e2e up --renew-anon-volumes --abort-on-container-exit --exit-code-from immich-server-test --remove-orphans --build
|
||||
|
||||
prod:
|
||||
docker-compose -f ./docker/docker-compose.yml up --build -V --remove-orphans
|
||||
docker-compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
docker-compose -f ./docker/docker-compose.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
docker-compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
api:
|
||||
cd ./server && npm run api:generate
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
<p align="center">
|
||||
<a href="README_zh_CN.md">中文</a>
|
||||
<a href="README_tr_TR.md">Türkçe</a>
|
||||
<a href="README_ca_ES.md">Català</a>
|
||||
<a href="README_es_ES.md">Español</a>
|
||||
</p>
|
||||
|
||||
## Disclaimer
|
||||
|
||||
108
README_ca_ES.md
Normal file
108
README_ca_ES.md
Normal file
@@ -0,0 +1,108 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/license-MIT-green.svg?color=3F51B5&style=for-the-badge&label=Llicència&logoColor=000000&labelColor=ececec" alt="Llicència: MIT"></a>
|
||||
<a href="https://discord.gg/D8JsnBEuKb">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" atl="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
<br/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="design/immich-logo.svg" width="150" title="Iniciar sessió amb URL personalitzada">
|
||||
</p>
|
||||
<h3 align="center">Immich - Solució de còpia de seguretat d'alta rendiment per a fotos i vídeos auto-allotjada</h3>
|
||||
<br/>
|
||||
<a href="https://immich.app">
|
||||
<img src="design/immich-screenshots.png" title="Captura de pantalla principal">
|
||||
</a>
|
||||
<br/>
|
||||
<p align="center">
|
||||
<a href="README.md">English</a>
|
||||
<a href="README_zh_CN.md">中文</a>
|
||||
<a href="README_tr_TR.md">Türkçe</a>
|
||||
<a href="README_ca_ES.md">Español</a>
|
||||
</p>
|
||||
|
||||
## Avís legal
|
||||
|
||||
- ⚠️ El projecte està en desenvolupament **molt actiu**.
|
||||
- ⚠️ Espereu errors i canvis que poden trencar coses.
|
||||
- ⚠️ **No utilitzeu l'aplicació com a única manera de guardar les vostres fotos i vídeos!**
|
||||
|
||||
## Contingut
|
||||
|
||||
- [Documentació oficial](https://immich.app/docs)
|
||||
- [Mapa de ruta](https://github.com/orgs/immich-app/projects/1)
|
||||
- [Demo](#demo)
|
||||
- [Funcionalitats](#funcionalitats)
|
||||
- [Introducció](https://immich.app/docs/overview/introduction)
|
||||
- [Instal·lació](https://immich.app/docs/install/requirements)
|
||||
- [Directrius de contribució](https://immich.app/docs/overview/support-the-project)
|
||||
- [Donar suport al projecte](#suportar-el-projecte)
|
||||
|
||||
## Documentació
|
||||
|
||||
Podeu trobar la documentació principal, incloent les guies d'instal·lació, a https://immich.app/.
|
||||
|
||||
## Demo
|
||||
|
||||
Podeu accedir a la demostració web a https://demo.immich.app
|
||||
|
||||
Per a l'aplicació mòbil, podeu utilitzar `https://demo.immich.app/api` com a "URL de punt final del servidor".
|
||||
|
||||
```bash title="Credencials de la demo"
|
||||
Les credencials
|
||||
email: demo@immich.app
|
||||
contrasenya: demo
|
||||
```
|
||||
```
|
||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
# Funcionalitats
|
||||
|
||||
| Característiques | Mòbil | Web |
|
||||
| -------------------------------------------- | ------ | --- |
|
||||
| Pujar i veure vídeos i fotos | Sí | Sí |
|
||||
| Còpia de seguretat automàtica en obrir l'aplicació | Sí | N/A |
|
||||
| Selecció d'àlbums per a la còpia de seguretat | Sí | N/A |
|
||||
| Descarregar fotos i vídeos a l'aparell local | Sí | Sí |
|
||||
| Suport per a múltiples usuaris | Sí | Sí |
|
||||
| Àlbums i àlbums compartits | Sí | Sí |
|
||||
| Barra de desplaçament amb funció de rasclet/arrossegament | Sí | Sí |
|
||||
| Suport per a formats raw | Sí | Sí |
|
||||
| Visualització de metadades (EXIF, mapa) | Sí | Sí |
|
||||
| Cerca per metadades, objectes, cares i CLIP | Sí | Sí |
|
||||
| Funcions administratives (gestió d'usuaris) | No | Sí |
|
||||
| Còpia de seguretat en segon pla | Sí | N/A |
|
||||
| Desplaçament virtual | Sí | Sí |
|
||||
| Suport per a OAuth | Sí | Sí |
|
||||
| Claus d'API | N/A | Sí |
|
||||
| Còpia de seguretat i reproducció de LivePhoto | iOS | Sí |
|
||||
| Estructura d'emmagatzematge definida per l'usuari | Sí | Sí |
|
||||
| Compartició pública | No | Sí |
|
||||
| Arxiu i preferits | Sí | Sí |
|
||||
| Mapa global | No | Sí |
|
||||
| Compartició amb associats | Sí | Sí |
|
||||
| Reconeixement facial i agrupament | Sí | Sí |
|
||||
| Records (fa x anys) | Sí | Sí |
|
||||
| Suport fora de línia | Sí | No |
|
||||
| Galeria de només lectura | Sí | Sí |
|
||||
|
||||
# Donar suport al projecte
|
||||
|
||||
M'he compromès amb aquest projecte i no em detindré. Continuaré actualitzant la documentació, afegint noves funcionalitats i solucionant errors. Però no ho puc fer sol. Per això, necessito la vostra ajuda per donar-me motivació addicional per seguir endavant.
|
||||
|
||||
Com van dir els nostres amfitrions a l'episodi [selfhosted.show - 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418), això és una tasca enorme del que l'equip i jo estem fent. I m'encantaria poder dedicar-m'hi a temps complet, per la qual cosa us demano la vostra ajuda per fer-ho possible.
|
||||
|
||||
Si creieu que aquesta és una causa justa i l'aplicació és alguna cosa que us veieu utilitzant durant molt de temps, considereu donar suport al projecte amb alguna de les opcions següents.
|
||||
|
||||
## Donació
|
||||
|
||||
- [Donació mensual](https://github.com/sponsors/alextran1502) a través de GitHub Sponsors
|
||||
- [Donació única](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502) a través de GitHub Sponsors
|
||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||
|
||||
108
README_es_ES.md
Normal file
108
README_es_ES.md
Normal file
@@ -0,0 +1,108 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/license-MIT-green.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="Licencia: MIT"></a>
|
||||
<a href="https://discord.gg/D8JsnBEuKb">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" atl="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
<br/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="design/immich-logo.svg" width="150" title="Iniciar sesión con URL personalizada">
|
||||
</p>
|
||||
<h3 align="center">Immich: Una solución Self-Hosted de copia de seguridad de fotos y videos de alto rendimiento</h3>
|
||||
<br/>
|
||||
<a href="https://immich.app">
|
||||
<img src="design/immich-screenshots.png" title="Captura de pantalla principal">
|
||||
</a>
|
||||
<br/>
|
||||
<p align="center">
|
||||
<a href="README.md">English</a>
|
||||
<a href="README_zh_CN.md">中文</a>
|
||||
<a href="README_tr_TR.md">Türkçe</a>
|
||||
<a href="README_ca_ES.md">Català</a>
|
||||
</p>
|
||||
|
||||
## Descargo de responsabilidad
|
||||
|
||||
- ⚠️ El proyecto está en **desarrollo muy activo**.
|
||||
- ⚠️ Es probable que haya errores y cambios disruptivos.
|
||||
- ⚠️ **¡No utilices la aplicación como única forma de almacenar tus fotos y videos!**
|
||||
|
||||
## Contenido
|
||||
|
||||
- [Documentación oficial](https://immich.app/docs)
|
||||
- [Hoja de ruta](https://github.com/orgs/immich-app/projects/1)
|
||||
- [Demostración](#demo)
|
||||
- [Funciones](#features)
|
||||
- [Introducción](https://immich.app/docs/overview/introduction)
|
||||
- [Instalación](https://immich.app/docs/install/requirements)
|
||||
- [Directrices para contribuir](https://immich.app/docs/overview/support-the-project)
|
||||
- [Apoya el proyecto](#support-the-project)
|
||||
|
||||
## Documentación
|
||||
|
||||
Puedes encontrar la documentación principal, incluidas las guías de instalación, en <https://immich.app/>.
|
||||
|
||||
## Demostración
|
||||
|
||||
Puedes acceder a la demostración web en <https://demo.immich.app>
|
||||
|
||||
Para la aplicación móvil, puedes usar `https://demo.immich.app/api` como `URL de la terminal del servidor`.
|
||||
|
||||
```bash title="Credenciales de la demostración"
|
||||
Las credenciales son
|
||||
correo electrónico: demo@immich.app
|
||||
contraseña: demo
|
||||
```
|
||||
|
||||
```bash
|
||||
Especificaciones: VM de nivel gratuito de Oracle - Ámsterdam - CPU ARM64 de cuatro núcleos a 2.4 GHz, 24 GB de RAM
|
||||
```
|
||||
|
||||
## Funcionalidades
|
||||
|
||||
| Funcionalidades | Móvil | Web |
|
||||
| ----------------------------------------------------- | ------ | --- |
|
||||
| Cargar y ver videos y fotos | Sí | Sí |
|
||||
| Copia de seguridad automática al abrir la aplicación | Sí | N/D |
|
||||
| Álbum(es) selectivo(s) para copia de seguridad | Sí | N/D |
|
||||
| Descargar fotos y videos al dispositivo local | Sí | Sí |
|
||||
| Soporte multiusuario | Sí | Sí |
|
||||
| Álbum y álbumes compartidos | Sí | Sí |
|
||||
| Barra de desplazamiento con función de búsqueda | Sí | Sí |
|
||||
| Soporte para formatos RAW | Sí | Sí |
|
||||
| Visualización de metadatos (EXIF, map) | Sí | Sí |
|
||||
| Búsqueda por metadatos, objetos, rostros y CLIP | Sí | Sí |
|
||||
| Funciones administrativas (gestión de usuarios) | No | Sí |
|
||||
| Copia de seguridad en segundo plano | Sí | N/D |
|
||||
| Desplazamiento virtual | Sí | Sí |
|
||||
| Soporte de OAuth | Sí | Sí |
|
||||
| Claves de API | N/D | Sí |
|
||||
| Copia de seguridad y reproducción de LivePhoto | iOS | Sí |
|
||||
| Estructura de almacenamiento definida por el usuario | Sí | Sí |
|
||||
| Compartir públicamente | No | Sí |
|
||||
| Archivar y marcar como favorito | Sí | Sí |
|
||||
| Mapa global | No | Sí |
|
||||
| Compartir con colaboradores | Sí | Sí |
|
||||
| Reconocimiento facial y agrupación | Sí | Sí |
|
||||
| Recuerdos (hace x años) | Sí | Sí |
|
||||
| Soporte sin conexión | Sí | No |
|
||||
| Galería de solo lectura | Sí | Sí |
|
||||
|
||||
## Apoya el proyecto
|
||||
|
||||
Me he comprometido con este proyecto, y no me detendré. Continuaré actualizando la documentación, agregando nuevas funcionalidades y corrigiendo errores. Pero no puedo hacerlo solo. Por eso, necesito tu ayuda para darme una motivación adicional para seguir adelante.
|
||||
|
||||
Como dijeron nuestros anfitriones en [selfhosted.show - En el episodio 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418), esto es una gran tarea de lo que el equipo y yo estamos haciendo. Y me encantaría poder dedicarme a esto a tiempo completo algún día, así que te pido tu ayuda para que eso sea posible.
|
||||
|
||||
Si consideras que esta es una causa justa y la aplicación es algo que te gustaría usar durante mucho tiempo, por favor, considera apoyar el proyecto con las siguientes opciones.
|
||||
|
||||
## Donación
|
||||
|
||||
- [Donación mensual](https://github.com/sponsors/alextran1502) a través de GitHub Sponsors
|
||||
- [Donación única](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502) a través de GitHub Sponsors
|
||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||
@@ -20,6 +20,8 @@
|
||||
<p align="center">
|
||||
<a href="README.md">English</a>
|
||||
<a href="README_zh_CN.md">中文</a>
|
||||
<a href="README_ca_ES.md">Català</a>
|
||||
<a href="README_es_ES.md">Español</a>
|
||||
</p>
|
||||
|
||||
## Feragatname
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
<p align="center">
|
||||
<a href="README.md">English</a>
|
||||
<a href="README_tr_TR.md">Türkçe</a>
|
||||
<a href="README_ca_ES.md">Català</a>
|
||||
<a href="README_es_ES.md">Español</a>
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
20
cli/.editorconfig
Normal file
20
cli/.editorconfig
Normal file
@@ -0,0 +1,20 @@
|
||||
# Editor configuration, see https://editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.{ts,js}]
|
||||
quote_type = single
|
||||
|
||||
[*.{md,mdx}]
|
||||
max_line_length = off
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.{yml,yaml}]
|
||||
quote_type = double
|
||||
1
cli/.eslintignore
Normal file
1
cli/.eslintignore
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
23
cli/.eslintrc.js
Normal file
23
cli/.eslintrc.js
Normal file
@@ -0,0 +1,23 @@
|
||||
module.exports = {
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
sourceType: 'module',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
plugins: ['@typescript-eslint/eslint-plugin'],
|
||||
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'],
|
||||
root: true,
|
||||
env: {
|
||||
node: true,
|
||||
jest: true,
|
||||
},
|
||||
ignorePatterns: ['.eslintrc.js'],
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'prettier/prettier': 0,
|
||||
},
|
||||
};
|
||||
13
cli/.gitignore
vendored
Normal file
13
cli/.gitignore
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
*-debug.log
|
||||
*-error.log
|
||||
/.nyc_output
|
||||
/dist
|
||||
/lib
|
||||
/tmp
|
||||
/yarn.lock
|
||||
node_modules
|
||||
oclif.manifest.json
|
||||
|
||||
.vscode
|
||||
.idea
|
||||
/coverage/
|
||||
18
cli/.prettierignore
Normal file
18
cli/.prettierignore
Normal file
@@ -0,0 +1,18 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
src/api/open-api
|
||||
*.md
|
||||
*.json
|
||||
coverage
|
||||
dist
|
||||
**/migrations/**
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
6
cli/.prettierrc
Normal file
6
cli/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 120,
|
||||
"semi": true
|
||||
}
|
||||
46
cli/README.md
Normal file
46
cli/README.md
Normal file
@@ -0,0 +1,46 @@
|
||||
A command-line interface for interfacing with Immich
|
||||
|
||||
# Getting started
|
||||
|
||||
$ ts-node cli/src
|
||||
|
||||
To start using the CLI, you need to login with an API key first:
|
||||
|
||||
$ ts-node cli/src login-key https://your-immich-instance/api your-api-key
|
||||
|
||||
NOTE: This will store your api key under ~/.config/immich/auth.yml
|
||||
|
||||
Next, you can run commands:
|
||||
|
||||
$ ts-node cli/src server-info
|
||||
|
||||
When you're done, log out to remove the credentials from your filesystem
|
||||
|
||||
$ ts-node cli/src logout
|
||||
|
||||
# Usage
|
||||
|
||||
```
|
||||
Usage: immich [options] [command]
|
||||
|
||||
Immich command line interface
|
||||
|
||||
Options:
|
||||
-h, --help display help for command
|
||||
|
||||
Commands:
|
||||
upload [options] [paths...] Upload assets
|
||||
import [options] [paths...] Import existing assets
|
||||
server-info Display server information
|
||||
login-key [instanceUrl] [apiKey] Login using an API key
|
||||
help [command] display help for command
|
||||
```
|
||||
|
||||
# Todo
|
||||
|
||||
- Sidecar should check both .jpg.xmp and .xmp
|
||||
- Sidecar check could be case-insensitive
|
||||
|
||||
# Known issues
|
||||
|
||||
- Upload can't use sdk due to multiple issues
|
||||
6197
cli/package-lock.json
generated
Normal file
6197
cli/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
68
cli/package.json
Normal file
68
cli/package.json
Normal file
@@ -0,0 +1,68 @@
|
||||
{
|
||||
"name": "immich-cli",
|
||||
"dependencies": {
|
||||
"axios": "^1.4.0",
|
||||
"byte-size": "^8.1.1",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^11.0.0",
|
||||
"form-data": "^4.0.0",
|
||||
"glob": "^10.3.1",
|
||||
"picomatch": "^2.3.1",
|
||||
"systeminformation": "^5.18.4",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/chai": "^4.3.5",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/jest": "^29.5.2",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"@types/mime-types": "^2.1.1",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.3.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
||||
"@typescript-eslint/parser": "^5.48.1",
|
||||
"chai": "^4.3.7",
|
||||
"eslint": "^8.43.0",
|
||||
"eslint-config-prettier": "^8.8.0",
|
||||
"eslint-plugin-jest": "^27.2.2",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"eslint-plugin-unicorn": "^47.0.0",
|
||||
"jest": "^29.5.0",
|
||||
"jest-extended": "^4.0.0",
|
||||
"jest-message-util": "^29.5.0",
|
||||
"jest-mock-axios": "^4.7.2",
|
||||
"jest-when": "^3.5.2",
|
||||
"mock-fs": "^5.2.0",
|
||||
"ts-jest": "^29.1.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"tslib": "^2.5.3",
|
||||
"typescript": "^4.9.4"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc --project tsconfig.build.json",
|
||||
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
|
||||
"prepack": "yarn build ",
|
||||
"test": "jest",
|
||||
"test:cov": "jest --coverage",
|
||||
"format": "prettier --check ."
|
||||
},
|
||||
"jest": {
|
||||
"clearMocks": true,
|
||||
"moduleFileExtensions": [
|
||||
"js",
|
||||
"json",
|
||||
"ts"
|
||||
],
|
||||
"rootDir": ".",
|
||||
"testRegex": ".*\\.spec\\.ts$",
|
||||
"transform": {
|
||||
"^.+\\.ts$": "ts-jest"
|
||||
},
|
||||
"collectCoverageFrom": [
|
||||
"<rootDir>/src/**/*.(t|j)s"
|
||||
],
|
||||
"coverageDirectory": "./coverage",
|
||||
"testEnvironment": "node"
|
||||
}
|
||||
}
|
||||
3
cli/src/__mocks__/axios.ts
Normal file
3
cli/src/__mocks__/axios.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
// ./__mocks__/axios.js
|
||||
import mockAxios from 'jest-mock-axios';
|
||||
export default mockAxios;
|
||||
50
cli/src/api/client.ts
Normal file
50
cli/src/api/client.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import {
|
||||
AlbumApi,
|
||||
APIKeyApi,
|
||||
AssetApi,
|
||||
AuthenticationApi,
|
||||
Configuration,
|
||||
JobApi,
|
||||
OAuthApi,
|
||||
ServerInfoApi,
|
||||
SystemConfigApi,
|
||||
UserApi,
|
||||
} from './open-api';
|
||||
import { ApiConfiguration } from '../cores/api-configuration';
|
||||
|
||||
export class ImmichApi {
|
||||
public userApi: UserApi;
|
||||
public albumApi: AlbumApi;
|
||||
public assetApi: AssetApi;
|
||||
public authenticationApi: AuthenticationApi;
|
||||
public oauthApi: OAuthApi;
|
||||
public serverInfoApi: ServerInfoApi;
|
||||
public jobApi: JobApi;
|
||||
public keyApi: APIKeyApi;
|
||||
public systemConfigApi: SystemConfigApi;
|
||||
|
||||
private readonly config;
|
||||
public readonly apiConfiguration: ApiConfiguration;
|
||||
|
||||
constructor(instanceUrl: string, apiKey: string) {
|
||||
this.apiConfiguration = new ApiConfiguration(instanceUrl, apiKey);
|
||||
this.config = new Configuration({
|
||||
basePath: instanceUrl,
|
||||
baseOptions: {
|
||||
headers: {
|
||||
'x-api-key': apiKey,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
this.userApi = new UserApi(this.config);
|
||||
this.albumApi = new AlbumApi(this.config);
|
||||
this.assetApi = new AssetApi(this.config);
|
||||
this.authenticationApi = new AuthenticationApi(this.config);
|
||||
this.oauthApi = new OAuthApi(this.config);
|
||||
this.serverInfoApi = new ServerInfoApi(this.config);
|
||||
this.jobApi = new JobApi(this.config);
|
||||
this.keyApi = new APIKeyApi(this.config);
|
||||
this.systemConfigApi = new SystemConfigApi(this.config);
|
||||
}
|
||||
}
|
||||
4
cli/src/api/open-api/.gitignore
vendored
Normal file
4
cli/src/api/open-api/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
wwwroot/*.js
|
||||
node_modules
|
||||
typings
|
||||
dist
|
||||
1
cli/src/api/open-api/.npmignore
Normal file
1
cli/src/api/open-api/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
# empty npmignore to ensure all required files (e.g., in the dist folder) are published by npm
|
||||
23
cli/src/api/open-api/.openapi-generator-ignore
Normal file
23
cli/src/api/open-api/.openapi-generator-ignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# OpenAPI Generator Ignore
|
||||
# Generated by openapi-generator https://github.com/openapitools/openapi-generator
|
||||
|
||||
# Use this file to prevent files from being overwritten by the generator.
|
||||
# The patterns follow closely to .gitignore or .dockerignore.
|
||||
|
||||
# As an example, the C# client generator defines ApiClient.cs.
|
||||
# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
|
||||
#ApiClient.cs
|
||||
|
||||
# You can match any string of characters against a directory, file or extension with a single asterisk (*):
|
||||
#foo/*/qux
|
||||
# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
|
||||
|
||||
# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
|
||||
#foo/**/qux
|
||||
# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
|
||||
|
||||
# You can also negate patterns with an exclamation (!).
|
||||
# For example, you can ignore all files in a docs folder with the file extension .md:
|
||||
#docs/*.md
|
||||
# Then explicitly reverse the ignore rule for a single file:
|
||||
#!docs/README.md
|
||||
9
cli/src/api/open-api/.openapi-generator/FILES
Normal file
9
cli/src/api/open-api/.openapi-generator/FILES
Normal file
@@ -0,0 +1,9 @@
|
||||
.gitignore
|
||||
.npmignore
|
||||
.openapi-generator-ignore
|
||||
api.ts
|
||||
base.ts
|
||||
common.ts
|
||||
configuration.ts
|
||||
git_push.sh
|
||||
index.ts
|
||||
1
cli/src/api/open-api/.openapi-generator/VERSION
Normal file
1
cli/src/api/open-api/.openapi-generator/VERSION
Normal file
@@ -0,0 +1 @@
|
||||
6.5.0
|
||||
13015
cli/src/api/open-api/api.ts
generated
Normal file
13015
cli/src/api/open-api/api.ts
generated
Normal file
File diff suppressed because it is too large
Load Diff
72
cli/src/api/open-api/base.ts
generated
Normal file
72
cli/src/api/open-api/base.ts
generated
Normal file
@@ -0,0 +1,72 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.72.2
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
import type { Configuration } from './configuration';
|
||||
// Some imports not used depending on template conditions
|
||||
// @ts-ignore
|
||||
import type { AxiosPromise, AxiosInstance, AxiosRequestConfig } from 'axios';
|
||||
import globalAxios from 'axios';
|
||||
|
||||
export const BASE_PATH = "/api".replace(/\/+$/, "");
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const COLLECTION_FORMATS = {
|
||||
csv: ",",
|
||||
ssv: " ",
|
||||
tsv: "\t",
|
||||
pipes: "|",
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface RequestArgs
|
||||
*/
|
||||
export interface RequestArgs {
|
||||
url: string;
|
||||
options: AxiosRequestConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @class BaseAPI
|
||||
*/
|
||||
export class BaseAPI {
|
||||
protected configuration: Configuration | undefined;
|
||||
|
||||
constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
|
||||
if (configuration) {
|
||||
this.configuration = configuration;
|
||||
this.basePath = configuration.basePath || this.basePath;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @class RequiredError
|
||||
* @extends {Error}
|
||||
*/
|
||||
export class RequiredError extends Error {
|
||||
constructor(public field: string, msg?: string) {
|
||||
super(msg);
|
||||
this.name = "RequiredError"
|
||||
}
|
||||
}
|
||||
150
cli/src/api/open-api/common.ts
generated
Normal file
150
cli/src/api/open-api/common.ts
generated
Normal file
@@ -0,0 +1,150 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.72.2
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
import type { Configuration } from "./configuration";
|
||||
import type { RequestArgs } from "./base";
|
||||
import type { AxiosInstance, AxiosResponse } from 'axios';
|
||||
import { RequiredError } from "./base";
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const DUMMY_BASE_URL = 'https://example.com'
|
||||
|
||||
/**
|
||||
*
|
||||
* @throws {RequiredError}
|
||||
* @export
|
||||
*/
|
||||
export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
|
||||
if (paramValue === null || paramValue === undefined) {
|
||||
throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
|
||||
if (configuration && configuration.apiKey) {
|
||||
const localVarApiKeyValue = typeof configuration.apiKey === 'function'
|
||||
? await configuration.apiKey(keyParamName)
|
||||
: await configuration.apiKey;
|
||||
object[keyParamName] = localVarApiKeyValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
|
||||
if (configuration && (configuration.username || configuration.password)) {
|
||||
object["auth"] = { username: configuration.username, password: configuration.password };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
|
||||
if (configuration && configuration.accessToken) {
|
||||
const accessToken = typeof configuration.accessToken === 'function'
|
||||
? await configuration.accessToken()
|
||||
: await configuration.accessToken;
|
||||
object["Authorization"] = "Bearer " + accessToken;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
|
||||
if (configuration && configuration.accessToken) {
|
||||
const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
|
||||
? await configuration.accessToken(name, scopes)
|
||||
: await configuration.accessToken;
|
||||
object["Authorization"] = "Bearer " + localVarAccessTokenValue;
|
||||
}
|
||||
}
|
||||
|
||||
function setFlattenedQueryParams(urlSearchParams: URLSearchParams, parameter: any, key: string = ""): void {
|
||||
if (parameter == null) return;
|
||||
if (typeof parameter === "object") {
|
||||
if (Array.isArray(parameter)) {
|
||||
(parameter as any[]).forEach(item => setFlattenedQueryParams(urlSearchParams, item, key));
|
||||
}
|
||||
else {
|
||||
Object.keys(parameter).forEach(currentKey =>
|
||||
setFlattenedQueryParams(urlSearchParams, parameter[currentKey], `${key}${key !== '' ? '.' : ''}${currentKey}`)
|
||||
);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (urlSearchParams.has(key)) {
|
||||
urlSearchParams.append(key, parameter);
|
||||
}
|
||||
else {
|
||||
urlSearchParams.set(key, parameter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const setSearchParams = function (url: URL, ...objects: any[]) {
|
||||
const searchParams = new URLSearchParams(url.search);
|
||||
setFlattenedQueryParams(searchParams, objects);
|
||||
url.search = searchParams.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
|
||||
const nonString = typeof value !== 'string';
|
||||
const needsSerialization = nonString && configuration && configuration.isJsonMime
|
||||
? configuration.isJsonMime(requestOptions.headers['Content-Type'])
|
||||
: nonString;
|
||||
return needsSerialization
|
||||
? JSON.stringify(value !== undefined ? value : {})
|
||||
: (value || "");
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const toPathString = function (url: URL) {
|
||||
return url.pathname + url.search + url.hash
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
*/
|
||||
export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
|
||||
return <T = unknown, R = AxiosResponse<T>>(axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
|
||||
const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
|
||||
return axios.request<T, R>(axiosRequestArgs);
|
||||
};
|
||||
}
|
||||
101
cli/src/api/open-api/configuration.ts
generated
Normal file
101
cli/src/api/open-api/configuration.ts
generated
Normal file
@@ -0,0 +1,101 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.72.2
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export interface ConfigurationParameters {
|
||||
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
|
||||
username?: string;
|
||||
password?: string;
|
||||
accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
|
||||
basePath?: string;
|
||||
baseOptions?: any;
|
||||
formDataCtor?: new () => any;
|
||||
}
|
||||
|
||||
export class Configuration {
|
||||
/**
|
||||
* parameter for apiKey security
|
||||
* @param name security name
|
||||
* @memberof Configuration
|
||||
*/
|
||||
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
|
||||
/**
|
||||
* parameter for basic security
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
username?: string;
|
||||
/**
|
||||
* parameter for basic security
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
password?: string;
|
||||
/**
|
||||
* parameter for oauth2 security
|
||||
* @param name security name
|
||||
* @param scopes oauth2 scope
|
||||
* @memberof Configuration
|
||||
*/
|
||||
accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
|
||||
/**
|
||||
* override base path
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
basePath?: string;
|
||||
/**
|
||||
* base options for axios calls
|
||||
*
|
||||
* @type {any}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
baseOptions?: any;
|
||||
/**
|
||||
* The FormData constructor that will be used to create multipart form data
|
||||
* requests. You can inject this here so that execution environments that
|
||||
* do not support the FormData class can still run the generated client.
|
||||
*
|
||||
* @type {new () => FormData}
|
||||
*/
|
||||
formDataCtor?: new () => any;
|
||||
|
||||
constructor(param: ConfigurationParameters = {}) {
|
||||
this.apiKey = param.apiKey;
|
||||
this.username = param.username;
|
||||
this.password = param.password;
|
||||
this.accessToken = param.accessToken;
|
||||
this.basePath = param.basePath;
|
||||
this.baseOptions = param.baseOptions;
|
||||
this.formDataCtor = param.formDataCtor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given MIME is a JSON MIME.
|
||||
* JSON MIME examples:
|
||||
* application/json
|
||||
* application/json; charset=UTF8
|
||||
* APPLICATION/JSON
|
||||
* application/vnd.company+json
|
||||
* @param mime - MIME (Multipurpose Internet Mail Extensions)
|
||||
* @return True if the given MIME is JSON, false otherwise.
|
||||
*/
|
||||
public isJsonMime(mime: string): boolean {
|
||||
const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
|
||||
return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
|
||||
}
|
||||
}
|
||||
57
cli/src/api/open-api/git_push.sh
Normal file
57
cli/src/api/open-api/git_push.sh
Normal file
@@ -0,0 +1,57 @@
|
||||
#!/bin/sh
|
||||
# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/
|
||||
#
|
||||
# Usage example: /bin/sh ./git_push.sh wing328 openapi-petstore-perl "minor update" "gitlab.com"
|
||||
|
||||
git_user_id=$1
|
||||
git_repo_id=$2
|
||||
release_note=$3
|
||||
git_host=$4
|
||||
|
||||
if [ "$git_host" = "" ]; then
|
||||
git_host="github.com"
|
||||
echo "[INFO] No command line input provided. Set \$git_host to $git_host"
|
||||
fi
|
||||
|
||||
if [ "$git_user_id" = "" ]; then
|
||||
git_user_id="GIT_USER_ID"
|
||||
echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id"
|
||||
fi
|
||||
|
||||
if [ "$git_repo_id" = "" ]; then
|
||||
git_repo_id="GIT_REPO_ID"
|
||||
echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id"
|
||||
fi
|
||||
|
||||
if [ "$release_note" = "" ]; then
|
||||
release_note="Minor update"
|
||||
echo "[INFO] No command line input provided. Set \$release_note to $release_note"
|
||||
fi
|
||||
|
||||
# Initialize the local directory as a Git repository
|
||||
git init
|
||||
|
||||
# Adds the files in the local repository and stages them for commit.
|
||||
git add .
|
||||
|
||||
# Commits the tracked changes and prepares them to be pushed to a remote repository.
|
||||
git commit -m "$release_note"
|
||||
|
||||
# Sets the new remote
|
||||
git_remote=$(git remote)
|
||||
if [ "$git_remote" = "" ]; then # git remote not defined
|
||||
|
||||
if [ "$GIT_TOKEN" = "" ]; then
|
||||
echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment."
|
||||
git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git
|
||||
else
|
||||
git remote add origin https://${git_user_id}:"${GIT_TOKEN}"@${git_host}/${git_user_id}/${git_repo_id}.git
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
git pull origin master
|
||||
|
||||
# Pushes (Forces) the changes in the local repository up to the remote repository
|
||||
echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git"
|
||||
git push origin master 2>&1 | grep -v 'To https'
|
||||
18
cli/src/api/open-api/index.ts
generated
Normal file
18
cli/src/api/open-api/index.ts
generated
Normal file
@@ -0,0 +1,18 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.72.2
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export * from "./api";
|
||||
export * from "./configuration";
|
||||
|
||||
38
cli/src/cli/base-command.ts
Normal file
38
cli/src/cli/base-command.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { ImmichApi } from '../api/client';
|
||||
import path from 'node:path';
|
||||
import { SessionService } from '../services/session.service';
|
||||
import { LoginError } from '../cores/errors/login-error';
|
||||
import { exit } from 'node:process';
|
||||
import os from 'os';
|
||||
import { ServerVersionReponseDto, UserResponseDto } from 'src/api/open-api';
|
||||
|
||||
export abstract class BaseCommand {
|
||||
protected sessionService!: SessionService;
|
||||
protected immichApi!: ImmichApi;
|
||||
protected deviceId!: string;
|
||||
protected user!: UserResponseDto;
|
||||
protected serverVersion!: ServerVersionReponseDto;
|
||||
|
||||
protected configDir;
|
||||
protected authPath;
|
||||
|
||||
constructor() {
|
||||
const userHomeDir = os.homedir();
|
||||
this.configDir = path.join(userHomeDir, '.config/immich/');
|
||||
this.sessionService = new SessionService(this.configDir);
|
||||
this.authPath = path.join(this.configDir, 'auth.yml');
|
||||
}
|
||||
|
||||
public async connect(): Promise<void> {
|
||||
try {
|
||||
this.immichApi = await this.sessionService.connect();
|
||||
} catch (error) {
|
||||
if (error instanceof LoginError) {
|
||||
console.log(error.message);
|
||||
exit(1);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
cli/src/commands/login/key.ts
Normal file
9
cli/src/commands/login/key.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { BaseCommand } from '../../cli/base-command';
|
||||
|
||||
export default class LoginKey extends BaseCommand {
|
||||
public async run(instanceUrl: string, apiKey: string): Promise<void> {
|
||||
console.log('Executing API key auth flow...');
|
||||
|
||||
await this.sessionService.keyLogin(instanceUrl, apiKey);
|
||||
}
|
||||
}
|
||||
13
cli/src/commands/logout.ts
Normal file
13
cli/src/commands/logout.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { BaseCommand } from '../cli/base-command';
|
||||
|
||||
export default class Logout extends BaseCommand {
|
||||
public static readonly description = 'Logout and remove persisted credentials';
|
||||
|
||||
public async run(): Promise<void> {
|
||||
console.log('Executing logout flow...');
|
||||
|
||||
await this.sessionService.logout();
|
||||
|
||||
console.log('Successfully logged out');
|
||||
}
|
||||
}
|
||||
15
cli/src/commands/server-info.ts
Normal file
15
cli/src/commands/server-info.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { BaseCommand } from '../cli/base-command';
|
||||
|
||||
export default class ServerInfo extends BaseCommand {
|
||||
static description = 'Display server information';
|
||||
static enableJsonFlag = true;
|
||||
|
||||
public async run() {
|
||||
console.log('Getting server information');
|
||||
|
||||
await this.connect();
|
||||
const { data: versionInfo } = await this.immichApi.serverInfoApi.getServerVersion();
|
||||
|
||||
console.log(versionInfo);
|
||||
}
|
||||
}
|
||||
175
cli/src/commands/upload.ts
Normal file
175
cli/src/commands/upload.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { BaseCommand } from '../cli/base-command';
|
||||
import { CrawledAsset } from '../cores/models/crawled-asset';
|
||||
import { CrawlService, UploadService } from '../services';
|
||||
import * as si from 'systeminformation';
|
||||
import FormData from 'form-data';
|
||||
import { UploadOptionsDto } from '../cores/dto/upload-options-dto';
|
||||
import { CrawlOptionsDto } from '../cores/dto/crawl-options-dto';
|
||||
|
||||
import cliProgress from 'cli-progress';
|
||||
import byteSize from 'byte-size';
|
||||
|
||||
export default class Upload extends BaseCommand {
|
||||
private crawlService = new CrawlService();
|
||||
private uploadService!: UploadService;
|
||||
deviceId!: string;
|
||||
uploadLength!: number;
|
||||
dryRun = false;
|
||||
|
||||
public async run(paths: string[], options: UploadOptionsDto): Promise<void> {
|
||||
await this.connect();
|
||||
|
||||
const uuid = await si.uuid();
|
||||
this.deviceId = uuid.os || 'CLI';
|
||||
this.uploadService = new UploadService(this.immichApi.apiConfiguration);
|
||||
|
||||
this.dryRun = options.dryRun;
|
||||
|
||||
const crawlOptions = new CrawlOptionsDto();
|
||||
crawlOptions.pathsToCrawl = paths;
|
||||
crawlOptions.recursive = options.recursive;
|
||||
crawlOptions.excludePatterns = options.excludePatterns;
|
||||
|
||||
const crawledFiles: string[] = await this.crawlService.crawl(crawlOptions);
|
||||
|
||||
if (crawledFiles.length === 0) {
|
||||
console.log('No assets found, exiting');
|
||||
return;
|
||||
}
|
||||
|
||||
const assetsToUpload = crawledFiles.map((path) => new CrawledAsset(path));
|
||||
|
||||
const uploadProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: '{bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}: {filename}',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
|
||||
let totalSize = 0;
|
||||
let sizeSoFar = 0;
|
||||
|
||||
let totalSizeUploaded = 0;
|
||||
let uploadCounter = 0;
|
||||
|
||||
for (const asset of assetsToUpload) {
|
||||
// Compute total size first
|
||||
await asset.process();
|
||||
totalSize += asset.fileSize;
|
||||
}
|
||||
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
for (const asset of assetsToUpload) {
|
||||
uploadProgress.update({
|
||||
filename: asset.path,
|
||||
});
|
||||
|
||||
try {
|
||||
if (options.import) {
|
||||
const importData = {
|
||||
assetPath: asset.path,
|
||||
sidecarPath: asset.sidecarPath,
|
||||
deviceAssetId: asset.deviceAssetId,
|
||||
deviceId: this.deviceId,
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
isFavorite: false,
|
||||
isReadOnly: options.readOnly,
|
||||
};
|
||||
|
||||
if (!this.dryRun) {
|
||||
await this.uploadService.import(importData);
|
||||
}
|
||||
} else {
|
||||
await this.uploadAsset(asset, options.skipHash);
|
||||
}
|
||||
} catch (error) {
|
||||
uploadProgress.stop();
|
||||
throw error;
|
||||
}
|
||||
|
||||
sizeSoFar += asset.fileSize;
|
||||
if (!asset.skipped) {
|
||||
totalSizeUploaded += asset.fileSize;
|
||||
uploadCounter++;
|
||||
}
|
||||
|
||||
uploadProgress.update(sizeSoFar, { value_formatted: byteSize(sizeSoFar) });
|
||||
}
|
||||
|
||||
uploadProgress.stop();
|
||||
|
||||
let messageStart;
|
||||
if (this.dryRun) {
|
||||
messageStart = 'Would have ';
|
||||
} else {
|
||||
messageStart = 'Successfully ';
|
||||
}
|
||||
|
||||
if (options.import) {
|
||||
console.log(`${messageStart} imported ${uploadCounter} assets (${byteSize(totalSizeUploaded)})`);
|
||||
} else {
|
||||
if (uploadCounter === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
} else {
|
||||
console.log(`${messageStart} uploaded ${uploadCounter} assets (${byteSize(totalSizeUploaded)})`);
|
||||
}
|
||||
if (options.delete) {
|
||||
if (this.dryRun) {
|
||||
console.log(`Would now have deleted assets, but skipped due to dry run`);
|
||||
} else {
|
||||
console.log('Deleting assets that have been uploaded...');
|
||||
const deletionProgress = new cliProgress.SingleBar(cliProgress.Presets.shades_classic);
|
||||
deletionProgress.start(crawledFiles.length, 0);
|
||||
|
||||
for (const asset of assetsToUpload) {
|
||||
if (!this.dryRun) {
|
||||
await asset.delete();
|
||||
}
|
||||
deletionProgress.increment();
|
||||
}
|
||||
deletionProgress.stop();
|
||||
console.log('Deletion complete');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async uploadAsset(asset: CrawledAsset, skipHash = false) {
|
||||
await asset.readData();
|
||||
|
||||
let skipUpload = false;
|
||||
if (!skipHash) {
|
||||
const checksum = await asset.hash();
|
||||
|
||||
const checkResponse = await this.uploadService.checkIfAssetAlreadyExists(asset.path, checksum);
|
||||
skipUpload = checkResponse.data.results[0].action === 'reject';
|
||||
}
|
||||
|
||||
if (skipUpload) {
|
||||
asset.skipped = true;
|
||||
} else {
|
||||
const uploadFormData = new FormData();
|
||||
|
||||
uploadFormData.append('deviceAssetId', asset.deviceAssetId);
|
||||
uploadFormData.append('deviceId', this.deviceId);
|
||||
uploadFormData.append('fileCreatedAt', asset.fileCreatedAt);
|
||||
uploadFormData.append('fileModifiedAt', asset.fileModifiedAt);
|
||||
uploadFormData.append('isFavorite', String(false));
|
||||
uploadFormData.append('assetData', asset.assetData, { filename: asset.path });
|
||||
|
||||
if (asset.sidecarData) {
|
||||
uploadFormData.append('sidecarData', asset.sidecarData, {
|
||||
filename: asset.sidecarPath,
|
||||
contentType: 'application/xml',
|
||||
});
|
||||
}
|
||||
|
||||
if (!this.dryRun) {
|
||||
await this.uploadService.upload(uploadFormData);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
cli/src/cores/api-configuration.ts
Normal file
9
cli/src/cores/api-configuration.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export class ApiConfiguration {
|
||||
public readonly instanceUrl!: string;
|
||||
public readonly apiKey!: string;
|
||||
|
||||
constructor(instanceUrl: string, apiKey: string) {
|
||||
this.instanceUrl = instanceUrl;
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
}
|
||||
58
cli/src/cores/constants.ts
Normal file
58
cli/src/cores/constants.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
// Check asset-upload.config.spec.ts for complete list
|
||||
// TODO: we should get this list from the server via API in the future
|
||||
|
||||
// Videos
|
||||
const videos = ['mp4', 'webm', 'mov', '3gp', 'avi', 'm2ts', 'mts', 'mpg', 'flv', 'mkv', 'wmv'];
|
||||
|
||||
// Images
|
||||
const heic = ['heic', 'heif'];
|
||||
const jpeg = ['jpg', 'jpeg'];
|
||||
const png = ['png'];
|
||||
const gif = ['gif'];
|
||||
const tiff = ['tif', 'tiff'];
|
||||
const webp = ['webp'];
|
||||
const dng = ['dng'];
|
||||
const other = [
|
||||
'3fr',
|
||||
'ari',
|
||||
'arw',
|
||||
'avif',
|
||||
'cap',
|
||||
'cin',
|
||||
'cr2',
|
||||
'cr3',
|
||||
'crw',
|
||||
'dcr',
|
||||
'nef',
|
||||
'erf',
|
||||
'fff',
|
||||
'iiq',
|
||||
'jxl',
|
||||
'k25',
|
||||
'kdc',
|
||||
'mrw',
|
||||
'orf',
|
||||
'ori',
|
||||
'pef',
|
||||
'raf',
|
||||
'raw',
|
||||
'rwl',
|
||||
'sr2',
|
||||
'srf',
|
||||
'srw',
|
||||
'orf',
|
||||
'ori',
|
||||
'x3f',
|
||||
];
|
||||
|
||||
export const ACCEPTED_FILE_EXTENSIONS = [
|
||||
...videos,
|
||||
...jpeg,
|
||||
...png,
|
||||
...heic,
|
||||
...gif,
|
||||
...tiff,
|
||||
...webp,
|
||||
...dng,
|
||||
...other,
|
||||
];
|
||||
6
cli/src/cores/dto/crawl-options-dto.ts
Normal file
6
cli/src/cores/dto/crawl-options-dto.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export class CrawlOptionsDto {
|
||||
pathsToCrawl!: string[];
|
||||
recursive = false;
|
||||
includeHidden = false;
|
||||
excludePatterns!: string[];
|
||||
}
|
||||
9
cli/src/cores/dto/upload-options-dto.ts
Normal file
9
cli/src/cores/dto/upload-options-dto.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export class UploadOptionsDto {
|
||||
recursive = false;
|
||||
excludePatterns!: string[];
|
||||
dryRun = false;
|
||||
skipHash = false;
|
||||
delete = false;
|
||||
import = false;
|
||||
readOnly = true;
|
||||
}
|
||||
11
cli/src/cores/errors/login-error.ts
Normal file
11
cli/src/cores/errors/login-error.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export class LoginError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
|
||||
// assign the error class name in your custom error (as a shortcut)
|
||||
this.name = this.constructor.name;
|
||||
|
||||
// capturing the stack trace keeps the reference to your error class
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
}
|
||||
}
|
||||
2
cli/src/cores/index.ts
Normal file
2
cli/src/cores/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './constants';
|
||||
export * from './models';
|
||||
58
cli/src/cores/models/crawled-asset.ts
Normal file
58
cli/src/cores/models/crawled-asset.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import * as fs from 'fs';
|
||||
import { basename } from 'node:path';
|
||||
import crypto from 'crypto';
|
||||
|
||||
export class CrawledAsset {
|
||||
public path: string;
|
||||
|
||||
public assetData?: fs.ReadStream;
|
||||
public deviceAssetId?: string;
|
||||
public fileCreatedAt?: string;
|
||||
public fileModifiedAt?: string;
|
||||
public sidecarData?: Buffer;
|
||||
public sidecarPath?: string;
|
||||
public fileSize!: number;
|
||||
public skipped = false;
|
||||
|
||||
constructor(path: string) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
async readData() {
|
||||
this.assetData = fs.createReadStream(this.path);
|
||||
}
|
||||
|
||||
async process() {
|
||||
const stats = await fs.promises.stat(this.path);
|
||||
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replace(/\s+/g, '');
|
||||
this.fileCreatedAt = stats.ctime.toISOString();
|
||||
this.fileModifiedAt = stats.mtime.toISOString();
|
||||
this.fileSize = stats.size;
|
||||
|
||||
// TODO: doesn't xmp replace the file extension? Will need investigation
|
||||
const sideCarPath = `${this.path}.xmp`;
|
||||
try {
|
||||
fs.accessSync(sideCarPath, fs.constants.R_OK);
|
||||
this.sidecarData = await fs.promises.readFile(sideCarPath);
|
||||
this.sidecarPath = sideCarPath;
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
async delete(): Promise<void> {
|
||||
return fs.promises.unlink(this.path);
|
||||
}
|
||||
|
||||
public async hash(): Promise<string> {
|
||||
const sha1 = (filePath: string) => {
|
||||
const hash = crypto.createHash('sha1');
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
const rs = fs.createReadStream(filePath);
|
||||
rs.on('error', reject);
|
||||
rs.on('data', (chunk) => hash.update(chunk));
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
return await sha1(this.path);
|
||||
}
|
||||
}
|
||||
1
cli/src/cores/models/index.ts
Normal file
1
cli/src/cores/models/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './crawled-asset';
|
||||
63
cli/src/index.ts
Normal file
63
cli/src/index.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { program, Option } from 'commander';
|
||||
import Upload from './commands/upload';
|
||||
import ServerInfo from './commands/server-info';
|
||||
import LoginKey from './commands/login/key';
|
||||
|
||||
program.name('immich').description('Immich command line interface');
|
||||
|
||||
program
|
||||
.command('upload')
|
||||
.description('Upload assets')
|
||||
.usage('[options] [paths...]')
|
||||
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
||||
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS'))
|
||||
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
||||
.addOption(
|
||||
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
||||
.env('IMMICH_DRY_RUN')
|
||||
.default(false),
|
||||
)
|
||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||
.action((paths, options) => {
|
||||
options.excludePatterns = options.ignore;
|
||||
new Upload().run(paths, options);
|
||||
});
|
||||
|
||||
program
|
||||
.command('import')
|
||||
.description('Import existing assets')
|
||||
.usage('[options] [paths...]')
|
||||
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
||||
.addOption(
|
||||
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
||||
.env('IMMICH_DRY_RUN')
|
||||
.default(false),
|
||||
)
|
||||
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS').default(false))
|
||||
.addOption(new Option('--no-read-only', 'Import files without read-only protection, allowing Immich to manage them'))
|
||||
.argument('[paths...]', 'One or more paths to assets to be imported')
|
||||
.action((paths, options) => {
|
||||
options.import = true;
|
||||
options.excludePatterns = options.ignore;
|
||||
new Upload().run(paths, options);
|
||||
});
|
||||
|
||||
program
|
||||
.command('server-info')
|
||||
.description('Display server information')
|
||||
|
||||
.action(() => {
|
||||
new ServerInfo().run();
|
||||
});
|
||||
|
||||
program
|
||||
.command('login-key')
|
||||
.description('Login using an API key')
|
||||
.argument('[instanceUrl]')
|
||||
.argument('[apiKey]')
|
||||
.action((paths, options) => {
|
||||
new LoginKey().run(paths, options);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
235
cli/src/services/crawl.service.spec.ts
Normal file
235
cli/src/services/crawl.service.spec.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import { CrawlService } from './crawl.service';
|
||||
import mockfs from 'mock-fs';
|
||||
import { toIncludeSameMembers } from 'jest-extended';
|
||||
import { CrawlOptionsDto } from '../cores/dto/crawl-options-dto';
|
||||
|
||||
const matchers = require('jest-extended');
|
||||
expect.extend(matchers);
|
||||
|
||||
const crawlService = new CrawlService();
|
||||
|
||||
describe('CrawlService', () => {
|
||||
beforeAll(() => {
|
||||
// Write a dummy output before mock-fs to prevent some annoying errors
|
||||
console.log();
|
||||
});
|
||||
|
||||
it('should crawl a single directory', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should crawl a single file', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/image.jpg'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should crawl a file and a directory', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/images/photo.jpg': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/image.jpg', '/images/'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg', '/images/photo.jpg']);
|
||||
});
|
||||
|
||||
it('should exclude by file extension', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/image.tif': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
options.excludePatterns = ['**/*.tif'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should exclude by file extension without case sensitivity', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/image.tif': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
options.excludePatterns = ['**/*.TIF'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should exclude by folder', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/raw/image.jpg': '',
|
||||
'/photos/raw2/image.jpg': '',
|
||||
'/photos/folder/raw/image.jpg': '',
|
||||
'/photos/crawl/image.jpg': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
options.excludePatterns = ['**/raw/**'];
|
||||
options.recursive = true;
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg', '/photos/raw2/image.jpg', '/photos/crawl/image.jpg']);
|
||||
});
|
||||
|
||||
it('should crawl multiple paths', async () => {
|
||||
mockfs({
|
||||
'/photos/image1.jpg': '',
|
||||
'/images/image2.jpg': '',
|
||||
'/albums/image3.jpg': '',
|
||||
});
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/', '/images/', '/albums/'];
|
||||
options.recursive = false;
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image1.jpg', '/images/image2.jpg', '/albums/image3.jpg']);
|
||||
});
|
||||
|
||||
it('should crawl a single path without trailing slash', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
});
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should crawl a single path without recursion', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/subfolder/image1.jpg': '',
|
||||
'/photos/subfolder/image2.jpg': '',
|
||||
'/image1.jpg': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should crawl a single path with recursion', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/subfolder/image1.jpg': '',
|
||||
'/photos/subfolder/image2.jpg': '',
|
||||
'/image1.jpg': '',
|
||||
});
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
options.recursive = true;
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers([
|
||||
'/photos/image.jpg',
|
||||
'/photos/subfolder/image1.jpg',
|
||||
'/photos/subfolder/image2.jpg',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should filter file extensions', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/image.txt': '',
|
||||
'/photos/1': '',
|
||||
});
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers(['/photos/image.jpg']);
|
||||
});
|
||||
|
||||
it('should include photo and video extensions', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/image.jpeg': '',
|
||||
'/photos/image.heic': '',
|
||||
'/photos/image.heif': '',
|
||||
'/photos/image.png': '',
|
||||
'/photos/image.gif': '',
|
||||
'/photos/image.tif': '',
|
||||
'/photos/image.tiff': '',
|
||||
'/photos/image.webp': '',
|
||||
'/photos/image.dng': '',
|
||||
'/photos/image.nef': '',
|
||||
'/videos/video.mp4': '',
|
||||
'/videos/video.mov': '',
|
||||
'/videos/video.webm': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/', '/videos/'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
|
||||
expect(paths).toIncludeSameMembers([
|
||||
'/photos/image.jpg',
|
||||
'/photos/image.jpeg',
|
||||
'/photos/image.heic',
|
||||
'/photos/image.heif',
|
||||
'/photos/image.png',
|
||||
'/photos/image.gif',
|
||||
'/photos/image.tif',
|
||||
'/photos/image.tiff',
|
||||
'/photos/image.webp',
|
||||
'/photos/image.dng',
|
||||
'/photos/image.nef',
|
||||
'/videos/video.mp4',
|
||||
'/videos/video.mov',
|
||||
'/videos/video.webm',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should check file extensions without case sensitivity', async () => {
|
||||
mockfs({
|
||||
'/photos/image.jpg': '',
|
||||
'/photos/image.Jpg': '',
|
||||
'/photos/image.jpG': '',
|
||||
'/photos/image.JPG': '',
|
||||
'/photos/image.jpEg': '',
|
||||
'/photos/image.TIFF': '',
|
||||
'/photos/image.tif': '',
|
||||
'/photos/image.dng': '',
|
||||
'/photos/image.NEF': '',
|
||||
});
|
||||
|
||||
const options = new CrawlOptionsDto();
|
||||
options.pathsToCrawl = ['/photos/'];
|
||||
const paths: string[] = await crawlService.crawl(options);
|
||||
expect(paths).toIncludeSameMembers([
|
||||
'/photos/image.jpg',
|
||||
'/photos/image.Jpg',
|
||||
'/photos/image.jpG',
|
||||
'/photos/image.JPG',
|
||||
'/photos/image.jpEg',
|
||||
'/photos/image.TIFF',
|
||||
'/photos/image.tif',
|
||||
'/photos/image.dng',
|
||||
'/photos/image.NEF',
|
||||
]);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
});
|
||||
});
|
||||
47
cli/src/services/crawl.service.ts
Normal file
47
cli/src/services/crawl.service.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { CrawlOptionsDto } from 'src/cores/dto/crawl-options-dto';
|
||||
import { ACCEPTED_FILE_EXTENSIONS } from '../cores';
|
||||
import { glob } from 'glob';
|
||||
import * as fs from 'fs';
|
||||
|
||||
export class CrawlService {
|
||||
public async crawl(crawlOptions: CrawlOptionsDto): Promise<string[]> {
|
||||
const pathsToCrawl: string[] = crawlOptions.pathsToCrawl;
|
||||
|
||||
const directories: string[] = [];
|
||||
const crawledFiles: string[] = [];
|
||||
|
||||
for await (const currentPath of pathsToCrawl) {
|
||||
const stats = await fs.promises.stat(currentPath);
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(currentPath);
|
||||
} else {
|
||||
directories.push(currentPath);
|
||||
}
|
||||
}
|
||||
|
||||
let searchPattern: string;
|
||||
if (directories.length === 1) {
|
||||
searchPattern = directories[0];
|
||||
} else if (directories.length === 0) {
|
||||
return crawledFiles;
|
||||
} else {
|
||||
searchPattern = '{' + directories.join(',') + '}';
|
||||
}
|
||||
|
||||
if (crawlOptions.recursive) {
|
||||
searchPattern = searchPattern + '/**/';
|
||||
}
|
||||
|
||||
searchPattern = `${searchPattern}/*.{${ACCEPTED_FILE_EXTENSIONS.join(',')}}`;
|
||||
|
||||
const globbedFiles = await glob(searchPattern, {
|
||||
nocase: true,
|
||||
nodir: true,
|
||||
ignore: crawlOptions.excludePatterns,
|
||||
});
|
||||
|
||||
const returnedFiles = crawledFiles.concat(globbedFiles);
|
||||
returnedFiles.sort();
|
||||
return returnedFiles;
|
||||
}
|
||||
}
|
||||
2
cli/src/services/index.ts
Normal file
2
cli/src/services/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './upload.service';
|
||||
export * from './crawl.service';
|
||||
95
cli/src/services/session.service.spec.ts
Normal file
95
cli/src/services/session.service.spec.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { SessionService } from './session.service';
|
||||
import mockfs from 'mock-fs';
|
||||
import fs from 'node:fs';
|
||||
import yaml from 'yaml';
|
||||
import { LoginError } from '../cores/errors/login-error';
|
||||
|
||||
const mockPingServer = jest.fn(() => Promise.resolve({ data: { res: 'pong' } }));
|
||||
const mockUserInfo = jest.fn(() => Promise.resolve({ data: { email: 'admin@example.com' } }));
|
||||
|
||||
jest.mock('../api/open-api', () => {
|
||||
return {
|
||||
__esModule: true,
|
||||
...jest.requireActual('../api/open-api'),
|
||||
UserApi: jest.fn().mockImplementation(() => {
|
||||
return { getMyUserInfo: mockUserInfo };
|
||||
}),
|
||||
ServerInfoApi: jest.fn().mockImplementation(() => {
|
||||
return { pingServer: mockPingServer };
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
describe('SessionService', () => {
|
||||
let sessionService: SessionService;
|
||||
beforeAll(() => {
|
||||
// Write a dummy output before mock-fs to prevent some annoying errors
|
||||
console.log();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
const configDir = '/config';
|
||||
sessionService = new SessionService(configDir);
|
||||
});
|
||||
|
||||
it('should connect to immich', async () => {
|
||||
mockfs({
|
||||
'/config/auth.yml': 'apiKey: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\ninstanceUrl: https://test/api',
|
||||
});
|
||||
await sessionService.connect();
|
||||
expect(mockPingServer).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should error if no auth file exists', async () => {
|
||||
mockfs();
|
||||
await sessionService.connect().catch((error) => {
|
||||
expect(error.message).toEqual('No auth file exist. Please login first');
|
||||
});
|
||||
});
|
||||
|
||||
it('should error if auth file is missing instance URl', async () => {
|
||||
mockfs({
|
||||
'/config/auth.yml': 'foo: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\napiKey: https://test/api',
|
||||
});
|
||||
await sessionService.connect().catch((error) => {
|
||||
expect(error).toBeInstanceOf(LoginError);
|
||||
expect(error.message).toEqual('Instance URL missing in auth config file /config/auth.yml');
|
||||
});
|
||||
});
|
||||
|
||||
it('should error if auth file is missing api key', async () => {
|
||||
mockfs({
|
||||
'/config/auth.yml': 'instanceUrl: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\nbar: https://test/api',
|
||||
});
|
||||
await sessionService.connect().catch((error) => {
|
||||
expect(error).toBeInstanceOf(LoginError);
|
||||
expect(error.message).toEqual('API key missing in auth config file /config/auth.yml');
|
||||
});
|
||||
});
|
||||
|
||||
it('should create auth file when logged in', async () => {
|
||||
mockfs();
|
||||
|
||||
await sessionService.keyLogin('https://test/api', 'pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg');
|
||||
|
||||
const data: string = await fs.promises.readFile('/config/auth.yml', 'utf8');
|
||||
const authConfig = yaml.parse(data);
|
||||
expect(authConfig.instanceUrl).toBe('https://test/api');
|
||||
expect(authConfig.apiKey).toBe('pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg');
|
||||
});
|
||||
|
||||
it('should delete auth file when logging out', async () => {
|
||||
mockfs({
|
||||
'/config/auth.yml': 'apiKey: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\ninstanceUrl: https://test/api',
|
||||
});
|
||||
await sessionService.logout();
|
||||
|
||||
await fs.promises.access('/auth.yml', fs.constants.F_OK).catch((error) => {
|
||||
expect(error.message).toContain('ENOENT');
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
});
|
||||
});
|
||||
81
cli/src/services/session.service.ts
Normal file
81
cli/src/services/session.service.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import fs from 'node:fs';
|
||||
import yaml from 'yaml';
|
||||
import path from 'node:path';
|
||||
import { ImmichApi } from '../api/client';
|
||||
import { LoginError } from '../cores/errors/login-error';
|
||||
|
||||
export class SessionService {
|
||||
readonly configDir: string;
|
||||
readonly authPath!: string;
|
||||
private api!: ImmichApi;
|
||||
|
||||
constructor(configDir: string) {
|
||||
this.configDir = configDir;
|
||||
this.authPath = path.join(this.configDir, 'auth.yml');
|
||||
}
|
||||
|
||||
public async connect(): Promise<ImmichApi> {
|
||||
await fs.promises.access(this.authPath, fs.constants.F_OK).catch((error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
throw new LoginError('No auth file exist. Please login first');
|
||||
}
|
||||
});
|
||||
|
||||
const data: string = await fs.promises.readFile(this.authPath, 'utf8');
|
||||
const parsedConfig = yaml.parse(data);
|
||||
const instanceUrl: string = parsedConfig.instanceUrl;
|
||||
const apiKey: string = parsedConfig.apiKey;
|
||||
|
||||
if (!instanceUrl) {
|
||||
throw new LoginError('Instance URL missing in auth config file ' + this.authPath);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new LoginError('API key missing in auth config file ' + this.authPath);
|
||||
}
|
||||
|
||||
this.api = new ImmichApi(instanceUrl, apiKey);
|
||||
|
||||
await this.ping();
|
||||
|
||||
return this.api;
|
||||
}
|
||||
|
||||
public async keyLogin(instanceUrl: string, apiKey: string): Promise<ImmichApi> {
|
||||
this.api = new ImmichApi(instanceUrl, apiKey);
|
||||
|
||||
// Check if server and api key are valid
|
||||
const { data: userInfo } = await this.api.userApi.getMyUserInfo().catch((error) => {
|
||||
throw new LoginError(`Failed to connect to the server: ${error.message}`);
|
||||
});
|
||||
|
||||
console.log(`Logged in as ${userInfo.email}`);
|
||||
|
||||
if (!fs.existsSync(this.configDir)) {
|
||||
// Create config folder if it doesn't exist
|
||||
fs.mkdirSync(this.configDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(this.authPath, yaml.stringify({ instanceUrl, apiKey }));
|
||||
|
||||
console.log('Wrote auth info to ' + this.authPath);
|
||||
return this.api;
|
||||
}
|
||||
|
||||
public async logout(): Promise<void> {
|
||||
if (fs.existsSync(this.authPath)) {
|
||||
fs.unlinkSync(this.authPath);
|
||||
console.log('Removed auth file ' + this.authPath);
|
||||
}
|
||||
}
|
||||
|
||||
private async ping(): Promise<void> {
|
||||
const { data: pingResponse } = await this.api.serverInfoApi.pingServer().catch((error) => {
|
||||
throw new Error(`Failed to connect to the server: ${error.message}`);
|
||||
});
|
||||
|
||||
if (pingResponse.res !== 'pong') {
|
||||
throw new Error('Unexpected ping reply');
|
||||
}
|
||||
}
|
||||
}
|
||||
35
cli/src/services/upload.service.spec.ts
Normal file
35
cli/src/services/upload.service.spec.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { UploadService } from './upload.service';
|
||||
import mockfs from 'mock-fs';
|
||||
import axios from 'axios';
|
||||
import mockAxios from 'jest-mock-axios';
|
||||
import FormData from 'form-data';
|
||||
import { ApiConfiguration } from '../cores/api-configuration';
|
||||
|
||||
describe('UploadService', () => {
|
||||
let uploadService: UploadService;
|
||||
|
||||
beforeAll(() => {
|
||||
// Write a dummy output before mock-fs to prevent some annoying errors
|
||||
console.log();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
const apiConfiguration = new ApiConfiguration('https://example.com/api', 'key');
|
||||
|
||||
uploadService = new UploadService(apiConfiguration);
|
||||
});
|
||||
|
||||
it('should upload a single file', async () => {
|
||||
const data = new FormData();
|
||||
|
||||
uploadService.upload(data);
|
||||
|
||||
mockAxios.mockResponse();
|
||||
expect(axios).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
mockAxios.reset();
|
||||
});
|
||||
});
|
||||
65
cli/src/services/upload.service.ts
Normal file
65
cli/src/services/upload.service.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import axios, { AxiosRequestConfig } from 'axios';
|
||||
import FormData from 'form-data';
|
||||
import { ApiConfiguration } from '../cores/api-configuration';
|
||||
|
||||
export class UploadService {
|
||||
private readonly uploadConfig: AxiosRequestConfig<any>;
|
||||
private readonly checkAssetExistenceConfig: AxiosRequestConfig<any>;
|
||||
private readonly importConfig: AxiosRequestConfig<any>;
|
||||
|
||||
constructor(apiConfiguration: ApiConfiguration) {
|
||||
this.uploadConfig = {
|
||||
method: 'post',
|
||||
maxRedirects: 0,
|
||||
url: `${apiConfiguration.instanceUrl}/asset/upload`,
|
||||
headers: {
|
||||
'x-api-key': apiConfiguration.apiKey,
|
||||
},
|
||||
maxContentLength: Number.POSITIVE_INFINITY,
|
||||
maxBodyLength: Number.POSITIVE_INFINITY,
|
||||
};
|
||||
|
||||
this.importConfig = {
|
||||
method: 'post',
|
||||
maxRedirects: 0,
|
||||
url: `${apiConfiguration.instanceUrl}/asset/import`,
|
||||
headers: {
|
||||
'x-api-key': apiConfiguration.apiKey,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
maxContentLength: Number.POSITIVE_INFINITY,
|
||||
maxBodyLength: Number.POSITIVE_INFINITY,
|
||||
};
|
||||
|
||||
this.checkAssetExistenceConfig = {
|
||||
method: 'post',
|
||||
maxRedirects: 0,
|
||||
url: `${apiConfiguration.instanceUrl}/asset/bulk-upload-check`,
|
||||
headers: {
|
||||
'x-api-key': apiConfiguration.apiKey,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
public checkIfAssetAlreadyExists(path: string, checksum: string): Promise<any> {
|
||||
this.checkAssetExistenceConfig.data = JSON.stringify({ assets: [{ id: path, checksum: checksum }] });
|
||||
|
||||
// TODO: retry on 500 errors?
|
||||
return axios(this.checkAssetExistenceConfig);
|
||||
}
|
||||
|
||||
public upload(data: FormData): Promise<any> {
|
||||
this.uploadConfig.data = data;
|
||||
|
||||
// TODO: retry on 500 errors?
|
||||
return axios(this.uploadConfig);
|
||||
}
|
||||
|
||||
public import(data: any): Promise<any> {
|
||||
this.importConfig.data = data;
|
||||
|
||||
// TODO: retry on 500 errors?
|
||||
return axios(this.importConfig);
|
||||
}
|
||||
}
|
||||
3
cli/testSetup.js
Normal file
3
cli/testSetup.js
Normal file
@@ -0,0 +1,3 @@
|
||||
// add all jest-extended matchers
|
||||
import * as matchers from 'jest-extended';
|
||||
expect.extend(matchers);
|
||||
4
cli/tsconfig.build.json
Normal file
4
cli/tsconfig.build.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"exclude": ["dist", "node_modules", "upload", "test", "**/*spec.ts"]
|
||||
}
|
||||
25
cli/tsconfig.json
Normal file
25
cli/tsconfig.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"strict": true,
|
||||
"declaration": true,
|
||||
"removeComments": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"resolveJsonModule": true,
|
||||
"target": "es2017",
|
||||
"moduleResolution": "node16",
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"incremental": true,
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": true,
|
||||
"baseUrl": "./",
|
||||
"paths": {
|
||||
"@test": ["test"],
|
||||
"@test/*": ["test/*"]
|
||||
}
|
||||
},
|
||||
"exclude": ["dist", "node_modules", "upload"]
|
||||
}
|
||||
@@ -31,7 +31,6 @@ services:
|
||||
build:
|
||||
context: ../machine-learning
|
||||
dockerfile: Dockerfile
|
||||
command: python main.py
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
@@ -43,11 +42,14 @@ services:
|
||||
- NODE_ENV=development
|
||||
depends_on:
|
||||
- database
|
||||
restart: always
|
||||
restart: unless-stopped
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: immich-microservices:latest
|
||||
# extends:
|
||||
# file: hwaccel.yml
|
||||
# service: hwaccel
|
||||
build:
|
||||
context: ../server
|
||||
dockerfile: Dockerfile
|
||||
@@ -88,7 +90,7 @@ services:
|
||||
volumes:
|
||||
- ../web:/usr/src/app
|
||||
- /usr/src/app/node_modules
|
||||
restart: always
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- immich-server
|
||||
|
||||
@@ -116,7 +118,6 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
PG_DATA: /var/lib/postgresql/data
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
ports:
|
||||
@@ -137,7 +138,7 @@ services:
|
||||
depends_on:
|
||||
- immich-server
|
||||
- immich-web
|
||||
restart: always
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
|
||||
116
docker/docker-compose.prod.yml
Normal file
116
docker/docker-compose.prod.yml
Normal file
@@ -0,0 +1,116 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: immich-server:latest
|
||||
build:
|
||||
context: ../server
|
||||
dockerfile: Dockerfile
|
||||
command: ["./start-server.sh"]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
image: immich-machine-learning:latest
|
||||
build:
|
||||
context: ../machine-learning
|
||||
dockerfile: Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: immich-microservices:latest
|
||||
# extends:
|
||||
# file: hwaccel.yml
|
||||
# service: hwaccel
|
||||
build:
|
||||
context: ../server
|
||||
dockerfile: Dockerfile
|
||||
command: ["./start-microservices.sh"]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- database
|
||||
- immich-server
|
||||
- typesense
|
||||
restart: always
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
image: immich-web:latest
|
||||
build:
|
||||
context: ../web
|
||||
dockerfile: Dockerfile
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
depends_on:
|
||||
- immich-server
|
||||
|
||||
typesense:
|
||||
container_name: immich_typesense
|
||||
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
logging:
|
||||
driver: none
|
||||
volumes:
|
||||
- tsdata:/data
|
||||
restart: always
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:70a7a5b641117670beae0d80658430853896b5ef269ccf00d1827427e3263fa3
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
restart: always
|
||||
|
||||
immich-proxy:
|
||||
container_name: immich_proxy
|
||||
image: immich-proxy:latest
|
||||
environment:
|
||||
# Make sure these values get passed through from the env file
|
||||
- IMMICH_SERVER_URL
|
||||
- IMMICH_WEB_URL
|
||||
build:
|
||||
context: ../nginx
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- 2283:8080
|
||||
logging:
|
||||
driver: none
|
||||
depends_on:
|
||||
- immich-server
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
model-cache:
|
||||
tsdata:
|
||||
@@ -37,7 +37,6 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
PG_DATA: /var/lib/postgresql/data
|
||||
volumes:
|
||||
- /var/lib/postgresql/data
|
||||
networks:
|
||||
|
||||
@@ -18,6 +18,9 @@ services:
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends:
|
||||
# file: hwaccel.yml
|
||||
# service: hwaccel
|
||||
command: [ "start.sh", "microservices" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
@@ -51,8 +54,6 @@ services:
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
logging:
|
||||
driver: none
|
||||
volumes:
|
||||
- tsdata:/data
|
||||
restart: always
|
||||
@@ -71,7 +72,6 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
PG_DATA: /var/lib/postgresql/data
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
restart: always
|
||||
|
||||
23
docker/hwaccel.yml
Normal file
23
docker/hwaccel.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: "3.8"
|
||||
|
||||
# Hardware acceleration for transcoding - Optional
|
||||
# This is only needed if you want to use hardware acceleration for transcoding.
|
||||
# Depending on your hardware, you should uncomment the relevant lines below.
|
||||
|
||||
services:
|
||||
hwaccel:
|
||||
# devices:
|
||||
# - /dev/dri:/dev/dri # If using Intel QuickSync or VAAPI
|
||||
# volumes:
|
||||
# - /usr/lib/wsl:/usr/lib/wsl # If using VAAPI in WSL2
|
||||
# environment:
|
||||
# - NVIDIA_DRIVER_CAPABILITIES=all # If using NVIDIA GPU
|
||||
# - LD_LIBRARY_PATH=/usr/lib/wsl/lib # If using VAAPI in WSL2
|
||||
# - LIBVA_DRIVER_NAME=d3d12 # If using VAAPI in WSL2
|
||||
# deploy: # Uncomment this section if using NVIDIA GPU
|
||||
# resources:
|
||||
# reservations:
|
||||
# devices:
|
||||
# - driver: nvidia
|
||||
# count: 1
|
||||
# capabilities: [gpu]
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: June 2023 update
|
||||
title: Immich Update - June 2023
|
||||
authors: [alextran]
|
||||
tags: [update]
|
||||
---
|
||||
|
||||
BIN
docs/blog/2023/07-29/images/web-shortcuts-panel.png
Normal file
BIN
docs/blog/2023/07-29/images/web-shortcuts-panel.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.8 MiB |
151
docs/blog/2023/07-29/update.mdx
Normal file
151
docs/blog/2023/07-29/update.mdx
Normal file
@@ -0,0 +1,151 @@
|
||||
---
|
||||
title: Immich Update - July 2023
|
||||
authors: [alextran]
|
||||
tags: [update, v1.64.0-v1.71.0]
|
||||
---
|
||||
|
||||
Hello, Immich fans, another month, another milestone. We hope you are staying cool and safe in this scorching hot summer across the globe.
|
||||
|
||||
Immich recently got some good recognition when getting to the front page of HackerNews, which helped to let more people know about the project's existence. The project will help more and more people find a solution to control the privacy of their most precious moments. And with the gain in popularity and recognition, we have gotten new users and more questions from the community than ever.
|
||||
|
||||
I want to express my gratitude to all the contributors and the community who have been tremendously helpful to new users' questions and provided technical support.
|
||||
|
||||
Below are the highlights of new features we added to the application over the past month, along with countless bug fixes and improvements across the board, from developer experience to resource optimization and UI/UX improvement. I hope you find these topics as exciting as I am.
|
||||
|
||||
## Highlights
|
||||
|
||||
- Memories feature.
|
||||
- Facial recognition improvements.
|
||||
- Improvements on multi selection behavior on the web.
|
||||
- Shortcuts for common actions on the web.
|
||||
- Support viewer for 360-panorama photos.
|
||||
|
||||
<!--truncate-->
|
||||
|
||||
---
|
||||
|
||||
### Memories feature
|
||||
|
||||
We've added the memory feature on the mobile app, so you can reminisce about your past memories.
|
||||
|
||||
<iframe
|
||||
width="560"
|
||||
height="315"
|
||||
src="https://youtube.com/embed/c7OTl-RqNRE"
|
||||
title="YouTube video player"
|
||||
frameborder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
allowfullscreen
|
||||
></iframe>
|
||||
|
||||
### Facial recognition improvements
|
||||
|
||||
Over the past few releases, we have added many UI improvements to the facial recognition feature to help you manage the recognized people better. Some of the highlights:
|
||||
|
||||
#### Choose a new feature photo for a person.
|
||||
|
||||
<iframe
|
||||
width="560"
|
||||
height="315"
|
||||
src="https://youtube.com/embed/PmJp8DmSh1U"
|
||||
title="YouTube video player"
|
||||
frameborder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
allowfullscreen
|
||||
></iframe>
|
||||
|
||||
#### Hide and show faces.
|
||||
|
||||
You can now select irrelevant faces to hide them. The hidden faces won’t be displayed in search results and the people section in the info panel.
|
||||
|
||||
#### Merge faces.
|
||||
|
||||
This is useful when you have multiple faces of the same person in your photos, and you want to merge them into one.
|
||||
|
||||
<iframe
|
||||
width="560"
|
||||
height="315"
|
||||
src="https://youtube.com/embed/-Xskhw-vpc4"
|
||||
title="YouTube video player"
|
||||
frameborder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
allowfullscreen
|
||||
></iframe>
|
||||
|
||||
We also added a nifty mechanism that when naming a face, similar names will prompt you a merge face option for the convenience.
|
||||
|
||||
<iframe
|
||||
width="560"
|
||||
height="315"
|
||||
src="https://youtube.com/embed/XzE6wficbl4"
|
||||
title="YouTube video player"
|
||||
frameborder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
allowfullscreen
|
||||
></iframe>
|
||||
|
||||
### Improvements on multi selection behavior on the web
|
||||
|
||||
We have added a new multi selection behavior on the web to help you select multiple items easier. You can now select a range of photos and videos by holding the `Shift` key.
|
||||
|
||||
<iframe
|
||||
width="560"
|
||||
height="315"
|
||||
src="https://youtube.com/embed/e_SiuHpVnmM"
|
||||
title="YouTube video player"
|
||||
frameborder="0"
|
||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share"
|
||||
allowfullscreen
|
||||
></iframe>
|
||||
|
||||
### Shortcuts for common actions on the web.
|
||||
|
||||
Some of us only navigate the world and the web with a keyboard (looking at you, Vim and Emacs users). So it would take away the sacred weapon of choice to require many clicks to perform repetitive actions. So we added quick shortcuts for the following action on the web.
|
||||
|
||||
<img
|
||||
src={require('./images/web-shortcuts-panel.png').default}
|
||||
width="100%"
|
||||
style={{ borderRadius: '25px' }}
|
||||
alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
### Support viewer for 360-panorama photos.
|
||||
|
||||
Photos with the EXIF property of `ProjectionType` will now have a special viewer on the web to view all the angles of the panorama.
|
||||
|
||||
The thumbnail of the 360 degrees panoramas will have a special icon on the top right of the thumbnail
|
||||
|
||||
<img
|
||||
src="https://github.com/immich-app/immich/assets/61410067/728ca1b0-375c-4631-8081-a609843e702f"
|
||||
width="50%"
|
||||
style={{ borderRadius: '25px' }}
|
||||
alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
Panorama in the detail view
|
||||
|
||||
<img
|
||||
src="https://github.com/immich-app/immich/assets/61410067/3c89dac4-395d-45fa-9bc5-98a6248fd476"
|
||||
width="50%"
|
||||
style={{ borderRadius: '25px' }}
|
||||
alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
---
|
||||
|
||||
Thank you, and I am asking for your support for the project. I hope to be a full-time maintainer of Immich one day to dedicate myself to the project as my life's work for the community and my family. You can find the support channels below:
|
||||
|
||||
- Monthly donation via [GitHub Sponsors](https://github.com/sponsors/alextran1502)
|
||||
- One-time donation via [GitHub Sponsors](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502)
|
||||
- [Liberapay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
Until next time!
|
||||
|
||||
Alex
|
||||
@@ -94,7 +94,7 @@ To remove the **Metadata** you can stop Immich and delete the volume.
|
||||
docker-compose down -v
|
||||
```
|
||||
|
||||
After removing the the containers and volumes, the **Files** can be cleaned up (if necessary) from the `UPLOAD_LOCATION` by simply deleting an unwanted files or folders.
|
||||
After removing the containers and volumes, the **Files** can be cleaned up (if necessary) from the `UPLOAD_LOCATION` by simply deleting an unwanted files or folders.
|
||||
|
||||
### Why iOS app shows duplicate photos on the timeline while the web doesn't?
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
## Database
|
||||
|
||||
:::caution
|
||||
Immich saves [file paths in the database](https://github.com/immich-app/immich/discussions/3299), it does not scan the library folder to update the database so backups are crucial.
|
||||
:::
|
||||
|
||||
:::info
|
||||
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
|
||||
:::
|
||||
|
||||
@@ -12,10 +12,12 @@ The `immich-server` docker image comes preinstalled with an administrative CLI (
|
||||
|
||||
## How to run a command
|
||||
|
||||
To run a command, [connect](/docs/guides/docker-help.md#attach-to-a-container) to the `immich_server` container and then execute the command via `immich <command>`.
|
||||
To run a command, [connect](/docs/guides/docker-help.md#attach-to-a-container) to the `immich_server` container and then execute the command via `immich-admin <command>`.
|
||||
|
||||
## Examples
|
||||
|
||||
Note that the commands below should begin with `immich-admin`.
|
||||
|
||||
Reset Admin Password
|
||||
|
||||

|
||||
|
||||
@@ -4,38 +4,107 @@ sidebar_position: 1
|
||||
|
||||
# Architecture
|
||||
|
||||
Immich uses a traditional client-server design, with a dedicated database for data persistence. The frontend clients communicate with backend services over HTTP using REST APIs.
|
||||
|
||||
## High Level Diagram
|
||||
|
||||

|
||||
|
||||
## Technology
|
||||
The diagram shows clients communicating with the server via REST, as well as the flow of database between backend services.
|
||||
|
||||
Immich is a full-stack [TypeScript](https://www.typescriptlang.org/) application, with a [Flutter](https://flutter.dev/) mobile app.
|
||||
## Clients
|
||||
|
||||
### Mobile
|
||||
Immich has three main clients:
|
||||
|
||||
- [Flutter](https://flutter.dev/)
|
||||
- [Riverpod](https://riverpod.dev/) for state management.
|
||||
1. Mobile app - Android, iOS
|
||||
2. Web app - Responsive website
|
||||
3. CLI - Command-line utility for bulk upload
|
||||
|
||||
### Web
|
||||
:::info
|
||||
All three clients use [OpenAPI](./open-api.md) to auto-generate rest clients for easy integration. For more information about this process, see [OpenAPI](./open-api.md).
|
||||
:::
|
||||
|
||||
- [SvelteKit](https://kit.svelte.dev/)
|
||||
- [Tailwindcss](https://tailwindcss.com/)
|
||||
### Mobile App
|
||||
|
||||
### Server
|
||||
The mobile app is written in [Flutter](https://flutter.dev/). It uses [Isar Database](https://isar.dev/) for a local database and [Riverpod](https://riverpod.dev/) for state management.
|
||||
|
||||
- [Node.js](https://nodejs.org/)
|
||||
- [Nest.js](https://nestjs.com/)
|
||||
- [TypeORM](https://typeorm.io/) for database management.
|
||||
- [Jest](https://jestjs.io/) for testing.
|
||||
- [Python](https://www.python.org/) for Machine Learning.
|
||||
### Web Client
|
||||
|
||||
### Database
|
||||
The web app is a [TypeScript](https://www.typescriptlang.org/) project that uses [SvelteKit](https://kit.svelte.dev) and [Tailwindcss](https://tailwindcss.com/).
|
||||
|
||||
- [PostgreSQL](https://www.postgresql.org/)
|
||||
- [Redis](https://redis.io/) for job queuing.
|
||||
- [Typesense](https://typesense.org/) for search.
|
||||
### CLI
|
||||
|
||||
### Web Server
|
||||
The CLI is a [TypeScript](https://www.typescriptlang.org/) project that parses command line arguments to programmatically upload/import assets to an Immich server. See [Bulk Upload](/docs/features/bulk-upload.md) for more information about its usage.
|
||||
|
||||
- [NGINX](https://www.nginx.com/) for internal communication between containers and load balancing when scaling.
|
||||
## Server
|
||||
|
||||
The Immich backend is divided into several services, which are run as individual docker containers.
|
||||
|
||||
1. `immich-server` - Handle and respond to REST API requests
|
||||
1. `immich-microservices` - Execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.)
|
||||
1. `immich-machine-learning` - Execute machine learning models
|
||||
1. `postgres` - Persistent data storage
|
||||
1. `redis`- Queue management for `immich-microservices`
|
||||
1. `typesense`- Specialized database for search, specifically with vector comparison features
|
||||
|
||||
### Immich Server
|
||||
|
||||
The Immich Server is a [TypeScript](https://www.typescriptlang.org/) project written for [Node.js](https://nodejs.org/). It uses the [Nest.js](https://nestjs.com) framework, with [TypeORM](https://typeorm.io/) for database management. The server codebase also loosely follows the [Hexagonal Architecture](<https://en.wikipedia.org/wiki/Hexagonal_architecture_(software)>). Specifically, we aim to separate technology specific implementations (`infra/`) from core business logic (`domain/`).
|
||||
|
||||
#### REST Endpoints
|
||||
|
||||
The server is a list of HTTP endpoints and associated handlers (controllers). Each controller usually implements the following CRUD operations:
|
||||
|
||||
- `POST` `/<type>` - **Create**
|
||||
- `GET` `/<type>` - **Read** (all)
|
||||
- `GET` `/<type>/:id` - **Read** (by id)
|
||||
- `PUT` `/<type>/:id` - **Updated** (by id)
|
||||
- `DELETE` `/<type>/:id` - **Delete** (by id)
|
||||
|
||||
#### DTOs
|
||||
|
||||
The server uses [Domain Transfer Objects](https://en.wikipedia.org/wiki/Data_transfer_object) as public interfaces for the inputs (query, params, and body) and outputs (response) for each endpoint. DTOs translate to [OpenAPI](./open-api.md) schemas and control the generated code used by each client.
|
||||
|
||||
### Microservices
|
||||
|
||||
The Immich Microservices image uses the same `Dockerfile` as the Immich Server, but with a different entrypoint. The Immich Microservices service mainly handles executing jobs, which include the following:
|
||||
|
||||
- Thumbnail Generation
|
||||
- Metadata Extraction
|
||||
- Video Transcoding
|
||||
- Object Tagging
|
||||
- Facial Recognition
|
||||
- Storage Template Migration
|
||||
- Search (Typesense synchronization)
|
||||
- Sidecar (see [XMP Sidecars](/docs/features/xmp-sidecars.md))
|
||||
- Background jobs (file deletion, user deletion)
|
||||
|
||||
:::info
|
||||
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs.md) page, which provides some remote queue management capabilities.
|
||||
:::
|
||||
|
||||
### Machine Learning
|
||||
|
||||
The machine learning service is written in [Python](https://www.python.org/) and uses [FastAPI](https://fastapi.tiangolo.com/) for HTTP communication.
|
||||
|
||||
All machine learning related operations have been externalized to this service, `immich-machine-learning`. Python is a natural choice for AI and machine learning. It also has some pretty specific hardware requirements. Running it as a separate container makes it possible to run the container on a separate machine, or easily disable it entirely.
|
||||
|
||||
Machine learning models are also quite _large_, requiring _quite a bit_ of memory. We are always looking for ways to improve and optimize this aspect of this container specifically.
|
||||
|
||||
### Postgres
|
||||
|
||||
Immich persists data in Postgres, which includes information about access and authorization, users, albums, asset, sharing settings, etc.
|
||||
|
||||
:::info
|
||||
See [Database Migrations](./database-migrations.md) for more information about how to modify the database to create an index, modify a table, add a new column, etc.
|
||||
:::
|
||||
|
||||
### Redis
|
||||
|
||||
Immich uses [Redis](https://redis.com/) via [BullMQ](https://docs.bullmq.io/) to manage job queues. Some jobs trigger subsequent jobs. For example, object detection relies on thumbnail generation and automatically run after one is generated.
|
||||
|
||||
### Typesense
|
||||
|
||||
Immich synchronizes some of the Postgres data into Typesense, so it can execute vector related queries in order to implement certain features including, facial recognition and CLIP search.
|
||||
|
||||
<!-- - [NGINX](https://www.nginx.com/) for internal communication between containers and load balancing when scaling. -->
|
||||
|
||||
22
docs/docs/developer/directories.md
Normal file
22
docs/docs/developer/directories.md
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
title: Directories
|
||||
---
|
||||
|
||||
# Repository Folder Structure
|
||||
|
||||
Our [GitHub Repository](https://github.com/immich-app/immich) is a [monorepo](https://en.wikipedia.org/wiki/Monorepo) and includes the following folders:
|
||||
|
||||
| Folder | Description |
|
||||
| :------------------ | :------------------------------------------------------------------- |
|
||||
| `.github/` | Github templates and action workflows |
|
||||
| `.vscode/` | VSCode debug launch profiles |
|
||||
| `cli/` | Source code for the work-in-progress CLI rewrite |
|
||||
| `docker/` | Docker compose resources for dev, test, production |
|
||||
| `design/` | Screenshots and logos for the README |
|
||||
| `docs/` | Source code for the [https://immich.app](https://immich.app) website |
|
||||
| `machine-learning/` | Source code for the `immich-machine-learning` docker image |
|
||||
| `misc/release/` | Scripts for version pumps and draft releases |
|
||||
| `mobile/` | Source code for the mobile app, both Android and iOS |
|
||||
| `nginx/` | Source code for the `immich-proxy` docker image |
|
||||
| `server/` | Source code for the `immich-server` docker image |
|
||||
| `web/` | Source code for the `immich-web` docker image |
|
||||
@@ -1,10 +1,10 @@
|
||||
# Open API
|
||||
# OpenAPI
|
||||
|
||||
Immich uses the [Open API](https://swagger.io/specification/) standard to generate API documentation. To view the published docs see [here](/docs/api).
|
||||
Immich uses the [OpenAPI](https://swagger.io/specification/) standard to generate API documentation. To view the published docs see [here](/docs/api).
|
||||
|
||||
## Generator
|
||||
|
||||
OpenAPI is used to generate the client (Typescript, Dart) SDK. `openapi-generator-cli` can be installed [here](https://openapi-generator.tech/docs/installation/). When you add a new or modify an existing endpoint, you must run the command below to update the client SDK.
|
||||
OpenAPI is used to generate the client (Typescript, Dart) SDK. `openapi-generator-cli` can be installed [here](https://openapi-generator.tech/docs/installation/). The generated SDK is based on the `immich-openapi-specs.json` file, which is autogenerated by the server when running in development mode. The `immich-openapi-specs.json` file can be modified with `@nestjs/swagger` decorators used or referenced by controller endpoints. See the [NestJS OpenAPI docs](https://docs.nestjs.com/openapi/types-and-parameters) for more info. When you add a new endpoint or modify an existing one, you must run the command below to update the client SDK.
|
||||
|
||||
```bash
|
||||
npm run api:generate # Run from the `server/` directory
|
||||
|
||||
@@ -24,9 +24,9 @@ Run all web checks with `npm run check:all`
|
||||
Run all server checks with `npm run check:all`
|
||||
:::
|
||||
|
||||
## Open API
|
||||
## OpenAPI
|
||||
|
||||
The Open API client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. See [Open API](/docs/developer/open-api.md) for more details.
|
||||
The OpenAPI client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. Note that you should not modify this file directly as it is auto-generated. See [OpenAPI](/docs/developer/open-api.md) for more details.
|
||||
|
||||
## Database Migrations
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ The API key can be obtained in the user setting panel on the web interface.
|
||||
|
||||
---
|
||||
|
||||
## Uploading exiting libraries
|
||||
## Uploading existing libraries
|
||||
|
||||
### Run via Docker
|
||||
|
||||
|
||||
60
docs/docs/features/hardware-transcoding.md
Normal file
60
docs/docs/features/hardware-transcoding.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Hardware Transcoding [Experimental]
|
||||
|
||||
This feature allows you to use a GPU or Intel Quick Sync to accelerate transcoding and reduce CPU load.
|
||||
Note that hardware transcoding is much less efficient for file sizes.
|
||||
As this is a new feature, it is still experimental and may not work on all systems.
|
||||
|
||||
## Supported APIs
|
||||
|
||||
- NVENC
|
||||
- NVIDIA GPUs
|
||||
- Quick Sync
|
||||
- Intel CPUs
|
||||
- VAAPI
|
||||
- GPUs
|
||||
|
||||
## Limitations
|
||||
|
||||
- The instructions and configurations here are specific to Docker Compose. Other container engines may require different configuration.
|
||||
- Only Linux and Windows (through WSL2) servers are supported.
|
||||
- WSL2 does not support Quick Sync.
|
||||
- Raspberry Pi is currently not supported.
|
||||
- Two-pass mode is only supported for NVENC. Other APIs will ignore this setting.
|
||||
- Only encoding is currently hardware accelerated, so the CPU is still used for software decoding.
|
||||
- This is mainly because the original video may not be hardware-decodable.
|
||||
- Hardware dependent
|
||||
- Codec support varies, but H.264 and HEVC are usually supported.
|
||||
- Notably, NVIDIA and AMD GPUs do not support VP9 encoding.
|
||||
- Newer devices tend to have higher transcoding quality.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
#### NVENC
|
||||
|
||||
- You must have the official NVIDIA driver installed on the server.
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Runtime][nvcr] installed.
|
||||
|
||||
#### QSV
|
||||
|
||||
- For VP9 to work:
|
||||
- You must have a 9th gen Intel CPU or newer
|
||||
- If you have an 11th gen CPU or older, then you may need to follow [these][jellyfin-lp] instructions as Low-Power mode is required
|
||||
- Additionally, if the server specifically has an 11th gen CPU and is running kernel 5.15 (shipped with Ubuntu 22.04 LTS), then you will need to upgrade this kernel (from [Jellyfin docs][jellyfin-kernel-bug])
|
||||
|
||||
## Setup
|
||||
|
||||
1. If you do not already have it, download the latest [`hwaccel.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||
2. Uncomment the lines that apply to your system and desired usage.
|
||||
3. In the `docker-compose.yml` under `immich-microservices`, uncomment the lines relating to the `hwaccel.yml` file.
|
||||
4. Redeploy the `immich-microservices` container with these updated settings.
|
||||
5. In the Admin page under `FFmpeg settings`, change the hardware acceleration setting to the appropriate option and save.
|
||||
|
||||
## Tips
|
||||
|
||||
- You may want to choose a slower preset than for software transcoding to maintain quality and efficiency
|
||||
- While you can use VAAPI with Nvidia GPUs and Intel CPUs, prefer the more specific APIs since they're more optimized for their respective devices
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.yml
|
||||
[nvcr]: https://github.com/NVIDIA/nvidia-container-runtime/
|
||||
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
||||
@@ -42,8 +42,8 @@ We will use those values in the steps below.
|
||||
command: [ "start.sh", "immich" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /mnt/media/precious-memory:/mnt/media/precious-memory
|
||||
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory
|
||||
+ - /mnt/media/precious-memory:/mnt/media/precious-memory:ro
|
||||
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory:ro
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
@@ -58,8 +58,8 @@ We will use those values in the steps below.
|
||||
command: [ "start.sh", "microservices" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /mnt/media/precious-memory:/mnt/media/precious-memory
|
||||
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory
|
||||
+ - /mnt/media/precious-memory:/mnt/media/precious-memory:ro
|
||||
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory:ro
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
|
||||
@@ -25,10 +25,18 @@ wget https://github.com/immich-app/immich/releases/latest/download/docker-compos
|
||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
```
|
||||
|
||||
```bash title="(Optional) Get hwaccel.yml file"
|
||||
wget https://github.com/immich-app/immich/releases/latest/download/hwaccel.yml
|
||||
```
|
||||
|
||||
or by downloading from your browser and moving the files to the directory that you created.
|
||||
|
||||
Note: If you downloaded the files from your browser, also ensure that you rename `example.env` to `.env`.
|
||||
|
||||
:::info
|
||||
Optionally, you can use the [`hwaccel.yml`][hw-file] file to enable hardware acceleration for transcoding. See the [Hardware Transcoding](/docs/features/hardware-transcoding.md) guide for info on how to set this up.
|
||||
:::
|
||||
|
||||
### Step 2 - Populate the .env file with custom values
|
||||
|
||||
<details>
|
||||
@@ -166,6 +174,10 @@ docker-compose up -d # or `docker compose up -d` based on your docker-compos
|
||||
For more information on how to use the application, please refer to the [Post Installation](/docs/install/post-install.mdx) guide.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
Note that downloading container images might require you to authenticate to the GitHub Container Registry ([steps here](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry)).
|
||||
:::
|
||||
|
||||
### Step 4 - Upgrading
|
||||
|
||||
If `IMMICH_VERSION` is set, it will need to be updated to the latest or desired version.
|
||||
@@ -182,4 +194,5 @@ Immich is currently under heavy development, which means you can expect breaking
|
||||
|
||||
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.yml
|
||||
[watchtower]: https://containrrr.dev/watchtower/
|
||||
|
||||
@@ -184,3 +184,24 @@ Typesense URL example JSON before encoding:
|
||||
| `MACHINE_LEARNING_CLASSIFICATION_MODEL` | Classification Model | `microsoft/resnet-50` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | ML Cache Location | `/cache` | machine learning |
|
||||
| `TRANSFORMERS_CACHE` | ML Transformers Cache Location | `/cache` | machine learning |
|
||||
|
||||
## Docker Secrets
|
||||
|
||||
The following variables support the use of [Docker secrets](https://docs.docker.com/engine/swarm/secrets/) for additional security.
|
||||
|
||||
To use any of these, replace the regular environment variable with the equivalent `_FILE` environment variable. The value of
|
||||
the `_FILE` variable should be set to the path of a file containing the variable value.
|
||||
|
||||
| Regular Variable | Equivalent Docker Secrets '\_FILE' Variable |
|
||||
| :----------------: | :-----------------------------------------: |
|
||||
| `DB_HOSTNAME` | `DB_HOSTNAME_FILE`<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | `DB_DATABASE_NAME_FILE`<sup>\*1</sup> |
|
||||
| `DB_USERNAME` | `DB_USERNAME_FILE`<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | `DB_PASSWORD_FILE`<sup>\*1</sup> |
|
||||
| `REDIS_PASSWORD` | `REDIS_PASSWORD_FILE`<sup>\*2</sup> |
|
||||
|
||||
\*1: See the [official documentation](https://github.com/docker-library/docs/tree/master/postgres#docker-secrets) for
|
||||
details on how to use Docker Secrets in the Postgres image.
|
||||
|
||||
\*2: See [this comment](https://github.com/docker-library/redis/issues/46#issuecomment-335326234) for an example of how
|
||||
to use use a Docker secret for the password in the Redis container.
|
||||
|
||||
@@ -10,7 +10,7 @@ I really like the Japanese culture, especially the books, history, and food. The
|
||||
|
||||

|
||||
|
||||
One of my favorite books is [Taikō](https://www.goodreads.com/book/show/336228.Taiko), it is a story about a prominent figure in the history of Japan, [Toyotomy Hideyoshi](https://www.britannica.com/biography/Toyotomi-Hideyoshi). He came from nothing, and through his resilience and wonderful mind, he has become one of the most powerful rulers in Japan's history. I enjoy his personality and the way he moved through life.
|
||||
One of my favorite books is [Taikō](https://www.goodreads.com/book/show/336228.Taiko), it is a story about a prominent figure in the history of Japan, [Toyotomi Hideyoshi](https://www.britannica.com/biography/Toyotomi-Hideyoshi). He came from nothing, and through his resilience and wonderful mind, he has become one of the most powerful rulers in Japan's history. I enjoy his personality and the way he moved through life.
|
||||
|
||||
The color is an adaptation of **_App-Which-Must-Not-Be-Named_**'s color scheme, with an extra color (pink) to complete the flower's fifth petal. The petal layers are the same color scheme as the main layer rotating back and forth to "bring the flower to life."
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Immich allows the admin user to set the pattern of how the files are uploaded to the Immich would look like. Both in the directory and the filename level.
|
||||
Immich allows the admin user to set the uploaded filename pattern. Both at the directory and filename level.
|
||||
|
||||
The admin user can set the template by using the template builder in the `Administration -> Settings -> Storage Template`. Immich provides a set of variables that you can use in constructing the template, along with additional custom text.
|
||||
The admin user can set the template by using the template builder in the `Administration -> Settings -> Storage Template`. Immich provides a set of variables that you can use in constructing the template, along with additional custom text. If the template produces [multiple files with the same filename, they won't be overwritten](https://github.com/immich-app/immich/discussions/3324) as a sequence number is appended to the filename.
|
||||
|
||||
```bash title="Default template"
|
||||
Year/Year-Month-Day/Filename.Extension
|
||||
@@ -8,4 +8,4 @@ Year/Year-Month-Day/Filename.Extension
|
||||
|
||||
<img src={require('./img/storage-template.png').default} width="100%" title="Storage Template Setting" />
|
||||
|
||||
Immich also provides a mechanism to migrate between template so that if the template you set now doesn't work in the future, you can always migrate all the existing files to the new template. The mechanism is run as a job in the Job page.
|
||||
Immich also provides a mechanism to migrate between templates so that if the template you set now doesn't work in the future, you can always migrate all the existing files to the new template. The mechanism is run as a job on the Job page.
|
||||
|
||||
1494
docs/package-lock.json
generated
1494
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -17,14 +17,14 @@
|
||||
"check": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "2.1.0",
|
||||
"@docusaurus/preset-classic": "2.1.0",
|
||||
"@docusaurus/core": "^2.4.1",
|
||||
"@docusaurus/preset-classic": "^2.4.1",
|
||||
"@mdx-js/react": "^1.6.22",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"clsx": "^1.2.1",
|
||||
"docusaurus-lunr-search": "^2.3.2",
|
||||
"docusaurus-preset-openapi": "^0.6.3",
|
||||
"postcss": "^8.4.20",
|
||||
"postcss": "^8.4.25",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
@@ -32,10 +32,10 @@
|
||||
"url": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "2.1.0",
|
||||
"@docusaurus/module-type-aliases": "^2.4.1",
|
||||
"@tsconfig/docusaurus": "^1.0.5",
|
||||
"prettier": "^2.8.8",
|
||||
"typescript": "^5.0.0"
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
||||
@@ -32,6 +32,19 @@ function HomepageHeader() {
|
||||
</div>
|
||||
|
||||
<img src="/img/immich-screenshots.png" alt="logo" />
|
||||
|
||||
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-4 gap-1">
|
||||
<div className="h-24">
|
||||
<a href="https://play.google.com/store/apps/details?id=app.alextran.immich">
|
||||
<img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" />
|
||||
</a>
|
||||
</div>
|
||||
<div className="h-24">
|
||||
<a href="https://apps.apple.com/sg/app/immich/id1613945652">
|
||||
<img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</header>
|
||||
);
|
||||
@@ -40,7 +53,7 @@ function HomepageHeader() {
|
||||
export default function Home(): JSX.Element {
|
||||
return (
|
||||
<Layout
|
||||
title={`Home`}
|
||||
title="Home"
|
||||
description="immich Self-hosted photo and video backup solution directly from your mobile phone "
|
||||
noFooter={true}
|
||||
>
|
||||
|
||||
BIN
docs/static/img/google-play-badge.png
vendored
Normal file
BIN
docs/static/img/google-play-badge.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.8 KiB |
46
docs/static/img/ios-app-store-badge.svg
vendored
Executable file
46
docs/static/img/ios-app-store-badge.svg
vendored
Executable file
@@ -0,0 +1,46 @@
|
||||
<svg id="livetype" xmlns="http://www.w3.org/2000/svg" width="119.66407" height="40" viewBox="0 0 119.66407 40">
|
||||
<title>Download_on_the_App_Store_Badge_US-UK_RGB_blk_4SVG_092917</title>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path d="M110.13477,0H9.53468c-.3667,0-.729,0-1.09473.002-.30615.002-.60986.00781-.91895.0127A13.21476,13.21476,0,0,0,5.5171.19141a6.66509,6.66509,0,0,0-1.90088.627A6.43779,6.43779,0,0,0,1.99757,1.99707,6.25844,6.25844,0,0,0,.81935,3.61816a6.60119,6.60119,0,0,0-.625,1.90332,12.993,12.993,0,0,0-.1792,2.002C.00587,7.83008.00489,8.1377,0,8.44434V31.5586c.00489.3105.00587.6113.01515.9219a12.99232,12.99232,0,0,0,.1792,2.0019,6.58756,6.58756,0,0,0,.625,1.9043A6.20778,6.20778,0,0,0,1.99757,38.001a6.27445,6.27445,0,0,0,1.61865,1.1787,6.70082,6.70082,0,0,0,1.90088.6308,13.45514,13.45514,0,0,0,2.0039.1768c.30909.0068.6128.0107.91895.0107C8.80567,40,9.168,40,9.53468,40H110.13477c.3594,0,.7246,0,1.084-.002.3047,0,.6172-.0039.9219-.0107a13.279,13.279,0,0,0,2-.1768,6.80432,6.80432,0,0,0,1.9082-.6308,6.27742,6.27742,0,0,0,1.6172-1.1787,6.39482,6.39482,0,0,0,1.1816-1.6143,6.60413,6.60413,0,0,0,.6191-1.9043,13.50643,13.50643,0,0,0,.1856-2.0019c.0039-.3106.0039-.6114.0039-.9219.0078-.3633.0078-.7246.0078-1.0938V9.53613c0-.36621,0-.72949-.0078-1.09179,0-.30664,0-.61426-.0039-.9209a13.5071,13.5071,0,0,0-.1856-2.002,6.6177,6.6177,0,0,0-.6191-1.90332,6.46619,6.46619,0,0,0-2.7988-2.7998,6.76754,6.76754,0,0,0-1.9082-.627,13.04394,13.04394,0,0,0-2-.17676c-.3047-.00488-.6172-.01074-.9219-.01269-.3594-.002-.7246-.002-1.084-.002Z" style="fill: #a6a6a6"/>
|
||||
<path d="M8.44483,39.125c-.30468,0-.602-.0039-.90429-.0107a12.68714,12.68714,0,0,1-1.86914-.1631,5.88381,5.88381,0,0,1-1.65674-.5479,5.40573,5.40573,0,0,1-1.397-1.0166,5.32082,5.32082,0,0,1-1.02051-1.3965,5.72186,5.72186,0,0,1-.543-1.6572,12.41351,12.41351,0,0,1-.1665-1.875c-.00634-.2109-.01464-.9131-.01464-.9131V8.44434S.88185,7.75293.8877,7.5498a12.37039,12.37039,0,0,1,.16553-1.87207,5.7555,5.7555,0,0,1,.54346-1.6621A5.37349,5.37349,0,0,1,2.61183,2.61768,5.56543,5.56543,0,0,1,4.01417,1.59521a5.82309,5.82309,0,0,1,1.65332-.54394A12.58589,12.58589,0,0,1,7.543.88721L8.44532.875H111.21387l.9131.0127a12.38493,12.38493,0,0,1,1.8584.16259,5.93833,5.93833,0,0,1,1.6709.54785,5.59374,5.59374,0,0,1,2.415,2.41993,5.76267,5.76267,0,0,1,.5352,1.64892,12.995,12.995,0,0,1,.1738,1.88721c.0029.2832.0029.5874.0029.89014.0079.375.0079.73193.0079,1.09179V30.4648c0,.3633,0,.7178-.0079,1.0752,0,.3252,0,.6231-.0039.9297a12.73126,12.73126,0,0,1-.1709,1.8535,5.739,5.739,0,0,1-.54,1.67,5.48029,5.48029,0,0,1-1.0156,1.3857,5.4129,5.4129,0,0,1-1.3994,1.0225,5.86168,5.86168,0,0,1-1.668.5498,12.54218,12.54218,0,0,1-1.8692.1631c-.2929.0068-.5996.0107-.8974.0107l-1.084.002Z"/>
|
||||
</g>
|
||||
<g id="_Group_" data-name="<Group>">
|
||||
<g id="_Group_2" data-name="<Group>">
|
||||
<g id="_Group_3" data-name="<Group>">
|
||||
<path id="_Path_" data-name="<Path>" d="M24.76888,20.30068a4.94881,4.94881,0,0,1,2.35656-4.15206,5.06566,5.06566,0,0,0-3.99116-2.15768c-1.67924-.17626-3.30719,1.00483-4.1629,1.00483-.87227,0-2.18977-.98733-3.6085-.95814a5.31529,5.31529,0,0,0-4.47292,2.72787c-1.934,3.34842-.49141,8.26947,1.3612,10.97608.9269,1.32535,2.01018,2.8058,3.42763,2.7533,1.38706-.05753,1.9051-.88448,3.5794-.88448,1.65876,0,2.14479.88448,3.591.8511,1.48838-.02416,2.42613-1.33124,3.32051-2.66914a10.962,10.962,0,0,0,1.51842-3.09251A4.78205,4.78205,0,0,1,24.76888,20.30068Z" style="fill: #fff"/>
|
||||
<path id="_Path_2" data-name="<Path>" d="M22.03725,12.21089a4.87248,4.87248,0,0,0,1.11452-3.49062,4.95746,4.95746,0,0,0-3.20758,1.65961,4.63634,4.63634,0,0,0-1.14371,3.36139A4.09905,4.09905,0,0,0,22.03725,12.21089Z" style="fill: #fff"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<path d="M42.30227,27.13965h-4.7334l-1.13672,3.35645H34.42727l4.4834-12.418h2.083l4.4834,12.418H43.438ZM38.0591,25.59082h3.752l-1.84961-5.44727h-.05176Z" style="fill: #fff"/>
|
||||
<path d="M55.15969,25.96973c0,2.81348-1.50586,4.62109-3.77832,4.62109a3.0693,3.0693,0,0,1-2.84863-1.584h-.043v4.48438h-1.8584V21.44238H48.4302v1.50586h.03418a3.21162,3.21162,0,0,1,2.88281-1.60059C53.645,21.34766,55.15969,23.16406,55.15969,25.96973Zm-1.91016,0c0-1.833-.94727-3.03809-2.39258-3.03809-1.41992,0-2.375,1.23047-2.375,3.03809,0,1.82422.95508,3.0459,2.375,3.0459C52.30227,29.01563,53.24953,27.81934,53.24953,25.96973Z" style="fill: #fff"/>
|
||||
<path d="M65.12453,25.96973c0,2.81348-1.50586,4.62109-3.77832,4.62109a3.0693,3.0693,0,0,1-2.84863-1.584h-.043v4.48438h-1.8584V21.44238H58.395v1.50586h.03418A3.21162,3.21162,0,0,1,61.312,21.34766C63.60988,21.34766,65.12453,23.16406,65.12453,25.96973Zm-1.91016,0c0-1.833-.94727-3.03809-2.39258-3.03809-1.41992,0-2.375,1.23047-2.375,3.03809,0,1.82422.95508,3.0459,2.375,3.0459C62.26711,29.01563,63.21438,27.81934,63.21438,25.96973Z" style="fill: #fff"/>
|
||||
<path d="M71.71047,27.03613c.1377,1.23145,1.334,2.04,2.96875,2.04,1.56641,0,2.69336-.80859,2.69336-1.91895,0-.96387-.67969-1.541-2.28906-1.93652l-1.60937-.3877c-2.28027-.55078-3.33887-1.61719-3.33887-3.34766,0-2.14258,1.86719-3.61426,4.51855-3.61426,2.624,0,4.42285,1.47168,4.4834,3.61426h-1.876c-.1123-1.23926-1.13672-1.9873-2.63379-1.9873s-2.52148.75684-2.52148,1.8584c0,.87793.6543,1.39453,2.25488,1.79l1.36816.33594c2.54785.60254,3.60645,1.626,3.60645,3.44238,0,2.32324-1.85059,3.77832-4.79395,3.77832-2.75391,0-4.61328-1.4209-4.7334-3.667Z" style="fill: #fff"/>
|
||||
<path d="M83.34621,19.2998v2.14258h1.72168v1.47168H83.34621v4.99121c0,.77539.34473,1.13672,1.10156,1.13672a5.80752,5.80752,0,0,0,.61133-.043v1.46289a5.10351,5.10351,0,0,1-1.03223.08594c-1.833,0-2.54785-.68848-2.54785-2.44434V22.91406H80.16262V21.44238H81.479V19.2998Z" style="fill: #fff"/>
|
||||
<path d="M86.065,25.96973c0-2.84863,1.67773-4.63867,4.29395-4.63867,2.625,0,4.29492,1.79,4.29492,4.63867,0,2.85645-1.66113,4.63867-4.29492,4.63867C87.72609,30.6084,86.065,28.82617,86.065,25.96973Zm6.69531,0c0-1.9541-.89551-3.10742-2.40137-3.10742s-2.40039,1.16211-2.40039,3.10742c0,1.96191.89453,3.10645,2.40039,3.10645S92.76027,27.93164,92.76027,25.96973Z" style="fill: #fff"/>
|
||||
<path d="M96.18606,21.44238h1.77246v1.541h.043a2.1594,2.1594,0,0,1,2.17773-1.63574,2.86616,2.86616,0,0,1,.63672.06934v1.73828a2.59794,2.59794,0,0,0-.835-.1123,1.87264,1.87264,0,0,0-1.93652,2.083v5.37012h-1.8584Z" style="fill: #fff"/>
|
||||
<path d="M109.3843,27.83691c-.25,1.64355-1.85059,2.77148-3.89844,2.77148-2.63379,0-4.26855-1.76465-4.26855-4.5957,0-2.83984,1.64355-4.68164,4.19043-4.68164,2.50488,0,4.08008,1.7207,4.08008,4.46582v.63672h-6.39453v.1123a2.358,2.358,0,0,0,2.43555,2.56445,2.04834,2.04834,0,0,0,2.09082-1.27344Zm-6.28223-2.70215h4.52637a2.1773,2.1773,0,0,0-2.2207-2.29785A2.292,2.292,0,0,0,103.10207,25.13477Z" style="fill: #fff"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g id="_Group_4" data-name="<Group>">
|
||||
<g>
|
||||
<path d="M37.82619,8.731a2.63964,2.63964,0,0,1,2.80762,2.96484c0,1.90625-1.03027,3.002-2.80762,3.002H35.67092V8.731Zm-1.22852,5.123h1.125a1.87588,1.87588,0,0,0,1.96777-2.146,1.881,1.881,0,0,0-1.96777-2.13379h-1.125Z" style="fill: #fff"/>
|
||||
<path d="M41.68068,12.44434a2.13323,2.13323,0,1,1,4.24707,0,2.13358,2.13358,0,1,1-4.24707,0Zm3.333,0c0-.97607-.43848-1.54687-1.208-1.54687-.77246,0-1.207.5708-1.207,1.54688,0,.98389.43457,1.55029,1.207,1.55029C44.57522,13.99463,45.01369,13.42432,45.01369,12.44434Z" style="fill: #fff"/>
|
||||
<path d="M51.57326,14.69775h-.92187l-.93066-3.31641h-.07031l-.92676,3.31641h-.91309l-1.24121-4.50293h.90137l.80664,3.436h.06641l.92578-3.436h.85254l.92578,3.436h.07031l.80273-3.436h.88867Z" style="fill: #fff"/>
|
||||
<path d="M53.85354,10.19482H54.709v.71533h.06641a1.348,1.348,0,0,1,1.34375-.80225,1.46456,1.46456,0,0,1,1.55859,1.6748v2.915h-.88867V12.00586c0-.72363-.31445-1.0835-.97168-1.0835a1.03294,1.03294,0,0,0-1.0752,1.14111v2.63428h-.88867Z" style="fill: #fff"/>
|
||||
<path d="M59.09377,8.437h.88867v6.26074h-.88867Z" style="fill: #fff"/>
|
||||
<path d="M61.21779,12.44434a2.13346,2.13346,0,1,1,4.24756,0,2.1338,2.1338,0,1,1-4.24756,0Zm3.333,0c0-.97607-.43848-1.54687-1.208-1.54687-.77246,0-1.207.5708-1.207,1.54688,0,.98389.43457,1.55029,1.207,1.55029C64.11232,13.99463,64.5508,13.42432,64.5508,12.44434Z" style="fill: #fff"/>
|
||||
<path d="M66.4009,13.42432c0-.81055.60352-1.27783,1.6748-1.34424l1.21973-.07031v-.38867c0-.47559-.31445-.74414-.92187-.74414-.49609,0-.83984.18213-.93848.50049h-.86035c.09082-.77344.81836-1.26953,1.83984-1.26953,1.12891,0,1.76563.562,1.76563,1.51318v3.07666h-.85547v-.63281h-.07031a1.515,1.515,0,0,1-1.35254.707A1.36026,1.36026,0,0,1,66.4009,13.42432Zm2.89453-.38477v-.37646l-1.09961.07031c-.62012.0415-.90137.25244-.90137.64941,0,.40527.35156.64111.835.64111A1.0615,1.0615,0,0,0,69.29543,13.03955Z" style="fill: #fff"/>
|
||||
<path d="M71.34816,12.44434c0-1.42285.73145-2.32422,1.86914-2.32422a1.484,1.484,0,0,1,1.38086.79h.06641V8.437h.88867v6.26074h-.85156v-.71143h-.07031a1.56284,1.56284,0,0,1-1.41406.78564C72.0718,14.772,71.34816,13.87061,71.34816,12.44434Zm.918,0c0,.95508.4502,1.52979,1.20313,1.52979.749,0,1.21191-.583,1.21191-1.52588,0-.93848-.46777-1.52979-1.21191-1.52979C72.72121,10.91846,72.26613,11.49707,72.26613,12.44434Z" style="fill: #fff"/>
|
||||
<path d="M79.23,12.44434a2.13323,2.13323,0,1,1,4.24707,0,2.13358,2.13358,0,1,1-4.24707,0Zm3.333,0c0-.97607-.43848-1.54687-1.208-1.54687-.77246,0-1.207.5708-1.207,1.54688,0,.98389.43457,1.55029,1.207,1.55029C82.12453,13.99463,82.563,13.42432,82.563,12.44434Z" style="fill: #fff"/>
|
||||
<path d="M84.66945,10.19482h.85547v.71533h.06641a1.348,1.348,0,0,1,1.34375-.80225,1.46456,1.46456,0,0,1,1.55859,1.6748v2.915H87.605V12.00586c0-.72363-.31445-1.0835-.97168-1.0835a1.03294,1.03294,0,0,0-1.0752,1.14111v2.63428h-.88867Z" style="fill: #fff"/>
|
||||
<path d="M93.51516,9.07373v1.1416h.97559v.74854h-.97559V13.2793c0,.47168.19434.67822.63672.67822a2.96657,2.96657,0,0,0,.33887-.02051v.74023a2.9155,2.9155,0,0,1-.4834.04541c-.98828,0-1.38184-.34766-1.38184-1.21582v-2.543h-.71484v-.74854h.71484V9.07373Z" style="fill: #fff"/>
|
||||
<path d="M95.70461,8.437h.88086v2.48145h.07031a1.3856,1.3856,0,0,1,1.373-.80664,1.48339,1.48339,0,0,1,1.55078,1.67871v2.90723H98.69v-2.688c0-.71924-.335-1.0835-.96289-1.0835a1.05194,1.05194,0,0,0-1.13379,1.1416v2.62988h-.88867Z" style="fill: #fff"/>
|
||||
<path d="M104.76125,13.48193a1.828,1.828,0,0,1-1.95117,1.30273A2.04531,2.04531,0,0,1,100.73,12.46045a2.07685,2.07685,0,0,1,2.07617-2.35254c1.25293,0,2.00879.856,2.00879,2.27V12.688h-3.17969v.0498a1.1902,1.1902,0,0,0,1.19922,1.29,1.07934,1.07934,0,0,0,1.07129-.5459Zm-3.126-1.45117h2.27441a1.08647,1.08647,0,0,0-1.1084-1.1665A1.15162,1.15162,0,0,0,101.63527,12.03076Z" style="fill: #fff"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 11 KiB |
@@ -12,37 +12,65 @@ download:
|
||||
files:
|
||||
- file: mobile/assets/i18n/en-US.json
|
||||
locale_code: en-US
|
||||
- file: mobile/assets/i18n/da-DK.json
|
||||
locale_code: da-DK
|
||||
- file: mobile/assets/i18n/de-DE.json
|
||||
locale_code: de-DE
|
||||
- file: mobile/assets/i18n/fr-FR.json
|
||||
locale_code: fr-FR
|
||||
- file: mobile/assets/i18n/da-DK.json
|
||||
locale_code: da-DK
|
||||
- file: mobile/assets/i18n/it-IT.json
|
||||
locale_code: it-IT
|
||||
- file: mobile/assets/i18n/nl-NL.json
|
||||
locale_code: nl-NL
|
||||
- file: mobile/assets/i18n/ko-KR.json
|
||||
locale_code: ko-KR
|
||||
- file: mobile/assets/i18n/es-ES.json
|
||||
locale_code: es-ES
|
||||
- file: mobile/assets/i18n/fi-FI.json
|
||||
locale_code: fi-FI
|
||||
- file: mobile/assets/i18n/vi-VN.json
|
||||
locale_code: vi-VN
|
||||
- file: mobile/assets/i18n/fr-FR.json
|
||||
locale_code: fr-FR
|
||||
- file: mobile/assets/i18n/ja-JP.json
|
||||
locale_code: ja-JP
|
||||
- file: mobile/assets/i18n/pt-BR.json
|
||||
locale_code: pt-BR
|
||||
- file: mobile/assets/i18n/pl-PL.json
|
||||
locale_code: pl-PL
|
||||
- file: mobile/assets/i18n/sv-SE.json
|
||||
locale_code: sv-SE
|
||||
- file: mobile/assets/i18n/sk-SK.json
|
||||
locale_code: sk-SK
|
||||
- file: mobile/assets/i18n/zh-CN.json
|
||||
locale_code: zh-CN
|
||||
- file: mobile/assets/i18n/ru-RU.json
|
||||
locale_code: ru-RU
|
||||
- file: mobile/assets/i18n/fi-FI.json
|
||||
locale_code: fi-FI
|
||||
- file: mobile/assets/i18n/pt-BR.json
|
||||
locale_code: pt-BR
|
||||
- file: mobile/assets/i18n/cs-CZ.json
|
||||
locale_code: cs-CZ
|
||||
- file: mobile/assets/i18n/uk-UA.json
|
||||
locale_code: uk-UA
|
||||
- file: mobile/assets/i18n/ru-RU.json
|
||||
locale_code: ru-RU
|
||||
- file: mobile/assets/i18n/zh-CN.json
|
||||
locale_code: zh-CN
|
||||
- file: mobile/assets/i18n/sk-SK.json
|
||||
locale_code: sk-SK
|
||||
- file: mobile/assets/i18n/nl-NL.json
|
||||
locale_code: nl-NL
|
||||
- file: mobile/assets/i18n/nb-NO.json
|
||||
locale_code: nb-NO
|
||||
- file: mobile/assets/i18n/sv-SE.json
|
||||
locale_code: sv-SE
|
||||
- file: mobile/assets/i18n/mn.json
|
||||
locale_code: mn
|
||||
- file: mobile/assets/i18n/ko-KR.json
|
||||
locale_code: ko-KR
|
||||
- file: mobile/assets/i18n/sr-Latn.json
|
||||
locale_code: sr-Latn
|
||||
- file: mobile/assets/i18n/sr-Cyrl.json
|
||||
locale_code: sr-Cyrl
|
||||
- file: mobile/assets/i18n/hi-IN.json
|
||||
locale_code: hi-IN
|
||||
- file: mobile/assets/i18n/es-PE.json
|
||||
locale_code: es-PE
|
||||
- file: mobile/assets/i18n/es-MX.json
|
||||
locale_code: es-MX
|
||||
- file: mobile/assets/i18n/sv-FI.json
|
||||
locale_code: sv-FI
|
||||
- file: mobile/assets/i18n/ca.json
|
||||
locale_code: ca
|
||||
- file: mobile/assets/i18n/hu-HU.json
|
||||
locale_code: hu-HU
|
||||
- file: mobile/assets/i18n/lv-LV.json
|
||||
locale_code: lv-LV
|
||||
- file: mobile/assets/i18n/zh-Hans.json
|
||||
locale_code: zh-Hans
|
||||
- file: mobile/assets/i18n/th-TH.json
|
||||
locale_code: th-TH
|
||||
|
||||
@@ -15,6 +15,8 @@ RUN poetry install --sync --no-interaction --no-ansi --no-root --only main
|
||||
|
||||
FROM python:3.11.4-slim-bullseye@sha256:91d194f58f50594cda71dcd2e8fdefd90e7ecc57d07823813b67c8521e565dcd
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends tini && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
TRANSFORMERS_CACHE=/cache \
|
||||
@@ -25,4 +27,5 @@ ENV NODE_ENV=production \
|
||||
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
COPY app .
|
||||
ENTRYPOINT ["python", "-m", "app.main"]
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
CMD ["python", "-m", "app.main"]
|
||||
|
||||
21
machine-learning/README_es_ES.md
Normal file
21
machine-learning/README_es_ES.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Immich Machine Learning
|
||||
|
||||
- Clasificación de imágenes
|
||||
- Incorporación de CLIP
|
||||
- Reconocimiento facial
|
||||
|
||||
# Configuración
|
||||
|
||||
Este proyecto utiliza [Poetry](https://python-poetry.org/docs/#installation), así que asegúrate de instalarlo primero.
|
||||
Ejecutar `poetry install --no-root --with dev` instalará todo lo necesario en un entorno virtual aislado.
|
||||
|
||||
Para agregar o eliminar dependencias, puedes utilizar los comandos `poetry add $PACKAGE_NAME` y `poetry remove $PACKAGE_NAME`, respectivamente.
|
||||
Asegúrate de hacer commit de los archivos `poetry.lock` y `pyproject.toml` para reflejar cualquier cambio en las dependencias.
|
||||
|
||||
# Pruebas de carga
|
||||
|
||||
Para medir la velocidad y latencia de inferencia, puedes utilizar [Locust](https://locust.io/) con el archivo `locustfile.py` proporcionado.
|
||||
Locust funciona haciendo consultas a los puntos finales del modelo y agregando estadísticas, lo que significa que la aplicación debe estar desplegada.
|
||||
Puedes ejecutar `load_test.sh` para implementar automáticamente la aplicación localmente e iniciar Locust, ajustando opcionalmente sus variables de entorno según sea necesario.
|
||||
|
||||
Alternativamente, para pruebas más personalizadas, también puedes ejecutar `locust` directamente: consulta la [documentación](https://docs.locust.io/en/stable/index.html). Ten en cuenta que, en la jerga de Locust, la concurrencia se mide en `usuarios`, y cada usuario ejecuta una tarea a la vez. Para lograr una concurrencia específica por punto final, multiplica ese número por la cantidad de puntos finales que se desean consultar. Por ejemplo, si hay 3 puntos finales y deseas que cada uno de ellos reciba 8 solicitudes al mismo tiempo, debes configurar el número de usuarios en 24.
|
||||
@@ -13,14 +13,14 @@ class Settings(BaseSettings):
|
||||
facial_recognition_model: str = "buffalo_l"
|
||||
min_tag_score: float = 0.9
|
||||
eager_startup: bool = True
|
||||
model_ttl: int = 300
|
||||
model_ttl: int = 0
|
||||
host: str = "0.0.0.0"
|
||||
port: int = 3003
|
||||
workers: int = 1
|
||||
min_face_score: float = 0.7
|
||||
test_full: bool = False
|
||||
|
||||
class Config(BaseSettings.Config):
|
||||
class Config:
|
||||
env_prefix = "MACHINE_LEARNING_"
|
||||
case_sensitive = False
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from types import SimpleNamespace
|
||||
from typing import Any, Iterator, TypeAlias
|
||||
from typing import Iterator, TypeAlias
|
||||
from unittest import mock
|
||||
|
||||
import numpy as np
|
||||
@@ -22,91 +21,6 @@ def cv_image(pil_image: Image.Image) -> ndarray:
|
||||
return np.asarray(pil_image)[:, :, ::-1] # PIL uses RGB while cv2 uses BGR
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_classifier_pipeline() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.image_classification.pipeline") as model:
|
||||
classifier_preds = [
|
||||
{"label": "that's an image alright", "score": 0.8},
|
||||
{"label": "well it ends with .jpg", "score": 0.1},
|
||||
{"label": "idk, im just seeing bytes", "score": 0.05},
|
||||
{"label": "not sure", "score": 0.04},
|
||||
{"label": "probably a virus", "score": 0.01},
|
||||
]
|
||||
|
||||
def forward(
|
||||
inputs: Image.Image | list[Image.Image], **kwargs: Any
|
||||
) -> list[dict[str, Any]] | list[list[dict[str, Any]]]:
|
||||
if isinstance(inputs, list) and not all([isinstance(img, Image.Image) for img in inputs]):
|
||||
raise TypeError
|
||||
elif not isinstance(inputs, Image.Image):
|
||||
raise TypeError
|
||||
|
||||
if isinstance(inputs, list):
|
||||
return [classifier_preds] * len(inputs)
|
||||
|
||||
return classifier_preds
|
||||
|
||||
model.return_value = forward
|
||||
yield model
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_st() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.clip.SentenceTransformer") as model:
|
||||
embedding = np.random.rand(512).astype(np.float32)
|
||||
|
||||
def encode(inputs: Image.Image | list[Image.Image], **kwargs: Any) -> ndarray | list[ndarray]:
|
||||
# mypy complains unless isinstance(inputs, list) is used explicitly
|
||||
img_batch = isinstance(inputs, list) and all([isinstance(inst, Image.Image) for inst in inputs])
|
||||
text_batch = isinstance(inputs, list) and all([isinstance(inst, str) for inst in inputs])
|
||||
if isinstance(inputs, list) and not any([img_batch, text_batch]):
|
||||
raise TypeError
|
||||
|
||||
if isinstance(inputs, list):
|
||||
return np.stack([embedding] * len(inputs))
|
||||
|
||||
return embedding
|
||||
|
||||
mocked = mock.Mock()
|
||||
mocked.encode = encode
|
||||
model.return_value = mocked
|
||||
yield model
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_faceanalysis() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.facial_recognition.FaceAnalysis") as model:
|
||||
face_preds = [
|
||||
SimpleNamespace( # this is so these fields can be accessed through dot notation
|
||||
**{
|
||||
"bbox": np.random.rand(4).astype(np.float32),
|
||||
"kps": np.random.rand(5, 2).astype(np.float32),
|
||||
"det_score": np.array([0.67]).astype(np.float32),
|
||||
"normed_embedding": np.random.rand(512).astype(np.float32),
|
||||
}
|
||||
),
|
||||
SimpleNamespace(
|
||||
**{
|
||||
"bbox": np.random.rand(4).astype(np.float32),
|
||||
"kps": np.random.rand(5, 2).astype(np.float32),
|
||||
"det_score": np.array([0.4]).astype(np.float32),
|
||||
"normed_embedding": np.random.rand(512).astype(np.float32),
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
def get(image: np.ndarray[int, np.dtype[np.float32]], **kwargs: Any) -> list[SimpleNamespace]:
|
||||
if not isinstance(image, np.ndarray):
|
||||
raise TypeError
|
||||
|
||||
return face_preds
|
||||
|
||||
mocked = mock.Mock()
|
||||
mocked.get = get
|
||||
model.return_value = mocked
|
||||
yield model
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_model() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.cache.InferenceModel.from_model_type", autospec=True) as mocked:
|
||||
|
||||
@@ -9,7 +9,6 @@ from fastapi import Body, Depends, FastAPI
|
||||
from PIL import Image
|
||||
|
||||
from .config import settings
|
||||
from .models.base import InferenceModel
|
||||
from .models.cache import ModelCache
|
||||
from .schemas import (
|
||||
EmbeddingResponse,
|
||||
@@ -25,7 +24,7 @@ app = FastAPI()
|
||||
|
||||
|
||||
def init_state() -> None:
|
||||
app.state.model_cache = ModelCache(ttl=settings.model_ttl, revalidate=True)
|
||||
app.state.model_cache = ModelCache(ttl=settings.model_ttl, revalidate=settings.model_ttl > 0)
|
||||
|
||||
|
||||
async def load_models() -> None:
|
||||
@@ -38,10 +37,7 @@ async def load_models() -> None:
|
||||
|
||||
# Get all models
|
||||
for model_name, model_type in models:
|
||||
if settings.eager_startup:
|
||||
await app.state.model_cache.get(model_name, model_type)
|
||||
else:
|
||||
InferenceModel.from_model_type(model_type, model_name)
|
||||
await app.state.model_cache.get(model_name, model_type, eager=settings.eager_startup)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
|
||||
@@ -14,22 +14,43 @@ from ..schemas import ModelType
|
||||
class InferenceModel(ABC):
|
||||
_model_type: ModelType
|
||||
|
||||
def __init__(self, model_name: str, cache_dir: Path | str | None = None, **model_kwargs: Any) -> None:
|
||||
def __init__(
|
||||
self, model_name: str, cache_dir: Path | str | None = None, eager: bool = True, **model_kwargs: Any
|
||||
) -> None:
|
||||
self.model_name = model_name
|
||||
self._loaded = False
|
||||
self._cache_dir = Path(cache_dir) if cache_dir is not None else get_cache_dir(model_name, self.model_type)
|
||||
|
||||
loader = self.load if eager else self.download
|
||||
try:
|
||||
self.load(**model_kwargs)
|
||||
loader(**model_kwargs)
|
||||
except (OSError, InvalidProtobuf):
|
||||
self.clear_cache()
|
||||
self.load(**model_kwargs)
|
||||
loader(**model_kwargs)
|
||||
|
||||
def download(self, **model_kwargs: Any) -> None:
|
||||
if not self.cached:
|
||||
self._download(**model_kwargs)
|
||||
|
||||
def load(self, **model_kwargs: Any) -> None:
|
||||
self.download(**model_kwargs)
|
||||
self._load(**model_kwargs)
|
||||
self._loaded = True
|
||||
|
||||
def predict(self, inputs: Any) -> Any:
|
||||
if not self._loaded:
|
||||
self.load()
|
||||
return self._predict(inputs)
|
||||
|
||||
@abstractmethod
|
||||
def load(self, **model_kwargs: Any) -> None:
|
||||
def _predict(self, inputs: Any) -> Any:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def predict(self, inputs: Any) -> Any:
|
||||
def _download(self, **model_kwargs: Any) -> None:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def _load(self, **model_kwargs: Any) -> None:
|
||||
...
|
||||
|
||||
@property
|
||||
@@ -44,6 +65,10 @@ class InferenceModel(ABC):
|
||||
def cache_dir(self, cache_dir: Path) -> None:
|
||||
self._cache_dir = cache_dir
|
||||
|
||||
@property
|
||||
def cached(self) -> bool:
|
||||
return self.cache_dir.exists() and any(self.cache_dir.iterdir())
|
||||
|
||||
@classmethod
|
||||
def from_model_type(cls, model_type: ModelType, model_name: str, **model_kwargs: Any) -> InferenceModel:
|
||||
subclasses = {subclass._model_type: subclass for subclass in cls.__subclasses__()}
|
||||
@@ -55,7 +80,11 @@ class InferenceModel(ABC):
|
||||
def clear_cache(self) -> None:
|
||||
if not self.cache_dir.exists():
|
||||
return
|
||||
elif not rmtree.avoids_symlink_attacks:
|
||||
if not rmtree.avoids_symlink_attacks:
|
||||
raise RuntimeError("Attempted to clear cache, but rmtree is not safe on this platform.")
|
||||
|
||||
rmtree(self.cache_dir)
|
||||
if self.cache_dir.is_dir():
|
||||
rmtree(self.cache_dir)
|
||||
else:
|
||||
self.cache_dir.unlink()
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from aiocache.backends.memory import SimpleMemoryCache
|
||||
@@ -48,13 +47,10 @@ class ModelCache:
|
||||
"""
|
||||
|
||||
key = self.cache.build_key(model_name, model_type.value)
|
||||
model = await self.cache.get(key)
|
||||
if model is None:
|
||||
async with OptimisticLock(self.cache, key) as lock:
|
||||
model = await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: InferenceModel.from_model_type(model_type, model_name, **model_kwargs),
|
||||
)
|
||||
async with OptimisticLock(self.cache, key) as lock:
|
||||
model = await self.cache.get(key)
|
||||
if model is None:
|
||||
model = InferenceModel.from_model_type(model_type, model_name, **model_kwargs)
|
||||
await lock.cas(model, ttl=self.ttl)
|
||||
return model
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from PIL.Image import Image
|
||||
from sentence_transformers import SentenceTransformer
|
||||
from sentence_transformers.util import snapshot_download
|
||||
|
||||
from ..schemas import ModelType
|
||||
from .base import InferenceModel
|
||||
@@ -11,12 +11,21 @@ from .base import InferenceModel
|
||||
class CLIPSTEncoder(InferenceModel):
|
||||
_model_type = ModelType.CLIP
|
||||
|
||||
def load(self, **model_kwargs: Any) -> None:
|
||||
def _download(self, **model_kwargs: Any) -> None:
|
||||
repo_id = self.model_name if "/" in self.model_name else f"sentence-transformers/{self.model_name}"
|
||||
snapshot_download(
|
||||
cache_dir=self.cache_dir,
|
||||
repo_id=repo_id,
|
||||
library_name="sentence-transformers",
|
||||
ignore_files=["flax_model.msgpack", "rust_model.ot", "tf_model.h5"],
|
||||
)
|
||||
|
||||
def _load(self, **model_kwargs: Any) -> None:
|
||||
self.model = SentenceTransformer(
|
||||
self.model_name,
|
||||
cache_folder=self.cache_dir.as_posix(),
|
||||
**model_kwargs,
|
||||
)
|
||||
|
||||
def predict(self, image_or_text: Image | str) -> list[float]:
|
||||
def _predict(self, image_or_text: Image | str) -> list[float]:
|
||||
return self.model.encode(image_or_text).tolist()
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
from insightface.app import FaceAnalysis
|
||||
import numpy as np
|
||||
from insightface.model_zoo import ArcFaceONNX, RetinaFace
|
||||
from insightface.utils.face_align import norm_crop
|
||||
from insightface.utils.storage import BASE_REPO_URL, download_file
|
||||
|
||||
from ..config import settings
|
||||
from ..schemas import ModelType
|
||||
@@ -22,39 +26,62 @@ class FaceRecognizer(InferenceModel):
|
||||
self.min_score = min_score
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
|
||||
def load(self, **model_kwargs: Any) -> None:
|
||||
self.model = FaceAnalysis(
|
||||
name=self.model_name,
|
||||
root=self.cache_dir.as_posix(),
|
||||
allowed_modules=["detection", "recognition"],
|
||||
**model_kwargs,
|
||||
)
|
||||
self.model.prepare(
|
||||
ctx_id=0,
|
||||
def _download(self, **model_kwargs: Any) -> None:
|
||||
zip_file = self.cache_dir / f"{self.model_name}.zip"
|
||||
download_file(f"{BASE_REPO_URL}/{self.model_name}.zip", zip_file)
|
||||
with zipfile.ZipFile(zip_file, "r") as zip:
|
||||
members = zip.namelist()
|
||||
det_file = next(model for model in members if model.startswith("det_"))
|
||||
rec_file = next(model for model in members if model.startswith("w600k_"))
|
||||
zip.extractall(self.cache_dir, members=[det_file, rec_file])
|
||||
zip_file.unlink()
|
||||
|
||||
def _load(self, **model_kwargs: Any) -> None:
|
||||
try:
|
||||
det_file = next(self.cache_dir.glob("det_*.onnx"))
|
||||
rec_file = next(self.cache_dir.glob("w600k_*.onnx"))
|
||||
except StopIteration:
|
||||
raise FileNotFoundError("Facial recognition models not found in cache directory")
|
||||
self.det_model = RetinaFace(det_file.as_posix())
|
||||
self.rec_model = ArcFaceONNX(rec_file.as_posix())
|
||||
|
||||
self.det_model.prepare(
|
||||
ctx_id=-1,
|
||||
det_thresh=self.min_score,
|
||||
det_size=(640, 640),
|
||||
input_size=(640, 640),
|
||||
)
|
||||
self.rec_model.prepare(ctx_id=-1)
|
||||
|
||||
def _predict(self, image: cv2.Mat) -> list[dict[str, Any]]:
|
||||
bboxes, kpss = self.det_model.detect(image)
|
||||
if bboxes.size == 0:
|
||||
return []
|
||||
assert isinstance(kpss, np.ndarray)
|
||||
|
||||
scores = bboxes[:, 4].tolist()
|
||||
bboxes = bboxes[:, :4].round().tolist()
|
||||
|
||||
def predict(self, image: cv2.Mat) -> list[dict[str, Any]]:
|
||||
height, width, _ = image.shape
|
||||
results = []
|
||||
faces = self.model.get(image)
|
||||
|
||||
for face in faces:
|
||||
x1, y1, x2, y2 = face.bbox
|
||||
|
||||
height, width, _ = image.shape
|
||||
for (x1, y1, x2, y2), score, kps in zip(bboxes, scores, kpss):
|
||||
cropped_img = norm_crop(image, kps)
|
||||
embedding = self.rec_model.get_feat(cropped_img)[0].tolist()
|
||||
results.append(
|
||||
{
|
||||
"imageWidth": width,
|
||||
"imageHeight": height,
|
||||
"boundingBox": {
|
||||
"x1": round(x1),
|
||||
"y1": round(y1),
|
||||
"x2": round(x2),
|
||||
"y2": round(y2),
|
||||
"x1": x1,
|
||||
"y1": y1,
|
||||
"x2": x2,
|
||||
"y2": y2,
|
||||
},
|
||||
"score": face.det_score.item(),
|
||||
"embedding": face.normed_embedding.tolist(),
|
||||
"score": score,
|
||||
"embedding": embedding,
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
@property
|
||||
def cached(self) -> bool:
|
||||
return self.cache_dir.is_dir() and any(self.cache_dir.glob("*.onnx"))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from huggingface_hub import snapshot_download
|
||||
from PIL.Image import Image
|
||||
from transformers.pipelines import pipeline
|
||||
|
||||
@@ -22,14 +23,19 @@ class ImageClassifier(InferenceModel):
|
||||
self.min_score = min_score
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
|
||||
def load(self, **model_kwargs: Any) -> None:
|
||||
def _download(self, **model_kwargs: Any) -> None:
|
||||
snapshot_download(
|
||||
cache_dir=self.cache_dir, repo_id=self.model_name, allow_patterns=["*.bin", "*.json", "*.txt"]
|
||||
)
|
||||
|
||||
def _load(self, **model_kwargs: Any) -> None:
|
||||
self.model = pipeline(
|
||||
self.model_type.value,
|
||||
self.model_name,
|
||||
model_kwargs={"cache_dir": self.cache_dir, **model_kwargs},
|
||||
)
|
||||
|
||||
def predict(self, image: Image) -> list[str]:
|
||||
def _predict(self, image: Image) -> list[str]:
|
||||
predictions: list[dict[str, Any]] = self.model(image) # type: ignore
|
||||
tags = [tag for pred in predictions for tag in pred["label"].split(", ") if pred["score"] >= self.min_score]
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import TypeAlias
|
||||
from unittest import mock
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from PIL import Image
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from .config import settings
|
||||
from .models.cache import ModelCache
|
||||
@@ -14,22 +16,43 @@ from .models.facial_recognition import FaceRecognizer
|
||||
from .models.image_classification import ImageClassifier
|
||||
from .schemas import ModelType
|
||||
|
||||
ndarray: TypeAlias = np.ndarray[int, np.dtype[np.float32]]
|
||||
|
||||
|
||||
class TestImageClassifier:
|
||||
def test_init(self, mock_classifier_pipeline: mock.Mock) -> None:
|
||||
cache_dir = Path("test_cache")
|
||||
classifier = ImageClassifier("test_model_name", 0.5, cache_dir=cache_dir)
|
||||
classifier_preds = [
|
||||
{"label": "that's an image alright", "score": 0.8},
|
||||
{"label": "well it ends with .jpg", "score": 0.1},
|
||||
{"label": "idk, im just seeing bytes", "score": 0.05},
|
||||
{"label": "not sure", "score": 0.04},
|
||||
{"label": "probably a virus", "score": 0.01},
|
||||
]
|
||||
|
||||
assert classifier.min_score == 0.5
|
||||
mock_classifier_pipeline.assert_called_once_with(
|
||||
"image-classification",
|
||||
"test_model_name",
|
||||
model_kwargs={"cache_dir": cache_dir},
|
||||
)
|
||||
def test_eager_init(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(ImageClassifier, "download")
|
||||
mock_load = mocker.patch.object(ImageClassifier, "load")
|
||||
classifier = ImageClassifier("test_model_name", cache_dir="test_cache", eager=True, test_arg="test_arg")
|
||||
|
||||
def test_min_score(self, pil_image: Image.Image, mock_classifier_pipeline: mock.Mock) -> None:
|
||||
assert classifier.model_name == "test_model_name"
|
||||
mock_load.assert_called_once_with(test_arg="test_arg")
|
||||
|
||||
def test_lazy_init(self, mocker: MockerFixture) -> None:
|
||||
mock_download = mocker.patch.object(ImageClassifier, "download")
|
||||
mock_load = mocker.patch.object(ImageClassifier, "load")
|
||||
face_model = ImageClassifier("test_model_name", cache_dir="test_cache", eager=False, test_arg="test_arg")
|
||||
|
||||
assert face_model.model_name == "test_model_name"
|
||||
mock_download.assert_called_once_with(test_arg="test_arg")
|
||||
mock_load.assert_not_called()
|
||||
|
||||
def test_min_score(self, pil_image: Image.Image, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(ImageClassifier, "load")
|
||||
classifier = ImageClassifier("test_model_name", min_score=0.0)
|
||||
classifier.min_score = 0.0
|
||||
assert classifier.min_score == 0.0
|
||||
|
||||
classifier.model = mock.Mock()
|
||||
classifier.model.return_value = self.classifier_preds
|
||||
|
||||
all_labels = classifier.predict(pil_image)
|
||||
classifier.min_score = 0.5
|
||||
filtered_labels = classifier.predict(pil_image)
|
||||
@@ -46,45 +69,94 @@ class TestImageClassifier:
|
||||
|
||||
|
||||
class TestCLIP:
|
||||
def test_init(self, mock_st: mock.Mock) -> None:
|
||||
CLIPSTEncoder("test_model_name", cache_dir="test_cache")
|
||||
embedding = np.random.rand(512).astype(np.float32)
|
||||
|
||||
mock_st.assert_called_once_with("test_model_name", cache_folder="test_cache")
|
||||
def test_eager_init(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(CLIPSTEncoder, "download")
|
||||
mock_load = mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_model = CLIPSTEncoder("test_model_name", cache_dir="test_cache", eager=True, test_arg="test_arg")
|
||||
|
||||
def test_basic_image(self, pil_image: Image.Image, mock_st: mock.Mock) -> None:
|
||||
assert clip_model.model_name == "test_model_name"
|
||||
mock_load.assert_called_once_with(test_arg="test_arg")
|
||||
|
||||
def test_lazy_init(self, mocker: MockerFixture) -> None:
|
||||
mock_download = mocker.patch.object(CLIPSTEncoder, "download")
|
||||
mock_load = mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_model = CLIPSTEncoder("test_model_name", cache_dir="test_cache", eager=False, test_arg="test_arg")
|
||||
|
||||
assert clip_model.model_name == "test_model_name"
|
||||
mock_download.assert_called_once_with(test_arg="test_arg")
|
||||
mock_load.assert_not_called()
|
||||
|
||||
def test_basic_image(self, pil_image: Image.Image, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_encoder = CLIPSTEncoder("test_model_name", cache_dir="test_cache")
|
||||
clip_encoder.model = mock.Mock()
|
||||
clip_encoder.model.encode.return_value = self.embedding
|
||||
embedding = clip_encoder.predict(pil_image)
|
||||
|
||||
assert isinstance(embedding, list)
|
||||
assert len(embedding) == 512
|
||||
assert all([isinstance(num, float) for num in embedding])
|
||||
mock_st.assert_called_once()
|
||||
clip_encoder.model.encode.assert_called_once()
|
||||
|
||||
def test_basic_text(self, mock_st: mock.Mock) -> None:
|
||||
def test_basic_text(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_encoder = CLIPSTEncoder("test_model_name", cache_dir="test_cache")
|
||||
clip_encoder.model = mock.Mock()
|
||||
clip_encoder.model.encode.return_value = self.embedding
|
||||
embedding = clip_encoder.predict("test search query")
|
||||
|
||||
assert isinstance(embedding, list)
|
||||
assert len(embedding) == 512
|
||||
assert all([isinstance(num, float) for num in embedding])
|
||||
mock_st.assert_called_once()
|
||||
clip_encoder.model.encode.assert_called_once()
|
||||
|
||||
|
||||
class TestFaceRecognition:
|
||||
def test_init(self, mock_faceanalysis: mock.Mock) -> None:
|
||||
FaceRecognizer("test_model_name", cache_dir="test_cache")
|
||||
def test_eager_init(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceRecognizer, "download")
|
||||
mock_load = mocker.patch.object(FaceRecognizer, "load")
|
||||
face_model = FaceRecognizer("test_model_name", cache_dir="test_cache", eager=True, test_arg="test_arg")
|
||||
|
||||
mock_faceanalysis.assert_called_once_with(
|
||||
name="test_model_name",
|
||||
root="test_cache",
|
||||
allowed_modules=["detection", "recognition"],
|
||||
)
|
||||
assert face_model.model_name == "test_model_name"
|
||||
mock_load.assert_called_once_with(test_arg="test_arg")
|
||||
|
||||
def test_basic(self, cv_image: cv2.Mat, mock_faceanalysis: mock.Mock) -> None:
|
||||
def test_lazy_init(self, mocker: MockerFixture) -> None:
|
||||
mock_download = mocker.patch.object(FaceRecognizer, "download")
|
||||
mock_load = mocker.patch.object(FaceRecognizer, "load")
|
||||
face_model = FaceRecognizer("test_model_name", cache_dir="test_cache", eager=False, test_arg="test_arg")
|
||||
|
||||
assert face_model.model_name == "test_model_name"
|
||||
mock_download.assert_called_once_with(test_arg="test_arg")
|
||||
mock_load.assert_not_called()
|
||||
|
||||
def test_set_min_score(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceRecognizer, "load")
|
||||
face_recognizer = FaceRecognizer("test_model_name", cache_dir="test_cache", min_score=0.5)
|
||||
|
||||
assert face_recognizer.min_score == 0.5
|
||||
|
||||
def test_basic(self, cv_image: cv2.Mat, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(FaceRecognizer, "load")
|
||||
face_recognizer = FaceRecognizer("test_model_name", min_score=0.0, cache_dir="test_cache")
|
||||
|
||||
det_model = mock.Mock()
|
||||
num_faces = 2
|
||||
bbox = np.random.rand(num_faces, 4).astype(np.float32)
|
||||
score = np.array([[0.67]] * num_faces).astype(np.float32)
|
||||
kpss = np.random.rand(num_faces, 5, 2).astype(np.float32)
|
||||
det_model.detect.return_value = (np.concatenate([bbox, score], axis=-1), kpss)
|
||||
face_recognizer.det_model = det_model
|
||||
|
||||
rec_model = mock.Mock()
|
||||
embedding = np.random.rand(num_faces, 512).astype(np.float32)
|
||||
rec_model.get_feat.return_value = embedding
|
||||
face_recognizer.rec_model = rec_model
|
||||
|
||||
faces = face_recognizer.predict(cv_image)
|
||||
|
||||
assert len(faces) == 2
|
||||
assert len(faces) == num_faces
|
||||
for face in faces:
|
||||
assert face["imageHeight"] == 800
|
||||
assert face["imageWidth"] == 600
|
||||
@@ -92,7 +164,8 @@ class TestFaceRecognition:
|
||||
assert len(face["embedding"]) == 512
|
||||
assert all([isinstance(num, float) for num in face["embedding"]])
|
||||
|
||||
mock_faceanalysis.assert_called_once()
|
||||
det_model.detect.assert_called_once()
|
||||
assert rec_model.get_feat.call_count == num_faces
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
||||
1211
machine-learning/poetry.lock
generated
1211
machine-learning/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.66.0"
|
||||
version = "1.72.2"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
@@ -22,8 +22,6 @@ fastapi = "^0.95.2"
|
||||
uvicorn = {extras = ["standard"], version = "^0.22.0"}
|
||||
pydantic = "^1.10.8"
|
||||
aiocache = "^0.12.1"
|
||||
pytest-cov = "^4.1.0"
|
||||
ruff = "^0.0.272"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
mypy = "^1.3.0"
|
||||
@@ -33,6 +31,9 @@ locust = "^2.15.1"
|
||||
gunicorn = "^20.1.0"
|
||||
httpx = "^0.24.1"
|
||||
pytest-asyncio = "^0.21.0"
|
||||
pytest-cov = "^4.1.0"
|
||||
ruff = "^0.0.272"
|
||||
pytest-mock = "^3.11.1"
|
||||
|
||||
[[tool.poetry.source]]
|
||||
name = "pytorch-cpu"
|
||||
@@ -60,10 +61,14 @@ warn_untyped_fields = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"huggingface_hub",
|
||||
"transformers.pipelines",
|
||||
"cv2",
|
||||
"insightface.app",
|
||||
"insightface.model_zoo",
|
||||
"insightface.utils.face_align",
|
||||
"insightface.utils.storage",
|
||||
"sentence_transformers",
|
||||
"sentence_transformers.util",
|
||||
"aiocache.backends.memory",
|
||||
"aiocache.lock",
|
||||
"aiocache.plugins"
|
||||
|
||||
2
mobile/android/.gitignore
vendored
2
mobile/android/.gitignore
vendored
@@ -13,4 +13,4 @@ key.properties
|
||||
**/*.jks
|
||||
|
||||
# Fastlane
|
||||
/fastlane/report.xml
|
||||
fastlane/report.xml
|
||||
|
||||
@@ -7,6 +7,10 @@
|
||||
android:name="io.flutter.embedding.android.EnableImpeller"
|
||||
android:value="false" />
|
||||
|
||||
<meta-data
|
||||
android:name="com.google.firebase.messaging.default_notification_icon"
|
||||
android:resource="@drawable/notification_icon" />
|
||||
|
||||
<activity android:name=".MainActivity" android:exported="true" android:launchMode="singleTop"
|
||||
android:theme="@style/LaunchTheme"
|
||||
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user