Compare commits

..

11 Commits

Author SHA1 Message Date
Min Idzelis
915db962fc saving for later 2025-06-28 15:01:39 +00:00
Min Idzelis
2406bb2951 sync before lint/check 2025-06-28 04:03:26 +00:00
Min Idzelis
e52b43c075 shellcheck 2025-06-28 03:56:55 +00:00
Min Idzelis
227819343a update lockfile after merge 2025-06-28 03:56:05 +00:00
Min Idzelis
fccf698a5c Merge remote-tracking branch 'origin/main' into chore/pnpm_alt 2025-06-28 03:50:28 +00:00
Min Idzelis
6330319ee4 update gha 2025-06-28 03:42:36 +00:00
Min Idzelis
c026a53cb7 Dockerfile changes 2025-06-28 03:17:06 +00:00
Jason Rasmussen
09cbc5d3f4 refactor: change password repository lookup (#19584) 2025-06-27 16:52:04 -04:00
Jason Rasmussen
a2a9797fab refactor: auth medium tests (#19583) 2025-06-27 15:35:19 -04:00
Min Idzelis
1c339ff85b more progress - dockerfile 2025-06-26 22:41:24 +00:00
Min Idzelis
839db1e2c4 pnpm 2025-06-24 21:37:42 +00:00
78 changed files with 41054 additions and 23052 deletions

View File

@@ -73,10 +73,7 @@ install_dependencies() {
log "Installing dependencies"
(
cd "${IMMICH_WORKSPACE}" || exit 1
run_cmd make ci-server
run_cmd make ci-sdk
run_cmd make build-sdk
run_cmd make ci-web
CI=1 run_cmd make install-all
)
log ""
}

View File

@@ -8,11 +8,6 @@ services:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/workspaces/immich/server/upload
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/workspaces/immich/server/upload/upload
- /etc/localtime:/etc/localtime:ro

View File

@@ -10,8 +10,9 @@ cd "${IMMICH_WORKSPACE}/server" || (
exit 1
)
CI=1 pnpm install
while true; do
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
run_cmd pnpm exec nest start --debug "0.0.0.0:9230" --watch
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -16,7 +16,7 @@ until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_
done
while true; do
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
run_cmd pnpm exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
log "Web crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -4,6 +4,7 @@
design/
docker/
Dockerfile
!docker/scripts
docs/
!docs/package.json

View File

@@ -60,14 +60,14 @@ jobs:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
- name: Run install
run: pnpm install
- name: Check formatting
run: npm run format
run: pnpm format
- name: Run build
run: npm run build
run: pnpm build
- name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2

View File

@@ -80,30 +80,33 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
- name: Run package manager install
run: pnpm install
- name: Run linter
run: npm run lint
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: npm run check
run: pnpm check
if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: npm test
run: pnpm test
if: ${{ !cancelled() }}
cli-unit-tests:
@@ -123,34 +126,37 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: npm ci && npm run build
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: npm ci
run: pnpm install
- name: Run linter
run: npm run lint
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: npm run check
run: pnpm check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test
run: pnpm test
if: ${{ !cancelled() }}
cli-unit-tests-win:
@@ -170,27 +176,30 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Setup typescript-sdk
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: npm ci
run: pnpm install --frozen-lockfile
# Skip linter & formatter in Windows test.
- name: Run tsc
run: npm run check
run: pnpm check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test
run: pnpm test
if: ${{ !cancelled() }}
web-lint:
@@ -210,30 +219,33 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: npm ci
run: pnpm install --frozen-lockfile
- name: Run linter
run: npm run lint:p
run: pnpm lint:p
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: npm run check:svelte
run: pnpm check:svelte
if: ${{ !cancelled() }}
web-unit-tests:
@@ -253,26 +265,29 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: npm ci
run: pnpm install --frozen-lockfile
- name: Run tsc
run: npm run check:typescript
run: pnpm check:typescript
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test
run: pnpm test
if: ${{ !cancelled() }}
i18n-tests:
@@ -288,18 +303,21 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Install dependencies
run: npm --prefix=web ci
run: pnpm --filter=immich-web install --frozen-lockfile
- name: Format
run: npm --prefix=web run format:i18n
run: pnpm --filter=immich-web format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
@@ -334,32 +352,35 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: npm ci
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
- name: Run linter
run: npm run lint
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: npm run check
run: pnpm check
if: ${{ !cancelled() }}
server-medium-tests:
@@ -379,18 +400,21 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
run: pnpm install --frozen-lockfile
- name: Run medium tests
run: npm run test:medium
run: pnpm test:medium
if: ${{ !cancelled() }}
e2e-tests-server-cli:
@@ -414,25 +438,28 @@ jobs:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Run setup cli
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./cli
if: ${{ !cancelled() }}
- name: Install dependencies
run: npm ci
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
- name: Docker build
@@ -440,7 +467,7 @@ jobs:
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: npm run test
run: pnpm test
if: ${{ !cancelled() }}
e2e-tests-web:
@@ -464,20 +491,23 @@ jobs:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run setup typescript-sdk
run: npm ci && npm run build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: npm ci
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
@@ -584,18 +614,21 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Run npm install
run: npm ci
run: pnpm install --frozen-lockfile
- name: Run formatter
run: npm run format
run: pnpm format
if: ${{ !cancelled() }}
shellcheck:
@@ -627,18 +660,21 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Install server dependencies
run: npm --prefix=server ci
run: pnpm --filter immich install --frozen-lockfile
- name: Build the app
run: npm --prefix=server run build
run: pnpm --filter immich build
- name: Run API generation
run: make open-api
@@ -690,28 +726,31 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'npm'
cache: 'pnpm'
cache-dependency-path: '**/package-lock.json'
- name: Install server dependencies
run: npm ci
run: pnpm install --frozen-lockfile
- name: Build the app
run: npm run build
run: pnpm build
- name: Run existing migrations
run: npm run migrations:run
run: pnpm migrations:run
- name: Test npm run schema:reset command works
run: npm run schema:reset
run: pnpm schema:reset
- name: Generate new migrations
continue-on-error: true
run: npm run migrations:generate src/TestMigration
run: pnpm migrations:generate src/TestMigration
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
@@ -730,7 +769,7 @@ jobs:
exit 1
- name: Run SQL generation
run: npm run sync:sql
run: pnpm sync:sql
env:
DB_URL: postgres://postgres:postgres@localhost:5432/immich

View File

@@ -34,41 +34,52 @@ open-api-typescript:
cd ./open-api && bash ./bin/generate-open-api.sh typescript
sql:
npm --prefix server run sync:sql
pnpm --filter immich run sync:sql
attach-server:
docker exec -it docker_immich-server_1 sh
renovate:
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
MODULES = e2e server web cli sdk docs .github
# package names mapping function
# cli = @immich/cli
# docs = documentation
# e2e = immich-e2e
# open-api/typescript-sdk = @immich/sdk
# server = immich
# web = immich-web
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
audit-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
pnpm --filter $(call map-package,$*) audit fix
install-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
pnpm --filter $(call map-package,$*) install
ci-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) ci
pnpm --filter $(call map-package,$*) install --frozen-lockfile
build-cli: build-sdk
build-web: build-sdk
build-%: install-%
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
pnpm --filter $(call map-package,$*) run build
format-%:
npm --prefix $* run format:fix
pnpm --filter $(call map-package,$*) run format:fix
lint-%:
npm --prefix $* run lint:fix
pnpm --filter $(call map-package,$*) run lint:fix
lint-web:
pnpm --filter $(call map-package,$*) run lint:p
check-%:
npm --prefix $* run check
pnpm --filter $(call map-package,$*) run check
check-web:
npm --prefix web run check:typescript
npm --prefix web run check:svelte
pnpm --filter immich-web run check:typescript
pnpm --filter immich-web run check:svelte
test-%:
npm --prefix $* run test
pnpm --filter $(call map-package,$*) run test
test-e2e:
docker compose -f ./e2e/docker-compose.yml build
npm --prefix e2e run test
npm --prefix e2e run test:web
pnpm --filter immich-e2e run test
pnpm --filter immich-e2e run test:web
test-medium:
docker run \
--rm \
@@ -78,24 +89,39 @@ test-medium:
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
-e NODE_ENV=development \
immich-server:latest \
-c "npm ci && npm run test:medium -- --run"
-c "pnpm test:medium -- --run"
test-medium-dev:
docker exec -it immich_server /bin/sh -c "npm run test:medium"
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
install-all: $(foreach M,$(MODULES),install-$M) ;
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
audit-all: $(foreach M,$(MODULES),audit-$M) ;
hygiene-all: lint-all format-all check-all sql audit-all;
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
install-all:
pnpm -r --filter '!documentation' install
ci-all:
pnpm -r --filter '!documentation' install --frozen-lockfile
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ;
check-all:
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/"
lint-all:
pnpm -r --filter '!documentation' run lint:fix
format-all:
pnpm -r --filter '!documentation' run format:fix
audit-all:
pnpm -r --filter '!documentation' audit fix
hygiene-all: audit-all
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
test-all:
pnpm -r --filter '!documentation' run "/^test/"
prune:
pnpm store prune
clean:
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
find . -name "node_modules" -type d -prune -exec rm -rf {} +
find . -name "dist" -type d -prune -exec rm -rf '{}' +
find . -name "build" -type d -prune -exec rm -rf '{}' +
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true

View File

@@ -6,8 +6,10 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
$ npm install
$ npm run build
# if you don't have node installed
$ npm install -g pnpm
$ pnpm install
$ pnpm build
Then, to build the open-api client run the following in the open-api folder:
@@ -15,8 +17,10 @@ Then, to build the open-api client run the following in the open-api folder:
To run the Immich CLI from source, run the following in the cli folder:
$ npm install
$ npm run build
# if you don't have node installed
$ npm install -g pnpm
$ pnpm install
$ pnpm build
$ ts-node .
You'll need ts-node, the easiest way to install it is to use npm:

2
cli/bin/immich Executable file
View File

@@ -0,0 +1,2 @@
#!/usr/bin/env node
require('../dist/index.js');

View File

@@ -5,7 +5,7 @@
"type": "module",
"exports": "./dist/index.js",
"bin": {
"immich": "dist/index.js"
"immich": "./bin/immich"
},
"license": "GNU Affero General Public License version 3",
"keywords": [

2825
cli/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

22
docker/.env.bak Normal file
View File

@@ -0,0 +1,22 @@
# You can find documentation for all the supported env variables at https://immich.app/docs/install/environment-variables
# The location where your uploaded files are stored
UPLOAD_LOCATION=/LUNA/ALPHA/MEDIA/PHOTOS/immich_dev_library
# The location where your database files are stored. Network shares are not supported for the database
DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
# TZ=Etc/UTC
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
IMMICH_VERSION=release
# Connection secret for postgres. You should change it to a random password
# Please use only the characters `A-Za-z0-9`, without special characters or spaces
DB_PASSWORD=postgres
# The values below this line do not need to be changed
###################################################################################
DB_USERNAME=postgres
DB_DATABASE_NAME=immich

View File

@@ -24,11 +24,11 @@ services:
build:
context: ../
dockerfile: server/Dockerfile
target: dev
target: dev-docker
restart: unless-stopped
volumes:
- ../server:/usr/src/app
- ../open-api:/usr/src/open-api
- ../open-api/:/usr/src/open-api
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
- /usr/src/app/node_modules
@@ -69,7 +69,8 @@ services:
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
# user: 0:0
build:
context: ../web
context: ../
dockerfile: web/Dockerfile
command: ['/usr/src/app/bin/immich-web']
env_file:
- .env
@@ -79,7 +80,7 @@ services:
volumes:
- ../web:/usr/src/app
- ../i18n:/usr/src/i18n
- ../open-api/:/usr/src/open-api/
- ../open-api/:/usr/src/open-api
# - ../../ui:/usr/ui
- /usr/src/app/node_modules
ulimits:

View File

@@ -5,7 +5,7 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
### Installation
```
$ npm install
$ pnpm install
```
### Local Development

View File

@@ -150,12 +150,10 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
- **Encoded Assets:**
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
- **Postgres**
- The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `DB_DATA_LOCATION`.
@@ -201,7 +199,6 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
- **Postgres**
- The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `DB_DATA_LOCATION`.

View File

@@ -20,7 +20,6 @@ Immich supports 3rd party authentication via [OpenID Connect][oidc] (OIDC), an i
Before enabling OAuth in Immich, a new client application needs to be configured in the 3rd-party authentication server. While the specifics of this setup vary from provider to provider, the general approach should be the same.
1. Create a new (Client) Application
1. The **Provider** type should be `OpenID Connect` or `OAuth2`
2. The **Client type** should be `Confidential`
3. The **Application** type should be `Web`
@@ -29,7 +28,6 @@ Before enabling OAuth in Immich, a new client application needs to be configured
2. Configure Redirect URIs/Origins
The **Sign-in redirect URIs** should include:
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
@@ -37,21 +35,17 @@ Before enabling OAuth in Immich, a new client application needs to be configured
Redirect URIs should contain all the domains you will be using to access Immich. Some examples include:
Mobile
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
Localhost
- `http://localhost:2283/auth/login`
- `http://localhost:2283/user-settings`
Local IP
- `http://192.168.0.200:2283/auth/login`
- `http://192.168.0.200:2283/user-settings`
Hostname
- `https://immich.example.com/auth/login`
- `https://immich.example.com/user-settings`

View File

@@ -199,13 +199,11 @@ To use your SSH key for commit signing, see the [GitHub guide on SSH commit sign
When the Dev Container starts, it automatically:
1. **Runs post-create script** (`container-server-post-create.sh`):
- Adjusts file permissions for the `node` user
- Installs dependencies: `npm install` in all packages
- Installs dependencies: `pnpm install` in all packages
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
2. **Starts development servers** via VS Code tasks:
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
- Both servers watch for file changes and recompile automatically
@@ -335,14 +333,12 @@ make install-all # Install all dependencies
The Dev Container is pre-configured for debugging:
1. **API Server Debugging**:
- Set breakpoints in VS Code
- Press `F5` or use "Run and Debug" panel
- Select "Attach to Server" configuration
- Debug port: 9231
2. **Worker Debugging**:
- Use "Attach to Workers" configuration
- Debug port: 9230
@@ -428,7 +424,6 @@ While the Dev Container focuses on server and web development, you can connect m
```
2. **Configure mobile app**:
- Server URL: `http://YOUR_IP:2283/api`
- Ensure firewall allows port 2283

View File

@@ -56,7 +56,7 @@ If you only want to do web development connected to an existing, remote backend,
1. Build the Immich SDK - `cd open-api/typescript-sdk && npm i && npm run build && cd -`
2. Enter the web directory - `cd web/`
3. Install web dependencies - `npm i`
3. Install web dependencies - `pnpm i`
4. Start the web development server
```bash

View File

@@ -5,7 +5,7 @@
### Unit tests
Unit are run by calling `npm run test` from the `server/` directory.
You need to run `npm install` (in `server/`) before _once_.
You need to run `pnpm install` (in `server/`) before _once_.
### End to end tests

View File

@@ -75,7 +75,6 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.

20954
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -18,6 +18,7 @@
"dependencies": {
"@docusaurus/core": "~3.7.0",
"@docusaurus/preset-classic": "~3.7.0",
"@docusaurus/theme-common": "~3.7.0",
"@mdi/js": "^7.3.67",
"@mdi/react": "^1.6.1",
"@mdx-js/react": "^3.0.0",
@@ -26,6 +27,7 @@
"clsx": "^2.0.0",
"docusaurus-lunr-search": "^3.3.2",
"docusaurus-preset-openapi": "^0.7.5",
"lunr": "^2.3.9",
"postcss": "^8.4.25",
"prism-react-renderer": "^2.3.1",
"raw-loader": "^4.0.2",

13672
docs/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

4470
e2e/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,146 +0,0 @@
import { LoginResponseDto, login, signUpAdmin } from '@immich/sdk';
import { loginDto, signupDto } from 'src/fixtures';
import { errorDto, loginResponseDto, signupResponseDto } from 'src/responses';
import { app, utils } from 'src/utils';
import request from 'supertest';
import { beforeEach, describe, expect, it } from 'vitest';
const { email, password } = signupDto.admin;
describe(`/auth/admin-sign-up`, () => {
beforeEach(async () => {
await utils.resetDatabase();
});
describe('POST /auth/admin-sign-up', () => {
it(`should sign up the admin`, async () => {
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
expect(status).toBe(201);
expect(body).toEqual(signupResponseDto.admin);
});
it('should not allow a second admin to sign up', async () => {
await signUpAdmin({ signUpDto: signupDto.admin });
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
expect(status).toBe(400);
expect(body).toEqual(errorDto.alreadyHasAdmin);
});
});
});
describe('/auth/*', () => {
let admin: LoginResponseDto;
beforeEach(async () => {
await utils.resetDatabase();
await signUpAdmin({ signUpDto: signupDto.admin });
admin = await login({ loginCredentialDto: loginDto.admin });
});
describe(`POST /auth/login`, () => {
it('should reject an incorrect password', async () => {
const { status, body } = await request(app).post('/auth/login').send({ email, password: 'incorrect' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.incorrectLogin);
});
it('should accept a correct password', async () => {
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
expect(status).toBe(201);
expect(body).toEqual(loginResponseDto.admin);
const token = body.accessToken;
expect(token).toBeDefined();
const cookies = headers['set-cookie'];
expect(cookies).toHaveLength(3);
expect(cookies[0].split(';').map((item) => item.trim())).toEqual([
`immich_access_token=${token}`,
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'HttpOnly',
'SameSite=Lax',
]);
expect(cookies[1].split(';').map((item) => item.trim())).toEqual([
'immich_auth_type=password',
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'HttpOnly',
'SameSite=Lax',
]);
expect(cookies[2].split(';').map((item) => item.trim())).toEqual([
'immich_is_authenticated=true',
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'SameSite=Lax',
]);
});
});
describe('POST /auth/validateToken', () => {
it('should reject an invalid token', async () => {
const { status, body } = await request(app).post(`/auth/validateToken`).set('Authorization', 'Bearer 123');
expect(status).toBe(401);
expect(body).toEqual(errorDto.invalidToken);
});
it('should accept a valid token', async () => {
const { status, body } = await request(app)
.post(`/auth/validateToken`)
.send({})
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({ authStatus: true });
});
});
describe('POST /auth/change-password', () => {
it('should require the current password', async () => {
const { status, body } = await request(app)
.post(`/auth/change-password`)
.send({ password: 'wrong-password', newPassword: 'Password1234' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.wrongPassword);
});
it('should change the password', async () => {
const { status } = await request(app)
.post(`/auth/change-password`)
.send({ password, newPassword: 'Password1234' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
await login({
loginCredentialDto: {
email: 'admin@immich.cloud',
password: 'Password1234',
},
});
});
});
describe('POST /auth/logout', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/auth/logout`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should logout the user', async () => {
const { status, body } = await request(app)
.post(`/auth/logout`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
});
});
});

File diff suppressed because one or more lines are too long

View File

@@ -5,7 +5,7 @@ import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_store.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
import 'package:logging/logging.dart';
/// Service responsible for handling application logging.
@@ -14,8 +14,8 @@ import 'package:logging/logging.dart';
/// writes them to a persistent [ILogRepository], and manages log levels
/// via [IStoreRepository]
class LogService {
final LogRepository _logRepository;
final IStoreRepository _storeRepository;
final IsarLogRepository _logRepository;
final IsarStoreRepository _storeRepository;
final List<LogMessage> _msgBuffer = [];
@@ -37,8 +37,8 @@ class LogService {
}
static Future<LogService> init({
required LogRepository logRepository,
required IStoreRepository storeRepository,
required IsarLogRepository logRepository,
required IsarStoreRepository storeRepository,
bool shouldBuffer = true,
}) async {
_instance ??= await create(
@@ -50,8 +50,8 @@ class LogService {
}
static Future<LogService> create({
required LogRepository logRepository,
required IStoreRepository storeRepository,
required IsarLogRepository logRepository,
required IsarStoreRepository storeRepository,
bool shouldBuffer = true,
}) async {
final instance = LogService._(logRepository, storeRepository, shouldBuffer);

View File

@@ -1,18 +1,18 @@
import 'dart:async';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_store.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
/// Provides access to a persistent key-value store with an in-memory cache.
/// Listens for repository changes to keep the cache updated.
class StoreService {
final IStoreRepository _storeRepository;
final IsarStoreRepository _storeRepository;
/// In-memory cache. Keys are [StoreKey.id]
final Map<int, Object?> _cache = {};
late final StreamSubscription<StoreDto> _storeUpdateSubscription;
StoreService._({required IStoreRepository storeRepository})
StoreService._({required IsarStoreRepository storeRepository})
: _storeRepository = storeRepository;
// TODO: Temporary typedef to make minimal changes. Remove this and make the presentation layer access store through a provider
@@ -26,14 +26,14 @@ class StoreService {
// TODO: Replace the implementation with the one from create after removing the typedef
static Future<StoreService> init({
required IStoreRepository storeRepository,
required IsarStoreRepository storeRepository,
}) async {
_instance ??= await create(storeRepository: storeRepository);
return _instance!;
}
static Future<StoreService> create({
required IStoreRepository storeRepository,
required IsarStoreRepository storeRepository,
}) async {
final instance = StoreService._(storeRepository: storeRepository);
await instance._populateCache();

View File

@@ -1,13 +0,0 @@
import 'package:isar/isar.dart';
part 'isar_store.entity.g.dart';
/// Internal class for `Store`, do not use elsewhere.
@Collection(inheritance: false)
class StoreValue {
final Id id;
final int? intValue;
final String? strValue;
const StoreValue(this.id, {this.intValue, this.strValue});
}

View File

@@ -1,7 +1,5 @@
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
import 'package:isar/isar.dart';
import 'package:drift/drift.dart';
part 'log.entity.g.dart';
@@ -47,21 +45,3 @@ class LoggerMessage {
);
}
}
class LoggerMessageEntity extends Table with DriftDefaultsMixin {
const LoggerMessageEntity();
IntColumn get id => integer().autoIncrement()();
TextColumn get message => text()();
TextColumn get details => text().nullable()();
IntColumn get level => intEnum<LogLevel>()();
DateTimeColumn get createdAt => dateTime()();
TextColumn get context1 => text().nullable()();
TextColumn get context2 => text().nullable()();
}

View File

@@ -1,589 +0,0 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart'
as i1;
import 'package:immich_mobile/domain/models/log.model.dart' as i2;
import 'package:immich_mobile/infrastructure/entities/log.entity.dart' as i3;
typedef $$LoggerMessageEntityTableCreateCompanionBuilder
= i1.LoggerMessageEntityCompanion Function({
required int id,
required String message,
i0.Value<String?> details,
required i2.LogLevel level,
required DateTime createdAt,
i0.Value<String?> context1,
i0.Value<String?> context2,
});
typedef $$LoggerMessageEntityTableUpdateCompanionBuilder
= i1.LoggerMessageEntityCompanion Function({
i0.Value<int> id,
i0.Value<String> message,
i0.Value<String?> details,
i0.Value<i2.LogLevel> level,
i0.Value<DateTime> createdAt,
i0.Value<String?> context1,
i0.Value<String?> context2,
});
class $$LoggerMessageEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LoggerMessageEntityTable> {
$$LoggerMessageEntityTableFilterComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnFilters<int> get id => $composableBuilder(
column: $table.id, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get message => $composableBuilder(
column: $table.message, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get details => $composableBuilder(
column: $table.details, builder: (column) => i0.ColumnFilters(column));
i0.ColumnWithTypeConverterFilters<i2.LogLevel, i2.LogLevel, int> get level =>
$composableBuilder(
column: $table.level,
builder: (column) => i0.ColumnWithTypeConverterFilters(column));
i0.ColumnFilters<DateTime> get createdAt => $composableBuilder(
column: $table.createdAt, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get context1 => $composableBuilder(
column: $table.context1, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get context2 => $composableBuilder(
column: $table.context2, builder: (column) => i0.ColumnFilters(column));
}
class $$LoggerMessageEntityTableOrderingComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LoggerMessageEntityTable> {
$$LoggerMessageEntityTableOrderingComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnOrderings<int> get id => $composableBuilder(
column: $table.id, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get message => $composableBuilder(
column: $table.message, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get details => $composableBuilder(
column: $table.details, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<int> get level => $composableBuilder(
column: $table.level, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<DateTime> get createdAt => $composableBuilder(
column: $table.createdAt,
builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get context1 => $composableBuilder(
column: $table.context1, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get context2 => $composableBuilder(
column: $table.context2, builder: (column) => i0.ColumnOrderings(column));
}
class $$LoggerMessageEntityTableAnnotationComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LoggerMessageEntityTable> {
$$LoggerMessageEntityTableAnnotationComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.GeneratedColumn<int> get id =>
$composableBuilder(column: $table.id, builder: (column) => column);
i0.GeneratedColumn<String> get message =>
$composableBuilder(column: $table.message, builder: (column) => column);
i0.GeneratedColumn<String> get details =>
$composableBuilder(column: $table.details, builder: (column) => column);
i0.GeneratedColumnWithTypeConverter<i2.LogLevel, int> get level =>
$composableBuilder(column: $table.level, builder: (column) => column);
i0.GeneratedColumn<DateTime> get createdAt =>
$composableBuilder(column: $table.createdAt, builder: (column) => column);
i0.GeneratedColumn<String> get context1 =>
$composableBuilder(column: $table.context1, builder: (column) => column);
i0.GeneratedColumn<String> get context2 =>
$composableBuilder(column: $table.context2, builder: (column) => column);
}
class $$LoggerMessageEntityTableTableManager extends i0.RootTableManager<
i0.GeneratedDatabase,
i1.$LoggerMessageEntityTable,
i1.LoggerMessageEntityData,
i1.$$LoggerMessageEntityTableFilterComposer,
i1.$$LoggerMessageEntityTableOrderingComposer,
i1.$$LoggerMessageEntityTableAnnotationComposer,
$$LoggerMessageEntityTableCreateCompanionBuilder,
$$LoggerMessageEntityTableUpdateCompanionBuilder,
(
i1.LoggerMessageEntityData,
i0.BaseReferences<i0.GeneratedDatabase, i1.$LoggerMessageEntityTable,
i1.LoggerMessageEntityData>
),
i1.LoggerMessageEntityData,
i0.PrefetchHooks Function()> {
$$LoggerMessageEntityTableTableManager(
i0.GeneratedDatabase db, i1.$LoggerMessageEntityTable table)
: super(i0.TableManagerState(
db: db,
table: table,
createFilteringComposer: () => i1
.$$LoggerMessageEntityTableFilterComposer($db: db, $table: table),
createOrderingComposer: () =>
i1.$$LoggerMessageEntityTableOrderingComposer(
$db: db, $table: table),
createComputedFieldComposer: () =>
i1.$$LoggerMessageEntityTableAnnotationComposer(
$db: db, $table: table),
updateCompanionCallback: ({
i0.Value<int> id = const i0.Value.absent(),
i0.Value<String> message = const i0.Value.absent(),
i0.Value<String?> details = const i0.Value.absent(),
i0.Value<i2.LogLevel> level = const i0.Value.absent(),
i0.Value<DateTime> createdAt = const i0.Value.absent(),
i0.Value<String?> context1 = const i0.Value.absent(),
i0.Value<String?> context2 = const i0.Value.absent(),
}) =>
i1.LoggerMessageEntityCompanion(
id: id,
message: message,
details: details,
level: level,
createdAt: createdAt,
context1: context1,
context2: context2,
),
createCompanionCallback: ({
required int id,
required String message,
i0.Value<String?> details = const i0.Value.absent(),
required i2.LogLevel level,
required DateTime createdAt,
i0.Value<String?> context1 = const i0.Value.absent(),
i0.Value<String?> context2 = const i0.Value.absent(),
}) =>
i1.LoggerMessageEntityCompanion.insert(
id: id,
message: message,
details: details,
level: level,
createdAt: createdAt,
context1: context1,
context2: context2,
),
withReferenceMapper: (p0) => p0
.map((e) => (e.readTable(table), i0.BaseReferences(db, table, e)))
.toList(),
prefetchHooksCallback: null,
));
}
typedef $$LoggerMessageEntityTableProcessedTableManager
= i0.ProcessedTableManager<
i0.GeneratedDatabase,
i1.$LoggerMessageEntityTable,
i1.LoggerMessageEntityData,
i1.$$LoggerMessageEntityTableFilterComposer,
i1.$$LoggerMessageEntityTableOrderingComposer,
i1.$$LoggerMessageEntityTableAnnotationComposer,
$$LoggerMessageEntityTableCreateCompanionBuilder,
$$LoggerMessageEntityTableUpdateCompanionBuilder,
(
i1.LoggerMessageEntityData,
i0.BaseReferences<i0.GeneratedDatabase, i1.$LoggerMessageEntityTable,
i1.LoggerMessageEntityData>
),
i1.LoggerMessageEntityData,
i0.PrefetchHooks Function()>;
class $LoggerMessageEntityTable extends i3.LoggerMessageEntity
with i0.TableInfo<$LoggerMessageEntityTable, i1.LoggerMessageEntityData> {
@override
final i0.GeneratedDatabase attachedDatabase;
final String? _alias;
$LoggerMessageEntityTable(this.attachedDatabase, [this._alias]);
static const i0.VerificationMeta _idMeta = const i0.VerificationMeta('id');
@override
late final i0.GeneratedColumn<int> id = i0.GeneratedColumn<int>(
'id', aliasedName, false,
hasAutoIncrement: true,
type: i0.DriftSqlType.int,
requiredDuringInsert: true,
defaultConstraints:
i0.GeneratedColumn.constraintIsAlways('PRIMARY KEY AUTOINCREMENT'));
static const i0.VerificationMeta _messageMeta =
const i0.VerificationMeta('message');
@override
late final i0.GeneratedColumn<String> message = i0.GeneratedColumn<String>(
'message', aliasedName, false,
type: i0.DriftSqlType.string, requiredDuringInsert: true);
static const i0.VerificationMeta _detailsMeta =
const i0.VerificationMeta('details');
@override
late final i0.GeneratedColumn<String> details = i0.GeneratedColumn<String>(
'details', aliasedName, true,
type: i0.DriftSqlType.string, requiredDuringInsert: false);
@override
late final i0.GeneratedColumnWithTypeConverter<i2.LogLevel, int> level =
i0.GeneratedColumn<int>('level', aliasedName, false,
type: i0.DriftSqlType.int, requiredDuringInsert: true)
.withConverter<i2.LogLevel>(
i1.$LoggerMessageEntityTable.$converterlevel);
static const i0.VerificationMeta _createdAtMeta =
const i0.VerificationMeta('createdAt');
@override
late final i0.GeneratedColumn<DateTime> createdAt =
i0.GeneratedColumn<DateTime>('created_at', aliasedName, false,
type: i0.DriftSqlType.dateTime, requiredDuringInsert: true);
static const i0.VerificationMeta _context1Meta =
const i0.VerificationMeta('context1');
@override
late final i0.GeneratedColumn<String> context1 = i0.GeneratedColumn<String>(
'context1', aliasedName, true,
type: i0.DriftSqlType.string, requiredDuringInsert: false);
static const i0.VerificationMeta _context2Meta =
const i0.VerificationMeta('context2');
@override
late final i0.GeneratedColumn<String> context2 = i0.GeneratedColumn<String>(
'context2', aliasedName, true,
type: i0.DriftSqlType.string, requiredDuringInsert: false);
@override
List<i0.GeneratedColumn> get $columns =>
[id, message, details, level, createdAt, context1, context2];
@override
String get aliasedName => _alias ?? actualTableName;
@override
String get actualTableName => $name;
static const String $name = 'logger_message_entity';
@override
i0.VerificationContext validateIntegrity(
i0.Insertable<i1.LoggerMessageEntityData> instance,
{bool isInserting = false}) {
final context = i0.VerificationContext();
final data = instance.toColumns(true);
if (data.containsKey('id')) {
context.handle(_idMeta, id.isAcceptableOrUnknown(data['id']!, _idMeta));
} else if (isInserting) {
context.missing(_idMeta);
}
if (data.containsKey('message')) {
context.handle(_messageMeta,
message.isAcceptableOrUnknown(data['message']!, _messageMeta));
} else if (isInserting) {
context.missing(_messageMeta);
}
if (data.containsKey('details')) {
context.handle(_detailsMeta,
details.isAcceptableOrUnknown(data['details']!, _detailsMeta));
}
if (data.containsKey('created_at')) {
context.handle(_createdAtMeta,
createdAt.isAcceptableOrUnknown(data['created_at']!, _createdAtMeta));
} else if (isInserting) {
context.missing(_createdAtMeta);
}
if (data.containsKey('context1')) {
context.handle(_context1Meta,
context1.isAcceptableOrUnknown(data['context1']!, _context1Meta));
}
if (data.containsKey('context2')) {
context.handle(_context2Meta,
context2.isAcceptableOrUnknown(data['context2']!, _context2Meta));
}
return context;
}
@override
Set<i0.GeneratedColumn> get $primaryKey => {id};
@override
i1.LoggerMessageEntityData map(Map<String, dynamic> data,
{String? tablePrefix}) {
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
return i1.LoggerMessageEntityData(
id: attachedDatabase.typeMapping
.read(i0.DriftSqlType.int, data['${effectivePrefix}id'])!,
message: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}message'])!,
details: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}details']),
level: i1.$LoggerMessageEntityTable.$converterlevel.fromSql(
attachedDatabase.typeMapping
.read(i0.DriftSqlType.int, data['${effectivePrefix}level'])!),
createdAt: attachedDatabase.typeMapping.read(
i0.DriftSqlType.dateTime, data['${effectivePrefix}created_at'])!,
context1: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}context1']),
context2: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}context2']),
);
}
@override
$LoggerMessageEntityTable createAlias(String alias) {
return $LoggerMessageEntityTable(attachedDatabase, alias);
}
static i0.JsonTypeConverter2<i2.LogLevel, int, int> $converterlevel =
const i0.EnumIndexConverter<i2.LogLevel>(i2.LogLevel.values);
@override
bool get withoutRowId => true;
@override
bool get isStrict => true;
}
class LoggerMessageEntityData extends i0.DataClass
implements i0.Insertable<i1.LoggerMessageEntityData> {
final int id;
final String message;
final String? details;
final i2.LogLevel level;
final DateTime createdAt;
final String? context1;
final String? context2;
const LoggerMessageEntityData(
{required this.id,
required this.message,
this.details,
required this.level,
required this.createdAt,
this.context1,
this.context2});
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
map['id'] = i0.Variable<int>(id);
map['message'] = i0.Variable<String>(message);
if (!nullToAbsent || details != null) {
map['details'] = i0.Variable<String>(details);
}
{
map['level'] = i0.Variable<int>(
i1.$LoggerMessageEntityTable.$converterlevel.toSql(level));
}
map['created_at'] = i0.Variable<DateTime>(createdAt);
if (!nullToAbsent || context1 != null) {
map['context1'] = i0.Variable<String>(context1);
}
if (!nullToAbsent || context2 != null) {
map['context2'] = i0.Variable<String>(context2);
}
return map;
}
factory LoggerMessageEntityData.fromJson(Map<String, dynamic> json,
{i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return LoggerMessageEntityData(
id: serializer.fromJson<int>(json['id']),
message: serializer.fromJson<String>(json['message']),
details: serializer.fromJson<String?>(json['details']),
level: i1.$LoggerMessageEntityTable.$converterlevel
.fromJson(serializer.fromJson<int>(json['level'])),
createdAt: serializer.fromJson<DateTime>(json['createdAt']),
context1: serializer.fromJson<String?>(json['context1']),
context2: serializer.fromJson<String?>(json['context2']),
);
}
@override
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return <String, dynamic>{
'id': serializer.toJson<int>(id),
'message': serializer.toJson<String>(message),
'details': serializer.toJson<String?>(details),
'level': serializer.toJson<int>(
i1.$LoggerMessageEntityTable.$converterlevel.toJson(level)),
'createdAt': serializer.toJson<DateTime>(createdAt),
'context1': serializer.toJson<String?>(context1),
'context2': serializer.toJson<String?>(context2),
};
}
i1.LoggerMessageEntityData copyWith(
{int? id,
String? message,
i0.Value<String?> details = const i0.Value.absent(),
i2.LogLevel? level,
DateTime? createdAt,
i0.Value<String?> context1 = const i0.Value.absent(),
i0.Value<String?> context2 = const i0.Value.absent()}) =>
i1.LoggerMessageEntityData(
id: id ?? this.id,
message: message ?? this.message,
details: details.present ? details.value : this.details,
level: level ?? this.level,
createdAt: createdAt ?? this.createdAt,
context1: context1.present ? context1.value : this.context1,
context2: context2.present ? context2.value : this.context2,
);
LoggerMessageEntityData copyWithCompanion(
i1.LoggerMessageEntityCompanion data) {
return LoggerMessageEntityData(
id: data.id.present ? data.id.value : this.id,
message: data.message.present ? data.message.value : this.message,
details: data.details.present ? data.details.value : this.details,
level: data.level.present ? data.level.value : this.level,
createdAt: data.createdAt.present ? data.createdAt.value : this.createdAt,
context1: data.context1.present ? data.context1.value : this.context1,
context2: data.context2.present ? data.context2.value : this.context2,
);
}
@override
String toString() {
return (StringBuffer('LoggerMessageEntityData(')
..write('id: $id, ')
..write('message: $message, ')
..write('details: $details, ')
..write('level: $level, ')
..write('createdAt: $createdAt, ')
..write('context1: $context1, ')
..write('context2: $context2')
..write(')'))
.toString();
}
@override
int get hashCode =>
Object.hash(id, message, details, level, createdAt, context1, context2);
@override
bool operator ==(Object other) =>
identical(this, other) ||
(other is i1.LoggerMessageEntityData &&
other.id == this.id &&
other.message == this.message &&
other.details == this.details &&
other.level == this.level &&
other.createdAt == this.createdAt &&
other.context1 == this.context1 &&
other.context2 == this.context2);
}
class LoggerMessageEntityCompanion
extends i0.UpdateCompanion<i1.LoggerMessageEntityData> {
final i0.Value<int> id;
final i0.Value<String> message;
final i0.Value<String?> details;
final i0.Value<i2.LogLevel> level;
final i0.Value<DateTime> createdAt;
final i0.Value<String?> context1;
final i0.Value<String?> context2;
const LoggerMessageEntityCompanion({
this.id = const i0.Value.absent(),
this.message = const i0.Value.absent(),
this.details = const i0.Value.absent(),
this.level = const i0.Value.absent(),
this.createdAt = const i0.Value.absent(),
this.context1 = const i0.Value.absent(),
this.context2 = const i0.Value.absent(),
});
LoggerMessageEntityCompanion.insert({
required int id,
required String message,
this.details = const i0.Value.absent(),
required i2.LogLevel level,
required DateTime createdAt,
this.context1 = const i0.Value.absent(),
this.context2 = const i0.Value.absent(),
}) : id = i0.Value(id),
message = i0.Value(message),
level = i0.Value(level),
createdAt = i0.Value(createdAt);
static i0.Insertable<i1.LoggerMessageEntityData> custom({
i0.Expression<int>? id,
i0.Expression<String>? message,
i0.Expression<String>? details,
i0.Expression<int>? level,
i0.Expression<DateTime>? createdAt,
i0.Expression<String>? context1,
i0.Expression<String>? context2,
}) {
return i0.RawValuesInsertable({
if (id != null) 'id': id,
if (message != null) 'message': message,
if (details != null) 'details': details,
if (level != null) 'level': level,
if (createdAt != null) 'created_at': createdAt,
if (context1 != null) 'context1': context1,
if (context2 != null) 'context2': context2,
});
}
i1.LoggerMessageEntityCompanion copyWith(
{i0.Value<int>? id,
i0.Value<String>? message,
i0.Value<String?>? details,
i0.Value<i2.LogLevel>? level,
i0.Value<DateTime>? createdAt,
i0.Value<String?>? context1,
i0.Value<String?>? context2}) {
return i1.LoggerMessageEntityCompanion(
id: id ?? this.id,
message: message ?? this.message,
details: details ?? this.details,
level: level ?? this.level,
createdAt: createdAt ?? this.createdAt,
context1: context1 ?? this.context1,
context2: context2 ?? this.context2,
);
}
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
if (id.present) {
map['id'] = i0.Variable<int>(id.value);
}
if (message.present) {
map['message'] = i0.Variable<String>(message.value);
}
if (details.present) {
map['details'] = i0.Variable<String>(details.value);
}
if (level.present) {
map['level'] = i0.Variable<int>(
i1.$LoggerMessageEntityTable.$converterlevel.toSql(level.value));
}
if (createdAt.present) {
map['created_at'] = i0.Variable<DateTime>(createdAt.value);
}
if (context1.present) {
map['context1'] = i0.Variable<String>(context1.value);
}
if (context2.present) {
map['context2'] = i0.Variable<String>(context2.value);
}
return map;
}
@override
String toString() {
return (StringBuffer('LoggerMessageEntityCompanion(')
..write('id: $id, ')
..write('message: $message, ')
..write('details: $details, ')
..write('level: $level, ')
..write('createdAt: $createdAt, ')
..write('context1: $context1, ')
..write('context2: $context2')
..write(')'))
.toString();
}
}

View File

@@ -1,13 +1,13 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
import 'package:isar/isar.dart';
class StoreEntity extends Table with DriftDefaultsMixin {
const StoreEntity();
part 'store.entity.g.dart';
IntColumn get id => integer()();
IntColumn get intValue => integer().nullable()();
TextColumn get strValue => text().nullable()();
/// Internal class for `Store`, do not use elsewhere.
@Collection(inheritance: false)
class StoreValue {
final Id id;
final int? intValue;
final String? strValue;
@override
Set<Column> get primaryKey => {id};
const StoreValue(this.id, {this.intValue, this.strValue});
}

View File

@@ -1,364 +0,0 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
as i1;
import 'package:immich_mobile/infrastructure/entities/store.entity.dart' as i2;
typedef $$StoreEntityTableCreateCompanionBuilder = i1.StoreEntityCompanion
Function({
required int id,
i0.Value<int?> intValue,
i0.Value<String?> strValue,
});
typedef $$StoreEntityTableUpdateCompanionBuilder = i1.StoreEntityCompanion
Function({
i0.Value<int> id,
i0.Value<int?> intValue,
i0.Value<String?> strValue,
});
class $$StoreEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$StoreEntityTable> {
$$StoreEntityTableFilterComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnFilters<int> get id => $composableBuilder(
column: $table.id, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<int> get intValue => $composableBuilder(
column: $table.intValue, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get strValue => $composableBuilder(
column: $table.strValue, builder: (column) => i0.ColumnFilters(column));
}
class $$StoreEntityTableOrderingComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$StoreEntityTable> {
$$StoreEntityTableOrderingComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnOrderings<int> get id => $composableBuilder(
column: $table.id, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<int> get intValue => $composableBuilder(
column: $table.intValue, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get strValue => $composableBuilder(
column: $table.strValue, builder: (column) => i0.ColumnOrderings(column));
}
class $$StoreEntityTableAnnotationComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$StoreEntityTable> {
$$StoreEntityTableAnnotationComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.GeneratedColumn<int> get id =>
$composableBuilder(column: $table.id, builder: (column) => column);
i0.GeneratedColumn<int> get intValue =>
$composableBuilder(column: $table.intValue, builder: (column) => column);
i0.GeneratedColumn<String> get strValue =>
$composableBuilder(column: $table.strValue, builder: (column) => column);
}
class $$StoreEntityTableTableManager extends i0.RootTableManager<
i0.GeneratedDatabase,
i1.$StoreEntityTable,
i1.StoreEntityData,
i1.$$StoreEntityTableFilterComposer,
i1.$$StoreEntityTableOrderingComposer,
i1.$$StoreEntityTableAnnotationComposer,
$$StoreEntityTableCreateCompanionBuilder,
$$StoreEntityTableUpdateCompanionBuilder,
(
i1.StoreEntityData,
i0.BaseReferences<i0.GeneratedDatabase, i1.$StoreEntityTable,
i1.StoreEntityData>
),
i1.StoreEntityData,
i0.PrefetchHooks Function()> {
$$StoreEntityTableTableManager(
i0.GeneratedDatabase db, i1.$StoreEntityTable table)
: super(i0.TableManagerState(
db: db,
table: table,
createFilteringComposer: () =>
i1.$$StoreEntityTableFilterComposer($db: db, $table: table),
createOrderingComposer: () =>
i1.$$StoreEntityTableOrderingComposer($db: db, $table: table),
createComputedFieldComposer: () =>
i1.$$StoreEntityTableAnnotationComposer($db: db, $table: table),
updateCompanionCallback: ({
i0.Value<int> id = const i0.Value.absent(),
i0.Value<int?> intValue = const i0.Value.absent(),
i0.Value<String?> strValue = const i0.Value.absent(),
}) =>
i1.StoreEntityCompanion(
id: id,
intValue: intValue,
strValue: strValue,
),
createCompanionCallback: ({
required int id,
i0.Value<int?> intValue = const i0.Value.absent(),
i0.Value<String?> strValue = const i0.Value.absent(),
}) =>
i1.StoreEntityCompanion.insert(
id: id,
intValue: intValue,
strValue: strValue,
),
withReferenceMapper: (p0) => p0
.map((e) => (e.readTable(table), i0.BaseReferences(db, table, e)))
.toList(),
prefetchHooksCallback: null,
));
}
typedef $$StoreEntityTableProcessedTableManager = i0.ProcessedTableManager<
i0.GeneratedDatabase,
i1.$StoreEntityTable,
i1.StoreEntityData,
i1.$$StoreEntityTableFilterComposer,
i1.$$StoreEntityTableOrderingComposer,
i1.$$StoreEntityTableAnnotationComposer,
$$StoreEntityTableCreateCompanionBuilder,
$$StoreEntityTableUpdateCompanionBuilder,
(
i1.StoreEntityData,
i0.BaseReferences<i0.GeneratedDatabase, i1.$StoreEntityTable,
i1.StoreEntityData>
),
i1.StoreEntityData,
i0.PrefetchHooks Function()>;
class $StoreEntityTable extends i2.StoreEntity
with i0.TableInfo<$StoreEntityTable, i1.StoreEntityData> {
@override
final i0.GeneratedDatabase attachedDatabase;
final String? _alias;
$StoreEntityTable(this.attachedDatabase, [this._alias]);
static const i0.VerificationMeta _idMeta = const i0.VerificationMeta('id');
@override
late final i0.GeneratedColumn<int> id = i0.GeneratedColumn<int>(
'id', aliasedName, false,
type: i0.DriftSqlType.int, requiredDuringInsert: true);
static const i0.VerificationMeta _intValueMeta =
const i0.VerificationMeta('intValue');
@override
late final i0.GeneratedColumn<int> intValue = i0.GeneratedColumn<int>(
'int_value', aliasedName, true,
type: i0.DriftSqlType.int, requiredDuringInsert: false);
static const i0.VerificationMeta _strValueMeta =
const i0.VerificationMeta('strValue');
@override
late final i0.GeneratedColumn<String> strValue = i0.GeneratedColumn<String>(
'str_value', aliasedName, true,
type: i0.DriftSqlType.string, requiredDuringInsert: false);
@override
List<i0.GeneratedColumn> get $columns => [id, intValue, strValue];
@override
String get aliasedName => _alias ?? actualTableName;
@override
String get actualTableName => $name;
static const String $name = 'store_entity';
@override
i0.VerificationContext validateIntegrity(
i0.Insertable<i1.StoreEntityData> instance,
{bool isInserting = false}) {
final context = i0.VerificationContext();
final data = instance.toColumns(true);
if (data.containsKey('id')) {
context.handle(_idMeta, id.isAcceptableOrUnknown(data['id']!, _idMeta));
} else if (isInserting) {
context.missing(_idMeta);
}
if (data.containsKey('int_value')) {
context.handle(_intValueMeta,
intValue.isAcceptableOrUnknown(data['int_value']!, _intValueMeta));
}
if (data.containsKey('str_value')) {
context.handle(_strValueMeta,
strValue.isAcceptableOrUnknown(data['str_value']!, _strValueMeta));
}
return context;
}
@override
Set<i0.GeneratedColumn> get $primaryKey => {id};
@override
i1.StoreEntityData map(Map<String, dynamic> data, {String? tablePrefix}) {
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
return i1.StoreEntityData(
id: attachedDatabase.typeMapping
.read(i0.DriftSqlType.int, data['${effectivePrefix}id'])!,
intValue: attachedDatabase.typeMapping
.read(i0.DriftSqlType.int, data['${effectivePrefix}int_value']),
strValue: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}str_value']),
);
}
@override
$StoreEntityTable createAlias(String alias) {
return $StoreEntityTable(attachedDatabase, alias);
}
@override
bool get withoutRowId => true;
@override
bool get isStrict => true;
}
class StoreEntityData extends i0.DataClass
implements i0.Insertable<i1.StoreEntityData> {
final int id;
final int? intValue;
final String? strValue;
const StoreEntityData({required this.id, this.intValue, this.strValue});
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
map['id'] = i0.Variable<int>(id);
if (!nullToAbsent || intValue != null) {
map['int_value'] = i0.Variable<int>(intValue);
}
if (!nullToAbsent || strValue != null) {
map['str_value'] = i0.Variable<String>(strValue);
}
return map;
}
factory StoreEntityData.fromJson(Map<String, dynamic> json,
{i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return StoreEntityData(
id: serializer.fromJson<int>(json['id']),
intValue: serializer.fromJson<int?>(json['intValue']),
strValue: serializer.fromJson<String?>(json['strValue']),
);
}
@override
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return <String, dynamic>{
'id': serializer.toJson<int>(id),
'intValue': serializer.toJson<int?>(intValue),
'strValue': serializer.toJson<String?>(strValue),
};
}
i1.StoreEntityData copyWith(
{int? id,
i0.Value<int?> intValue = const i0.Value.absent(),
i0.Value<String?> strValue = const i0.Value.absent()}) =>
i1.StoreEntityData(
id: id ?? this.id,
intValue: intValue.present ? intValue.value : this.intValue,
strValue: strValue.present ? strValue.value : this.strValue,
);
StoreEntityData copyWithCompanion(i1.StoreEntityCompanion data) {
return StoreEntityData(
id: data.id.present ? data.id.value : this.id,
intValue: data.intValue.present ? data.intValue.value : this.intValue,
strValue: data.strValue.present ? data.strValue.value : this.strValue,
);
}
@override
String toString() {
return (StringBuffer('StoreEntityData(')
..write('id: $id, ')
..write('intValue: $intValue, ')
..write('strValue: $strValue')
..write(')'))
.toString();
}
@override
int get hashCode => Object.hash(id, intValue, strValue);
@override
bool operator ==(Object other) =>
identical(this, other) ||
(other is i1.StoreEntityData &&
other.id == this.id &&
other.intValue == this.intValue &&
other.strValue == this.strValue);
}
class StoreEntityCompanion extends i0.UpdateCompanion<i1.StoreEntityData> {
final i0.Value<int> id;
final i0.Value<int?> intValue;
final i0.Value<String?> strValue;
const StoreEntityCompanion({
this.id = const i0.Value.absent(),
this.intValue = const i0.Value.absent(),
this.strValue = const i0.Value.absent(),
});
StoreEntityCompanion.insert({
required int id,
this.intValue = const i0.Value.absent(),
this.strValue = const i0.Value.absent(),
}) : id = i0.Value(id);
static i0.Insertable<i1.StoreEntityData> custom({
i0.Expression<int>? id,
i0.Expression<int>? intValue,
i0.Expression<String>? strValue,
}) {
return i0.RawValuesInsertable({
if (id != null) 'id': id,
if (intValue != null) 'int_value': intValue,
if (strValue != null) 'str_value': strValue,
});
}
i1.StoreEntityCompanion copyWith(
{i0.Value<int>? id,
i0.Value<int?>? intValue,
i0.Value<String?>? strValue}) {
return i1.StoreEntityCompanion(
id: id ?? this.id,
intValue: intValue ?? this.intValue,
strValue: strValue ?? this.strValue,
);
}
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
if (id.present) {
map['id'] = i0.Variable<int>(id.value);
}
if (intValue.present) {
map['int_value'] = i0.Variable<int>(intValue.value);
}
if (strValue.present) {
map['str_value'] = i0.Variable<String>(strValue.value);
}
return map;
}
@override
String toString() {
return (StringBuffer('StoreEntityCompanion(')
..write('id: $id, ')
..write('intValue: $intValue, ')
..write('strValue: $strValue')
..write(')'))
.toString();
}
}

View File

@@ -1,6 +1,6 @@
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'isar_store.entity.dart';
part of 'store.entity.dart';
// **************************************************************************
// IsarCollectionGenerator

View File

@@ -7,13 +7,11 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.dart';
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.dart';
import 'package:isar/isar.dart';
@@ -48,8 +46,6 @@ class IsarDatabaseRepository implements IDatabaseRepository {
RemoteAlbumEntity,
RemoteAlbumAssetEntity,
RemoteAlbumUserEntity,
StoreEntity,
LoggerMessageEntity,
],
include: {
'package:immich_mobile/infrastructure/entities/merged_asset.drift',

View File

@@ -23,13 +23,9 @@ import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.
as i10;
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
as i11;
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
as i12;
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart'
as i13;
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
as i14;
import 'package:drift/internal/modular.dart' as i15;
as i12;
import 'package:drift/internal/modular.dart' as i13;
abstract class $Drift extends i0.GeneratedDatabase {
$Drift(i0.QueryExecutor e) : super(e);
@@ -55,11 +51,8 @@ abstract class $Drift extends i0.GeneratedDatabase {
i10.$RemoteAlbumAssetEntityTable(this);
late final i11.$RemoteAlbumUserEntityTable remoteAlbumUserEntity =
i11.$RemoteAlbumUserEntityTable(this);
late final i12.$StoreEntityTable storeEntity = i12.$StoreEntityTable(this);
late final i13.$LoggerMessageEntityTable loggerMessageEntity =
i13.$LoggerMessageEntityTable(this);
i14.MergedAssetDrift get mergedAssetDrift => i15.ReadDatabaseContainer(this)
.accessor<i14.MergedAssetDrift>(i14.MergedAssetDrift.new);
i12.MergedAssetDrift get mergedAssetDrift => i13.ReadDatabaseContainer(this)
.accessor<i12.MergedAssetDrift>(i12.MergedAssetDrift.new);
@override
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
@@ -78,9 +71,7 @@ abstract class $Drift extends i0.GeneratedDatabase {
remoteExifEntity,
remoteAlbumEntity,
remoteAlbumAssetEntity,
remoteAlbumUserEntity,
storeEntity,
loggerMessageEntity
remoteAlbumUserEntity
];
@override
i0.StreamQueryUpdateRules get streamUpdateRules =>
@@ -217,8 +208,4 @@ class $DriftManager {
_db, _db.remoteAlbumAssetEntity);
i11.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i11
.$$RemoteAlbumUserEntityTableTableManager(_db, _db.remoteAlbumUserEntity);
i12.$$StoreEntityTableTableManager get storeEntity =>
i12.$$StoreEntityTableTableManager(_db, _db.storeEntity);
i13.$$LoggerMessageEntityTableTableManager get loggerMessageEntity =>
i13.$$LoggerMessageEntityTableTableManager(_db, _db.loggerMessageEntity);
}

View File

@@ -1,145 +0,0 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_user.repository.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
final driftStoreRepositoryProvider = Provider<DriftStoreRepository>(
(ref) => DriftStoreRepository(ref.watch(driftProvider)),
);
class DriftStoreRepository implements IStoreRepository {
final Drift _db;
final validStoreKeys = StoreKey.values.map((e) => e.id).toSet();
DriftStoreRepository(this._db);
@override
Future<bool> deleteAll() async {
return await _db.transaction(() async {
await _db.delete(_db.storeEntity).go();
return true;
});
}
@override
Stream<StoreDto<Object>> watchAll() {
return (_db.select(_db.storeEntity)
..where((tbl) => tbl.id.isIn(validStoreKeys)))
.watch()
.asyncExpand(
(entities) => Stream.fromFutures(
entities.map((e) async => _toUpdateEvent(e)),
),
);
}
@override
Future<void> delete<T>(StoreKey<T> key) async {
return await _db.transaction(() async {
await (_db.delete(_db.storeEntity)..where((tbl) => tbl.id.equals(key.id)))
.go();
});
}
@override
Future<bool> insert<T>(StoreKey<T> key, T value) async {
return await _db.transaction(() async {
await _db
.into(_db.storeEntity)
.insertOnConflictUpdate(await _fromValue(key, value));
return true;
});
}
@override
Future<T?> tryGet<T>(StoreKey<T> key) async {
final entity = await (_db.select(_db.storeEntity)
..where((tbl) => tbl.id.equals(key.id)))
.getSingleOrNull();
if (entity == null) {
return null;
}
return await _toValue(key, entity);
}
@override
Future<bool> update<T>(StoreKey<T> key, T value) async {
return await _db.transaction(() async {
await _db
.into(_db.storeEntity)
.insertOnConflictUpdate(await _fromValue(key, value));
return true;
});
}
@override
Stream<T?> watch<T>(StoreKey<T> key) async* {
yield* (_db.select(_db.storeEntity)..where((tbl) => tbl.id.equals(key.id)))
.watchSingleOrNull()
.asyncMap((e) async => e == null ? null : await _toValue(key, e));
}
Future<StoreDto<Object>> _toUpdateEvent(StoreEntityData entity) async {
final key = StoreKey.values.firstWhere((e) => e.id == entity.id)
as StoreKey<Object>;
final value = await _toValue(key, entity);
return StoreDto(key, value);
}
Future<T?> _toValue<T>(StoreKey<T> key, StoreEntityData entity) async =>
switch (key.type) {
const (int) => entity.intValue,
const (String) => entity.strValue,
const (bool) => entity.intValue == 1,
const (DateTime) => entity.intValue == null
? null
: DateTime.fromMillisecondsSinceEpoch(entity.intValue!),
const (UserDto) => entity.strValue == null
? null
: await DriftUserRepository(_db).getByUserId(entity.strValue!),
_ => null,
} as T?;
Future<StoreEntityData> _fromValue<T>(StoreKey<T> key, T value) async {
final (int? intValue, String? strValue) = switch (key.type) {
const (int) => (value as int, null),
const (String) => (null, value as String),
const (bool) => ((value as bool) ? 1 : 0, null),
const (DateTime) => ((value as DateTime).millisecondsSinceEpoch, null),
const (UserDto) => (
null,
(await DriftUserRepository(_db).update(value as UserDto)).id,
),
_ => throw UnsupportedError(
"Unsupported primitive type: ${key.type} for key: ${key.name}",
),
};
return StoreEntityData(
id: key.id,
intValue: intValue,
strValue: strValue,
);
}
@override
Future<List<StoreDto<Object>>> getAll() async {
final entities = await (_db.select(_db.storeEntity)
..where((tbl) => tbl.id.isIn(validStoreKeys)))
.get();
return Future.wait(entities.map((e) => _toUpdateEvent(e)).toList());
}
}
abstract class IStoreRepository {
Future<bool> deleteAll();
Stream<StoreDto<Object>> watchAll();
Future<void> delete<T>(StoreKey<T> key);
Future<bool> insert<T>(StoreKey<T> key, T value);
Future<T?> tryGet<T>(StoreKey<T> key);
Future<bool> update<T>(StoreKey<T> key, T value);
Stream<T?> watch<T>(StoreKey<T> key);
Future<List<StoreDto<Object>>> getAll();
}

View File

@@ -1,117 +0,0 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart';
import 'package:immich_mobile/domain/models/user_metadata.model.dart';
class DriftUserRepository {
final Drift _db;
const DriftUserRepository(this._db);
Future<void> delete(List<String> ids) async {
await _db.transaction(() async {
await (_db.delete(_db.userEntity)..where((tbl) => tbl.id.isIn(ids))).go();
});
}
Future<void> deleteAll() async {
await _db.transaction(() async {
await _db.delete(_db.userEntity).go();
});
}
Future<List<UserDto>> getAll({SortUserBy? sortBy}) async {
var query = _db.select(_db.userEntity);
if (sortBy != null) {
switch (sortBy) {
case SortUserBy.id:
query = query..orderBy([(u) => OrderingTerm.asc(u.id)]);
}
}
final users = await query.get();
return users.map((u) => _toDto(u)).toList();
}
Future<UserDto?> getByUserId(String id) async {
final user = await (_db.select(_db.userEntity)
..where((tbl) => tbl.id.equals(id)))
.getSingleOrNull();
return user != null ? _toDto(user) : null;
}
Future<List<UserDto?>> getByUserIds(List<String> ids) async {
final users = await (_db.select(_db.userEntity)
..where((tbl) => tbl.id.isIn(ids)))
.get();
// Create a map for quick lookup
final userMap = {for (var user in users) user.id: _toDto(user)};
// Return results in the same order as input ids
return ids.map((id) => userMap[id]).toList();
}
Future<bool> insert(UserDto user) async {
await _db.transaction(() async {
await _db.into(_db.userEntity).insertOnConflictUpdate(_fromDto(user));
});
return true;
}
Future<UserDto> update(UserDto user) async {
await _db.transaction(() async {
await _db.into(_db.userEntity).insertOnConflictUpdate(_fromDto(user));
});
return user;
}
Future<bool> updateAll(List<UserDto> users) async {
await _db.transaction(() async {
await _db.batch((batch) {
for (final user in users) {
batch.insert(_db.userEntity, _fromDto(user),
mode: InsertMode.insertOrReplace);
}
});
});
return true;
}
UserDto _toDto(UserEntityData entity) {
return UserDto(
id: entity.id,
updatedAt: entity.updatedAt,
email: entity.email,
name: entity.name,
isAdmin: entity.isAdmin,
profileImagePath: entity.profileImagePath ?? '',
// Note: These fields are not in the current UserEntity table but are in UserDto
// You may need to add them to the table or provide defaults
isPartnerSharedBy: false,
isPartnerSharedWith: false,
avatarColor: AvatarColor.primary,
memoryEnabled: true,
inTimeline: false,
quotaUsageInBytes: entity.quotaUsageInBytes,
quotaSizeInBytes: entity.quotaSizeInBytes ?? 0,
);
}
UserEntityCompanion _fromDto(UserDto dto) {
return UserEntityCompanion(
id: Value(dto.id),
name: Value(dto.name),
isAdmin: Value(dto.isAdmin),
email: Value(dto.email),
profileImagePath: Value.absentIfNull(
dto.profileImagePath?.isEmpty == true ? null : dto.profileImagePath),
updatedAt: Value(dto.updatedAt),
quotaSizeInBytes: Value.absentIfNull(
dto.quotaSizeInBytes == 0 ? null : dto.quotaSizeInBytes),
quotaUsageInBytes: Value(dto.quotaUsageInBytes),
);
}
}

View File

@@ -1,99 +1,46 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:riverpod_annotation/riverpod_annotation.dart';
import 'package:isar/isar.dart';
final driftLogRepositoryProvider = Provider<LogRepository>(
(ref) => LogRepository(ref.watch(driftProvider)),
);
class LogRepository {
final Drift _db;
const LogRepository(this._db);
class IsarLogRepository extends IsarDatabaseRepository {
final Isar _db;
const IsarLogRepository(super.db) : _db = db;
Future<bool> deleteAll() async {
await _db.transaction(() async {
await _db.delete(_db.loggerMessageEntity).go();
});
await transaction(() async => await _db.loggerMessages.clear());
return true;
}
Future<List<LogMessage>> getAll() async {
final query = _db.select(_db.loggerMessageEntity)
..orderBy([(t) => OrderingTerm.desc(t.createdAt)]);
final results = await query.get();
return results
.map(
(row) => LogMessage(
message: row.message,
level: row.level,
createdAt: row.createdAt,
logger: row.context1,
error: row.details,
stack: row.context2,
),
)
.toList();
final logs =
await _db.loggerMessages.where().sortByCreatedAtDesc().findAll();
return logs.map((l) => l.toDto()).toList();
}
Future<bool> insert(LogMessage log) async {
await _db.transaction(() async {
await _db.into(_db.loggerMessageEntity).insert(
LoggerMessageEntityCompanion.insert(
id: 0, // Will be auto-incremented by the database
message: log.message,
details: Value(log.error),
level: log.level,
createdAt: log.createdAt,
context1: Value(log.logger),
context2: Value(log.stack),
),
);
final logEntity = LoggerMessage.fromDto(log);
await transaction(() async {
await _db.loggerMessages.put(logEntity);
});
return true;
}
Future<bool> insertAll(Iterable<LogMessage> logs) async {
await _db.transaction(() async {
for (final log in logs) {
await _db.into(_db.loggerMessageEntity).insert(
LoggerMessageEntityCompanion.insert(
id: 0, // Will be auto-incremented by the database
message: log.message,
details: Value(log.error),
level: log.level,
createdAt: log.createdAt,
context1: Value(log.logger),
context2: Value(log.stack),
),
);
}
await transaction(() async {
final logEntities =
logs.map((log) => LoggerMessage.fromDto(log)).toList();
await _db.loggerMessages.putAll(logEntities);
});
return true;
}
Future<void> truncate({int limit = 250}) async {
await _db.transaction(() async {
final countQuery = _db.selectOnly(_db.loggerMessageEntity)
..addColumns([_db.loggerMessageEntity.id.count()]);
final countResult = await countQuery.getSingle();
final count = countResult.read(_db.loggerMessageEntity.id.count()) ?? 0;
await transaction(() async {
final count = await _db.loggerMessages.count();
if (count <= limit) return;
final toRemove = count - limit;
final oldestIds = await (_db.select(_db.loggerMessageEntity)
..orderBy([(t) => OrderingTerm.asc(t.createdAt)])
..limit(toRemove))
.get();
final idsToDelete = oldestIds.map((row) => row.id).toList();
await (_db.delete(_db.loggerMessageEntity)
..where((tbl) => tbl.id.isIn(idsToDelete)))
.go();
await _db.loggerMessages.where().limit(toRemove).deleteAll();
});
}
}

View File

@@ -1,19 +1,16 @@
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/infrastructure/entities/isar_store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/user.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_store.repository.dart';
import 'package:isar/isar.dart';
class IsarStoreRepository extends IsarDatabaseRepository
implements IStoreRepository {
class IsarStoreRepository extends IsarDatabaseRepository {
final Isar _db;
final validStoreKeys = StoreKey.values.map((e) => e.id).toSet();
IsarStoreRepository(super.db) : _db = db;
@override
Future<bool> deleteAll() async {
return await transaction(() async {
await _db.storeValues.clear();
@@ -21,7 +18,6 @@ class IsarStoreRepository extends IsarDatabaseRepository
});
}
@override
Stream<StoreDto<Object>> watchAll() {
return _db.storeValues
.filter()
@@ -34,12 +30,10 @@ class IsarStoreRepository extends IsarDatabaseRepository
);
}
@override
Future<void> delete<T>(StoreKey<T> key) async {
return await transaction(() async => await _db.storeValues.delete(key.id));
}
@override
Future<bool> insert<T>(StoreKey<T> key, T value) async {
return await transaction(() async {
await _db.storeValues.put(await _fromValue(key, value));
@@ -47,7 +41,6 @@ class IsarStoreRepository extends IsarDatabaseRepository
});
}
@override
Future<T?> tryGet<T>(StoreKey<T> key) async {
final entity = (await _db.storeValues.get(key.id));
if (entity == null) {
@@ -56,7 +49,6 @@ class IsarStoreRepository extends IsarDatabaseRepository
return await _toValue(key, entity);
}
@override
Future<bool> update<T>(StoreKey<T> key, T value) async {
return await transaction(() async {
await _db.storeValues.put(await _fromValue(key, value));
@@ -64,7 +56,6 @@ class IsarStoreRepository extends IsarDatabaseRepository
});
}
@override
Stream<T?> watch<T>(StoreKey<T> key) async* {
yield* _db.storeValues
.watchObject(key.id, fireImmediately: true)
@@ -109,7 +100,6 @@ class IsarStoreRepository extends IsarDatabaseRepository
return StoreValue(key.id, intValue: intValue, strValue: strValue);
}
@override
Future<List<StoreDto<Object>>> getAll() async {
final entities = await _db.storeValues
.filter()

View File

@@ -1,11 +1,12 @@
import 'dart:async';
import 'dart:io';
import 'package:drift/drift.dart';
import 'package:flutter/foundation.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
// ignore: import_rule_isar
import 'package:isar/isar.dart';
const kDevLoggerTag = 'DEV';
@@ -13,38 +14,28 @@ abstract final class DLog {
const DLog();
static Stream<List<LogMessage>> watchLog() {
final db = Drift();
final db = Isar.getInstance();
if (db == null) {
return const Stream.empty();
}
final query = db.select(db.loggerMessageEntity)
..where((tbl) => tbl.context1.equals(kDevLoggerTag))
..orderBy([(t) => OrderingTerm.desc(t.createdAt)]);
return query.watch().map(
(rows) => rows
.map(
(row) => LogMessage(
message: row.message,
level: row.level,
createdAt: row.createdAt,
logger: row.context1,
error: row.details,
stack: row.context2,
),
)
.toList(),
);
return db.loggerMessages
.filter()
.context1EqualTo(kDevLoggerTag)
.sortByCreatedAtDesc()
.watch(fireImmediately: true)
.map((logs) => logs.map((log) => log.toDto()).toList());
}
static void clearLog() {
final db = Drift();
final db = Isar.getInstance();
if (db == null) {
return;
}
unawaited(
db.transaction(() async {
await (db.delete(db.loggerMessageEntity)
..where((tbl) => tbl.context1.equals(kDevLoggerTag)))
.go();
}),
);
db.writeTxnSync(() {
db.loggerMessages.filter().context1EqualTo(kDevLoggerTag).deleteAllSync();
});
}
static void log(String message, [Object? error, StackTrace? stackTrace]) {
@@ -58,7 +49,10 @@ abstract final class DLog {
debugPrint('StackTrace: $stackTrace');
}
final db = Drift();
final isar = Isar.getInstance();
if (isar == null) {
return;
}
final record = LogMessage(
message: message,
@@ -69,6 +63,6 @@ abstract final class DLog {
stack: stackTrace?.toString(),
);
unawaited(LogRepository(db).insert(record));
unawaited(IsarLogRepository(isar).insert(record));
}
}

View File

@@ -5,11 +5,8 @@ import 'package:drift/drift.dart' hide Column;
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_store.repository.dart';
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
@@ -17,32 +14,6 @@ import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/routing/router.dart';
final _features = [
_Feature(
name: 'test',
icon: Icons.abc,
onTap: (_, ref) {
final UserDto value = UserDto(
id: "1234",
email: "alex@email.com",
name: "alex",
isAdmin: true,
updatedAt: DateTime.now(),
);
ref
.read(driftStoreRepositoryProvider)
.insert(StoreKey.serverUrl, "https://example.com");
final readback =
ref.read(driftStoreRepositoryProvider).tryGet(StoreKey.serverUrl);
readback.then((value) {
print("Read back: $value");
});
return Future.value();
},
),
_Feature(
name: 'Sync Local',
icon: Icons.photo_album_rounded,

View File

@@ -14,7 +14,7 @@ part of 'router.dart';
/// [ActivitiesPage]
class ActivitiesRoute extends PageRouteInfo<void> {
const ActivitiesRoute({List<PageRouteInfo>? children})
: super(ActivitiesRoute.name, initialChildren: children);
: super(ActivitiesRoute.name, initialChildren: children);
static const String name = 'ActivitiesRoute';
@@ -35,13 +35,13 @@ class AlbumAdditionalSharedUserSelectionRoute
required Album album,
List<PageRouteInfo>? children,
}) : super(
AlbumAdditionalSharedUserSelectionRoute.name,
args: AlbumAdditionalSharedUserSelectionRouteArgs(
key: key,
album: album,
),
initialChildren: children,
);
AlbumAdditionalSharedUserSelectionRoute.name,
args: AlbumAdditionalSharedUserSelectionRouteArgs(
key: key,
album: album,
),
initialChildren: children,
);
static const String name = 'AlbumAdditionalSharedUserSelectionRoute';
@@ -83,14 +83,14 @@ class AlbumAssetSelectionRoute
bool canDeselect = false,
List<PageRouteInfo>? children,
}) : super(
AlbumAssetSelectionRoute.name,
args: AlbumAssetSelectionRouteArgs(
key: key,
existingAssets: existingAssets,
canDeselect: canDeselect,
),
initialChildren: children,
);
AlbumAssetSelectionRoute.name,
args: AlbumAssetSelectionRouteArgs(
key: key,
existingAssets: existingAssets,
canDeselect: canDeselect,
),
initialChildren: children,
);
static const String name = 'AlbumAssetSelectionRoute';
@@ -130,7 +130,7 @@ class AlbumAssetSelectionRouteArgs {
/// [AlbumOptionsPage]
class AlbumOptionsRoute extends PageRouteInfo<void> {
const AlbumOptionsRoute({List<PageRouteInfo>? children})
: super(AlbumOptionsRoute.name, initialChildren: children);
: super(AlbumOptionsRoute.name, initialChildren: children);
static const String name = 'AlbumOptionsRoute';
@@ -150,10 +150,10 @@ class AlbumPreviewRoute extends PageRouteInfo<AlbumPreviewRouteArgs> {
required Album album,
List<PageRouteInfo>? children,
}) : super(
AlbumPreviewRoute.name,
args: AlbumPreviewRouteArgs(key: key, album: album),
initialChildren: children,
);
AlbumPreviewRoute.name,
args: AlbumPreviewRouteArgs(key: key, album: album),
initialChildren: children,
);
static const String name = 'AlbumPreviewRoute';
@@ -188,10 +188,10 @@ class AlbumSharedUserSelectionRoute
required Set<Asset> assets,
List<PageRouteInfo>? children,
}) : super(
AlbumSharedUserSelectionRoute.name,
args: AlbumSharedUserSelectionRouteArgs(key: key, assets: assets),
initialChildren: children,
);
AlbumSharedUserSelectionRoute.name,
args: AlbumSharedUserSelectionRouteArgs(key: key, assets: assets),
initialChildren: children,
);
static const String name = 'AlbumSharedUserSelectionRoute';
@@ -225,10 +225,10 @@ class AlbumViewerRoute extends PageRouteInfo<AlbumViewerRouteArgs> {
required int albumId,
List<PageRouteInfo>? children,
}) : super(
AlbumViewerRoute.name,
args: AlbumViewerRouteArgs(key: key, albumId: albumId),
initialChildren: children,
);
AlbumViewerRoute.name,
args: AlbumViewerRouteArgs(key: key, albumId: albumId),
initialChildren: children,
);
static const String name = 'AlbumViewerRoute';
@@ -258,7 +258,7 @@ class AlbumViewerRouteArgs {
/// [AlbumsPage]
class AlbumsRoute extends PageRouteInfo<void> {
const AlbumsRoute({List<PageRouteInfo>? children})
: super(AlbumsRoute.name, initialChildren: children);
: super(AlbumsRoute.name, initialChildren: children);
static const String name = 'AlbumsRoute';
@@ -274,7 +274,7 @@ class AlbumsRoute extends PageRouteInfo<void> {
/// [AllMotionPhotosPage]
class AllMotionPhotosRoute extends PageRouteInfo<void> {
const AllMotionPhotosRoute({List<PageRouteInfo>? children})
: super(AllMotionPhotosRoute.name, initialChildren: children);
: super(AllMotionPhotosRoute.name, initialChildren: children);
static const String name = 'AllMotionPhotosRoute';
@@ -290,7 +290,7 @@ class AllMotionPhotosRoute extends PageRouteInfo<void> {
/// [AllPeoplePage]
class AllPeopleRoute extends PageRouteInfo<void> {
const AllPeopleRoute({List<PageRouteInfo>? children})
: super(AllPeopleRoute.name, initialChildren: children);
: super(AllPeopleRoute.name, initialChildren: children);
static const String name = 'AllPeopleRoute';
@@ -306,7 +306,7 @@ class AllPeopleRoute extends PageRouteInfo<void> {
/// [AllPlacesPage]
class AllPlacesRoute extends PageRouteInfo<void> {
const AllPlacesRoute({List<PageRouteInfo>? children})
: super(AllPlacesRoute.name, initialChildren: children);
: super(AllPlacesRoute.name, initialChildren: children);
static const String name = 'AllPlacesRoute';
@@ -322,7 +322,7 @@ class AllPlacesRoute extends PageRouteInfo<void> {
/// [AllVideosPage]
class AllVideosRoute extends PageRouteInfo<void> {
const AllVideosRoute({List<PageRouteInfo>? children})
: super(AllVideosRoute.name, initialChildren: children);
: super(AllVideosRoute.name, initialChildren: children);
static const String name = 'AllVideosRoute';
@@ -342,10 +342,10 @@ class AppLogDetailRoute extends PageRouteInfo<AppLogDetailRouteArgs> {
required LogMessage logMessage,
List<PageRouteInfo>? children,
}) : super(
AppLogDetailRoute.name,
args: AppLogDetailRouteArgs(key: key, logMessage: logMessage),
initialChildren: children,
);
AppLogDetailRoute.name,
args: AppLogDetailRouteArgs(key: key, logMessage: logMessage),
initialChildren: children,
);
static const String name = 'AppLogDetailRoute';
@@ -375,7 +375,7 @@ class AppLogDetailRouteArgs {
/// [AppLogPage]
class AppLogRoute extends PageRouteInfo<void> {
const AppLogRoute({List<PageRouteInfo>? children})
: super(AppLogRoute.name, initialChildren: children);
: super(AppLogRoute.name, initialChildren: children);
static const String name = 'AppLogRoute';
@@ -391,7 +391,7 @@ class AppLogRoute extends PageRouteInfo<void> {
/// [ArchivePage]
class ArchiveRoute extends PageRouteInfo<void> {
const ArchiveRoute({List<PageRouteInfo>? children})
: super(ArchiveRoute.name, initialChildren: children);
: super(ArchiveRoute.name, initialChildren: children);
static const String name = 'ArchiveRoute';
@@ -407,7 +407,7 @@ class ArchiveRoute extends PageRouteInfo<void> {
/// [BackupAlbumSelectionPage]
class BackupAlbumSelectionRoute extends PageRouteInfo<void> {
const BackupAlbumSelectionRoute({List<PageRouteInfo>? children})
: super(BackupAlbumSelectionRoute.name, initialChildren: children);
: super(BackupAlbumSelectionRoute.name, initialChildren: children);
static const String name = 'BackupAlbumSelectionRoute';
@@ -423,7 +423,7 @@ class BackupAlbumSelectionRoute extends PageRouteInfo<void> {
/// [BackupControllerPage]
class BackupControllerRoute extends PageRouteInfo<void> {
const BackupControllerRoute({List<PageRouteInfo>? children})
: super(BackupControllerRoute.name, initialChildren: children);
: super(BackupControllerRoute.name, initialChildren: children);
static const String name = 'BackupControllerRoute';
@@ -439,7 +439,7 @@ class BackupControllerRoute extends PageRouteInfo<void> {
/// [BackupOptionsPage]
class BackupOptionsRoute extends PageRouteInfo<void> {
const BackupOptionsRoute({List<PageRouteInfo>? children})
: super(BackupOptionsRoute.name, initialChildren: children);
: super(BackupOptionsRoute.name, initialChildren: children);
static const String name = 'BackupOptionsRoute';
@@ -455,7 +455,7 @@ class BackupOptionsRoute extends PageRouteInfo<void> {
/// [ChangePasswordPage]
class ChangePasswordRoute extends PageRouteInfo<void> {
const ChangePasswordRoute({List<PageRouteInfo>? children})
: super(ChangePasswordRoute.name, initialChildren: children);
: super(ChangePasswordRoute.name, initialChildren: children);
static const String name = 'ChangePasswordRoute';
@@ -475,10 +475,10 @@ class CreateAlbumRoute extends PageRouteInfo<CreateAlbumRouteArgs> {
List<Asset>? assets,
List<PageRouteInfo>? children,
}) : super(
CreateAlbumRoute.name,
args: CreateAlbumRouteArgs(key: key, assets: assets),
initialChildren: children,
);
CreateAlbumRoute.name,
args: CreateAlbumRouteArgs(key: key, assets: assets),
initialChildren: children,
);
static const String name = 'CreateAlbumRoute';
@@ -515,10 +515,10 @@ class CropImageRoute extends PageRouteInfo<CropImageRouteArgs> {
required Asset asset,
List<PageRouteInfo>? children,
}) : super(
CropImageRoute.name,
args: CropImageRouteArgs(key: key, image: image, asset: asset),
initialChildren: children,
);
CropImageRoute.name,
args: CropImageRouteArgs(key: key, image: image, asset: asset),
initialChildren: children,
);
static const String name = 'CropImageRoute';
@@ -560,15 +560,15 @@ class EditImageRoute extends PageRouteInfo<EditImageRouteArgs> {
required bool isEdited,
List<PageRouteInfo>? children,
}) : super(
EditImageRoute.name,
args: EditImageRouteArgs(
key: key,
asset: asset,
image: image,
isEdited: isEdited,
),
initialChildren: children,
);
EditImageRoute.name,
args: EditImageRouteArgs(
key: key,
asset: asset,
image: image,
isEdited: isEdited,
),
initialChildren: children,
);
static const String name = 'EditImageRoute';
@@ -612,7 +612,7 @@ class EditImageRouteArgs {
/// [FailedBackupStatusPage]
class FailedBackupStatusRoute extends PageRouteInfo<void> {
const FailedBackupStatusRoute({List<PageRouteInfo>? children})
: super(FailedBackupStatusRoute.name, initialChildren: children);
: super(FailedBackupStatusRoute.name, initialChildren: children);
static const String name = 'FailedBackupStatusRoute';
@@ -628,7 +628,7 @@ class FailedBackupStatusRoute extends PageRouteInfo<void> {
/// [FavoritesPage]
class FavoritesRoute extends PageRouteInfo<void> {
const FavoritesRoute({List<PageRouteInfo>? children})
: super(FavoritesRoute.name, initialChildren: children);
: super(FavoritesRoute.name, initialChildren: children);
static const String name = 'FavoritesRoute';
@@ -644,7 +644,7 @@ class FavoritesRoute extends PageRouteInfo<void> {
/// [FeatInDevPage]
class FeatInDevRoute extends PageRouteInfo<void> {
const FeatInDevRoute({List<PageRouteInfo>? children})
: super(FeatInDevRoute.name, initialChildren: children);
: super(FeatInDevRoute.name, initialChildren: children);
static const String name = 'FeatInDevRoute';
@@ -665,10 +665,10 @@ class FilterImageRoute extends PageRouteInfo<FilterImageRouteArgs> {
required Asset asset,
List<PageRouteInfo>? children,
}) : super(
FilterImageRoute.name,
args: FilterImageRouteArgs(key: key, image: image, asset: asset),
initialChildren: children,
);
FilterImageRoute.name,
args: FilterImageRouteArgs(key: key, image: image, asset: asset),
initialChildren: children,
);
static const String name = 'FilterImageRoute';
@@ -712,10 +712,10 @@ class FolderRoute extends PageRouteInfo<FolderRouteArgs> {
RecursiveFolder? folder,
List<PageRouteInfo>? children,
}) : super(
FolderRoute.name,
args: FolderRouteArgs(key: key, folder: folder),
initialChildren: children,
);
FolderRoute.name,
args: FolderRouteArgs(key: key, folder: folder),
initialChildren: children,
);
static const String name = 'FolderRoute';
@@ -754,16 +754,16 @@ class GalleryViewerRoute extends PageRouteInfo<GalleryViewerRouteArgs> {
bool showStack = false,
List<PageRouteInfo>? children,
}) : super(
GalleryViewerRoute.name,
args: GalleryViewerRouteArgs(
key: key,
renderList: renderList,
initialIndex: initialIndex,
heroOffset: heroOffset,
showStack: showStack,
),
initialChildren: children,
);
GalleryViewerRoute.name,
args: GalleryViewerRouteArgs(
key: key,
renderList: renderList,
initialIndex: initialIndex,
heroOffset: heroOffset,
showStack: showStack,
),
initialChildren: children,
);
static const String name = 'GalleryViewerRoute';
@@ -811,7 +811,7 @@ class GalleryViewerRouteArgs {
/// [HeaderSettingsPage]
class HeaderSettingsRoute extends PageRouteInfo<void> {
const HeaderSettingsRoute({List<PageRouteInfo>? children})
: super(HeaderSettingsRoute.name, initialChildren: children);
: super(HeaderSettingsRoute.name, initialChildren: children);
static const String name = 'HeaderSettingsRoute';
@@ -827,7 +827,7 @@ class HeaderSettingsRoute extends PageRouteInfo<void> {
/// [LibraryPage]
class LibraryRoute extends PageRouteInfo<void> {
const LibraryRoute({List<PageRouteInfo>? children})
: super(LibraryRoute.name, initialChildren: children);
: super(LibraryRoute.name, initialChildren: children);
static const String name = 'LibraryRoute';
@@ -843,7 +843,7 @@ class LibraryRoute extends PageRouteInfo<void> {
/// [LocalAlbumsPage]
class LocalAlbumsRoute extends PageRouteInfo<void> {
const LocalAlbumsRoute({List<PageRouteInfo>? children})
: super(LocalAlbumsRoute.name, initialChildren: children);
: super(LocalAlbumsRoute.name, initialChildren: children);
static const String name = 'LocalAlbumsRoute';
@@ -859,7 +859,7 @@ class LocalAlbumsRoute extends PageRouteInfo<void> {
/// [LocalMediaSummaryPage]
class LocalMediaSummaryRoute extends PageRouteInfo<void> {
const LocalMediaSummaryRoute({List<PageRouteInfo>? children})
: super(LocalMediaSummaryRoute.name, initialChildren: children);
: super(LocalMediaSummaryRoute.name, initialChildren: children);
static const String name = 'LocalMediaSummaryRoute';
@@ -879,10 +879,10 @@ class LocalTimelineRoute extends PageRouteInfo<LocalTimelineRouteArgs> {
required String albumId,
List<PageRouteInfo>? children,
}) : super(
LocalTimelineRoute.name,
args: LocalTimelineRouteArgs(key: key, albumId: albumId),
initialChildren: children,
);
LocalTimelineRoute.name,
args: LocalTimelineRouteArgs(key: key, albumId: albumId),
initialChildren: children,
);
static const String name = 'LocalTimelineRoute';
@@ -912,7 +912,7 @@ class LocalTimelineRouteArgs {
/// [LockedPage]
class LockedRoute extends PageRouteInfo<void> {
const LockedRoute({List<PageRouteInfo>? children})
: super(LockedRoute.name, initialChildren: children);
: super(LockedRoute.name, initialChildren: children);
static const String name = 'LockedRoute';
@@ -928,7 +928,7 @@ class LockedRoute extends PageRouteInfo<void> {
/// [LoginPage]
class LoginRoute extends PageRouteInfo<void> {
const LoginRoute({List<PageRouteInfo>? children})
: super(LoginRoute.name, initialChildren: children);
: super(LoginRoute.name, initialChildren: children);
static const String name = 'LoginRoute';
@@ -944,7 +944,7 @@ class LoginRoute extends PageRouteInfo<void> {
/// [MainTimelinePage]
class MainTimelineRoute extends PageRouteInfo<void> {
const MainTimelineRoute({List<PageRouteInfo>? children})
: super(MainTimelineRoute.name, initialChildren: children);
: super(MainTimelineRoute.name, initialChildren: children);
static const String name = 'MainTimelineRoute';
@@ -964,13 +964,13 @@ class MapLocationPickerRoute extends PageRouteInfo<MapLocationPickerRouteArgs> {
LatLng initialLatLng = const LatLng(0, 0),
List<PageRouteInfo>? children,
}) : super(
MapLocationPickerRoute.name,
args: MapLocationPickerRouteArgs(
key: key,
initialLatLng: initialLatLng,
),
initialChildren: children,
);
MapLocationPickerRoute.name,
args: MapLocationPickerRouteArgs(
key: key,
initialLatLng: initialLatLng,
),
initialChildren: children,
);
static const String name = 'MapLocationPickerRoute';
@@ -1008,11 +1008,11 @@ class MapLocationPickerRouteArgs {
/// [MapPage]
class MapRoute extends PageRouteInfo<MapRouteArgs> {
MapRoute({Key? key, LatLng? initialLocation, List<PageRouteInfo>? children})
: super(
MapRoute.name,
args: MapRouteArgs(key: key, initialLocation: initialLocation),
initialChildren: children,
);
: super(
MapRoute.name,
args: MapRouteArgs(key: key, initialLocation: initialLocation),
initialChildren: children,
);
static const String name = 'MapRoute';
@@ -1049,14 +1049,14 @@ class MemoryRoute extends PageRouteInfo<MemoryRouteArgs> {
Key? key,
List<PageRouteInfo>? children,
}) : super(
MemoryRoute.name,
args: MemoryRouteArgs(
memories: memories,
memoryIndex: memoryIndex,
key: key,
),
initialChildren: children,
);
MemoryRoute.name,
args: MemoryRouteArgs(
memories: memories,
memoryIndex: memoryIndex,
key: key,
),
initialChildren: children,
);
static const String name = 'MemoryRoute';
@@ -1103,16 +1103,16 @@ class NativeVideoViewerRoute extends PageRouteInfo<NativeVideoViewerRouteArgs> {
int playbackDelayFactor = 1,
List<PageRouteInfo>? children,
}) : super(
NativeVideoViewerRoute.name,
args: NativeVideoViewerRouteArgs(
key: key,
asset: asset,
image: image,
showControls: showControls,
playbackDelayFactor: playbackDelayFactor,
),
initialChildren: children,
);
NativeVideoViewerRoute.name,
args: NativeVideoViewerRouteArgs(
key: key,
asset: asset,
image: image,
showControls: showControls,
playbackDelayFactor: playbackDelayFactor,
),
initialChildren: children,
);
static const String name = 'NativeVideoViewerRoute';
@@ -1164,10 +1164,10 @@ class PartnerDetailRoute extends PageRouteInfo<PartnerDetailRouteArgs> {
required UserDto partner,
List<PageRouteInfo>? children,
}) : super(
PartnerDetailRoute.name,
args: PartnerDetailRouteArgs(key: key, partner: partner),
initialChildren: children,
);
PartnerDetailRoute.name,
args: PartnerDetailRouteArgs(key: key, partner: partner),
initialChildren: children,
);
static const String name = 'PartnerDetailRoute';
@@ -1197,7 +1197,7 @@ class PartnerDetailRouteArgs {
/// [PartnerPage]
class PartnerRoute extends PageRouteInfo<void> {
const PartnerRoute({List<PageRouteInfo>? children})
: super(PartnerRoute.name, initialChildren: children);
: super(PartnerRoute.name, initialChildren: children);
static const String name = 'PartnerRoute';
@@ -1213,7 +1213,7 @@ class PartnerRoute extends PageRouteInfo<void> {
/// [PeopleCollectionPage]
class PeopleCollectionRoute extends PageRouteInfo<void> {
const PeopleCollectionRoute({List<PageRouteInfo>? children})
: super(PeopleCollectionRoute.name, initialChildren: children);
: super(PeopleCollectionRoute.name, initialChildren: children);
static const String name = 'PeopleCollectionRoute';
@@ -1229,7 +1229,7 @@ class PeopleCollectionRoute extends PageRouteInfo<void> {
/// [PermissionOnboardingPage]
class PermissionOnboardingRoute extends PageRouteInfo<void> {
const PermissionOnboardingRoute({List<PageRouteInfo>? children})
: super(PermissionOnboardingRoute.name, initialChildren: children);
: super(PermissionOnboardingRoute.name, initialChildren: children);
static const String name = 'PermissionOnboardingRoute';
@@ -1250,14 +1250,14 @@ class PersonResultRoute extends PageRouteInfo<PersonResultRouteArgs> {
required String personName,
List<PageRouteInfo>? children,
}) : super(
PersonResultRoute.name,
args: PersonResultRouteArgs(
key: key,
personId: personId,
personName: personName,
),
initialChildren: children,
);
PersonResultRoute.name,
args: PersonResultRouteArgs(
key: key,
personId: personId,
personName: personName,
),
initialChildren: children,
);
static const String name = 'PersonResultRoute';
@@ -1297,7 +1297,7 @@ class PersonResultRouteArgs {
/// [PhotosPage]
class PhotosRoute extends PageRouteInfo<void> {
const PhotosRoute({List<PageRouteInfo>? children})
: super(PhotosRoute.name, initialChildren: children);
: super(PhotosRoute.name, initialChildren: children);
static const String name = 'PhotosRoute';
@@ -1317,10 +1317,10 @@ class PinAuthRoute extends PageRouteInfo<PinAuthRouteArgs> {
bool createPinCode = false,
List<PageRouteInfo>? children,
}) : super(
PinAuthRoute.name,
args: PinAuthRouteArgs(key: key, createPinCode: createPinCode),
initialChildren: children,
);
PinAuthRoute.name,
args: PinAuthRouteArgs(key: key, createPinCode: createPinCode),
initialChildren: children,
);
static const String name = 'PinAuthRoute';
@@ -1356,13 +1356,13 @@ class PlacesCollectionRoute extends PageRouteInfo<PlacesCollectionRouteArgs> {
LatLng? currentLocation,
List<PageRouteInfo>? children,
}) : super(
PlacesCollectionRoute.name,
args: PlacesCollectionRouteArgs(
key: key,
currentLocation: currentLocation,
),
initialChildren: children,
);
PlacesCollectionRoute.name,
args: PlacesCollectionRouteArgs(
key: key,
currentLocation: currentLocation,
),
initialChildren: children,
);
static const String name = 'PlacesCollectionRoute';
@@ -1397,7 +1397,7 @@ class PlacesCollectionRouteArgs {
/// [RecentlyTakenPage]
class RecentlyTakenRoute extends PageRouteInfo<void> {
const RecentlyTakenRoute({List<PageRouteInfo>? children})
: super(RecentlyTakenRoute.name, initialChildren: children);
: super(RecentlyTakenRoute.name, initialChildren: children);
static const String name = 'RecentlyTakenRoute';
@@ -1413,7 +1413,7 @@ class RecentlyTakenRoute extends PageRouteInfo<void> {
/// [RemoteMediaSummaryPage]
class RemoteMediaSummaryRoute extends PageRouteInfo<void> {
const RemoteMediaSummaryRoute({List<PageRouteInfo>? children})
: super(RemoteMediaSummaryRoute.name, initialChildren: children);
: super(RemoteMediaSummaryRoute.name, initialChildren: children);
static const String name = 'RemoteMediaSummaryRoute';
@@ -1433,10 +1433,10 @@ class RemoteTimelineRoute extends PageRouteInfo<RemoteTimelineRouteArgs> {
required String albumId,
List<PageRouteInfo>? children,
}) : super(
RemoteTimelineRoute.name,
args: RemoteTimelineRouteArgs(key: key, albumId: albumId),
initialChildren: children,
);
RemoteTimelineRoute.name,
args: RemoteTimelineRouteArgs(key: key, albumId: albumId),
initialChildren: children,
);
static const String name = 'RemoteTimelineRoute';
@@ -1470,10 +1470,10 @@ class SearchRoute extends PageRouteInfo<SearchRouteArgs> {
SearchFilter? prefilter,
List<PageRouteInfo>? children,
}) : super(
SearchRoute.name,
args: SearchRouteArgs(key: key, prefilter: prefilter),
initialChildren: children,
);
SearchRoute.name,
args: SearchRouteArgs(key: key, prefilter: prefilter),
initialChildren: children,
);
static const String name = 'SearchRoute';
@@ -1505,7 +1505,7 @@ class SearchRouteArgs {
/// [SettingsPage]
class SettingsRoute extends PageRouteInfo<void> {
const SettingsRoute({List<PageRouteInfo>? children})
: super(SettingsRoute.name, initialChildren: children);
: super(SettingsRoute.name, initialChildren: children);
static const String name = 'SettingsRoute';
@@ -1525,10 +1525,10 @@ class SettingsSubRoute extends PageRouteInfo<SettingsSubRouteArgs> {
Key? key,
List<PageRouteInfo>? children,
}) : super(
SettingsSubRoute.name,
args: SettingsSubRouteArgs(section: section, key: key),
initialChildren: children,
);
SettingsSubRoute.name,
args: SettingsSubRouteArgs(section: section, key: key),
initialChildren: children,
);
static const String name = 'SettingsSubRoute';
@@ -1562,10 +1562,10 @@ class ShareIntentRoute extends PageRouteInfo<ShareIntentRouteArgs> {
required List<ShareIntentAttachment> attachments,
List<PageRouteInfo>? children,
}) : super(
ShareIntentRoute.name,
args: ShareIntentRouteArgs(key: key, attachments: attachments),
initialChildren: children,
);
ShareIntentRoute.name,
args: ShareIntentRouteArgs(key: key, attachments: attachments),
initialChildren: children,
);
static const String name = 'ShareIntentRoute';
@@ -1601,15 +1601,15 @@ class SharedLinkEditRoute extends PageRouteInfo<SharedLinkEditRouteArgs> {
String? albumId,
List<PageRouteInfo>? children,
}) : super(
SharedLinkEditRoute.name,
args: SharedLinkEditRouteArgs(
key: key,
existingLink: existingLink,
assetsList: assetsList,
albumId: albumId,
),
initialChildren: children,
);
SharedLinkEditRoute.name,
args: SharedLinkEditRouteArgs(
key: key,
existingLink: existingLink,
assetsList: assetsList,
albumId: albumId,
),
initialChildren: children,
);
static const String name = 'SharedLinkEditRoute';
@@ -1655,7 +1655,7 @@ class SharedLinkEditRouteArgs {
/// [SharedLinkPage]
class SharedLinkRoute extends PageRouteInfo<void> {
const SharedLinkRoute({List<PageRouteInfo>? children})
: super(SharedLinkRoute.name, initialChildren: children);
: super(SharedLinkRoute.name, initialChildren: children);
static const String name = 'SharedLinkRoute';
@@ -1671,7 +1671,7 @@ class SharedLinkRoute extends PageRouteInfo<void> {
/// [SplashScreenPage]
class SplashScreenRoute extends PageRouteInfo<void> {
const SplashScreenRoute({List<PageRouteInfo>? children})
: super(SplashScreenRoute.name, initialChildren: children);
: super(SplashScreenRoute.name, initialChildren: children);
static const String name = 'SplashScreenRoute';
@@ -1687,7 +1687,7 @@ class SplashScreenRoute extends PageRouteInfo<void> {
/// [TabControllerPage]
class TabControllerRoute extends PageRouteInfo<void> {
const TabControllerRoute({List<PageRouteInfo>? children})
: super(TabControllerRoute.name, initialChildren: children);
: super(TabControllerRoute.name, initialChildren: children);
static const String name = 'TabControllerRoute';
@@ -1703,7 +1703,7 @@ class TabControllerRoute extends PageRouteInfo<void> {
/// [TabShellPage]
class TabShellRoute extends PageRouteInfo<void> {
const TabShellRoute({List<PageRouteInfo>? children})
: super(TabShellRoute.name, initialChildren: children);
: super(TabShellRoute.name, initialChildren: children);
static const String name = 'TabShellRoute';
@@ -1719,7 +1719,7 @@ class TabShellRoute extends PageRouteInfo<void> {
/// [TrashPage]
class TrashRoute extends PageRouteInfo<void> {
const TrashRoute({List<PageRouteInfo>? children})
: super(TrashRoute.name, initialChildren: children);
: super(TrashRoute.name, initialChildren: children);
static const String name = 'TrashRoute';

View File

@@ -13,12 +13,10 @@ import 'package:immich_mobile/entities/ios_device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/entities/isar_store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_store.repository.dart';
import 'package:isar/isar.dart';
import 'package:path_provider/path_provider.dart';
@@ -50,32 +48,15 @@ abstract final class Bootstrap {
);
}
static Future<Drift> initDrift() async {
return Drift();
}
static Future<void> initDomain(
Isar db, {
bool shouldBufferLogs = true,
}) async {
final driftDb = Drift();
await StoreService.init(storeRepository: IsarStoreRepository(db));
await LogService.init(
logRepository: LogRepository(driftDb),
logRepository: IsarLogRepository(db),
storeRepository: IsarStoreRepository(db),
shouldBuffer: shouldBufferLogs,
);
}
static Future<void> initDomainWithDrift(
Drift db, {
bool shouldBufferLogs = true,
}) async {
await StoreService.init(storeRepository: DriftStoreRepository(db));
await LogService.init(
logRepository: LogRepository(db),
storeRepository: DriftStoreRepository(db),
shouldBuffer: shouldBufferLogs,
);
}
}

View File

@@ -14,8 +14,8 @@ import 'package:immich_mobile/entities/ios_device_asset.entity.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/isar_store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/utils/diff.dart';

View File

@@ -1,13 +1,12 @@
import 'package:drift/drift.dart' hide isNull;
import 'package:drift/native.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/drift_store.repository.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
import 'package:isar/isar.dart';
import '../../fixtures/user.stub.dart';
import '../../test_utils.dart';
const _kTestAccessToken = "#TestToken";
final _kTestBackupFailed = DateTime(2025, 2, 20, 11, 45);
@@ -15,28 +14,16 @@ const _kTestVersion = 10;
const _kTestColorfulInterface = false;
final _kTestUser = UserStub.admin;
Future<void> _addIntStoreValue(Drift db, StoreKey key, int? value) async {
await db.into(db.storeEntity).insert(
StoreEntityCompanion.insert(
id: key.id,
intValue: Value(value),
strValue: const Value(null),
),
);
Future<void> _addIntStoreValue(Isar db, StoreKey key, int? value) async {
await db.storeValues.put(StoreValue(key.id, intValue: value, strValue: null));
}
Future<void> _addStrStoreValue(Drift db, StoreKey key, String? value) async {
await db.into(db.storeEntity).insert(
StoreEntityCompanion.insert(
id: key.id,
intValue: const Value(null),
strValue: Value(value),
),
);
Future<void> _addStrStoreValue(Isar db, StoreKey key, String? value) async {
await db.storeValues.put(StoreValue(key.id, intValue: null, strValue: value));
}
Future<void> _populateStore(Drift db) async {
await db.transaction(() async {
Future<void> _populateStore(Isar db) async {
await db.writeTxn(() async {
await _addIntStoreValue(
db,
StoreKey.colorfulInterface,
@@ -53,12 +40,12 @@ Future<void> _populateStore(Drift db) async {
}
void main() {
late Drift db;
late DriftStoreRepository sut;
late Isar db;
late IsarStoreRepository sut;
setUp(() async {
db = Drift(NativeDatabase.memory());
sut = DriftStoreRepository(db);
db = await TestUtils.initIsar();
sut = IsarStoreRepository(db);
});
group('Store Repository converters:', () {
@@ -118,16 +105,10 @@ void main() {
});
test('deleteAll()', () async {
final countQuery = db.selectOnly(db.storeEntity)
..addColumns([db.storeEntity.id.count()]);
final countResult = await countQuery.getSingle();
final count = countResult.read(db.storeEntity.id.count()) ?? 0;
final count = await db.storeValues.count();
expect(count, isNot(isZero));
await sut.deleteAll();
final newCountResult = await countQuery.getSingle();
final newCount = newCountResult.read(db.storeEntity.id.count()) ?? 0;
expect(newCount, isZero);
expectLater(await db.storeValues.count(), isZero);
});
});

View File

@@ -12,7 +12,7 @@ import 'package:mocktail/mocktail.dart';
class MockStoreRepository extends Mock implements IsarStoreRepository {}
class MockLogRepository extends Mock implements LogRepository {}
class MockLogRepository extends Mock implements IsarLogRepository {}
class MockIsarUserRepository extends Mock implements IsarUserRepository {}

View File

@@ -1,4 +1,3 @@
import 'package:drift/native.dart';
import 'package:flutter/widgets.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:immich_mobile/constants/enums.dart';
@@ -9,7 +8,6 @@ import 'package:immich_mobile/domain/services/store.service.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/entities/etag.entity.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
import 'package:immich_mobile/repositories/partner_api.repository.dart';
@@ -82,12 +80,12 @@ void main() {
setUpAll(() async {
WidgetsFlutterBinding.ensureInitialized();
final db = await TestUtils.initIsar();
final driftDb = Drift(NativeDatabase.memory());
db.writeTxnSync(() => db.clearSync());
await StoreService.init(storeRepository: IsarStoreRepository(db));
await Store.put(StoreKey.currentUser, owner);
await LogService.init(
logRepository: LogRepository(driftDb),
logRepository: IsarLogRepository(db),
storeRepository: IsarStoreRepository(db),
);
});

View File

@@ -15,6 +15,7 @@ import 'package:immich_mobile/entities/ios_device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:isar/isar.dart';
import 'package:mocktail/mocktail.dart';
@@ -40,6 +41,7 @@ abstract final class TestUtils {
final db = await Isar.open(
[
StoreValueSchema,
ExifInfoSchema,
AssetSchema,
AlbumSchema,

View File

@@ -5,7 +5,7 @@ A TypeScript SDK for interfacing with the [Immich](https://immich.app/) API.
## Install
```bash
npm i --save @immich/sdk
pnpm i --save @immich/sdk
```
## Usage

View File

@@ -29,5 +29,6 @@
},
"volta": {
"node": "22.16.0"
}
},
"packageManager": "pnpm@10.12.3+sha512.467df2c586056165580ad6dfb54ceaad94c5a30f80893ebdec5a44c5aa73c205ae4a5bb9d5ed6bb84ea7c249ece786642bbb49d06a307df218d03da41c317417"
}

50
open-api/typescript-sdk/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,50 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
packageExtensionsChecksum: sha256-da8GREkR2VnR5zDxp+RNh2YOpcUGfK6mRCcdi/oaiJs=
importers:
.:
dependencies:
'@oazapfts/runtime':
specifier: ^1.0.2
version: 1.0.4
devDependencies:
'@types/node':
specifier: ^22.15.32
version: 22.15.32
typescript:
specifier: ^5.3.3
version: 5.8.3
packages:
'@oazapfts/runtime@1.0.4':
resolution: {integrity: sha512-7t6C2shug/6tZhQgkCa532oTYBLEnbASV/i1SG1rH2GB4h3aQQujYciYSPT92hvN4IwTe8S2hPkN/6iiOyTlCg==}
'@types/node@22.15.32':
resolution: {integrity: sha512-3jigKqgSjsH6gYZv2nEsqdXfZqIFGAV36XYYjf9KGZ3PSG+IhLecqPnI310RvjutyMwifE2hhhNEklOUrvx/wA==}
typescript@5.8.3:
resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
snapshots:
'@oazapfts/runtime@1.0.4': {}
'@types/node@22.15.32':
dependencies:
undici-types: 6.21.0
typescript@5.8.3: {}
undici-types@6.21.0: {}

15
package-lock.json generated Normal file
View File

@@ -0,0 +1,15 @@
{
"name": "immich-monorepo",
"version": "0.0.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "immich-monorepo",
"version": "0.0.1",
"engines": {
"pnpm": ">=10.0.0"
}
}
}
}

10
package.json Normal file
View File

@@ -0,0 +1,10 @@
{
"name": "immich-monorepo",
"version": "0.0.1",
"description": "monorepo for immich and friends",
"private": true,
"packageManager": "pnpm@10.12.3+sha512.467df2c586056165580ad6dfb54ceaad94c5a30f80893ebdec5a44c5aa73c205ae4a5bb9d5ed6bb84ea7c249ece786642bbb49d06a307df218d03da41c317417",
"engines": {
"pnpm": ">=10.0.0"
}
}

11
pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,11 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
packageExtensionsChecksum: sha256-da8GREkR2VnR5zDxp+RNh2YOpcUGfK6mRCcdi/oaiJs=
importers:
.: {}

45
pnpm-workspace.yaml Normal file
View File

@@ -0,0 +1,45 @@
packages:
- cli
- docs
- e2e
- open-api/typescript-sdk
- server
- web
dedupePeerDependents: false
ignoredBuiltDependencies:
- '@tailwindcss/oxide'
- canvas
- es5-ext
- esbuild
- '@nestjs/core'
- '@scarf/scarf'
- '@swc/core'
- bcrypt
- cpu-features
- msgpackr-extract
- protobufjs
- ssh2
- utimes
onlyBuiltDependencies:
- sharp
packageExtensions:
# these packages use tslib, but do not declare it as a dependency
nestjs-kysely:
dependencies:
tslib: '*'
nestjs-otel:
dependencies:
tslib: '*'
sharp:
dependencies:
'@img/sharp-libvips-linux-x64': '*'
'@img/sharp-libvips-linux-arm64': '*'
preferWorkspacePackages: true
shamefullyHoist: true
sharedWorkspaceLockfile: false

View File

@@ -1,48 +1,101 @@
# dev build
FROM ghcr.io/immich-app/base-server-dev:202505131114@sha256:cf4507bbbf307e9b6d8ee9418993321f2b85867da8ce14d0a20ccaf9574cb995 AS dev
RUN apt-get install --no-install-recommends -yqq tini
WORKDIR /usr/src/app
COPY server/package.json server/package-lock.json ./
COPY server/patches ./patches
RUN npm ci && \
# exiftool-vendored.pl, sharp-linux-x64 and sharp-linux-arm64 are the only ones we need
# they're marked as optional dependencies, so we need to copy them manually after pruning
rm -rf node_modules/@img/sharp-libvips* && \
rm -rf node_modules/@img/sharp-linuxmusl-x64
ENV PATH="${PATH}:/usr/src/app/bin" \
IMMICH_ENV=development \
NVIDIA_DRIVER_CAPABILITIES=all \
NVIDIA_VISIBLE_DEVICES=all
NVIDIA_VISIBLE_DEVICES=all \
COREPACK_ENABLE_AUTO_PIN=0 \
COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
npm_config_devdir=/buildcache/node_gyp
RUN corepack enable && \
corepack install -g pnpm && \
apt-get install --no-install-recommends -yqq tini
RUN mkdir -p /buildcache/pnpm_store && \
chown -R node:node /buildcache && \
mkdir -p /usr/local/etc && \
echo "store-dir=/buildcache/pnpm_store" >> /usr/local/etc/npmrc
RUN rm -rf /usr/src/app && \
mkdir -p /usr/src/app && \
chown node:node /usr/src/app
USER node
WORKDIR /usr/src/app
COPY --chown=node:node \
server/package.json \
server/pnpm-lock.yaml \
pnpm-workspace.yaml \
./
RUN pnpm fetch
ENTRYPOINT ["tini", "--", "/bin/sh"]
FROM dev AS dev-docker
RUN pnpm install
FROM dev AS dev-container-server
RUN rm -rf /usr/src/app
USER root
RUN apt-get update && \
apt-get install sudo inetutils-ping openjdk-11-jre-headless \
vim nano \
-y --no-install-recommends --fix-missing
vim nano -y --no-install-recommends --fix-missing
RUN usermod -aG sudo node
RUN echo "node ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
RUN mkdir -p /workspaces/immich
RUN chown node -R /workspaces
COPY --chown=node:node --chmod=777 ../.devcontainer/server/*.sh /immich-devcontainer/
RUN usermod -aG sudo node && \
echo "node ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
USER node
COPY --chown=node:node .. /tmp/create-dep-cache/
WORKDIR /tmp/create-dep-cache
RUN make ci-all && rm -rf /tmp/create-dep-cache
RUN sudo mkdir -p /workspaces/immich && \
sudo chown node -R /workspaces && \
sudo mkdir /immich-devcontainer && \
sudo chown node -R /immich-devcontainer
COPY --chmod=777 \
../.devcontainer/server/*.sh \
/immich-devcontainer/
COPY --chown=node:node \
package.json \
pnpm-lock.yaml \
./
# note: e2e is part of dockerignore, so it is not copied here
COPY --chown=node:node \
web/package.json \
web/pnpm-lock.yaml \
./web/
COPY --chown=node:node \
cli/package.json \
cli/pnpm-lock.yaml \
./cli/
COPY --chown=node:node \
server/package.json \
server/pnpm-lock.yaml \
./server/
# note: docs is part of dockerignore, so it is not copied here
COPY --chown=node:node open-api/typescript-sdk/package.json \
open-api/typescript-sdk/pnpm-lock.yaml \
./open-api/typescript-sdk/
# This will cache all dependencies
RUN sudo rm -rf /buildcache && mkdir -p /buildcache/pnpm_store
RUN pnpm install --frozen-lockfile
WORKDIR /workspaces/immich
# Remove app dir from dev container
RUN sudo rm -rf /usr/src/app
FROM dev-container-server AS dev-container-mobile
USER root
# Enable multiarch for arm64 if necessary
RUN if [ "$(dpkg --print-architecture)" = "arm64" ]; then \
dpkg --add-architecture amd64 && \
apt-get update && \
apt-get install -y --no-install-recommends \
sudo dpkg --add-architecture amd64 && \
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
libc6:amd64 \
libstdc++6:amd64 \
@@ -57,15 +110,13 @@ ENV FLUTTER_HOME=/flutter
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
# Flutter SDK
RUN mkdir -p ${FLUTTER_HOME} \
&& curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
&& tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
&& rm flutter.tar.xz \
&& chown -R node ${FLUTTER_HOME}
RUN sudo mkdir -p ${FLUTTER_HOME} \
&& sudo curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
&& sudo tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
&& sudo rm flutter.tar.xz \
&& sudo chown -R node ${FLUTTER_HOME}
USER node
RUN sudo apt-get update \
&& wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg \
RUN wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg \
&& echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list \
&& sudo apt-get update \
&& sudo apt-get install dcm -y
@@ -74,49 +125,81 @@ COPY --chmod=777 ../.devcontainer/mobile/container-mobile-post-create.sh /immich
RUN dart --disable-analytics
# server production build
FROM dev AS prod
COPY server .
RUN npm run build
RUN npm prune --omit=dev --omit=optional
COPY --from=dev /usr/src/app/node_modules/@img ./node_modules/@img
COPY --from=dev /usr/src/app/node_modules/exiftool-vendored.pl ./node_modules/exiftool-vendored.pl
USER root
RUN chown node:node /usr/src/app
USER node
COPY --chown=node:node server .
RUN pnpm install --frozen-lockfile --offline && \
pnpm build
# web build
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS web
FROM dev AS sdk
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
RUN npm ci
COPY open-api/typescript-sdk/ ./
RUN npm run build
COPY --chown=node:node open-api/typescript-sdk/ .
RUN pnpm install --frozen-lockfile --no-optional && \
pnpm build
WORKDIR /usr/src/app
COPY web/package*.json web/svelte.config.js ./
RUN npm ci
COPY web ./
COPY i18n ../i18n
RUN npm run build
# web production build
FROM dev AS web
COPY --chown=node:node web .
COPY --from=sdk /usr/src/app /usr/src/open-api/typescript-sdk
COPY --chown=node:node i18n /usr/src/i18n
RUN pnpm install --frozen-lockfile && \
pnpm build
FROM dev AS cli
COPY --chown=node:node cli .
COPY --from=sdk /usr/src/app /usr/src/open-api/typescript-sdk
# the following command does not use --offline, because the cache created in
# the 'dev' stage did not includ the cli depenencies
RUN pnpm install --frozen-lockfile && \
pnpm build
# prod build
FROM ghcr.io/immich-app/base-server-prod:202505061115@sha256:9971d3a089787f0bd01f4682141d3665bcf5efb3e101a88e394ffd25bee4eedb
RUN corepack enable && \
corepack install -g pnpm
WORKDIR /usr/src/app
ENV NODE_ENV=production \
NVIDIA_DRIVER_CAPABILITIES=all \
NVIDIA_VISIBLE_DEVICES=all
COPY --from=prod /usr/src/app/node_modules ./node_modules
COPY --from=prod /usr/src/app/dist ./dist
COPY --from=prod /usr/src/app/bin ./bin
COPY --from=web /usr/src/app/build /build/www
COPY server/resources resources
COPY server/package.json server/package-lock.json ./
COPY server/start*.sh ./
COPY "docker/scripts/get-cpus.sh" ./
RUN npm install -g @immich/cli && npm cache clean --force
NVIDIA_VISIBLE_DEVICES=all \
npm_config_devdir=/buildcache/node_gyp \
COREPACK_ENABLE_DOWNLOAD_PROMPT=0
RUN mkdir -p /buildcache/pnpm_store && \
chown -R node:node /buildcache && \
mkdir -p /usr/local/etc && \
echo "store-dir=/buildcache/pnpm_store" >> /usr/local/etc/npmrc && \
mkdir -p /usr/src/app/upload && \
chown -R node:node /usr/src/app && \
chmod 755 /usr/src/app
COPY --chown=node:node --from=prod /buildcache /buildcache
COPY --chown=node:node --from=prod /usr/src/app/dist ./dist
COPY --chown=node:node --from=prod /usr/src/app/bin ./bin
COPY --chown=node:node --from=web /usr/src/app/build /build/www
COPY --chown=node:node --from=cli /usr/src/app/dist ./cli
COPY --chown=node:node server/resources ./resources/
COPY --chown=node:node server/package.json server/pnpm-lock.yaml pnpm-workspace.yaml server/start*.sh \
docker/scripts/get-cpus.sh ./
COPY LICENSE /licenses/LICENSE.txt
COPY LICENSE /LICENSE
USER node
RUN pnpm install --frozen-lockfile --prod --no-optional && \
echo '#!/usr/bin/env node' > /usr/src/app/bin/immich && \
echo 'require("../cli/index.js");' >> /usr/src/app/bin/immich && \
chmod +x /usr/src/app/bin/immich
USER root
RUN rm -rf /buildcache /usr/local/etc/npmrc
USER node
ENV PATH="${PATH}:/usr/src/app/bin"
ARG BUILD_ID
@@ -134,6 +217,8 @@ ENV IMMICH_SOURCE_REF=${BUILD_SOURCE_REF}
ENV IMMICH_SOURCE_COMMIT=${BUILD_SOURCE_COMMIT}
ENV IMMICH_SOURCE_URL=https://github.com/immich-app/immich/commit/${BUILD_SOURCE_COMMIT}
USER root
VOLUME /usr/src/app/upload
EXPOSE 2283
ENTRYPOINT ["tini", "--", "/bin/bash"]

View File

@@ -1,3 +1,3 @@
#!/usr/bin/env bash
node /usr/src/app/node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch -- "$@"
pnpm exec nest start --debug "0.0.0.0:9230" --watch -- "$@"

View File

@@ -32,8 +32,7 @@
"kysely:codegen": "npx kysely-codegen --include-pattern=\"(public|vectors).*\" --dialect postgres --url postgres://postgres:postgres@localhost/immich --log-level debug --out-file=./src/db.d.ts",
"sync:open-api": "node ./dist/bin/sync-open-api.js",
"sync:sql": "node ./dist/bin/sync-sql.js",
"email:dev": "email dev -p 3050 --dir src/emails",
"postinstall": "patch-package"
"email:dev": "email dev -p 3050 --dir src/emails"
},
"dependencies": {
"@nestjs/bullmq": "^11.0.1",
@@ -65,14 +64,14 @@
"bcrypt": "^6.0.0",
"body-parser": "^2.2.0",
"bullmq": "^5.51.0",
"chokidar": "^3.5.3",
"chokidar": "^4.0.3",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
"compression": "^1.8.0",
"cookie": "^1.0.2",
"cookie-parser": "^1.4.7",
"cron": "^3.5.0",
"exiftool-vendored": "^28.3.1",
"exiftool-vendored": "^28.8.0",
"express": "^5.1.0",
"fast-glob": "^3.3.2",
"fluent-ffmpeg": "^2.1.2",
@@ -108,13 +107,14 @@
"sharp": "^0.34.2",
"sirv": "^3.0.0",
"socket.io": "^4.8.1",
"tailwindcss-preset-email": "^1.3.2",
"tailwindcss-preset-email": "^1.4.0",
"thumbhash": "^0.1.1",
"typeorm": "^0.3.17",
"ua-parser-js": "^2.0.0",
"validator": "^13.12.0"
},
"devDependencies": {
"canvas": "^3.1.0",
"@eslint/eslintrc": "^3.1.0",
"@eslint/js": "^9.8.0",
"@nestjs/cli": "^11.0.2",
@@ -156,7 +156,6 @@
"mock-fs": "^5.2.0",
"node-addon-api": "^8.3.1",
"node-gyp": "^11.2.0",
"patch-package": "^8.0.0",
"pngjs": "^7.0.0",
"prettier": "^3.0.2",
"prettier-plugin-organize-imports": "^4.0.0",
@@ -164,6 +163,7 @@
"source-map-support": "^0.5.21",
"sql-formatter": "^15.0.0",
"supertest": "^7.1.0",
"tailwindcss": "^3.4.0",
"testcontainers": "^11.0.0",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.3.3",
@@ -178,5 +178,6 @@
},
"overrides": {
"sharp": "^0.34.2"
}
},
"packageManager": "pnpm@10.12.3+sha512.467df2c586056165580ad6dfb54ceaad94c5a30f80893ebdec5a44c5aa73c205ae4a5bb9d5ed6bb84ea7c249ece786642bbb49d06a307df218d03da41c317417"
}

View File

@@ -1,7 +1,7 @@
diff --git a/node_modules/postgres/cf/src/connection.js b/node_modules/postgres/cf/src/connection.js
index ee8b1e6..acf4566 100644
--- a/node_modules/postgres/cf/src/connection.js
+++ b/node_modules/postgres/cf/src/connection.js
diff --git a/cf/src/connection.js b/cf/src/connection.js
index ee8b1e69055bef090d322a66c7d792b5b502f47a..acf45662b35a8d01fa0d198faf6337d9f6808f3f 100644
--- a/cf/src/connection.js
+++ b/cf/src/connection.js
@@ -387,8 +387,10 @@ function Connection(options, queues = {}, { onopen = noop, onend = noop, onclose
}
@@ -14,10 +14,10 @@ index ee8b1e6..acf4566 100644
query: { value: query.string, enumerable: options.debug },
parameters: { value: query.parameters, enumerable: options.debug },
args: { value: query.args, enumerable: options.debug },
diff --git a/node_modules/postgres/cjs/src/connection.js b/node_modules/postgres/cjs/src/connection.js
index f7f58d1..b7f2d65 100644
--- a/node_modules/postgres/cjs/src/connection.js
+++ b/node_modules/postgres/cjs/src/connection.js
diff --git a/cjs/src/connection.js b/cjs/src/connection.js
index f7f58d147f344cb7f2420c2bd24c6b7917162033..b7f2d657c9e1d5fbc7ce4735f61abae972959b1e 100644
--- a/cjs/src/connection.js
+++ b/cjs/src/connection.js
@@ -385,8 +385,10 @@ function Connection(options, queues = {}, { onopen = noop, onend = noop, onclose
}
@@ -30,10 +30,10 @@ index f7f58d1..b7f2d65 100644
query: { value: query.string, enumerable: options.debug },
parameters: { value: query.parameters, enumerable: options.debug },
args: { value: query.args, enumerable: options.debug },
diff --git a/node_modules/postgres/src/connection.js b/node_modules/postgres/src/connection.js
index 97cc97e..26f508e 100644
--- a/node_modules/postgres/src/connection.js
+++ b/node_modules/postgres/src/connection.js
diff --git a/src/connection.js b/src/connection.js
index 97cc97e1576d6c75f958c66e9cecbf8cd11ed450..26f508e2de12f09f27838aca8d88fa4721fe6677 100644
--- a/src/connection.js
+++ b/src/connection.js
@@ -385,8 +385,10 @@ function Connection(options, queues = {}, { onopen = noop, onend = noop, onclose
}

12106
server/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,7 @@ import { AuthController } from 'src/controllers/auth.controller';
import { LoginResponseDto } from 'src/dtos/auth.dto';
import { AuthService } from 'src/services/auth.service';
import request from 'supertest';
import { mediumFactory } from 'test/medium.factory';
import { errorDto } from 'test/medium/responses';
import { ControllerContext, controllerSetup, mockBaseService } from 'test/utils';
@@ -132,6 +133,50 @@ describe(AuthController.name, () => {
expect(status).toEqual(201);
expect(service.login).toHaveBeenCalledWith(expect.objectContaining({ email: 'admin@local' }), expect.anything());
});
it('should auth cookies on a secure connection', async () => {
const loginResponse = mediumFactory.loginResponse();
service.login.mockResolvedValue(loginResponse);
const { status, body, headers } = await request(ctx.getHttpServer())
.post('/auth/login')
.send({ name: 'admin', email: 'admin@local', password: 'password' });
expect(status).toEqual(201);
expect(body).toEqual(loginResponse);
const cookies = headers['set-cookie'];
expect(cookies).toHaveLength(3);
expect(cookies[0].split(';').map((item) => item.trim())).toEqual([
`immich_access_token=${loginResponse.accessToken}`,
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'HttpOnly',
'SameSite=Lax',
]);
expect(cookies[1].split(';').map((item) => item.trim())).toEqual([
'immich_auth_type=password',
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'HttpOnly',
'SameSite=Lax',
]);
expect(cookies[2].split(';').map((item) => item.trim())).toEqual([
'immich_is_authenticated=true',
'Max-Age=34560000',
'Path=/',
expect.stringContaining('Expires='),
'SameSite=Lax',
]);
});
});
describe('POST /auth/logout', () => {
it('should be an authenticated route', async () => {
await request(ctx.getHttpServer()).post('/auth/logout');
expect(ctx.authenticate).toHaveBeenCalled();
});
});
describe('POST /auth/change-password', () => {

View File

@@ -97,6 +97,16 @@ where
"users"."id" = $1
and "users"."deletedAt" is null
-- UserRepository.getForChangePassword
select
"users"."id",
"users"."password"
from
"users"
where
"users"."id" = $1
and "users"."deletedAt" is null
-- UserRepository.getByEmail
select
"id",

View File

@@ -1,6 +1,6 @@
import { Injectable } from '@nestjs/common';
import archiver from 'archiver';
import chokidar, { WatchOptions } from 'chokidar';
import chokidar, { ChokidarOptions } from 'chokidar';
import { escapePath, glob, globStream } from 'fast-glob';
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs';
import fs from 'node:fs/promises';
@@ -219,14 +219,14 @@ export class StorageRepository {
}
}
watch(paths: string[], options: WatchOptions, events: Partial<WatchEvents>) {
watch(paths: string[], options: ChokidarOptions, events: Partial<WatchEvents>) {
const watcher = chokidar.watch(paths, options);
watcher.on('ready', () => events.onReady?.());
watcher.on('add', (path) => events.onAdd?.(path));
watcher.on('change', (path) => events.onChange?.(path));
watcher.on('unlink', (path) => events.onUnlink?.(path));
watcher.on('error', (error) => events.onError?.(error));
watcher.on('error', (error) => events.onError?.(error as Error));
return () => watcher.close();
}

View File

@@ -100,6 +100,16 @@ export class UserRepository {
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForChangePassword(id: string) {
return this.db
.selectFrom('users')
.select(['users.id', 'users.password'])
.where('users.id', '=', id)
.where('users.deletedAt', 'is', null)
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.EMAIL] })
getByEmail(email: string, options?: { withPassword?: boolean }) {
return this.db

View File

@@ -116,46 +116,33 @@ describe(AuthService.name, () => {
const auth = factory.auth({ user });
const dto = { password: 'old-password', newPassword: 'new-password' };
mocks.user.getByEmail.mockResolvedValue({ ...user, password: 'hash-password' });
mocks.user.getForChangePassword.mockResolvedValue({ id: user.id, password: 'hash-password' });
mocks.user.update.mockResolvedValue(user);
await sut.changePassword(auth, dto);
expect(mocks.user.getByEmail).toHaveBeenCalledWith(auth.user.email, { withPassword: true });
expect(mocks.user.getForChangePassword).toHaveBeenCalledWith(user.id);
expect(mocks.crypto.compareBcrypt).toHaveBeenCalledWith('old-password', 'hash-password');
});
it('should throw when auth user email is not found', async () => {
const auth = { user: { email: 'test@imimch.com' } } as AuthDto;
const dto = { password: 'old-password', newPassword: 'new-password' };
mocks.user.getByEmail.mockResolvedValue(void 0);
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(UnauthorizedException);
});
it('should throw when password does not match existing password', async () => {
const auth = { user: { email: 'test@imimch.com' } as UserAdmin };
const user = factory.user();
const auth = factory.auth({ user });
const dto = { password: 'old-password', newPassword: 'new-password' };
mocks.crypto.compareBcrypt.mockReturnValue(false);
mocks.user.getByEmail.mockResolvedValue({
email: 'test@immich.com',
password: 'hash-password',
} as UserAdmin & { password: string });
mocks.user.getForChangePassword.mockResolvedValue({ id: user.id, password: 'hash-password' });
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(BadRequestException);
});
it('should throw when user does not have a password', async () => {
const auth = { user: { email: 'test@imimch.com' } } as AuthDto;
const user = factory.user();
const auth = factory.auth({ user });
const dto = { password: 'old-password', newPassword: 'new-password' };
mocks.user.getByEmail.mockResolvedValue({
email: 'test@immich.com',
password: '',
} as UserAdmin & { password: string });
mocks.user.getForChangePassword.mockResolvedValue({ id: user.id, password: '' });
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(BadRequestException);
});

View File

@@ -91,11 +91,7 @@ export class AuthService extends BaseService {
async changePassword(auth: AuthDto, dto: ChangePasswordDto): Promise<UserAdminResponseDto> {
const { password, newPassword } = dto;
const user = await this.userRepository.getByEmail(auth.user.email, { withPassword: true });
if (!user) {
throw new UnauthorizedException();
}
const user = await this.userRepository.getForChangePassword(auth.user.id);
const valid = this.validateSecret(password, user.password);
if (!valid) {
throw new BadRequestException('Wrong password');

View File

@@ -5,7 +5,7 @@ import { createHash, randomBytes } from 'node:crypto';
import { Writable } from 'node:stream';
import { AssetFace } from 'src/database';
import { Albums, AssetJobStatus, Assets, DB, Exif, FaceSearch, Memories, Person, Sessions } from 'src/db';
import { AuthDto } from 'src/dtos/auth.dto';
import { AuthDto, LoginResponseDto } from 'src/dtos/auth.dto';
import { AlbumUserRole, AssetType, AssetVisibility, MemoryType, SourceType, SyncRequestType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { ActivityRepository } from 'src/repositories/activity.repository';
@@ -17,6 +17,7 @@ import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
@@ -305,6 +306,10 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
return automock(EmailRepository, { args: [{ setContext: () => {} }] });
}
case EventRepository: {
return automock(EventRepository, { args: [undefined, undefined, { setContext: () => {} }] });
}
case JobRepository: {
return automock(JobRepository, {
args: [
@@ -461,10 +466,13 @@ const sessionInsert = ({ id = newUuid(), userId, ...session }: Partial<Insertabl
const userInsert = (user: Partial<Insertable<UserTable>> = {}) => {
const id = user.id || newUuid();
const defaults: Insertable<UserTable> = {
const defaults = {
email: `${id}@immich.cloud`,
name: `User ${id}`,
deletedAt: null,
isAdmin: false,
profileImagePath: '',
shouldChangePassword: true,
};
return { ...defaults, ...user, id };
@@ -513,6 +521,24 @@ const syncStream = () => {
return new CustomWritable();
};
const loginDetails = () => {
return { isSecure: false, clientIp: '', deviceType: '', deviceOS: '' };
};
const loginResponse = (): LoginResponseDto => {
const user = userInsert({});
return {
accessToken: 'access-token',
userId: user.id,
userEmail: user.email,
name: user.name,
profileImagePath: user.profileImagePath,
isAdmin: user.isAdmin,
shouldChangePassword: user.shouldChangePassword,
isOnboarded: false,
};
};
export const mediumFactory = {
assetInsert,
assetFaceInsert,
@@ -524,4 +550,6 @@ export const mediumFactory = {
syncStream,
userInsert,
memoryInsert,
loginDetails,
loginResponse,
};

View File

@@ -0,0 +1,163 @@
import { BadRequestException } from '@nestjs/common';
import { hash } from 'bcrypt';
import { Kysely } from 'kysely';
import { DB } from 'src/db';
import { AuthType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { AuthService } from 'src/services/auth.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AuthService, {
database: db || defaultDatabase,
real: [
AccessRepository,
ConfigRepository,
CryptoRepository,
DatabaseRepository,
SessionRepository,
SystemMetadataRepository,
UserRepository,
],
mock: [LoggingRepository, StorageRepository, EventRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AuthService.name, () => {
describe('adminSignUp', () => {
it(`should sign up the admin`, async () => {
const { sut } = setup();
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
await expect(sut.adminSignUp(dto)).resolves.toEqual(
expect.objectContaining({
id: expect.any(String),
email: dto.email,
name: dto.name,
isAdmin: true,
}),
);
});
it('should not allow a second admin to sign up', async () => {
const { sut, ctx } = setup();
await ctx.newUser({ isAdmin: true });
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
const response = sut.adminSignUp(dto);
await expect(response).rejects.toThrow(BadRequestException);
await expect(response).rejects.toThrow('The server already has an admin');
});
});
describe('login', () => {
it('should reject an incorrect password', async () => {
const { sut, ctx } = setup();
const password = 'password';
const passwordHashed = await hash(password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const dto = { email: user.email, password: 'wrong-password' };
await expect(sut.login(dto, mediumFactory.loginDetails())).rejects.toThrow('Incorrect email or password');
});
it('should accept a correct password and return a login response', async () => {
const { sut, ctx } = setup();
const password = 'password';
const passwordHashed = await hash(password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const dto = { email: user.email, password };
await expect(sut.login(dto, mediumFactory.loginDetails())).resolves.toEqual({
accessToken: expect.any(String),
isAdmin: user.isAdmin,
isOnboarded: false,
name: user.name,
profileImagePath: user.profileImagePath,
userId: user.id,
userEmail: user.email,
shouldChangePassword: user.shouldChangePassword,
});
});
});
describe('logout', () => {
it('should logout', async () => {
const { sut } = setup();
const auth = factory.auth();
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
});
it('should cleanup the session', async () => {
const { sut, ctx } = setup();
const sessionRepo = ctx.get(SessionRepository);
const eventRepo = ctx.getMock(EventRepository);
const { user } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user.id });
const auth = factory.auth({ session, user });
eventRepo.emit.mockResolvedValue();
await expect(sessionRepo.get(session.id)).resolves.toEqual(expect.objectContaining({ id: session.id }));
await expect(sut.logout(auth, AuthType.PASSWORD)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
await expect(sessionRepo.get(session.id)).resolves.toBeUndefined();
});
});
describe('changePassword', () => {
it('should change the password and login with it', async () => {
const { sut, ctx } = setup();
const dto = { password: 'password', newPassword: 'new-password' };
const passwordHashed = await hash(dto.password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const auth = factory.auth({ user });
const response = await sut.changePassword(auth, dto);
expect(response).toEqual(
expect.objectContaining({
id: user.id,
email: user.email,
}),
);
expect((response as any).password).not.toBeDefined();
await expect(
sut.login({ email: user.email, password: dto.newPassword }, mediumFactory.loginDetails()),
).resolves.toBeDefined();
});
it('should validate the current password', async () => {
const { sut, ctx } = setup();
const dto = { password: 'wrong-password', newPassword: 'new-password' };
const passwordHashed = await hash('password', 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const auth = factory.auth({ user });
const response = sut.changePassword(auth, dto);
await expect(response).rejects.toThrow(BadRequestException);
await expect(response).rejects.toThrow('Wrong password');
});
});
});

View File

@@ -1,4 +1,4 @@
import { WatchOptions } from 'chokidar';
import { ChokidarOptions } from 'chokidar';
import { StorageCore } from 'src/cores/storage.core';
import { StorageRepository, WatchEvents } from 'src/repositories/storage.repository';
import { RepositoryInterface } from 'src/types';
@@ -11,7 +11,7 @@ interface MockWatcherOptions {
export const makeMockWatcher =
({ items, close }: MockWatcherOptions) =>
(paths: string[], options: WatchOptions, events: Partial<WatchEvents>) => {
(paths: string[], options: ChokidarOptions, events: Partial<WatchEvents>) => {
events.onReady?.();
for (const item of items || []) {
switch (item.event) {
@@ -29,6 +29,7 @@ export const makeMockWatcher =
}
case 'error': {
events.onError?.(new Error(item.value));
break;
}
}
}

View File

@@ -1 +0,0 @@
engine-strict=true

View File

@@ -1,11 +1,33 @@
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e
ENV COREPACK_ENABLE_AUTO_PIN=0 \
COREPACK_ENABLE_DOWNLOAD_PROMPT=0
RUN corepack enable && corepack install -g pnpm && \
apk add --no-cache tini && \
mkdir -p /pnpm/store && \
chown node:node -R /pnpm && \
mkdir -p /usr/local/etc && \
echo "store-dir=/pnpm/store" >> /usr/local/etc/npmrc
RUN apk add --no-cache tini
USER node
WORKDIR /usr/src/app
COPY --chown=node:node package*.json ./
RUN npm ci
COPY --chown=node:node \
../open-api/typescript-sdk/package.json \
../open-api/typescript-sdk/pnpm-lock.yaml \
../open-api/typescript-sdk/
COPY --chown=node:node \
./web/package.json \
./web/pnpm-lock.yaml \
pnpm-workspace.yaml \
./
RUN pnpm install --frozen-lockfile
ENV CHOKIDAR_USEPOLLING=true
EXPOSE 24678
EXPOSE 3000
ENTRYPOINT ["/sbin/tini", "--", "/bin/sh"]

View File

@@ -1,10 +1,9 @@
#!/usr/bin/env sh
TYPESCRIPT_SDK=/usr/src/open-api/typescript-sdk
npm --prefix "$TYPESCRIPT_SDK" install
npm --prefix "$TYPESCRIPT_SDK" run build
echo "Building TypeScript SDK..."
(cd ../open-api/typescript-sdk && pnpm install && pnpm build)
echo "Installing Deps ..."
pnpm install
COUNT=0
UPSTREAM="${IMMICH_SERVER_URL:-http://immich-server:2283/}"
@@ -18,4 +17,4 @@ done
echo "Connected to $UPSTREAM"
node ./node_modules/.bin/vite dev --host 0.0.0.0 --port 3000
pnpm exec vite dev --host 0.0.0.0 --port 3000

View File

@@ -5,25 +5,24 @@
"type": "module",
"scripts": {
"dev": "vite dev --host 0.0.0.0 --port 3000",
"build": "vite build",
"build": "svelte-kit sync && vite build",
"build:stats": "BUILD_STATS=true vite build",
"package": "svelte-kit package",
"preview": "vite preview",
"check:svelte": "svelte-check --no-tsconfig --fail-on-warnings --compiler-warnings 'reactive_declaration_non_reactive_property:ignore' --ignore src/lib/components/photos-page/asset-grid.svelte",
"check:typescript": "tsc --noEmit",
"check:typescript": "svelte-kit sync && tsc --noEmit",
"check:watch": "npm run check:svelte -- --watch",
"check:code": "npm run format && npm run lint:p && npm run check:svelte && npm run check:typescript",
"check:all": "npm run check:code && npm run test:cov",
"lint": "eslint . --max-warnings 0",
"lint:p": "eslint-p . --max-warnings 0 --concurrency=4",
"lint:fix": "npm run lint -- --fix",
"lint": "svelte-kit sync && eslint . --max-warnings 0",
"lint:p": "svelte-kit sync && eslint-p . --max-warnings 0 --concurrency=4",
"lint:fix": "svelte-kit sync && npm run lint -- --fix",
"format": "prettier --check .",
"format:fix": "prettier --write . && npm run format:i18n",
"format:i18n": "npx --yes sort-json ../i18n/*.json",
"test": "vitest --run",
"test:cov": "vitest --coverage",
"test:watch": "vitest dev",
"prepare": "svelte-kit sync"
"test:watch": "vitest dev"
},
"dependencies": {
"@formatjs/icu-messageformat-parser": "^2.9.8",
@@ -39,7 +38,9 @@
"@zoom-image/svelte": "^0.3.0",
"dom-to-image": "^2.6.0",
"fabric": "^6.5.4",
"geojson": "^0.5.0",
"handlebars": "^4.7.8",
"happy-dom": "^18.0.1",
"intl-messageformat": "^10.7.11",
"justified-layout": "^4.1.0",
"lodash-es": "^4.17.21",
@@ -103,5 +104,6 @@
},
"volta": {
"node": "22.16.0"
}
},
"packageManager": "pnpm@10.12.3+sha512.467df2c586056165580ad6dfb54ceaad94c5a30f80893ebdec5a44c5aa73c205ae4a5bb9d5ed6bb84ea7c249ece786642bbb49d06a307df218d03da41c317417"
}

6874
web/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -58,7 +58,8 @@ describe('get asset filename', () => {
});
describe('copy image to clipboard', () => {
it('should not allow copy image to clipboard', () => {
expect(canCopyImageToClipboard()).toEqual(false);
// This test is dubious, as it totally on the environment where the test is run which is mocked.
it('should allow copy image to clipboard', () => {
expect(canCopyImageToClipboard()).toEqual(true);
});
});

View File

@@ -53,7 +53,7 @@ export default defineConfig({
test: {
include: ['src/**/*.{test,spec}.{js,ts}'],
globals: true,
environment: 'jsdom',
environment: 'happy-dom',
setupFiles: ['./src/test-data/setup.ts'],
sequence: {
hooks: 'list',