Compare commits
85 Commits
feat/widge
...
chore/pnpm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6fffe41f0b | ||
|
|
83afd49f5c | ||
|
|
639ede78c2 | ||
|
|
15be3437bf | ||
|
|
f59b0bab5a | ||
|
|
fa418d778b | ||
|
|
e0c4b8df6f | ||
|
|
7f9689b4bc | ||
|
|
e6f8bfdf5e | ||
|
|
8ccca04e27 | ||
|
|
53f80393bf | ||
|
|
e5e857edc3 | ||
|
|
590f96246d | ||
|
|
38d73f2bc6 | ||
|
|
96e3b96d57 | ||
|
|
36b018e355 | ||
|
|
214ca50406 | ||
|
|
29b3981609 | ||
|
|
a068a41c06 | ||
|
|
3c6e9e1191 | ||
|
|
db0415bbcc | ||
|
|
a5c431fbf5 | ||
|
|
a3d588f6bd | ||
|
|
21f500191a | ||
|
|
5011636d95 | ||
|
|
3f330c6476 | ||
|
|
bb8755021d | ||
|
|
93f9e118ad | ||
|
|
58ca1402ed | ||
|
|
32a7087883 | ||
|
|
53020852ec | ||
|
|
181a7e115f | ||
|
|
095ace8687 | ||
|
|
4c3fcdc745 | ||
|
|
fa5f30d9ca | ||
|
|
e60bc3c304 | ||
|
|
09cbc5d3f4 | ||
|
|
a2a9797fab | ||
|
|
3d35e65f27 | ||
|
|
df76735f4a | ||
|
|
6feca56da8 | ||
|
|
97aabe466e | ||
|
|
72a53f43c8 | ||
|
|
30b4f334d8 | ||
|
|
6c6a32c63e | ||
|
|
6fed223405 | ||
|
|
3105094a3d | ||
|
|
b96c95beda | ||
|
|
926ff075a3 | ||
|
|
934649c8df | ||
|
|
a43159f4ba | ||
|
|
ea3a14ed25 | ||
|
|
24a4cba953 | ||
|
|
fda22c83b9 | ||
|
|
2a8019726c | ||
|
|
5f76cdddc7 | ||
|
|
48be10e48b | ||
|
|
6c11ef62e8 | ||
|
|
65dce58aa4 | ||
|
|
64cc7239fe | ||
|
|
5f89c2d111 | ||
|
|
4621ec5ea2 | ||
|
|
881a96cdf9 | ||
|
|
b001ba44f5 | ||
|
|
afb444c92c | ||
|
|
027c4a8b34 | ||
|
|
eca9b56847 | ||
|
|
5b0575b956 | ||
|
|
05064f87f0 | ||
|
|
522cdbac99 | ||
|
|
9240bbc6ff | ||
|
|
3751f8bc57 | ||
|
|
88b8afb8d6 | ||
|
|
2e13543d5d | ||
|
|
bcfc967d77 | ||
|
|
7d0e8f50f7 | ||
|
|
c759233d8c | ||
|
|
bfe32c2bb9 | ||
|
|
6c7b2e4b5c | ||
|
|
7edbeb2ed6 | ||
|
|
4e59a55c1d | ||
|
|
c2d7337d12 | ||
|
|
c1b82bed9b | ||
|
|
9ca31abae9 | ||
|
|
ebcf133bea |
@@ -73,10 +73,8 @@ install_dependencies() {
|
||||
log "Installing dependencies"
|
||||
(
|
||||
cd "${IMMICH_WORKSPACE}" || exit 1
|
||||
run_cmd make install-server
|
||||
run_cmd make install-sdk
|
||||
run_cmd make build-sdk
|
||||
run_cmd make install-web
|
||||
export CI=1 FROZEN=1 OFFLINE=1
|
||||
run_cmd make setup-dev
|
||||
)
|
||||
log ""
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ services:
|
||||
build:
|
||||
target: dev-container-server
|
||||
env_file: !reset []
|
||||
hostname: immich-dev
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
|
||||
volumes: !override
|
||||
@@ -21,7 +22,7 @@ services:
|
||||
|
||||
immich-machine-learning:
|
||||
env_file: !reset []
|
||||
|
||||
|
||||
database:
|
||||
env_file: !reset []
|
||||
environment: !override
|
||||
@@ -30,7 +31,7 @@ services:
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
POSTGRES_HOST_AUTH_METHOD: md5
|
||||
volumes:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
|
||||
@@ -10,8 +10,9 @@ cd "${IMMICH_WORKSPACE}/server" || (
|
||||
exit 1
|
||||
)
|
||||
|
||||
CI=1 pnpm install
|
||||
while true; do
|
||||
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
|
||||
run_cmd pnpm exec nest start --debug "0.0.0.0:9230" --watch
|
||||
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
|
||||
@@ -16,7 +16,7 @@ until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_
|
||||
done
|
||||
|
||||
while true; do
|
||||
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
|
||||
run_cmd pnpm exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
|
||||
log "Web crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
|
||||
@@ -4,9 +4,14 @@
|
||||
|
||||
design/
|
||||
docker/
|
||||
Dockerfile
|
||||
!docker/scripts
|
||||
docs/
|
||||
!docs/package.json
|
||||
!docs/package-lock.json
|
||||
e2e/
|
||||
!e2e/package.json
|
||||
!e2e/package-lock.json
|
||||
fastlane/
|
||||
machine-learning/
|
||||
misc/
|
||||
@@ -15,6 +20,7 @@ mobile/
|
||||
cli/coverage/
|
||||
cli/dist/
|
||||
cli/node_modules/
|
||||
cli/Dockerfile
|
||||
|
||||
open-api/typescript-sdk/build/
|
||||
open-api/typescript-sdk/node_modules/
|
||||
@@ -25,9 +31,11 @@ server/upload/
|
||||
server/src/queries
|
||||
server/dist/
|
||||
server/www/
|
||||
server/Dockerfile
|
||||
|
||||
web/node_modules/
|
||||
web/coverage/
|
||||
web/.svelte-kit
|
||||
web/build/
|
||||
web/.env
|
||||
web/Dockerfile
|
||||
|
||||
2
.github/.nvmrc
vendored
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.17.0
|
||||
|
||||
4
.github/.prettierignore
vendored
Normal file
4
.github/.prettierignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
28
.github/package-lock.json
generated
vendored
28
.github/package-lock.json
generated
vendored
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"name": ".github",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"devDependencies": {
|
||||
"prettier": "^3.5.3"
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
|
||||
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"prettier": "bin/prettier.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/prettier/prettier?sponsor=1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
54
.github/workflows/build-mobile.yml
vendored
54
.github/workflows/build-mobile.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
runs-on: mich
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -66,24 +66,40 @@ jobs:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Restore Gradle Cache
|
||||
id: cache-gradle-restore
|
||||
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: build-mobile-gradle-${{ runner.os }}-main
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: echo $KEY_JKS | base64 -d > android/key.jks
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
|
||||
with:
|
||||
packages: ''
|
||||
|
||||
- name: Get Packages
|
||||
working-directory: ./mobile
|
||||
@@ -103,12 +119,30 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
if [[ $IS_MAIN == 'true' ]]; then
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
else
|
||||
flutter build apk --debug --split-per-abi --target-platform android-arm64
|
||||
fi
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
|
||||
- name: Save Gradle Cache
|
||||
id: cache-gradle-save
|
||||
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
|
||||
|
||||
30
.github/workflows/cli.yml
vendored
30
.github/workflows/cli.yml
vendored
@@ -33,18 +33,24 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
run: npm run build --prefix ../open-api/typescript-sdk/
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: pnpm install && pnpm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm build
|
||||
- run: pnpm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -67,7 +73,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
@@ -96,7 +102,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -76,6 +76,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
4
.github/workflows/docker.yml
vendored
4
.github/workflows/docker.yml
vendored
@@ -177,7 +177,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
@@ -188,6 +188,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
15
.github/workflows/docs-build.yml
vendored
15
.github/workflows/docs-build.yml
vendored
@@ -53,19 +53,24 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run install
|
||||
run: pnpm install
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
||||
2
.github/workflows/fix-format.yml
vendored
2
.github/workflows/fix-format.yml
vendored
@@ -32,6 +32,8 @@ jobs:
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
|
||||
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
|
||||
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
|
||||
2
.github/workflows/prepare-release.yml
vendored
2
.github/workflows/prepare-release.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
name: release-apk-signed
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
|
||||
11
.github/workflows/sdk.yml
vendored
11
.github/workflows/sdk.yml
vendored
@@ -20,16 +20,21 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Build
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
- name: Publish
|
||||
run: npm publish
|
||||
run: pnpm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
6
.github/workflows/static_analysis.yml
vendored
6
.github/workflows/static_analysis.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run DCM
|
||||
run: dcm analyze lib
|
||||
run: dcm analyze lib --fatal-style --fatal-warnings
|
||||
working-directory: ./mobile
|
||||
|
||||
zizmor:
|
||||
@@ -134,7 +134,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
196
.github/workflows/test.yml
vendored
196
.github/workflows/test.yml
vendored
@@ -80,28 +80,33 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run package manager install
|
||||
run: pnpm install
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run small tests & coverage
|
||||
run: npm test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests:
|
||||
@@ -121,32 +126,37 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install && pnpm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
@@ -166,25 +176,30 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# Skip linter & formatter in Windows test.
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-lint:
|
||||
@@ -204,28 +219,33 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
run: pnpm rebuild && pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint:p
|
||||
run: pnpm lint:p
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run svelte checks
|
||||
run: npm run check:svelte
|
||||
run: pnpm check:svelte
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
@@ -245,24 +265,29 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check:typescript
|
||||
run: pnpm check:typescript
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
i18n-tests:
|
||||
@@ -278,16 +303,21 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm --prefix=web ci
|
||||
run: pnpm --filter=immich-web install --frozen-lockfile
|
||||
|
||||
- name: Format
|
||||
run: npm --prefix=web run format:i18n
|
||||
run: pnpm --filter=immich-web format:i18n
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
@@ -322,30 +352,35 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-medium-tests:
|
||||
@@ -365,16 +400,21 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run medium tests
|
||||
run: npm run test:medium
|
||||
run: pnpm test:medium
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-server-cli:
|
||||
@@ -398,23 +438,33 @@ jobs:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run setup web
|
||||
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
|
||||
working-directory: ./web
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run setup cli
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./cli
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
@@ -422,7 +472,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (api & cli)
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-web:
|
||||
@@ -446,18 +496,23 @@ jobs:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
@@ -479,7 +534,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
@@ -496,7 +551,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -564,16 +619,21 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
shellcheck:
|
||||
@@ -587,7 +647,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
@@ -605,16 +665,21 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
run: pnpm --filter immich install --frozen-lockfile
|
||||
|
||||
- name: Build the app
|
||||
run: npm --prefix=server run build
|
||||
run: pnpm --filter immich build
|
||||
|
||||
- name: Run API generation
|
||||
run: make open-api
|
||||
@@ -644,7 +709,7 @@ jobs:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:1f5583fe3397210a0fbc7f11b0cec18bacc4a99e3e8ea0548e9bd6bcf26ec37a
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
@@ -666,26 +731,31 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build the app
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
|
||||
- name: Run existing migrations
|
||||
run: npm run migrations:run
|
||||
run: pnpm migrations:run
|
||||
|
||||
- name: Test npm run schema:reset command works
|
||||
run: npm run schema:reset
|
||||
run: pnpm schema:reset
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: npm run migrations:generate src/TestMigration
|
||||
run: pnpm migrations:generate src/TestMigration
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
@@ -704,7 +774,7 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Run SQL generation
|
||||
run: npm run sync:sql
|
||||
run: pnpm sync:sql
|
||||
env:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
|
||||
2
.github/workflows/weblate-lock.yml
vendored
2
.github/workflows/weblate-lock.yml
vendored
@@ -52,6 +52,6 @@ jobs:
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@6b81b1572e466f7f48ba3c823159ce3f4a4d66a6 # success-check-action-0.0.3
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
90
Makefile
90
Makefile
@@ -1,27 +1,33 @@
|
||||
dev:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
|
||||
dev-down:
|
||||
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
|
||||
|
||||
dev-update:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-scale:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-update:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-down:
|
||||
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
|
||||
|
||||
prod:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-down:
|
||||
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
.PHONY: open-api
|
||||
open-api:
|
||||
@@ -34,7 +40,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
npm --prefix server run sync:sql
|
||||
pnpm --filter immich run sync:sql
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
@@ -44,29 +50,40 @@ renovate:
|
||||
|
||||
MODULES = e2e server web cli sdk docs .github
|
||||
|
||||
# directory to package name mapping function
|
||||
# cli = @immich/cli
|
||||
# docs = documentation
|
||||
# e2e = immich-e2e
|
||||
# open-api/typescript-sdk = @immich/sdk
|
||||
# server = immich
|
||||
# web = immich-web
|
||||
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
|
||||
|
||||
audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
pnpm --filter $(call map-package,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
pnpm --filter $(call map-package,$*) install $(if $(FROZEN),--frozen-lockfile) $(if $(OFFLINE),--offline)
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
|
||||
pnpm --filter $(call map-package,$*) run build
|
||||
format-%:
|
||||
npm --prefix $* run format:fix
|
||||
pnpm --filter $(call map-package,$*) run format:fix
|
||||
lint-%:
|
||||
npm --prefix $* run lint:fix
|
||||
pnpm --filter $(call map-package,$*) run lint:fix
|
||||
lint-web:
|
||||
pnpm --filter $(call map-package,$*) run lint:p
|
||||
check-%:
|
||||
npm --prefix $* run check
|
||||
pnpm --filter $(call map-package,$*) run check
|
||||
check-web:
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
pnpm --filter immich-web run check:typescript
|
||||
pnpm --filter immich-web run check:svelte
|
||||
test-%:
|
||||
npm --prefix $* run test
|
||||
pnpm --filter $(call map-package,$*) run test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
pnpm --filter immich-e2e run test
|
||||
pnpm --filter immich-e2e run test:web
|
||||
test-medium:
|
||||
docker run \
|
||||
--rm \
|
||||
@@ -76,23 +93,34 @@ test-medium:
|
||||
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
|
||||
-e NODE_ENV=development \
|
||||
immich-server:latest \
|
||||
-c "npm ci && npm run test:medium -- --run"
|
||||
-c "pnpm test:medium -- --run"
|
||||
test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
|
||||
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
|
||||
install-all:
|
||||
pnpm -r --filter '!documentation' install
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ;
|
||||
|
||||
check-all:
|
||||
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/"
|
||||
lint-all:
|
||||
pnpm -r --filter '!documentation' run lint:fix
|
||||
format-all:
|
||||
pnpm -r --filter '!documentation' run format:fix
|
||||
audit-all:
|
||||
pnpm -r --filter '!documentation' audit fix
|
||||
hygiene-all: audit-all
|
||||
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
|
||||
|
||||
test-all:
|
||||
pnpm -r --filter '!documentation' run "/^test/"
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf {} +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
setup-dev: install-server install-sdk build-sdk install-web
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.17.0
|
||||
|
||||
@@ -1,19 +1,41 @@
|
||||
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
RUN npm ci
|
||||
COPY open-api/typescript-sdk/ ./
|
||||
RUN npm run build
|
||||
ENV COREPACK_ENABLE_AUTO_PIN=0 \
|
||||
COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||
|
||||
RUN corepack enable && \
|
||||
corepack install -g pnpm
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY --chown=node:node . .
|
||||
|
||||
COPY cli/package.json cli/package-lock.json ./
|
||||
RUN npm ci
|
||||
WORKDIR /usr/src/app/web
|
||||
RUN --mount=type=cache,id=pnpm,target=/buildcache,uid=1000,gid=1000 \
|
||||
pnpm install --frozen-lockfile && \
|
||||
pnpm exec svelte-kit sync
|
||||
|
||||
COPY cli .
|
||||
RUN npm run build
|
||||
WORKDIR /usr/src/app/open-api/typescript-sdk
|
||||
RUN --mount=type=cache,id=pnpm,target=/buildcache,uid=1000,gid=1000 \
|
||||
pnpm install --frozen-lockfile && \
|
||||
pnpm build
|
||||
|
||||
WORKDIR /usr/src/app/cli
|
||||
RUN --mount=type=cache,id=pnpm,target=/buildcache,uid=1000,gid=1000 \
|
||||
pnpm install --frozen-lockfile --prod --no-optional && \
|
||||
pnpm build
|
||||
|
||||
RUN rm -rf /usr/src/app/web && \
|
||||
rm -rf /usr/src/app/open-api && \
|
||||
rm -rf /usr/src/app/cli/src && \
|
||||
rm -rf /usr/src/app/cli/src && \
|
||||
rm -rf /usr/src/app/server && \
|
||||
rm -rf /usr/src/app/i18n && \
|
||||
rm -rf /usr/src/app/e2e && \
|
||||
rm -rf /usr/src/app/docs && \
|
||||
rm -rf /usr/src/app/readme_i18n && \
|
||||
rm -rf /usr/src/app/deployment && \
|
||||
rm -rf /usr/src/app/docker
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
ENTRYPOINT ["node", "/usr/src/app/cli/dist"]
|
||||
|
||||
@@ -6,8 +6,10 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
|
||||
|
||||
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
# if you don't have node installed
|
||||
$ npm install -g pnpm
|
||||
$ pnpm install
|
||||
$ pnpm build
|
||||
|
||||
Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
@@ -15,8 +17,10 @@ Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
To run the Immich CLI from source, run the following in the cli folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
# if you don't have node installed
|
||||
$ npm install -g pnpm
|
||||
$ pnpm install
|
||||
$ pnpm build
|
||||
$ ts-node .
|
||||
|
||||
You'll need ts-node, the easiest way to install it is to use npm:
|
||||
|
||||
2
cli/bin/immich
Executable file
2
cli/bin/immich
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
import '../dist/index.js';
|
||||
4517
cli/package-lock.json
generated
4517
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
"bin": {
|
||||
"immich": "dist/index.js"
|
||||
"immich": "./bin/immich"
|
||||
},
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"keywords": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.15.31",
|
||||
"@types/node": "^22.15.33",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.16.0"
|
||||
"node": "22.17.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ name: immich-dev
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
command: ['/usr/src/app/server/bin/immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
@@ -24,13 +24,12 @@ services:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
target: dev-docker
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- ..:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/server/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/server/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
@@ -69,17 +68,16 @@ services:
|
||||
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
|
||||
# user: 0:0
|
||||
build:
|
||||
context: ../web
|
||||
command: ['/usr/src/app/bin/immich-web']
|
||||
context: ../
|
||||
dockerfile: web/Dockerfile
|
||||
command: ['/usr/src/app/web/bin/immich-web']
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 3000:3000
|
||||
- 24678:24678
|
||||
volumes:
|
||||
- ../web:/usr/src/app
|
||||
- ../i18n:/usr/src/i18n
|
||||
- ../open-api/:/usr/src/open-api/
|
||||
- ..:/usr/src/app
|
||||
# - ../../ui:/usr/ui
|
||||
- /usr/src/app/node_modules
|
||||
ulimits:
|
||||
@@ -122,7 +120,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -134,6 +132,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -75,6 +75,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
@@ -82,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:9abc6cf6aea7710d163dbb28d8eeb7dc5baef01e38fa4cd146a406dd9f07f70d
|
||||
image: prom/prometheus@sha256:7a34573f0b9c952286b33d537f233cd5b708e12263733aa646e50c33f598f16c
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -94,7 +95,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:12.0.1-ubuntu@sha256:65575bb9c761335e2ff30e364f21d38632e3b2e75f5f81d83cc92f44b9bbc055
|
||||
image: grafana/grafana:12.0.2-ubuntu@sha256:0512d81cdeaaff0e370a9aa66027b465d1f1f04379c3a9c801a905fabbdbc7a5
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
@@ -67,6 +67,7 @@ services:
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.17.0
|
||||
|
||||
@@ -1,2 +1,7 @@
|
||||
build/
|
||||
.docusaurus/
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
|
||||
@@ -5,7 +5,7 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ npm install
|
||||
$ pnpm install
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
@@ -490,7 +490,7 @@ You can also scan the Postgres database file structure for errors:
|
||||
<details>
|
||||
<summary>Scan for file structure errors</summary>
|
||||
```bash
|
||||
docker exec -it immich_postgres pg_amcheck --username=postgres --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
docker exec -it immich_postgres pg_amcheck --username=<DB_USERNAME> --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
```
|
||||
|
||||
A normal result will end something like this and return with an exit code of `0`:
|
||||
|
||||
@@ -57,7 +57,7 @@ Then please follow the steps in the following section for restoring the database
|
||||
<TabItem value="Linux system" label="Linux system" default>
|
||||
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
@@ -79,7 +79,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres))
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@@ -150,12 +150,10 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
|
||||
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
|
||||
- **Encoded Assets:**
|
||||
|
||||
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `DB_DATA_LOCATION`.
|
||||
@@ -201,7 +199,6 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
|
||||
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `DB_DATA_LOCATION`.
|
||||
|
||||
@@ -20,7 +20,6 @@ Immich supports 3rd party authentication via [OpenID Connect][oidc] (OIDC), an i
|
||||
Before enabling OAuth in Immich, a new client application needs to be configured in the 3rd-party authentication server. While the specifics of this setup vary from provider to provider, the general approach should be the same.
|
||||
|
||||
1. Create a new (Client) Application
|
||||
|
||||
1. The **Provider** type should be `OpenID Connect` or `OAuth2`
|
||||
2. The **Client type** should be `Confidential`
|
||||
3. The **Application** type should be `Web`
|
||||
@@ -29,7 +28,6 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
2. Configure Redirect URIs/Origins
|
||||
|
||||
The **Sign-in redirect URIs** should include:
|
||||
|
||||
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
|
||||
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
|
||||
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
|
||||
@@ -37,21 +35,17 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
Redirect URIs should contain all the domains you will be using to access Immich. Some examples include:
|
||||
|
||||
Mobile
|
||||
|
||||
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
|
||||
|
||||
Localhost
|
||||
|
||||
- `http://localhost:2283/auth/login`
|
||||
- `http://localhost:2283/user-settings`
|
||||
|
||||
Local IP
|
||||
|
||||
- `http://192.168.0.200:2283/auth/login`
|
||||
- `http://192.168.0.200:2283/user-settings`
|
||||
|
||||
Hostname
|
||||
|
||||
- `https://immich.example.com/auth/login`
|
||||
- `https://immich.example.com/user-settings`
|
||||
|
||||
|
||||
@@ -199,13 +199,11 @@ To use your SSH key for commit signing, see the [GitHub guide on SSH commit sign
|
||||
When the Dev Container starts, it automatically:
|
||||
|
||||
1. **Runs post-create script** (`container-server-post-create.sh`):
|
||||
|
||||
- Adjusts file permissions for the `node` user
|
||||
- Installs dependencies: `npm install` in all packages
|
||||
- Installs dependencies: `pnpm install` in all packages
|
||||
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
|
||||
|
||||
2. **Starts development servers** via VS Code tasks:
|
||||
|
||||
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
|
||||
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
|
||||
- Both servers watch for file changes and recompile automatically
|
||||
@@ -335,14 +333,12 @@ make install-all # Install all dependencies
|
||||
The Dev Container is pre-configured for debugging:
|
||||
|
||||
1. **API Server Debugging**:
|
||||
|
||||
- Set breakpoints in VS Code
|
||||
- Press `F5` or use "Run and Debug" panel
|
||||
- Select "Attach to Server" configuration
|
||||
- Debug port: 9231
|
||||
|
||||
2. **Worker Debugging**:
|
||||
|
||||
- Use "Attach to Workers" configuration
|
||||
- Debug port: 9230
|
||||
|
||||
@@ -428,7 +424,6 @@ While the Dev Container focuses on server and web development, you can connect m
|
||||
```
|
||||
|
||||
2. **Configure mobile app**:
|
||||
|
||||
- Server URL: `http://YOUR_IP:2283/api`
|
||||
- Ensure firewall allows port 2283
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ If you only want to do web development connected to an existing, remote backend,
|
||||
|
||||
1. Build the Immich SDK - `cd open-api/typescript-sdk && npm i && npm run build && cd -`
|
||||
2. Enter the web directory - `cd web/`
|
||||
3. Install web dependencies - `npm i`
|
||||
3. Install web dependencies - `pnpm i`
|
||||
4. Start the web development server
|
||||
|
||||
```bash
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
### Unit tests
|
||||
|
||||
Unit are run by calling `npm run test` from the `server/` directory.
|
||||
You need to run `npm install` (in `server/`) before _once_.
|
||||
You need to run `pnpm install` (in `server/`) before _once_.
|
||||
|
||||
### End to end tests
|
||||
|
||||
|
||||
@@ -52,9 +52,9 @@ REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||
### Local
|
||||
|
||||
# Backup Immich database
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
# For deduplicating backup programs such as Borg or Restic, compressing the content can increase backup size by making it harder to deduplicate. If you are using a different program or still prefer to compress, you can use the following command instead:
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
|
||||
### Append to local Borg repository
|
||||
borg create "$BACKUP_PATH/immich-borg::{now}" "$UPLOAD_LOCATION" --exclude "$UPLOAD_LOCATION"/thumbs/ --exclude "$UPLOAD_LOCATION"/encoded-video/
|
||||
|
||||
@@ -72,22 +72,25 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
||||
|
||||
## Database
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :------------------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
|
||||
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector.
|
||||
|
||||
\*3: Uses either [`postgresql.ssd.conf`](https://github.com/immich-app/base-images/blob/main/postgres/postgresql.ssd.conf) or [`postgresql.hdd.conf`](https://github.com/immich-app/base-images/blob/main/postgres/postgresql.hdd.conf) which mainly controls the Postgres `effective_io_concurrency` setting to allow for concurrenct IO on SSDs and sequential IO on HDDs.
|
||||
|
||||
:::info
|
||||
|
||||
All `DB_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
|
||||
@@ -75,7 +75,6 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
||||
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||
|
||||
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
||||
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
||||
|
||||
|
||||
20954
docs/package-lock.json
generated
20954
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -16,8 +16,9 @@
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "~3.7.0",
|
||||
"@docusaurus/preset-classic": "~3.7.0",
|
||||
"@docusaurus/core": "~3.8.0",
|
||||
"@docusaurus/preset-classic": "~3.8.0",
|
||||
"@docusaurus/theme-common": "~3.8.0",
|
||||
"@mdi/js": "^7.3.67",
|
||||
"@mdi/react": "^1.6.1",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
@@ -26,6 +27,7 @@
|
||||
"clsx": "^2.0.0",
|
||||
"docusaurus-lunr-search": "^3.3.2",
|
||||
"docusaurus-preset-openapi": "^0.7.5",
|
||||
"lunr": "^2.3.9",
|
||||
"postcss": "^8.4.25",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"raw-loader": "^4.0.2",
|
||||
@@ -35,7 +37,7 @@
|
||||
"url": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "~3.7.0",
|
||||
"@docusaurus/module-type-aliases": "~3.8.0",
|
||||
"@docusaurus/tsconfig": "^3.7.0",
|
||||
"@docusaurus/types": "^3.7.0",
|
||||
"prettier": "^3.2.4",
|
||||
@@ -57,6 +59,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.16.0"
|
||||
"node": "22.17.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,12 @@ const guides: CommunityGuidesProps[] = [
|
||||
description: 'Access Immich with an end-to-end encrypted connection.',
|
||||
url: 'https://meshnet.nordvpn.com/how-to/remote-files-media-access/immich-remote-access',
|
||||
},
|
||||
{
|
||||
title: 'Trust Self Signed Certificates with Immich - OAuth Setup',
|
||||
description:
|
||||
'Set up Certificate Authority trust with Immich, and your private OAuth2/OpenID service, while using a private CA for HTTPS commication.',
|
||||
url: 'https://github.com/immich-app/immich/discussions/18614',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.17.0
|
||||
|
||||
@@ -39,7 +39,7 @@ services:
|
||||
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:9c704fb49ce27549df00f1b096cc93f8b0c959ef087507704d74954808f78a82
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:3aef84a0a4fabbda17ef115c3019ba0c914ec73e9f6e59203674322d858b8eea
|
||||
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
6874
e2e/package-lock.json
generated
6874
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -25,7 +25,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.15.31",
|
||||
"@types/node": "^22.15.33",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -44,6 +44,7 @@
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sharp": "^0.34.0",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -52,6 +53,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.16.0"
|
||||
"node": "22.17.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -40,6 +41,40 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -1190,6 +1225,411 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreateDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
import { LoginResponseDto, login, signUpAdmin } from '@immich/sdk';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
import { errorDto, loginResponseDto, signupResponseDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const { email, password } = signupDto.admin;
|
||||
|
||||
describe(`/auth/admin-sign-up`, () => {
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
});
|
||||
|
||||
describe('POST /auth/admin-sign-up', () => {
|
||||
it(`should sign up the admin`, async () => {
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(signupResponseDto.admin);
|
||||
});
|
||||
|
||||
it('should not allow a second admin to sign up', async () => {
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.alreadyHasAdmin);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('/auth/*', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
admin = await login({ loginCredentialDto: loginDto.admin });
|
||||
});
|
||||
|
||||
describe(`POST /auth/login`, () => {
|
||||
it('should reject an incorrect password', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email, password: 'incorrect' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.incorrectLogin);
|
||||
});
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(loginResponseDto.admin);
|
||||
|
||||
const token = body.accessToken;
|
||||
expect(token).toBeDefined();
|
||||
|
||||
const cookies = headers['set-cookie'];
|
||||
expect(cookies).toHaveLength(3);
|
||||
expect(cookies[0].split(';').map((item) => item.trim())).toEqual([
|
||||
`immich_access_token=${token}`,
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
expect(cookies[1].split(';').map((item) => item.trim())).toEqual([
|
||||
'immich_auth_type=password',
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
expect(cookies[2].split(';').map((item) => item.trim())).toEqual([
|
||||
'immich_is_authenticated=true',
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/validateToken', () => {
|
||||
it('should reject an invalid token', async () => {
|
||||
const { status, body } = await request(app).post(`/auth/validateToken`).set('Authorization', 'Bearer 123');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.invalidToken);
|
||||
});
|
||||
|
||||
it('should accept a valid token', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/validateToken`)
|
||||
.send({})
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ authStatus: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/change-password', () => {
|
||||
it('should require the current password', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password: 'wrong-password', newPassword: 'Password1234' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.wrongPassword);
|
||||
});
|
||||
|
||||
it('should change the password', async () => {
|
||||
const { status } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password, newPassword: 'Password1234' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
|
||||
await login({
|
||||
loginCredentialDto: {
|
||||
email: 'admin@immich.cloud',
|
||||
password: 'Password1234',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/logout', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/auth/logout`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should logout the user', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/logout`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
createMemory,
|
||||
getMemory,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -17,7 +17,6 @@ describe('/memories', () => {
|
||||
let user: LoginResponseDto;
|
||||
let adminAsset: AssetMediaResponseDto;
|
||||
let userAsset1: AssetMediaResponseDto;
|
||||
let userAsset2: AssetMediaResponseDto;
|
||||
let userMemory: MemoryResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -25,10 +24,9 @@ describe('/memories', () => {
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
[adminAsset, userAsset1, userAsset2] = await Promise.all([
|
||||
[adminAsset, userAsset1] = await Promise.all([
|
||||
utils.createAsset(admin.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
userMemory = await createMemory(
|
||||
{
|
||||
@@ -43,121 +41,7 @@ describe('/memories', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('GET /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should validate data when type is on this day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: {},
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
errorDto.badRequest(['data.year must be a positive number', 'data.year must be an integer number']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
isSaved: false,
|
||||
memoryAt: expect.any(String),
|
||||
ownerId: user.userId,
|
||||
assets: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new memory (with assets)', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, userAsset2.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({ id: userAsset1.id }),
|
||||
expect.objectContaining({ id: userAsset2.id }),
|
||||
]),
|
||||
});
|
||||
expect(body.assets).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should create a new memory and ignore assets the user does not have access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, adminAsset.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: userAsset1.id })],
|
||||
});
|
||||
expect(body.assets).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${userMemory.id}`)
|
||||
@@ -176,22 +60,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/memories/${uuidDto.invalid}`).send({ isSaved: true });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}`)
|
||||
@@ -218,23 +86,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
@@ -244,15 +95,6 @@ describe('/memories', () => {
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require asset access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
@@ -279,23 +121,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
@@ -305,15 +130,6 @@ describe('/memories', () => {
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should only remove assets in the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
@@ -340,21 +156,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}`)
|
||||
|
||||
@@ -117,8 +117,25 @@ describe('/shared-links', () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="http://127.0.0.1:2285`);
|
||||
});
|
||||
|
||||
it('should fall back to my.immich.app og:image meta tag for shared asset if Host header is not present', async () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`).set('Host', '');
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="https://my.immich.app`);
|
||||
});
|
||||
|
||||
it('should return 404 for an invalid shared link', async () => {
|
||||
const resp = await request(shareUrl).get(`/invalid-key`);
|
||||
expect(resp.status).toBe(404);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).not.toContain(`og:type`);
|
||||
expect(resp.text).not.toContain(`og:title`);
|
||||
expect(resp.text).not.toContain(`og:description`);
|
||||
expect(resp.text).not.toContain(`og:image`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /shared-links', () => {
|
||||
|
||||
@@ -1,230 +0,0 @@
|
||||
import {
|
||||
AssetMediaResponseDto,
|
||||
AssetVisibility,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
TimeBucketAssetResponseDto,
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
// TODO this should probably be a test util function
|
||||
const today = DateTime.fromObject({
|
||||
year: 2023,
|
||||
month: 11,
|
||||
day: 3,
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
describe('/timeline', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
|
||||
let user1Assets: AssetMediaResponseDto[];
|
||||
let user2Assets: AssetMediaResponseDto[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
[user, timeBucketUser] = await Promise.all([
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('1')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
|
||||
]);
|
||||
|
||||
user1Assets = await Promise.all([
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken, {
|
||||
isFavorite: true,
|
||||
fileCreatedAt: yesterday.toISO(),
|
||||
fileModifiedAt: yesterday.toISO(),
|
||||
assetData: { filename: 'example.mp4' },
|
||||
}),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
|
||||
user2Assets = await Promise.all([
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-10').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-12').toISOString() }),
|
||||
]);
|
||||
|
||||
await utils.deleteAssets(timeBucketUser.accessToken, [user2Assets[4].id]);
|
||||
});
|
||||
|
||||
describe('GET /timeline/buckets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/buckets');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get time buckets by month', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ count: 3, timeBucket: '1970-02-01' },
|
||||
{ count: 1, timeBucket: '1970-01-01' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: user1Assets.map(({ id }) => id),
|
||||
});
|
||||
|
||||
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ withPartners: true, visibility: undefined });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, isFavorite: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, isFavorite: false });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const req = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ withPartners: true, isTrashed: true });
|
||||
|
||||
expect(req.status).toBe(400);
|
||||
expect(req.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /timeline/bucket', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/bucket').query({
|
||||
timeBucket: '1900-01-01',
|
||||
});
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.query({ timeBucket: '012345-01-01' })
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
// TODO enable date string validation while still accepting 5 digit years
|
||||
// it('should fail if time bucket is invalid', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .get('/timeline/bucket')
|
||||
// .set('Authorization', `Bearer ${user.accessToken}`)
|
||||
// .query({ timeBucket: 'foo' });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest);
|
||||
// });
|
||||
|
||||
it('should return time bucket', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-10' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return time bucket in trash', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-01T00:00:00.000Z', isTrashed: true });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const timeBucket: TimeBucketAssetResponseDto = body;
|
||||
expect(timeBucket.isTrashed).toEqual([true]);
|
||||
});
|
||||
});
|
||||
});
|
||||
178
e2e/src/generate-date-tag-test-images.ts
Normal file
178
e2e/src/generate-date-tag-test-images.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
@@ -60,6 +60,7 @@ import { io, type Socket } from 'socket.io-client';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
import request from 'supertest';
|
||||
export type { Emitter } from '@socket.io/component-emitter';
|
||||
|
||||
type CommandResponse = { stdout: string; stderr: string; exitCode: number | null };
|
||||
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete' | 'assetHidden';
|
||||
@@ -78,16 +79,16 @@ export const tempDir = tmpdir();
|
||||
export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer ${accessToken}` });
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]).promise;
|
||||
executeCommand('pnpm', ['exec', 'immich', '-d', `/${tempDir}/immich/`, ...args], { cwd: '../cli' }).promise;
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];
|
||||
export const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
const executeCommand = (command: string, args: string[]) => {
|
||||
const executeCommand = (command: string, args: string[], options?: { cwd?: string }) => {
|
||||
let _resolve: (value: CommandResponse) => void;
|
||||
const promise = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
|
||||
const child = spawn(command, args, { stdio: 'pipe' });
|
||||
const child = spawn(command, args, { stdio: 'pipe', cwd: options?.cwd });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
Submodule e2e/test-assets updated: 8885d6d01c...18736fc27a
@@ -427,6 +427,7 @@
|
||||
"app_settings": "App Settings",
|
||||
"appears_in": "Appears in",
|
||||
"archive": "Archive",
|
||||
"archive_action_prompt": "{count} added to Archive",
|
||||
"archive_or_unarchive_photo": "Archive or unarchive photo",
|
||||
"archive_page_no_archived_assets": "No archived assets found",
|
||||
"archive_page_title": "Archive ({count})",
|
||||
@@ -702,7 +703,7 @@
|
||||
"daily_title_text_date": "E, MMM dd",
|
||||
"daily_title_text_date_year": "E, MMM dd, yyyy",
|
||||
"dark": "Dark",
|
||||
"darkTheme": "Toggle dark theme",
|
||||
"dark_theme": "Toggle dark theme",
|
||||
"date_after": "Date after",
|
||||
"date_and_time": "Date and Time",
|
||||
"date_before": "Date before",
|
||||
@@ -798,6 +799,7 @@
|
||||
"edit_key": "Edit key",
|
||||
"edit_link": "Edit link",
|
||||
"edit_location": "Edit location",
|
||||
"edit_location_action_prompt": "{count} location edited",
|
||||
"edit_location_dialog_title": "Location",
|
||||
"edit_name": "Edit name",
|
||||
"edit_people": "Edit people",
|
||||
@@ -983,6 +985,7 @@
|
||||
"failed_to_load_assets": "Failed to load assets",
|
||||
"failed_to_load_folder": "Failed to load folder",
|
||||
"favorite": "Favorite",
|
||||
"favorite_action_prompt": "{count} added to Favorites",
|
||||
"favorite_or_unfavorite_photo": "Favorite or unfavorite photo",
|
||||
"favorites": "Favorites",
|
||||
"favorites_page_no_favorites": "No favorite assets found",
|
||||
@@ -1245,6 +1248,7 @@
|
||||
"more": "More",
|
||||
"move": "Move",
|
||||
"move_off_locked_folder": "Move out of locked folder",
|
||||
"move_to_lock_folder_action_prompt": "{count} added to the locked folder",
|
||||
"move_to_locked_folder": "Move to locked folder",
|
||||
"move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder",
|
||||
"moved_to_archive": "Moved {count, plural, one {# asset} other {# assets}} to archive",
|
||||
@@ -1495,6 +1499,7 @@
|
||||
"remove_deleted_assets": "Remove Deleted Assets",
|
||||
"remove_from_album": "Remove from album",
|
||||
"remove_from_favorites": "Remove from favorites",
|
||||
"remove_from_lock_folder_action_prompt": "{count} removed from the locked folder",
|
||||
"remove_from_locked_folder": "Remove from locked folder",
|
||||
"remove_from_locked_folder_confirmation": "Are you sure you want to move these photos and videos out of the locked folder? They will be visible in your library.",
|
||||
"remove_from_shared_link": "Remove from shared link",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
ARG DEVICE=cpu
|
||||
|
||||
FROM python:3.11-bookworm@sha256:d2621a9f74d31a8a60af19f97b09cc3ac54382c8680b6544018713a12ef6c048 AS builder-cpu
|
||||
FROM python:3.11-bookworm@sha256:ce3b954c9285a7a145cba620bae03db836ab890b6b9e0d05a3ca522ea00dfbc9 AS builder-cpu
|
||||
|
||||
FROM builder-cpu AS builder-openvino
|
||||
|
||||
@@ -59,7 +59,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends g++
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:4faec156e35a5f345d57804d8858c6ba1cf6352ce5f4bffc11b7fdebdef46a38 /uv /uvx /bin/
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9653efd4380d5a0e5511e337dcfc3b8ba5bc4e6ea7fa3be7716598261d5503fa /uv /uvx /bin/
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
@@ -68,11 +68,11 @@ RUN if [ "$DEVICE" = "rocm" ]; then \
|
||||
uv pip install /opt/onnxruntime_rocm-*.whl; \
|
||||
fi
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:7a3ed1226224bcc1fe5443262363d42f48cf832a540c1836ba8ccbeaadf8637c AS prod-cpu
|
||||
FROM python:3.11-slim-bookworm@sha256:9e1912aab0a30bbd9488eb79063f68f42a68ab0946cbe98fecf197fe5b085506 AS prod-cpu
|
||||
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:7a3ed1226224bcc1fe5443262363d42f48cf832a540c1836ba8ccbeaadf8637c AS prod-openvino
|
||||
FROM python:3.11-slim-bookworm@sha256:9e1912aab0a30bbd9488eb79063f68f42a68ab0946cbe98fecf197fe5b085506 AS prod-openvino
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \
|
||||
|
||||
141
machine-learning/uv.lock
generated
141
machine-learning/uv.lock
generated
@@ -517,16 +517,16 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.12"
|
||||
version = "0.115.13"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload-time = "2025-03-23T22:55:43.822Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/64/ec0788201b5554e2a87c49af26b77a4d132f807a0fa9675257ac92c6aa0e/fastapi-0.115.13.tar.gz", hash = "sha256:55d1d25c2e1e0a0a50aceb1c8705cd932def273c102bff0b1c1da88b3c6eb307", size = 295680, upload-time = "2025-06-17T11:49:45.575Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload-time = "2025-03-23T22:55:42.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/4a/e17764385382062b0edbb35a26b7cf76d71e27e456546277a42ba6545c6e/fastapi-0.115.13-py3-none-any.whl", hash = "sha256:0a0cab59afa7bab22f5eb347f8c9864b681558c278395e94035a741fc10cd865", size = 95315, upload-time = "2025-06-17T11:49:44.106Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -900,7 +900,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.32.4"
|
||||
version = "0.33.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "filelock" },
|
||||
@@ -912,9 +912,9 @@ dependencies = [
|
||||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/c8/4f7d270285c46324fd66f62159eb16739aa5696f422dba57678a8c6b78e9/huggingface_hub-0.32.4.tar.gz", hash = "sha256:f61d45cd338736f59fb0e97550b74c24ee771bcc92c05ae0766b9116abe720be", size = 424494, upload-time = "2025-06-03T09:59:46.105Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/91/8a/1362d565fefabaa4185cf3ae842a98dbc5b35146f5694f7080f043a6952f/huggingface_hub-0.33.0.tar.gz", hash = "sha256:aa31f70d29439d00ff7a33837c03f1f9dd83971ce4e29ad664d63ffb17d3bb97", size = 426179, upload-time = "2025-06-11T17:08:07.913Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/67/8b/222140f3cfb6f17b0dd8c4b9a0b36bd4ebefe9fb0098ba35d6960abcda0f/huggingface_hub-0.32.4-py3-none-any.whl", hash = "sha256:37abf8826b38d971f60d3625229221c36e53fe58060286db9baf619cfbf39767", size = 512101, upload-time = "2025-06-03T09:59:44.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/fb/53587a89fbc00799e4179796f51b3ad713c5de6bb680b2becb6d37c94649/huggingface_hub-0.33.0-py3-none-any.whl", hash = "sha256:e8668875b40c68f9929150d99727d39e5ebb8a05a98e4191b908dc7ded9074b3", size = 514799, upload-time = "2025-06-11T17:08:05.757Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1225,7 +1225,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "locust"
|
||||
version = "2.37.9"
|
||||
version = "2.37.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "configargparse" },
|
||||
@@ -1245,14 +1245,14 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
{ name = "werkzeug" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/05/2bfdf19756c6a12f6f9513f75340ecf0595d83cab4d9fc91162225908e3d/locust-2.37.9.tar.gz", hash = "sha256:e43673b594ec5ecde4f9ba6e0d5c66c00d7c0ae93591951abe83e8d186c67175", size = 2252507, upload-time = "2025-06-05T09:26:58.186Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4f/19/66cdab585f7d4385be615d3792402fc75a1bed7519e5283adbe7133dbc78/locust-2.37.11.tar.gz", hash = "sha256:89c79bc599aa57160bd41dd3876e35d8b9dee5abded78e35008d01fd8f1640ed", size = 2252602, upload-time = "2025-06-23T08:22:23.922Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/33/1c/0ece4176231c578e819d970ec08d124492833e50aafd171c582bcc414446/locust-2.37.9-py3-none-any.whl", hash = "sha256:e17da439f3a252d1fb6d4c34daf00d7e8b87e99d833a32e8a79f4f8ebb07767d", size = 2269084, upload-time = "2025-06-05T09:26:56.257Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/2d/e5ae05911521bf84113be349d51b16d54589e986837d2d518f63434ea3ec/locust-2.37.11-py3-none-any.whl", hash = "sha256:b826f95fbfd5d9a32df6ab1b74672b88e65bbc33ec99fdc10af98079952ad517", size = 2269179, upload-time = "2025-06-23T08:22:21.067Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "locust-cloud"
|
||||
version = "1.23.1"
|
||||
version = "1.24.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "configargparse" },
|
||||
@@ -1262,9 +1262,9 @@ dependencies = [
|
||||
{ name = "python-socketio", extra = ["client"] },
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bd/7c/d9cbbd051490aeedfbd6ddda8ad48f77dd848ee490f6ebd166d20db5911e/locust_cloud-1.23.1.tar.gz", hash = "sha256:a09161752b8c9a9205e97cef5223ee3ad967bc2d91c52d61952aaa3da6802a55", size = 450937, upload-time = "2025-06-05T06:07:53.773Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d9/77/bda24167a2b763ba5d3cad1f3fa2a938f5273e51a61bffdbc8dc2e3ba24d/locust_cloud-1.24.2.tar.gz", hash = "sha256:a2656537ff367e6d4d4673477ba9e81ed73a8423a71573cd2512248740eded77", size = 451122, upload-time = "2025-06-23T11:08:00.558Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/01/5af43edee540e38ba0ee0a2e3beb72c50073e0f646bb543a8b34650315e3/locust_cloud-1.23.1-py3-none-any.whl", hash = "sha256:11677895c6ed6d0beef1b425a6f04f10ea2cfcaeaefbf00a97fb3c9134296e54", size = 408323, upload-time = "2025-06-05T06:07:51.947Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/38/8cda8aa1c6dfe5c5abbf69a9b10c03585c37eff64ca92733a291806052ac/locust_cloud-1.24.2-py3-none-any.whl", hash = "sha256:64a5e6f2bf0a1a012d9805291d44fb57e57535c2b5c0fa5bc87ba0d7cce9ef9c", size = 408594, upload-time = "2025-06-23T11:07:59.092Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1415,7 +1415,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.16.0"
|
||||
version = "1.16.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mypy-extensions" },
|
||||
@@ -1423,33 +1423,33 @@ dependencies = [
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d4/38/13c2f1abae94d5ea0354e146b95a1be9b2137a0d506728e0da037c4276f6/mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab", size = 3323139, upload-time = "2025-05-29T13:46:12.532Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/81/69/92c7fa98112e4d9eb075a239caa4ef4649ad7d441545ccffbd5e34607cbb/mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab", size = 3324747, upload-time = "2025-06-16T16:51:35.145Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/64/5e/a0485f0608a3d67029d3d73cec209278b025e3493a3acfda3ef3a88540fd/mypy-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7909541fef256527e5ee9c0a7e2aeed78b6cda72ba44298d1334fe7881b05c5c", size = 10967416, upload-time = "2025-05-29T13:34:17.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/53/5837c221f74c0d53a4bfc3003296f8179c3a2a7f336d7de7bbafbe96b688/mypy-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e71d6f0090c2256c713ed3d52711d01859c82608b5d68d4fa01a3fe30df95571", size = 10087654, upload-time = "2025-05-29T13:32:37.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/59/5fd2400352c3093bed4c09017fe671d26bc5bb7e6ef2d4bf85f2a2488104/mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491", size = 11875192, upload-time = "2025-05-29T13:34:54.281Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/3e/4bfec74663a64c2012f3e278dbc29ffe82b121bc551758590d1b6449ec0c/mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777", size = 12612939, upload-time = "2025-05-29T13:33:14.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/1f/fecbe3dcba4bf2ca34c26ca016383a9676711907f8db4da8354925cbb08f/mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b", size = 12874719, upload-time = "2025-05-29T13:21:52.09Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/51/c2d280601cd816c43dfa512a759270d5a5ef638d7ac9bea9134c8305a12f/mypy-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:09a8da6a0ee9a9770b8ff61b39c0bb07971cda90e7297f4213741b48a0cc8d93", size = 9487053, upload-time = "2025-05-29T13:33:29.797Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/c4/ff2f79db7075c274fe85b5fff8797d29c6b61b8854c39e3b7feb556aa377/mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab", size = 10884498, upload-time = "2025-05-29T13:18:54.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/07/12198e83006235f10f6a7808917376b5d6240a2fd5dce740fe5d2ebf3247/mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2", size = 10011755, upload-time = "2025-05-29T13:34:00.851Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/9b/5fd5801a72b5d6fb6ec0105ea1d0e01ab2d4971893076e558d4b6d6b5f80/mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff", size = 11800138, upload-time = "2025-05-29T13:32:55.082Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/81/a117441ea5dfc3746431e51d78a4aca569c677aa225bca2cc05a7c239b61/mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666", size = 12533156, upload-time = "2025-05-29T13:19:12.963Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/38/88ec57c6c86014d3f06251e00f397b5a7daa6888884d0abf187e4f5f587f/mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c", size = 12742426, upload-time = "2025-05-29T13:20:22.72Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/53/7e9d528433d56e6f6f77ccf24af6ce570986c2d98a5839e4c2009ef47283/mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b", size = 9478319, upload-time = "2025-05-29T13:21:17.582Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/cf/158e5055e60ca2be23aec54a3010f89dcffd788732634b344fc9cb1e85a0/mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13", size = 11062927, upload-time = "2025-05-29T13:35:52.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/34/cfff7a56be1609f5d10ef386342ce3494158e4d506516890142007e6472c/mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090", size = 10083082, upload-time = "2025-05-29T13:35:33.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/7f/7242062ec6288c33d8ad89574df87c3903d394870e5e6ba1699317a65075/mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1", size = 11828306, upload-time = "2025-05-29T13:21:02.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/5f/b392f7b4f659f5b619ce5994c5c43caab3d80df2296ae54fa888b3d17f5a/mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8", size = 12702764, upload-time = "2025-05-29T13:20:42.826Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/c0/7646ef3a00fa39ac9bc0938626d9ff29d19d733011be929cfea59d82d136/mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730", size = 12896233, upload-time = "2025-05-29T13:18:37.446Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/38/52f4b808b3fef7f0ef840ee8ff6ce5b5d77381e65425758d515cdd4f5bb5/mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec", size = 9565547, upload-time = "2025-05-29T13:20:02.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/9c/ca03bdbefbaa03b264b9318a98950a9c683e06472226b55472f96ebbc53d/mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b", size = 11059753, upload-time = "2025-05-29T13:18:18.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/92/79a969b8302cfe316027c88f7dc6fee70129490a370b3f6eb11d777749d0/mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0", size = 10073338, upload-time = "2025-05-29T13:19:48.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/9b/a943f09319167da0552d5cd722104096a9c99270719b1afeea60d11610aa/mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b", size = 11827764, upload-time = "2025-05-29T13:46:04.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/64/ff75e71c65a0cb6ee737287c7913ea155845a556c64144c65b811afdb9c7/mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d", size = 12701356, upload-time = "2025-05-29T13:35:13.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/ad/0e93c18987a1182c350f7a5fab70550852f9fabe30ecb63bfbe51b602074/mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52", size = 12900745, upload-time = "2025-05-29T13:17:24.409Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/5d/036c278d7a013e97e33f08c047fe5583ab4f1fc47c9a49f985f1cdd2a2d7/mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb", size = 9572200, upload-time = "2025-05-29T13:33:44.92Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/a3/6ed10530dec8e0fdc890d81361260c9ef1f5e5c217ad8c9b21ecb2b8366b/mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031", size = 2265773, upload-time = "2025-05-29T13:35:18.762Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/12/2bf23a80fcef5edb75de9a1e295d778e0f46ea89eb8b115818b663eff42b/mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a", size = 10958644, upload-time = "2025-06-16T16:51:11.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/50/bfe47b3b278eacf348291742fd5e6613bbc4b3434b72ce9361896417cfe5/mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72", size = 10087033, upload-time = "2025-06-16T16:35:30.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/de/40307c12fe25675a0776aaa2cdd2879cf30d99eec91b898de00228dc3ab5/mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea", size = 11875645, upload-time = "2025-06-16T16:35:48.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/d8/85bdb59e4a98b7a31495bd8f1a4445d8ffc86cde4ab1f8c11d247c11aedc/mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574", size = 12616986, upload-time = "2025-06-16T16:48:39.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/d0/bb25731158fa8f8ee9e068d3e94fcceb4971fedf1424248496292512afe9/mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d", size = 12878632, upload-time = "2025-06-16T16:36:08.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/11/822a9beb7a2b825c0cb06132ca0a5183f8327a5e23ef89717c9474ba0bc6/mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6", size = 9484391, upload-time = "2025-06-16T16:37:56.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/61/ec1245aa1c325cb7a6c0f8570a2eee3bfc40fa90d19b1267f8e50b5c8645/mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc", size = 10890557, upload-time = "2025-06-16T16:37:21.421Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/bb/6eccc0ba0aa0c7a87df24e73f0ad34170514abd8162eb0c75fd7128171fb/mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782", size = 10012921, upload-time = "2025-06-16T16:51:28.659Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/80/b337a12e2006715f99f529e732c5f6a8c143bb58c92bb142d5ab380963a5/mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507", size = 11802887, upload-time = "2025-06-16T16:50:53.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/59/f7af072d09793d581a745a25737c7c0a945760036b16aeb620f658a017af/mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca", size = 12531658, upload-time = "2025-06-16T16:33:55.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/c4/607672f2d6c0254b94a646cfc45ad589dd71b04aa1f3d642b840f7cce06c/mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4", size = 12732486, upload-time = "2025-06-16T16:37:03.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/5e/136555ec1d80df877a707cebf9081bd3a9f397dedc1ab9750518d87489ec/mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6", size = 9479482, upload-time = "2025-06-16T16:47:37.48Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/d6/39482e5fcc724c15bf6280ff5806548c7185e0c090712a3736ed4d07e8b7/mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d", size = 11066493, upload-time = "2025-06-16T16:47:01.683Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/e5/26c347890efc6b757f4d5bb83f4a0cf5958b8cf49c938ac99b8b72b420a6/mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9", size = 10081687, upload-time = "2025-06-16T16:48:19.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/c7/b5cb264c97b86914487d6a24bd8688c0172e37ec0f43e93b9691cae9468b/mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79", size = 11839723, upload-time = "2025-06-16T16:49:20.912Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/f8/491997a9b8a554204f834ed4816bda813aefda31cf873bb099deee3c9a99/mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15", size = 12722980, upload-time = "2025-06-16T16:37:40.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/f0/2bd41e174b5fd93bc9de9a28e4fb673113633b8a7f3a607fa4a73595e468/mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd", size = 12903328, upload-time = "2025-06-16T16:34:35.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/81/5572108a7bec2c46b8aff7e9b524f371fe6ab5efb534d38d6b37b5490da8/mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b", size = 9562321, upload-time = "2025-06-16T16:48:58.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/e3/96964af4a75a949e67df4b95318fe2b7427ac8189bbc3ef28f92a1c5bc56/mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438", size = 11063480, upload-time = "2025-06-16T16:47:56.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/4d/cd1a42b8e5be278fab7010fb289d9307a63e07153f0ae1510a3d7b703193/mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536", size = 10090538, upload-time = "2025-06-16T16:46:43.92Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/4f/c3c6b4b66374b5f68bab07c8cabd63a049ff69796b844bc759a0ca99bb2a/mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f", size = 11836839, upload-time = "2025-06-16T16:36:28.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/7e/81ca3b074021ad9775e5cb97ebe0089c0f13684b066a750b7dc208438403/mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359", size = 12715634, upload-time = "2025-06-16T16:50:34.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/95/bdd40c8be346fa4c70edb4081d727a54d0a05382d84966869738cfa8a497/mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be", size = 12895584, upload-time = "2025-06-16T16:34:54.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/fd/d486a0827a1c597b3b48b1bdef47228a6e9ee8102ab8c28f944cb83b65dc/mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee", size = 9573886, upload-time = "2025-06-16T16:36:43.589Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/d3/53e684e78e07c1a2bf7105715e5edd09ce951fc3f47cf9ed095ec1b7a037/mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37", size = 2265923, upload-time = "2025-06-16T16:48:02.366Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1834,7 +1834,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.5"
|
||||
version = "2.11.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-types" },
|
||||
@@ -1842,9 +1842,9 @@ dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102, upload-time = "2025-05-22T21:18:08.761Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229, upload-time = "2025-05-22T21:18:06.329Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1977,7 +1977,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.0"
|
||||
version = "8.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
@@ -1988,9 +1988,9 @@ dependencies = [
|
||||
{ name = "pygments" },
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/aa/405082ce2749be5398045152251ac69c0f3578c7077efc53431303af97ce/pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6", size = 1515232, upload-time = "2025-06-02T17:36:30.03Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/de/afa024cbe022b1b318a3d224125aa24939e99b4ff6f22e0ba639a2eaee47/pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e", size = 363797, upload-time = "2025-06-02T17:36:27.859Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2007,15 +2007,16 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "6.1.1"
|
||||
version = "6.2.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "coverage", extra = ["toml"] },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/25/69/5f1e57f6c5a39f81411b550027bf72842c4567ff5fd572bed1edc9e4b5d9/pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", size = 66857, upload-time = "2025-04-05T14:07:51.592Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/28/d0/def53b4a790cfb21483016430ed828f64830dd981ebe1089971cd10cab25/pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde", size = 23841, upload-time = "2025-04-05T14:07:49.641Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2303,27 +2304,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.11.13"
|
||||
version = "0.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/da/9c6f995903b4d9474b39da91d2d626659af3ff1eeb43e9ae7c119349dba6/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514", size = 4282054, upload-time = "2025-06-05T21:00:15.721Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/24/90/5255432602c0b196a0da6720f6f76b93eb50baef46d3c9b0025e2f9acbf3/ruff-0.12.0.tar.gz", hash = "sha256:4d047db3662418d4a848a3fdbfaf17488b34b62f527ed6f10cb8afd78135bc5c", size = 4376101, upload-time = "2025-06-17T15:19:26.217Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/ce/a11d381192966e0b4290842cc8d4fac7dc9214ddf627c11c1afff87da29b/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46", size = 10292516, upload-time = "2025-06-05T20:59:32.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/db/87c3b59b0d4e753e40b6a3b4a2642dfd1dcaefbff121ddc64d6c8b47ba00/ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48", size = 11106083, upload-time = "2025-06-05T20:59:37.03Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/79/d8cec175856ff810a19825d09ce700265f905c643c69f45d2b737e4a470a/ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b", size = 10436024, upload-time = "2025-06-05T20:59:39.741Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/5b/f6d94f2980fa1ee854b41568368a2e1252681b9238ab2895e133d303538f/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a", size = 10646324, upload-time = "2025-06-05T20:59:42.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/9c/b4c2acf24ea4426016d511dfdc787f4ce1ceb835f3c5fbdbcb32b1c63bda/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc", size = 10174416, upload-time = "2025-06-05T20:59:44.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/10/e2e62f77c65ede8cd032c2ca39c41f48feabedb6e282bfd6073d81bb671d/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629", size = 11724197, upload-time = "2025-06-05T20:59:46.935Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/f0/466fe8469b85c561e081d798c45f8a1d21e0b4a5ef795a1d7f1a9a9ec182/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933", size = 12511615, upload-time = "2025-06-05T20:59:49.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/0e/cefe778b46dbd0cbcb03a839946c8f80a06f7968eb298aa4d1a4293f3448/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165", size = 12117080, upload-time = "2025-06-05T20:59:51.654Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/2c/caaeda564cbe103bed145ea557cb86795b18651b0f6b3ff6a10e84e5a33f/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71", size = 11326315, upload-time = "2025-06-05T20:59:54.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/f0/782e7d681d660eda8c536962920c41309e6dd4ebcea9a2714ed5127d44bd/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9", size = 11555640, upload-time = "2025-06-05T20:59:56.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/d4/3d580c616316c7f07fb3c99dbecfe01fbaea7b6fd9a82b801e72e5de742a/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc", size = 10507364, upload-time = "2025-06-05T20:59:59.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/dc/195e6f17d7b3ea6b12dc4f3e9de575db7983db187c378d44606e5d503319/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7", size = 10141462, upload-time = "2025-06-05T21:00:01.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/8e/39a094af6967faa57ecdeacb91bedfb232474ff8c3d20f16a5514e6b3534/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432", size = 11121028, upload-time = "2025-06-05T21:00:04.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/c0/b0b508193b0e8a1654ec683ebab18d309861f8bd64e3a2f9648b80d392cb/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492", size = 11602992, upload-time = "2025-06-05T21:00:06.249Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/91/263e33ab93ab09ca06ce4f8f8547a858cc198072f873ebc9be7466790bae/ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250", size = 10474944, upload-time = "2025-06-05T21:00:08.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/f4/7c27734ac2073aae8efb0119cae6931b6fb48017adf048fdf85c19337afc/ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3", size = 11548669, upload-time = "2025-06-05T21:00:11.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/bf/b273dd11673fed8a6bd46032c0ea2a04b2ac9bfa9c628756a5856ba113b0/ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b", size = 10683928, upload-time = "2025-06-05T21:00:13.758Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/fd/b46bb20e14b11ff49dbc74c61de352e0dc07fb650189513631f6fb5fc69f/ruff-0.12.0-py3-none-linux_armv6l.whl", hash = "sha256:5652a9ecdb308a1754d96a68827755f28d5dfb416b06f60fd9e13f26191a8848", size = 10311554, upload-time = "2025-06-17T15:18:45.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/d3/021dde5a988fa3e25d2468d1dadeea0ae89dc4bc67d0140c6e68818a12a1/ruff-0.12.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:05ed0c914fabc602fc1f3b42c53aa219e5736cb030cdd85640c32dbc73da74a6", size = 11118435, upload-time = "2025-06-17T15:18:49.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/a2/01a5acf495265c667686ec418f19fd5c32bcc326d4c79ac28824aecd6a32/ruff-0.12.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:07a7aa9b69ac3fcfda3c507916d5d1bca10821fe3797d46bad10f2c6de1edda0", size = 10466010, upload-time = "2025-06-17T15:18:51.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/57/7caf31dd947d72e7aa06c60ecb19c135cad871a0a8a251723088132ce801/ruff-0.12.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7731c3eec50af71597243bace7ec6104616ca56dda2b99c89935fe926bdcd48", size = 10661366, upload-time = "2025-06-17T15:18:53.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/ba/aa393b972a782b4bc9ea121e0e358a18981980856190d7d2b6187f63e03a/ruff-0.12.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:952d0630eae628250ab1c70a7fffb641b03e6b4a2d3f3ec6c1d19b4ab6c6c807", size = 10173492, upload-time = "2025-06-17T15:18:55.262Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/50/9349ee777614bc3062fc6b038503a59b2034d09dd259daf8192f56c06720/ruff-0.12.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c021f04ea06966b02614d442e94071781c424ab8e02ec7af2f037b4c1e01cc82", size = 11761739, upload-time = "2025-06-17T15:18:58.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/8f/ad459de67c70ec112e2ba7206841c8f4eb340a03ee6a5cabc159fe558b8e/ruff-0.12.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d235618283718ee2fe14db07f954f9b2423700919dc688eacf3f8797a11315c", size = 12537098, upload-time = "2025-06-17T15:19:01.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/50/15ad9c80ebd3c4819f5bd8883e57329f538704ed57bac680d95cb6627527/ruff-0.12.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0758038f81beec8cc52ca22de9685b8ae7f7cc18c013ec2050012862cc9165", size = 12154122, upload-time = "2025-06-17T15:19:03.727Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/e6/79b91e41bc8cc3e78ee95c87093c6cacfa275c786e53c9b11b9358026b3d/ruff-0.12.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:139b3d28027987b78fc8d6cfb61165447bdf3740e650b7c480744873688808c2", size = 11363374, upload-time = "2025-06-17T15:19:05.875Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/c3/82b292ff8a561850934549aa9dc39e2c4e783ab3c21debe55a495ddf7827/ruff-0.12.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68853e8517b17bba004152aebd9dd77d5213e503a5f2789395b25f26acac0da4", size = 11587647, upload-time = "2025-06-17T15:19:08.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/42/d5760d742669f285909de1bbf50289baccb647b53e99b8a3b4f7ce1b2001/ruff-0.12.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3a9512af224b9ac4757f7010843771da6b2b0935a9e5e76bb407caa901a1a514", size = 10527284, upload-time = "2025-06-17T15:19:10.37Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/f6/fcee9935f25a8a8bba4adbae62495c39ef281256693962c2159e8b284c5f/ruff-0.12.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b08df3d96db798e5beb488d4df03011874aff919a97dcc2dd8539bb2be5d6a88", size = 10158609, upload-time = "2025-06-17T15:19:12.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/fb/057febf0eea07b9384787bfe197e8b3384aa05faa0d6bd844b94ceb29945/ruff-0.12.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6a315992297a7435a66259073681bb0d8647a826b7a6de45c6934b2ca3a9ed51", size = 11141462, upload-time = "2025-06-17T15:19:15.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/7c/1be8571011585914b9d23c95b15d07eec2d2303e94a03df58294bc9274d4/ruff-0.12.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1e55e44e770e061f55a7dbc6e9aed47feea07731d809a3710feda2262d2d4d8a", size = 11641616, upload-time = "2025-06-17T15:19:17.6Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ef/b960ab4818f90ff59e571d03c3f992828d4683561095e80f9ef31f3d58b7/ruff-0.12.0-py3-none-win32.whl", hash = "sha256:7162a4c816f8d1555eb195c46ae0bd819834d2a3f18f98cc63819a7b46f474fb", size = 10525289, upload-time = "2025-06-17T15:19:19.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/93/8b16034d493ef958a500f17cda3496c63a537ce9d5a6479feec9558f1695/ruff-0.12.0-py3-none-win_amd64.whl", hash = "sha256:d00b7a157b8fb6d3827b49d3324da34a1e3f93492c1f97b08e222ad7e9b291e0", size = 11598311, upload-time = "2025-06-17T15:19:21.785Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/33/4d3e79e4a84533d6cd526bfb42c020a23256ae5e4265d858bd1287831f7d/ruff-0.12.0-py3-none-win_arm64.whl", hash = "sha256:8cd24580405ad8c1cc64d61725bca091d6b6da7eb3d36f72cc605467069d7e8b", size = 10724946, upload-time = "2025-06-17T15:19:23.952Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2558,14 +2559,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.0.20250602"
|
||||
version = "2.32.4.20250611"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/48/b0/5321e6eeba5d59e4347fcf9bf06a5052f085c3aa0f4876230566d6a4dc97/types_requests-2.32.0.20250602.tar.gz", hash = "sha256:ee603aeefec42051195ae62ca7667cd909a2f8128fdf8aad9e8a5219ecfab3bf", size = 23042, upload-time = "2025-06-02T03:15:02.958Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826", size = 23118, upload-time = "2025-06-11T03:11:41.272Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/da/18/9b782980e575c6581d5c0c1c99f4c6f89a1d7173dad072ee96b2756c02e6/types_requests-2.32.0.20250602-py3-none-any.whl", hash = "sha256:f4f335f87779b47ce10b8b8597b409130299f6971ead27fead4fe7ba6ea3e726", size = 20638, upload-time = "2025-06-02T03:15:01.959Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072", size = 20643, upload-time = "2025-06-11T03:11:40.186Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -146,6 +146,7 @@ dart_code_metrics:
|
||||
# - no-empty-block
|
||||
# - no-equal-then-else
|
||||
# - prefer-correct-test-file-name
|
||||
- prefer-const-border-radius
|
||||
# - prefer-match-file-name
|
||||
# - prefer-return-await
|
||||
# - avoid-self-assignment
|
||||
@@ -290,7 +291,8 @@ dart_code_metrics:
|
||||
# Style
|
||||
# - prefer-trailing-comma
|
||||
# - unnecessary-trailing-comma
|
||||
# - prefer-declaring-const-constructor
|
||||
- prefer-declaring-const-constructor
|
||||
# - prefer-single-widget-per-file
|
||||
- prefer-switch-expression
|
||||
# - prefer-prefixed-global-constants
|
||||
# - prefer-correct-callback-field-name
|
||||
|
||||
@@ -90,6 +90,35 @@
|
||||
<data android:mimeType="video/*" />
|
||||
</intent-filter>
|
||||
|
||||
<!-- immich:// URL scheme handling -->
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.VIEW" />
|
||||
<category android:name="android.intent.category.DEFAULT" />
|
||||
<category android:name="android.intent.category.BROWSABLE" />
|
||||
<data android:scheme="immich" />
|
||||
</intent-filter>
|
||||
|
||||
<!-- my.immich.app deep link -->
|
||||
<intent-filter android:autoVerify="true">
|
||||
<action android:name="android.intent.action.VIEW" />
|
||||
<category android:name="android.intent.category.DEFAULT" />
|
||||
<category android:name="android.intent.category.BROWSABLE" />
|
||||
|
||||
<data android:scheme="https" />
|
||||
|
||||
<data
|
||||
android:host="my.immich.app"
|
||||
android:path="/" />
|
||||
<data
|
||||
android:host="my.immich.app"
|
||||
android:pathPrefix="/albums/" />
|
||||
<data
|
||||
android:host="my.immich.app"
|
||||
android:pathPrefix="/memories/" />
|
||||
<data
|
||||
android:host="my.immich.app"
|
||||
android:pathPrefix="/photos/" />
|
||||
</intent-filter>
|
||||
</activity>
|
||||
|
||||
|
||||
|
||||
@@ -2,4 +2,6 @@ org.gradle.jvmargs=-Xmx4096M
|
||||
android.useAndroidX=true
|
||||
android.enableJetifier=true
|
||||
android.nonTransitiveRClass=false
|
||||
android.nonFinalResIds=false
|
||||
android.nonFinalResIds=false
|
||||
org.gradle.caching=true
|
||||
org.gradle.parallel=true
|
||||
|
||||
2
mobile/drift_schemas/main/drift_schema_v1.json
generated
2
mobile/drift_schemas/main/drift_schema_v1.json
generated
File diff suppressed because one or more lines are too long
@@ -7,7 +7,7 @@ import 'general_helper.dart';
|
||||
class ImmichTestLoginHelper {
|
||||
final WidgetTester tester;
|
||||
|
||||
ImmichTestLoginHelper(this.tester);
|
||||
const ImmichTestLoginHelper(this.tester);
|
||||
|
||||
Future<void> waitForLoginScreen() async {
|
||||
await pumpUntilFound(tester, find.text("Login"));
|
||||
@@ -60,11 +60,11 @@ class ImmichTestLoginHelper {
|
||||
await tester.tap(button);
|
||||
}
|
||||
|
||||
Future<void> assertLoginSuccess({int timeoutSeconds = 15}) async {
|
||||
Future<void> assertLoginSuccess() async {
|
||||
await pumpUntilFound(tester, find.text("home_page_building_timeline".tr()));
|
||||
}
|
||||
|
||||
Future<void> assertLoginFailed({int timeoutSeconds = 15}) async {
|
||||
Future<void> assertLoginFailed() async {
|
||||
await pumpUntilFound(tester, find.text("login_form_failed_login".tr()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,11 +86,23 @@
|
||||
<dict>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>Editor</string>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>Share Extension</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>ShareMedia-$(PRODUCT_BUNDLE_IDENTIFIER)</string>
|
||||
</array>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>Editor</string>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>Deep Link</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>immich</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>210</string>
|
||||
@@ -120,6 +132,8 @@
|
||||
</array>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>We need to access the camera to let you take beautiful video using this app</string>
|
||||
<key>NSFaceIDUsageDescription</key>
|
||||
<string>We need to use FaceID to allow access to your locked folder</string>
|
||||
<key>NSLocationAlwaysAndWhenInUseUsageDescription</key>
|
||||
<string>We require this permission to access the local WiFi name for background upload mechanism</string>
|
||||
<key>NSLocationUsageDescription</key>
|
||||
@@ -166,8 +180,6 @@
|
||||
<true />
|
||||
<key>io.flutter.embedded_views_preview</key>
|
||||
<true />
|
||||
<key>NSFaceIDUsageDescription</key>
|
||||
<string>We need to use FaceID to allow access to your locked folder</string>
|
||||
<key>NSLocalNetworkUsageDescription</key>
|
||||
<string>We need local network permission to connect to the local server using IP address and
|
||||
allow the casting feature to work</string>
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.developer.associated-domains</key>
|
||||
<array>
|
||||
<string>applinks:my.immich.app</string>
|
||||
</array>
|
||||
<key>com.apple.developer.networking.wifi-info</key>
|
||||
<true/>
|
||||
<key>com.apple.security.application-groups</key>
|
||||
|
||||
@@ -4,6 +4,10 @@
|
||||
<dict>
|
||||
<key>aps-environment</key>
|
||||
<string>development</string>
|
||||
<key>com.apple.developer.associated-domains</key>
|
||||
<array>
|
||||
<string>applinks:my.immich.app</string>
|
||||
</array>
|
||||
<key>com.apple.developer.networking.wifi-info</key>
|
||||
<true/>
|
||||
<key>com.apple.security.application-groups</key>
|
||||
|
||||
@@ -3,7 +3,7 @@ import WidgetKit
|
||||
|
||||
func buildEntry(
|
||||
api: ImmichAPI,
|
||||
asset: SearchResult,
|
||||
asset: Asset,
|
||||
dateOffset: Int,
|
||||
subtitle: String? = nil
|
||||
)
|
||||
@@ -15,7 +15,8 @@ func buildEntry(
|
||||
to: Date.now
|
||||
)!
|
||||
let image = try await api.fetchImage(asset: asset)
|
||||
return ImageEntry(date: entryDate, image: image, subtitle: subtitle)
|
||||
|
||||
return ImageEntry(date: entryDate, image: image, subtitle: subtitle, deepLink: asset.deepLink)
|
||||
}
|
||||
|
||||
func generateRandomEntries(
|
||||
|
||||
@@ -6,6 +6,7 @@ struct ImageEntry: TimelineEntry {
|
||||
var image: UIImage?
|
||||
var subtitle: String? = nil
|
||||
var error: WidgetError? = nil
|
||||
var deepLink: URL? = nil
|
||||
|
||||
// Resizes the stored image to a maximum width of 450 pixels
|
||||
mutating func resize() {
|
||||
@@ -24,27 +25,11 @@ struct ImageEntry: TimelineEntry {
|
||||
struct ImmichWidgetView: View {
|
||||
var entry: ImageEntry
|
||||
|
||||
func getErrorText(_ error: WidgetError?) -> String {
|
||||
switch error {
|
||||
case .noLogin:
|
||||
return "Login to Immich"
|
||||
|
||||
case .fetchFailed:
|
||||
return "Unable to connect to your Immich instance"
|
||||
|
||||
case .albumNotFound:
|
||||
return "Album not found"
|
||||
|
||||
default:
|
||||
return "An unknown error occured"
|
||||
}
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
if entry.image == nil {
|
||||
VStack {
|
||||
Image("LaunchImage")
|
||||
Text(getErrorText(entry.error))
|
||||
Text(entry.error?.errorDescription ?? "")
|
||||
.minimumScaleFactor(0.25)
|
||||
.multilineTextAlignment(.center)
|
||||
.foregroundStyle(.secondary)
|
||||
@@ -70,6 +55,7 @@ struct ImmichWidgetView: View {
|
||||
}
|
||||
.padding(16)
|
||||
}
|
||||
.widgetURL(entry.deepLink)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,32 @@ enum WidgetError: Error {
|
||||
case unknown
|
||||
case albumNotFound
|
||||
case unableToResize
|
||||
case invalidImage
|
||||
case invalidURL
|
||||
}
|
||||
|
||||
extension WidgetError: LocalizedError {
|
||||
public var errorDescription: String? {
|
||||
switch self {
|
||||
case .noLogin:
|
||||
return "Login to Immich"
|
||||
|
||||
case .fetchFailed:
|
||||
return "Unable to connect to your Immich instance"
|
||||
|
||||
case .albumNotFound:
|
||||
return "Album not found"
|
||||
|
||||
case .invalidURL:
|
||||
return "An invalid URL was used"
|
||||
|
||||
case .invalidImage:
|
||||
return "An invalid image was received"
|
||||
|
||||
default:
|
||||
return "An unknown error occured"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum AssetType: String, Codable {
|
||||
@@ -17,9 +43,13 @@ enum AssetType: String, Codable {
|
||||
case other = "OTHER"
|
||||
}
|
||||
|
||||
struct SearchResult: Codable {
|
||||
struct Asset: Codable {
|
||||
let id: String
|
||||
let type: AssetType
|
||||
|
||||
var deepLink: URL? {
|
||||
return URL(string: "immich://asset?id=\(id)")
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchFilters: Codable {
|
||||
@@ -30,7 +60,7 @@ struct SearchFilters: Codable {
|
||||
|
||||
struct MemoryResult: Codable {
|
||||
let id: String
|
||||
var assets: [SearchResult]
|
||||
var assets: [Asset]
|
||||
let type: String
|
||||
|
||||
struct MemoryData: Codable {
|
||||
@@ -101,7 +131,7 @@ class ImmichAPI {
|
||||
}
|
||||
|
||||
func fetchSearchResults(with filters: SearchFilters) async throws
|
||||
-> [SearchResult]
|
||||
-> [Asset]
|
||||
{
|
||||
// get URL
|
||||
guard
|
||||
@@ -121,7 +151,7 @@ class ImmichAPI {
|
||||
let (data, _) = try await URLSession.shared.data(for: request)
|
||||
|
||||
// decode data
|
||||
return try JSONDecoder().decode([SearchResult].self, from: data)
|
||||
return try JSONDecoder().decode([Asset].self, from: data)
|
||||
}
|
||||
|
||||
func fetchMemory(for date: Date) async throws -> [MemoryResult] {
|
||||
@@ -146,7 +176,7 @@ class ImmichAPI {
|
||||
return try JSONDecoder().decode([MemoryResult].self, from: data)
|
||||
}
|
||||
|
||||
func fetchImage(asset: SearchResult) async throws -> UIImage {
|
||||
func fetchImage(asset: Asset) async throws(WidgetError) -> UIImage {
|
||||
let thumbnailParams = [URLQueryItem(name: "size", value: "preview")]
|
||||
let assetEndpoint = "/assets/" + asset.id + "/thumbnail"
|
||||
|
||||
@@ -157,16 +187,24 @@ class ImmichAPI {
|
||||
params: thumbnailParams
|
||||
)
|
||||
else {
|
||||
throw URLError(.badURL)
|
||||
throw .invalidURL
|
||||
}
|
||||
|
||||
guard let imageSource = CGImageSourceCreateWithURL(fetchURL as CFURL, nil) else {
|
||||
throw .invalidURL
|
||||
}
|
||||
|
||||
let (data, _) = try await URLSession.shared.data(from: fetchURL)
|
||||
|
||||
guard let img = UIImage(data: data) else {
|
||||
throw URLError(.badServerResponse)
|
||||
let decodeOptions: [NSString: Any] = [
|
||||
kCGImageSourceCreateThumbnailFromImageAlways: true,
|
||||
kCGImageSourceThumbnailMaxPixelSize: 400,
|
||||
kCGImageSourceCreateThumbnailWithTransform: true
|
||||
]
|
||||
|
||||
guard let thumbnail = CGImageSourceCreateThumbnailAtIndex(imageSource, 0, decodeOptions as CFDictionary) else {
|
||||
throw .fetchFailed
|
||||
}
|
||||
|
||||
return img
|
||||
return UIImage(cgImage: thumbnail)
|
||||
}
|
||||
|
||||
func fetchAlbums() async throws -> [Album] {
|
||||
|
||||
@@ -12,3 +12,5 @@ enum TextSearchType {
|
||||
enum AssetVisibilityEnum { timeline, hidden, archive, locked }
|
||||
|
||||
enum SortUserBy { id }
|
||||
|
||||
enum ActionSource { timeline, viewer }
|
||||
|
||||
@@ -4,6 +4,8 @@ sealed class ImmichErrors {
|
||||
}
|
||||
|
||||
class NoResponseDtoError extends ImmichErrors implements Exception {
|
||||
const NoResponseDtoError();
|
||||
|
||||
@override
|
||||
String toString() => "Response Dto is null";
|
||||
}
|
||||
|
||||
89
mobile/lib/domain/models/album/album.model.dart
Normal file
89
mobile/lib/domain/models/album/album.model.dart
Normal file
@@ -0,0 +1,89 @@
|
||||
enum AlbumAssetOrder {
|
||||
// do not change this order!
|
||||
asc,
|
||||
desc,
|
||||
}
|
||||
|
||||
enum AlbumUserRole {
|
||||
// do not change this order!
|
||||
editor,
|
||||
viewer,
|
||||
}
|
||||
|
||||
// Model for an album stored in the server
|
||||
class Album {
|
||||
final String id;
|
||||
final String name;
|
||||
final String ownerId;
|
||||
final String description;
|
||||
final DateTime createdAt;
|
||||
final DateTime updatedAt;
|
||||
final String? thumbnailAssetId;
|
||||
final bool isActivityEnabled;
|
||||
final AlbumAssetOrder order;
|
||||
final int assetCount;
|
||||
final String ownerName;
|
||||
|
||||
const Album({
|
||||
required this.id,
|
||||
required this.name,
|
||||
required this.ownerId,
|
||||
required this.description,
|
||||
required this.createdAt,
|
||||
required this.updatedAt,
|
||||
this.thumbnailAssetId,
|
||||
required this.isActivityEnabled,
|
||||
required this.order,
|
||||
required this.assetCount,
|
||||
required this.ownerName,
|
||||
});
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return '''Album {
|
||||
id: $id,
|
||||
name: $name,
|
||||
ownerId: $ownerId,
|
||||
description: $description,
|
||||
createdAt: $createdAt,
|
||||
updatedAt: $updatedAt,
|
||||
isActivityEnabled: $isActivityEnabled,
|
||||
order: $order,
|
||||
thumbnailAssetId: ${thumbnailAssetId ?? "<NA>"}
|
||||
assetCount: $assetCount
|
||||
ownerName: $ownerName
|
||||
}''';
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (other is! Album) return false;
|
||||
if (identical(this, other)) return true;
|
||||
return id == other.id &&
|
||||
name == other.name &&
|
||||
ownerId == other.ownerId &&
|
||||
description == other.description &&
|
||||
createdAt == other.createdAt &&
|
||||
updatedAt == other.updatedAt &&
|
||||
thumbnailAssetId == other.thumbnailAssetId &&
|
||||
isActivityEnabled == other.isActivityEnabled &&
|
||||
order == other.order &&
|
||||
assetCount == other.assetCount &&
|
||||
ownerName == other.ownerName;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return id.hashCode ^
|
||||
name.hashCode ^
|
||||
ownerId.hashCode ^
|
||||
description.hashCode ^
|
||||
createdAt.hashCode ^
|
||||
updatedAt.hashCode ^
|
||||
thumbnailAssetId.hashCode ^
|
||||
isActivityEnabled.hashCode ^
|
||||
order.hashCode ^
|
||||
assetCount.hashCode ^
|
||||
ownerName.hashCode;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
part 'asset.model.dart';
|
||||
part 'remote_asset.model.dart';
|
||||
part 'local_asset.model.dart';
|
||||
|
||||
enum AssetType {
|
||||
|
||||
@@ -8,16 +8,18 @@ enum AssetVisibility {
|
||||
}
|
||||
|
||||
// Model for an asset stored in the server
|
||||
class Asset extends BaseAsset {
|
||||
class RemoteAsset extends BaseAsset {
|
||||
final String id;
|
||||
final String? localId;
|
||||
final String? thumbHash;
|
||||
final AssetVisibility visibility;
|
||||
final String ownerId;
|
||||
|
||||
const Asset({
|
||||
const RemoteAsset({
|
||||
required this.id,
|
||||
this.localId,
|
||||
required super.name,
|
||||
required this.ownerId,
|
||||
required super.checksum,
|
||||
required super.type,
|
||||
required super.createdAt,
|
||||
@@ -37,16 +39,17 @@ class Asset extends BaseAsset {
|
||||
@override
|
||||
String toString() {
|
||||
return '''Asset {
|
||||
id: $id,
|
||||
name: $name,
|
||||
type: $type,
|
||||
createdAt: $createdAt,
|
||||
updatedAt: $updatedAt,
|
||||
width: ${width ?? "<NA>"},
|
||||
height: ${height ?? "<NA>"},
|
||||
durationInSeconds: ${durationInSeconds ?? "<NA>"},
|
||||
localId: ${localId ?? "<NA>"},
|
||||
isFavorite: $isFavorite,
|
||||
id: $id,
|
||||
name: $name,
|
||||
ownerId: $ownerId,
|
||||
type: $type,
|
||||
createdAt: $createdAt,
|
||||
updatedAt: $updatedAt,
|
||||
width: ${width ?? "<NA>"},
|
||||
height: ${height ?? "<NA>"},
|
||||
durationInSeconds: ${durationInSeconds ?? "<NA>"},
|
||||
localId: ${localId ?? "<NA>"},
|
||||
isFavorite: $isFavorite,
|
||||
thumbHash: ${thumbHash ?? "<NA>"},
|
||||
visibility: $visibility,
|
||||
}''';
|
||||
@@ -54,10 +57,11 @@ class Asset extends BaseAsset {
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (other is! Asset) return false;
|
||||
if (other is! RemoteAsset) return false;
|
||||
if (identical(this, other)) return true;
|
||||
return super == other &&
|
||||
id == other.id &&
|
||||
ownerId == other.ownerId &&
|
||||
localId == other.localId &&
|
||||
thumbHash == other.thumbHash &&
|
||||
visibility == other.visibility;
|
||||
@@ -67,6 +71,7 @@ class Asset extends BaseAsset {
|
||||
int get hashCode =>
|
||||
super.hashCode ^
|
||||
id.hashCode ^
|
||||
ownerId.hashCode ^
|
||||
localId.hashCode ^
|
||||
thumbHash.hashCode ^
|
||||
visibility.hashCode;
|
||||
@@ -1,7 +1,7 @@
|
||||
import 'dart:convert';
|
||||
|
||||
class Person {
|
||||
Person({
|
||||
const Person({
|
||||
required this.id,
|
||||
this.birthDate,
|
||||
required this.isHidden,
|
||||
|
||||
@@ -2,10 +2,8 @@ import 'dart:async';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:flutter/widgets.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/services/store.service.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
|
||||
@@ -17,22 +15,16 @@ class LocalSyncService {
|
||||
final DriftLocalAlbumRepository _localAlbumRepository;
|
||||
final NativeSyncApi _nativeSyncApi;
|
||||
final Platform _platform;
|
||||
final StoreService _storeService;
|
||||
final Logger _log = Logger("DeviceSyncService");
|
||||
|
||||
LocalSyncService({
|
||||
required DriftLocalAlbumRepository localAlbumRepository,
|
||||
required NativeSyncApi nativeSyncApi,
|
||||
required StoreService storeService,
|
||||
Platform? platform,
|
||||
}) : _localAlbumRepository = localAlbumRepository,
|
||||
_nativeSyncApi = nativeSyncApi,
|
||||
_storeService = storeService,
|
||||
_platform = platform ?? const LocalPlatform();
|
||||
|
||||
bool get _ignoreIcloudAssets =>
|
||||
_storeService.get(StoreKey.ignoreIcloudAssets, false) == true;
|
||||
|
||||
Future<void> sync({bool full = false}) async {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
try {
|
||||
@@ -84,11 +76,7 @@ class LocalSyncService {
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if (_ignoreIcloudAssets) {
|
||||
await removeAlbum(dbAlbum);
|
||||
} else {
|
||||
await updateAlbum(dbAlbum, album);
|
||||
}
|
||||
await updateAlbum(dbAlbum, album);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,12 +94,7 @@ class LocalSyncService {
|
||||
try {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
|
||||
List<PlatformAlbum> deviceAlbums =
|
||||
List.of(await _nativeSyncApi.getAlbums());
|
||||
if (_platform.isIOS && _ignoreIcloudAssets) {
|
||||
deviceAlbums.removeWhere((album) => album.isCloud);
|
||||
}
|
||||
|
||||
final deviceAlbums = await _nativeSyncApi.getAlbums();
|
||||
final dbAlbums =
|
||||
await _localAlbumRepository.getAll(sortBy: {SortLocalAlbumsBy.id});
|
||||
|
||||
|
||||
60
mobile/lib/domain/services/remote_album.service.dart
Normal file
60
mobile/lib/domain/services/remote_album.service.dart
Normal file
@@ -0,0 +1,60 @@
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/remote_album.repository.dart';
|
||||
import 'package:immich_mobile/models/albums/album_search.model.dart';
|
||||
import 'package:immich_mobile/utils/remote_album.utils.dart';
|
||||
|
||||
class RemoteAlbumService {
|
||||
final DriftRemoteAlbumRepository _repository;
|
||||
|
||||
const RemoteAlbumService(this._repository);
|
||||
|
||||
Future<List<Album>> getAll() {
|
||||
return _repository.getAll();
|
||||
}
|
||||
|
||||
List<Album> sortAlbums(
|
||||
List<Album> albums,
|
||||
RemoteAlbumSortMode sortMode, {
|
||||
bool isReverse = false,
|
||||
}) {
|
||||
return sortMode.sortFn(albums, isReverse);
|
||||
}
|
||||
|
||||
List<Album> searchAlbums(
|
||||
List<Album> albums,
|
||||
String query,
|
||||
String? userId, [
|
||||
QuickFilterMode filterMode = QuickFilterMode.all,
|
||||
]) {
|
||||
final lowerQuery = query.toLowerCase();
|
||||
List<Album> filtered = albums;
|
||||
|
||||
// Apply text search filter
|
||||
if (query.isNotEmpty) {
|
||||
filtered = filtered
|
||||
.where(
|
||||
(album) =>
|
||||
album.name.toLowerCase().contains(lowerQuery) ||
|
||||
album.description.toLowerCase().contains(lowerQuery),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (userId != null) {
|
||||
switch (filterMode) {
|
||||
case QuickFilterMode.myAlbums:
|
||||
filtered =
|
||||
filtered.where((album) => album.ownerId == userId).toList();
|
||||
break;
|
||||
case QuickFilterMode.sharedWithMe:
|
||||
filtered =
|
||||
filtered.where((album) => album.ownerId != userId).toList();
|
||||
break;
|
||||
case QuickFilterMode.all:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return filtered;
|
||||
}
|
||||
}
|
||||
@@ -76,11 +76,76 @@ class SyncStreamService {
|
||||
case SyncEntityType.assetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(data.cast());
|
||||
case SyncEntityType.partnerAssetV1:
|
||||
return _syncStreamRepository.updatePartnerAssetsV1(data.cast());
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner',
|
||||
);
|
||||
case SyncEntityType.partnerAssetBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner backfill',
|
||||
);
|
||||
case SyncEntityType.partnerAssetDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerAssetsV1(data.cast());
|
||||
return _syncStreamRepository.deleteAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: "partner",
|
||||
);
|
||||
case SyncEntityType.partnerAssetExifV1:
|
||||
return _syncStreamRepository.updatePartnerAssetsExifV1(data.cast());
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner',
|
||||
);
|
||||
case SyncEntityType.partnerAssetExifBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'partner backfill',
|
||||
);
|
||||
case SyncEntityType.albumV1:
|
||||
return _syncStreamRepository.updateAlbumsV1(data.cast());
|
||||
case SyncEntityType.albumDeleteV1:
|
||||
return _syncStreamRepository.deleteAlbumsV1(data.cast());
|
||||
case SyncEntityType.albumUserV1:
|
||||
return _syncStreamRepository.updateAlbumUsersV1(data.cast());
|
||||
case SyncEntityType.albumUserBackfillV1:
|
||||
return _syncStreamRepository.updateAlbumUsersV1(
|
||||
data.cast(),
|
||||
debugLabel: 'backfill',
|
||||
);
|
||||
case SyncEntityType.albumUserDeleteV1:
|
||||
return _syncStreamRepository.deleteAlbumUsersV1(data.cast());
|
||||
case SyncEntityType.albumAssetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album',
|
||||
);
|
||||
case SyncEntityType.albumAssetBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album backfill',
|
||||
);
|
||||
case SyncEntityType.albumAssetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album',
|
||||
);
|
||||
case SyncEntityType.albumAssetExifBackfillV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(
|
||||
data.cast(),
|
||||
debugLabel: 'album backfill',
|
||||
);
|
||||
case SyncEntityType.albumToAssetV1:
|
||||
return _syncStreamRepository.updateAlbumToAssetsV1(data.cast());
|
||||
case SyncEntityType.albumToAssetBackfillV1:
|
||||
return _syncStreamRepository.updateAlbumToAssetsV1(
|
||||
data.cast(),
|
||||
debugLabel: 'backfill',
|
||||
);
|
||||
case SyncEntityType.albumToAssetDeleteV1:
|
||||
return _syncStreamRepository.deleteAlbumToAssetsV1(data.cast());
|
||||
// No-op. SyncAckV1 entities are checkpoints in the sync stream
|
||||
// to acknowledge that the client has processed all the backfill events
|
||||
case SyncEntityType.syncAckV1:
|
||||
return;
|
||||
default:
|
||||
_logger.warning("Unknown sync data type: $type");
|
||||
}
|
||||
|
||||
@@ -45,6 +45,13 @@ class TimelineFactory {
|
||||
bucketSource: () =>
|
||||
_timelineRepository.watchLocalBucket(albumId, groupBy: groupBy),
|
||||
);
|
||||
|
||||
TimelineService remoteAlbum({required String albumId}) => TimelineService(
|
||||
assetSource: (offset, count) => _timelineRepository
|
||||
.getRemoteBucketAssets(albumId, offset: offset, count: count),
|
||||
bucketSource: () =>
|
||||
_timelineRepository.watchRemoteBucket(albumId, groupBy: groupBy),
|
||||
);
|
||||
}
|
||||
|
||||
class TimelineService {
|
||||
@@ -57,7 +64,7 @@ class TimelineService {
|
||||
}) : _assetSource = assetSource,
|
||||
_bucketSource = bucketSource {
|
||||
_bucketSubscription =
|
||||
_bucketSource().listen((_) => unawaited(_reloadBucket()));
|
||||
_bucketSource().listen((_) => unawaited(reloadBucket()));
|
||||
}
|
||||
|
||||
final AsyncMutex _mutex = AsyncMutex();
|
||||
@@ -67,7 +74,7 @@ class TimelineService {
|
||||
|
||||
Stream<List<Bucket>> Function() get watchBuckets => _bucketSource;
|
||||
|
||||
Future<void> _reloadBucket() => _mutex.run(() async {
|
||||
Future<void> reloadBucket() => _mutex.run(() async {
|
||||
_buffer = await _assetSource(_bufferOffset, _buffer.length);
|
||||
});
|
||||
|
||||
|
||||
@@ -554,18 +554,12 @@ class Asset {
|
||||
}""";
|
||||
}
|
||||
|
||||
static getVisibility(AssetVisibility visibility) {
|
||||
switch (visibility) {
|
||||
case AssetVisibility.timeline:
|
||||
return AssetVisibilityEnum.timeline;
|
||||
case AssetVisibility.archive:
|
||||
return AssetVisibilityEnum.archive;
|
||||
case AssetVisibility.hidden:
|
||||
return AssetVisibilityEnum.hidden;
|
||||
case AssetVisibility.locked:
|
||||
return AssetVisibilityEnum.locked;
|
||||
}
|
||||
}
|
||||
static getVisibility(AssetVisibility visibility) => switch (visibility) {
|
||||
AssetVisibility.archive => AssetVisibilityEnum.archive,
|
||||
AssetVisibility.hidden => AssetVisibilityEnum.hidden,
|
||||
AssetVisibility.locked => AssetVisibilityEnum.locked,
|
||||
AssetVisibility.timeline || _ => AssetVisibilityEnum.timeline,
|
||||
};
|
||||
}
|
||||
|
||||
enum AssetType {
|
||||
|
||||
@@ -11,7 +11,7 @@ class SSLClientCertStoreVal {
|
||||
final Uint8List data;
|
||||
final String? password;
|
||||
|
||||
SSLClientCertStoreVal(this.data, this.password);
|
||||
const SSLClientCertStoreVal(this.data, this.password);
|
||||
|
||||
void save() {
|
||||
final b64Str = base64Encode(data);
|
||||
|
||||
@@ -116,15 +116,15 @@ class RemoteExifEntity extends Table with DriftDefaultsMixin {
|
||||
|
||||
TextColumn get exposureTime => text().nullable()();
|
||||
|
||||
IntColumn get fNumber => integer().nullable()();
|
||||
RealColumn get fNumber => real().nullable()();
|
||||
|
||||
IntColumn get fileSize => integer().nullable()();
|
||||
|
||||
IntColumn get focalLength => integer().nullable()();
|
||||
RealColumn get focalLength => real().nullable()();
|
||||
|
||||
IntColumn get latitude => integer().nullable()();
|
||||
RealColumn get latitude => real().nullable()();
|
||||
|
||||
IntColumn get longitude => integer().nullable()();
|
||||
RealColumn get longitude => real().nullable()();
|
||||
|
||||
IntColumn get iso => integer().nullable()();
|
||||
|
||||
|
||||
@@ -19,11 +19,11 @@ typedef $$RemoteExifEntityTableCreateCompanionBuilder
|
||||
i0.Value<int?> height,
|
||||
i0.Value<int?> width,
|
||||
i0.Value<String?> exposureTime,
|
||||
i0.Value<int?> fNumber,
|
||||
i0.Value<double?> fNumber,
|
||||
i0.Value<int?> fileSize,
|
||||
i0.Value<int?> focalLength,
|
||||
i0.Value<int?> latitude,
|
||||
i0.Value<int?> longitude,
|
||||
i0.Value<double?> focalLength,
|
||||
i0.Value<double?> latitude,
|
||||
i0.Value<double?> longitude,
|
||||
i0.Value<int?> iso,
|
||||
i0.Value<String?> make,
|
||||
i0.Value<String?> model,
|
||||
@@ -43,11 +43,11 @@ typedef $$RemoteExifEntityTableUpdateCompanionBuilder
|
||||
i0.Value<int?> height,
|
||||
i0.Value<int?> width,
|
||||
i0.Value<String?> exposureTime,
|
||||
i0.Value<int?> fNumber,
|
||||
i0.Value<double?> fNumber,
|
||||
i0.Value<int?> fileSize,
|
||||
i0.Value<int?> focalLength,
|
||||
i0.Value<int?> latitude,
|
||||
i0.Value<int?> longitude,
|
||||
i0.Value<double?> focalLength,
|
||||
i0.Value<double?> latitude,
|
||||
i0.Value<double?> longitude,
|
||||
i0.Value<int?> iso,
|
||||
i0.Value<String?> make,
|
||||
i0.Value<String?> model,
|
||||
@@ -125,20 +125,20 @@ class $$RemoteExifEntityTableFilterComposer
|
||||
column: $table.exposureTime,
|
||||
builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<int> get fNumber => $composableBuilder(
|
||||
i0.ColumnFilters<double> get fNumber => $composableBuilder(
|
||||
column: $table.fNumber, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<int> get fileSize => $composableBuilder(
|
||||
column: $table.fileSize, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<int> get focalLength => $composableBuilder(
|
||||
i0.ColumnFilters<double> get focalLength => $composableBuilder(
|
||||
column: $table.focalLength,
|
||||
builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<int> get latitude => $composableBuilder(
|
||||
i0.ColumnFilters<double> get latitude => $composableBuilder(
|
||||
column: $table.latitude, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<int> get longitude => $composableBuilder(
|
||||
i0.ColumnFilters<double> get longitude => $composableBuilder(
|
||||
column: $table.longitude, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<int> get iso => $composableBuilder(
|
||||
@@ -223,20 +223,20 @@ class $$RemoteExifEntityTableOrderingComposer
|
||||
column: $table.exposureTime,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get fNumber => $composableBuilder(
|
||||
i0.ColumnOrderings<double> get fNumber => $composableBuilder(
|
||||
column: $table.fNumber, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get fileSize => $composableBuilder(
|
||||
column: $table.fileSize, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get focalLength => $composableBuilder(
|
||||
i0.ColumnOrderings<double> get focalLength => $composableBuilder(
|
||||
column: $table.focalLength,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get latitude => $composableBuilder(
|
||||
i0.ColumnOrderings<double> get latitude => $composableBuilder(
|
||||
column: $table.latitude, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get longitude => $composableBuilder(
|
||||
i0.ColumnOrderings<double> get longitude => $composableBuilder(
|
||||
column: $table.longitude,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
@@ -321,19 +321,19 @@ class $$RemoteExifEntityTableAnnotationComposer
|
||||
i0.GeneratedColumn<String> get exposureTime => $composableBuilder(
|
||||
column: $table.exposureTime, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<int> get fNumber =>
|
||||
i0.GeneratedColumn<double> get fNumber =>
|
||||
$composableBuilder(column: $table.fNumber, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<int> get fileSize =>
|
||||
$composableBuilder(column: $table.fileSize, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<int> get focalLength => $composableBuilder(
|
||||
i0.GeneratedColumn<double> get focalLength => $composableBuilder(
|
||||
column: $table.focalLength, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<int> get latitude =>
|
||||
i0.GeneratedColumn<double> get latitude =>
|
||||
$composableBuilder(column: $table.latitude, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<int> get longitude =>
|
||||
i0.GeneratedColumn<double> get longitude =>
|
||||
$composableBuilder(column: $table.longitude, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<int> get iso =>
|
||||
@@ -416,11 +416,11 @@ class $$RemoteExifEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.Value<int?> height = const i0.Value.absent(),
|
||||
i0.Value<int?> width = const i0.Value.absent(),
|
||||
i0.Value<String?> exposureTime = const i0.Value.absent(),
|
||||
i0.Value<int?> fNumber = const i0.Value.absent(),
|
||||
i0.Value<double?> fNumber = const i0.Value.absent(),
|
||||
i0.Value<int?> fileSize = const i0.Value.absent(),
|
||||
i0.Value<int?> focalLength = const i0.Value.absent(),
|
||||
i0.Value<int?> latitude = const i0.Value.absent(),
|
||||
i0.Value<int?> longitude = const i0.Value.absent(),
|
||||
i0.Value<double?> focalLength = const i0.Value.absent(),
|
||||
i0.Value<double?> latitude = const i0.Value.absent(),
|
||||
i0.Value<double?> longitude = const i0.Value.absent(),
|
||||
i0.Value<int?> iso = const i0.Value.absent(),
|
||||
i0.Value<String?> make = const i0.Value.absent(),
|
||||
i0.Value<String?> model = const i0.Value.absent(),
|
||||
@@ -462,11 +462,11 @@ class $$RemoteExifEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.Value<int?> height = const i0.Value.absent(),
|
||||
i0.Value<int?> width = const i0.Value.absent(),
|
||||
i0.Value<String?> exposureTime = const i0.Value.absent(),
|
||||
i0.Value<int?> fNumber = const i0.Value.absent(),
|
||||
i0.Value<double?> fNumber = const i0.Value.absent(),
|
||||
i0.Value<int?> fileSize = const i0.Value.absent(),
|
||||
i0.Value<int?> focalLength = const i0.Value.absent(),
|
||||
i0.Value<int?> latitude = const i0.Value.absent(),
|
||||
i0.Value<int?> longitude = const i0.Value.absent(),
|
||||
i0.Value<double?> focalLength = const i0.Value.absent(),
|
||||
i0.Value<double?> latitude = const i0.Value.absent(),
|
||||
i0.Value<double?> longitude = const i0.Value.absent(),
|
||||
i0.Value<int?> iso = const i0.Value.absent(),
|
||||
i0.Value<String?> make = const i0.Value.absent(),
|
||||
i0.Value<String?> model = const i0.Value.absent(),
|
||||
@@ -622,9 +622,9 @@ class $RemoteExifEntityTable extends i2.RemoteExifEntity
|
||||
static const i0.VerificationMeta _fNumberMeta =
|
||||
const i0.VerificationMeta('fNumber');
|
||||
@override
|
||||
late final i0.GeneratedColumn<int> fNumber = i0.GeneratedColumn<int>(
|
||||
late final i0.GeneratedColumn<double> fNumber = i0.GeneratedColumn<double>(
|
||||
'f_number', aliasedName, true,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: false);
|
||||
type: i0.DriftSqlType.double, requiredDuringInsert: false);
|
||||
static const i0.VerificationMeta _fileSizeMeta =
|
||||
const i0.VerificationMeta('fileSize');
|
||||
@override
|
||||
@@ -634,21 +634,21 @@ class $RemoteExifEntityTable extends i2.RemoteExifEntity
|
||||
static const i0.VerificationMeta _focalLengthMeta =
|
||||
const i0.VerificationMeta('focalLength');
|
||||
@override
|
||||
late final i0.GeneratedColumn<int> focalLength = i0.GeneratedColumn<int>(
|
||||
'focal_length', aliasedName, true,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: false);
|
||||
late final i0.GeneratedColumn<double> focalLength =
|
||||
i0.GeneratedColumn<double>('focal_length', aliasedName, true,
|
||||
type: i0.DriftSqlType.double, requiredDuringInsert: false);
|
||||
static const i0.VerificationMeta _latitudeMeta =
|
||||
const i0.VerificationMeta('latitude');
|
||||
@override
|
||||
late final i0.GeneratedColumn<int> latitude = i0.GeneratedColumn<int>(
|
||||
late final i0.GeneratedColumn<double> latitude = i0.GeneratedColumn<double>(
|
||||
'latitude', aliasedName, true,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: false);
|
||||
type: i0.DriftSqlType.double, requiredDuringInsert: false);
|
||||
static const i0.VerificationMeta _longitudeMeta =
|
||||
const i0.VerificationMeta('longitude');
|
||||
@override
|
||||
late final i0.GeneratedColumn<int> longitude = i0.GeneratedColumn<int>(
|
||||
late final i0.GeneratedColumn<double> longitude = i0.GeneratedColumn<double>(
|
||||
'longitude', aliasedName, true,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: false);
|
||||
type: i0.DriftSqlType.double, requiredDuringInsert: false);
|
||||
static const i0.VerificationMeta _isoMeta = const i0.VerificationMeta('iso');
|
||||
@override
|
||||
late final i0.GeneratedColumn<int> iso = i0.GeneratedColumn<int>(
|
||||
@@ -853,15 +853,15 @@ class $RemoteExifEntityTable extends i2.RemoteExifEntity
|
||||
exposureTime: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.string, data['${effectivePrefix}exposure_time']),
|
||||
fNumber: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}f_number']),
|
||||
.read(i0.DriftSqlType.double, data['${effectivePrefix}f_number']),
|
||||
fileSize: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}file_size']),
|
||||
focalLength: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}focal_length']),
|
||||
.read(i0.DriftSqlType.double, data['${effectivePrefix}focal_length']),
|
||||
latitude: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}latitude']),
|
||||
.read(i0.DriftSqlType.double, data['${effectivePrefix}latitude']),
|
||||
longitude: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}longitude']),
|
||||
.read(i0.DriftSqlType.double, data['${effectivePrefix}longitude']),
|
||||
iso: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}iso']),
|
||||
make: attachedDatabase.typeMapping
|
||||
@@ -901,11 +901,11 @@ class RemoteExifEntityData extends i0.DataClass
|
||||
final int? height;
|
||||
final int? width;
|
||||
final String? exposureTime;
|
||||
final int? fNumber;
|
||||
final double? fNumber;
|
||||
final int? fileSize;
|
||||
final int? focalLength;
|
||||
final int? latitude;
|
||||
final int? longitude;
|
||||
final double? focalLength;
|
||||
final double? latitude;
|
||||
final double? longitude;
|
||||
final int? iso;
|
||||
final String? make;
|
||||
final String? model;
|
||||
@@ -964,19 +964,19 @@ class RemoteExifEntityData extends i0.DataClass
|
||||
map['exposure_time'] = i0.Variable<String>(exposureTime);
|
||||
}
|
||||
if (!nullToAbsent || fNumber != null) {
|
||||
map['f_number'] = i0.Variable<int>(fNumber);
|
||||
map['f_number'] = i0.Variable<double>(fNumber);
|
||||
}
|
||||
if (!nullToAbsent || fileSize != null) {
|
||||
map['file_size'] = i0.Variable<int>(fileSize);
|
||||
}
|
||||
if (!nullToAbsent || focalLength != null) {
|
||||
map['focal_length'] = i0.Variable<int>(focalLength);
|
||||
map['focal_length'] = i0.Variable<double>(focalLength);
|
||||
}
|
||||
if (!nullToAbsent || latitude != null) {
|
||||
map['latitude'] = i0.Variable<int>(latitude);
|
||||
map['latitude'] = i0.Variable<double>(latitude);
|
||||
}
|
||||
if (!nullToAbsent || longitude != null) {
|
||||
map['longitude'] = i0.Variable<int>(longitude);
|
||||
map['longitude'] = i0.Variable<double>(longitude);
|
||||
}
|
||||
if (!nullToAbsent || iso != null) {
|
||||
map['iso'] = i0.Variable<int>(iso);
|
||||
@@ -1016,11 +1016,11 @@ class RemoteExifEntityData extends i0.DataClass
|
||||
height: serializer.fromJson<int?>(json['height']),
|
||||
width: serializer.fromJson<int?>(json['width']),
|
||||
exposureTime: serializer.fromJson<String?>(json['exposureTime']),
|
||||
fNumber: serializer.fromJson<int?>(json['fNumber']),
|
||||
fNumber: serializer.fromJson<double?>(json['fNumber']),
|
||||
fileSize: serializer.fromJson<int?>(json['fileSize']),
|
||||
focalLength: serializer.fromJson<int?>(json['focalLength']),
|
||||
latitude: serializer.fromJson<int?>(json['latitude']),
|
||||
longitude: serializer.fromJson<int?>(json['longitude']),
|
||||
focalLength: serializer.fromJson<double?>(json['focalLength']),
|
||||
latitude: serializer.fromJson<double?>(json['latitude']),
|
||||
longitude: serializer.fromJson<double?>(json['longitude']),
|
||||
iso: serializer.fromJson<int?>(json['iso']),
|
||||
make: serializer.fromJson<String?>(json['make']),
|
||||
model: serializer.fromJson<String?>(json['model']),
|
||||
@@ -1043,11 +1043,11 @@ class RemoteExifEntityData extends i0.DataClass
|
||||
'height': serializer.toJson<int?>(height),
|
||||
'width': serializer.toJson<int?>(width),
|
||||
'exposureTime': serializer.toJson<String?>(exposureTime),
|
||||
'fNumber': serializer.toJson<int?>(fNumber),
|
||||
'fNumber': serializer.toJson<double?>(fNumber),
|
||||
'fileSize': serializer.toJson<int?>(fileSize),
|
||||
'focalLength': serializer.toJson<int?>(focalLength),
|
||||
'latitude': serializer.toJson<int?>(latitude),
|
||||
'longitude': serializer.toJson<int?>(longitude),
|
||||
'focalLength': serializer.toJson<double?>(focalLength),
|
||||
'latitude': serializer.toJson<double?>(latitude),
|
||||
'longitude': serializer.toJson<double?>(longitude),
|
||||
'iso': serializer.toJson<int?>(iso),
|
||||
'make': serializer.toJson<String?>(make),
|
||||
'model': serializer.toJson<String?>(model),
|
||||
@@ -1068,11 +1068,11 @@ class RemoteExifEntityData extends i0.DataClass
|
||||
i0.Value<int?> height = const i0.Value.absent(),
|
||||
i0.Value<int?> width = const i0.Value.absent(),
|
||||
i0.Value<String?> exposureTime = const i0.Value.absent(),
|
||||
i0.Value<int?> fNumber = const i0.Value.absent(),
|
||||
i0.Value<double?> fNumber = const i0.Value.absent(),
|
||||
i0.Value<int?> fileSize = const i0.Value.absent(),
|
||||
i0.Value<int?> focalLength = const i0.Value.absent(),
|
||||
i0.Value<int?> latitude = const i0.Value.absent(),
|
||||
i0.Value<int?> longitude = const i0.Value.absent(),
|
||||
i0.Value<double?> focalLength = const i0.Value.absent(),
|
||||
i0.Value<double?> latitude = const i0.Value.absent(),
|
||||
i0.Value<double?> longitude = const i0.Value.absent(),
|
||||
i0.Value<int?> iso = const i0.Value.absent(),
|
||||
i0.Value<String?> make = const i0.Value.absent(),
|
||||
i0.Value<String?> model = const i0.Value.absent(),
|
||||
@@ -1232,11 +1232,11 @@ class RemoteExifEntityCompanion
|
||||
final i0.Value<int?> height;
|
||||
final i0.Value<int?> width;
|
||||
final i0.Value<String?> exposureTime;
|
||||
final i0.Value<int?> fNumber;
|
||||
final i0.Value<double?> fNumber;
|
||||
final i0.Value<int?> fileSize;
|
||||
final i0.Value<int?> focalLength;
|
||||
final i0.Value<int?> latitude;
|
||||
final i0.Value<int?> longitude;
|
||||
final i0.Value<double?> focalLength;
|
||||
final i0.Value<double?> latitude;
|
||||
final i0.Value<double?> longitude;
|
||||
final i0.Value<int?> iso;
|
||||
final i0.Value<String?> make;
|
||||
final i0.Value<String?> model;
|
||||
@@ -1300,11 +1300,11 @@ class RemoteExifEntityCompanion
|
||||
i0.Expression<int>? height,
|
||||
i0.Expression<int>? width,
|
||||
i0.Expression<String>? exposureTime,
|
||||
i0.Expression<int>? fNumber,
|
||||
i0.Expression<double>? fNumber,
|
||||
i0.Expression<int>? fileSize,
|
||||
i0.Expression<int>? focalLength,
|
||||
i0.Expression<int>? latitude,
|
||||
i0.Expression<int>? longitude,
|
||||
i0.Expression<double>? focalLength,
|
||||
i0.Expression<double>? latitude,
|
||||
i0.Expression<double>? longitude,
|
||||
i0.Expression<int>? iso,
|
||||
i0.Expression<String>? make,
|
||||
i0.Expression<String>? model,
|
||||
@@ -1348,11 +1348,11 @@ class RemoteExifEntityCompanion
|
||||
i0.Value<int?>? height,
|
||||
i0.Value<int?>? width,
|
||||
i0.Value<String?>? exposureTime,
|
||||
i0.Value<int?>? fNumber,
|
||||
i0.Value<double?>? fNumber,
|
||||
i0.Value<int?>? fileSize,
|
||||
i0.Value<int?>? focalLength,
|
||||
i0.Value<int?>? latitude,
|
||||
i0.Value<int?>? longitude,
|
||||
i0.Value<double?>? focalLength,
|
||||
i0.Value<double?>? latitude,
|
||||
i0.Value<double?>? longitude,
|
||||
i0.Value<int?>? iso,
|
||||
i0.Value<String?>? make,
|
||||
i0.Value<String?>? model,
|
||||
@@ -1416,19 +1416,19 @@ class RemoteExifEntityCompanion
|
||||
map['exposure_time'] = i0.Variable<String>(exposureTime.value);
|
||||
}
|
||||
if (fNumber.present) {
|
||||
map['f_number'] = i0.Variable<int>(fNumber.value);
|
||||
map['f_number'] = i0.Variable<double>(fNumber.value);
|
||||
}
|
||||
if (fileSize.present) {
|
||||
map['file_size'] = i0.Variable<int>(fileSize.value);
|
||||
}
|
||||
if (focalLength.present) {
|
||||
map['focal_length'] = i0.Variable<int>(focalLength.value);
|
||||
map['focal_length'] = i0.Variable<double>(focalLength.value);
|
||||
}
|
||||
if (latitude.present) {
|
||||
map['latitude'] = i0.Variable<int>(latitude.value);
|
||||
map['latitude'] = i0.Variable<double>(latitude.value);
|
||||
}
|
||||
if (longitude.present) {
|
||||
map['longitude'] = i0.Variable<int>(longitude.value);
|
||||
map['longitude'] = i0.Variable<double>(longitude.value);
|
||||
}
|
||||
if (iso.present) {
|
||||
map['iso'] = i0.Variable<int>(iso.value);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class LocalAlbumEntity extends Table with DriftDefaultsMixin {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart' as i2;
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart' as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart'
|
||||
as i3;
|
||||
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
|
||||
|
||||
34
mobile/lib/infrastructure/entities/remote_album.entity.dart
Normal file
34
mobile/lib/infrastructure/entities/remote_album.entity.dart
Normal file
@@ -0,0 +1,34 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class RemoteAlbumEntity extends Table with DriftDefaultsMixin {
|
||||
const RemoteAlbumEntity();
|
||||
|
||||
TextColumn get id => text()();
|
||||
|
||||
TextColumn get name => text()();
|
||||
|
||||
TextColumn get description => text().withDefault(const Constant(''))();
|
||||
|
||||
DateTimeColumn get createdAt => dateTime().withDefault(currentDateAndTime)();
|
||||
|
||||
DateTimeColumn get updatedAt => dateTime().withDefault(currentDateAndTime)();
|
||||
|
||||
TextColumn get ownerId =>
|
||||
text().references(UserEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
TextColumn get thumbnailAssetId => text()
|
||||
.references(RemoteAssetEntity, #id, onDelete: KeyAction.setNull)
|
||||
.nullable()();
|
||||
|
||||
BoolColumn get isActivityEnabled =>
|
||||
boolean().withDefault(const Constant(true))();
|
||||
|
||||
IntColumn get order => intEnum<AlbumAssetOrder>()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {id};
|
||||
}
|
||||
946
mobile/lib/infrastructure/entities/remote_album.entity.drift.dart
generated
Normal file
946
mobile/lib/infrastructure/entities/remote_album.entity.drift.dart
generated
Normal file
@@ -0,0 +1,946 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart' as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart'
|
||||
as i3;
|
||||
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'
|
||||
as i5;
|
||||
import 'package:drift/internal/modular.dart' as i6;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
|
||||
as i7;
|
||||
|
||||
typedef $$RemoteAlbumEntityTableCreateCompanionBuilder
|
||||
= i1.RemoteAlbumEntityCompanion Function({
|
||||
required String id,
|
||||
required String name,
|
||||
i0.Value<String> description,
|
||||
i0.Value<DateTime> createdAt,
|
||||
i0.Value<DateTime> updatedAt,
|
||||
required String ownerId,
|
||||
i0.Value<String?> thumbnailAssetId,
|
||||
i0.Value<bool> isActivityEnabled,
|
||||
required i2.AlbumAssetOrder order,
|
||||
});
|
||||
typedef $$RemoteAlbumEntityTableUpdateCompanionBuilder
|
||||
= i1.RemoteAlbumEntityCompanion Function({
|
||||
i0.Value<String> id,
|
||||
i0.Value<String> name,
|
||||
i0.Value<String> description,
|
||||
i0.Value<DateTime> createdAt,
|
||||
i0.Value<DateTime> updatedAt,
|
||||
i0.Value<String> ownerId,
|
||||
i0.Value<String?> thumbnailAssetId,
|
||||
i0.Value<bool> isActivityEnabled,
|
||||
i0.Value<i2.AlbumAssetOrder> order,
|
||||
});
|
||||
|
||||
final class $$RemoteAlbumEntityTableReferences extends i0.BaseReferences<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumEntityTable,
|
||||
i1.RemoteAlbumEntityData> {
|
||||
$$RemoteAlbumEntityTableReferences(
|
||||
super.$_db, super.$_table, super.$_typedResult);
|
||||
|
||||
static i5.$UserEntityTable _ownerIdTable(i0.GeneratedDatabase db) =>
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.ownerId,
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity')
|
||||
.id));
|
||||
|
||||
i5.$$UserEntityTableProcessedTableManager get ownerId {
|
||||
final $_column = $_itemColumn<String>('owner_id')!;
|
||||
|
||||
final manager = i5
|
||||
.$$UserEntityTableTableManager(
|
||||
$_db,
|
||||
i6.ReadDatabaseContainer($_db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_ownerIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
|
||||
static i7.$RemoteAssetEntityTable _thumbnailAssetIdTable(
|
||||
i0.GeneratedDatabase db) =>
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.thumbnailAssetId,
|
||||
i6.ReadDatabaseContainer(db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.id));
|
||||
|
||||
i7.$$RemoteAssetEntityTableProcessedTableManager? get thumbnailAssetId {
|
||||
final $_column = $_itemColumn<String>('thumbnail_asset_id');
|
||||
if ($_column == null) return null;
|
||||
final manager = i7
|
||||
.$$RemoteAssetEntityTableTableManager(
|
||||
$_db,
|
||||
i6.ReadDatabaseContainer($_db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_thumbnailAssetIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableFilterComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumEntityTable> {
|
||||
$$RemoteAlbumEntityTableFilterComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnFilters<String> get id => $composableBuilder(
|
||||
column: $table.id, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<String> get name => $composableBuilder(
|
||||
column: $table.name, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<String> get description => $composableBuilder(
|
||||
column: $table.description,
|
||||
builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<DateTime> get createdAt => $composableBuilder(
|
||||
column: $table.createdAt, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<DateTime> get updatedAt => $composableBuilder(
|
||||
column: $table.updatedAt, builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnFilters<bool> get isActivityEnabled => $composableBuilder(
|
||||
column: $table.isActivityEnabled,
|
||||
builder: (column) => i0.ColumnFilters(column));
|
||||
|
||||
i0.ColumnWithTypeConverterFilters<i2.AlbumAssetOrder, i2.AlbumAssetOrder, int>
|
||||
get order => $composableBuilder(
|
||||
column: $table.order,
|
||||
builder: (column) => i0.ColumnWithTypeConverterFilters(column));
|
||||
|
||||
i5.$$UserEntityTableFilterComposer get ownerId {
|
||||
final i5.$$UserEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.ownerId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$UserEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i7.$$RemoteAssetEntityTableFilterComposer get thumbnailAssetId {
|
||||
final i7.$$RemoteAssetEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.thumbnailAssetId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i7.$$RemoteAssetEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableOrderingComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumEntityTable> {
|
||||
$$RemoteAlbumEntityTableOrderingComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnOrderings<String> get id => $composableBuilder(
|
||||
column: $table.id, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<String> get name => $composableBuilder(
|
||||
column: $table.name, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<String> get description => $composableBuilder(
|
||||
column: $table.description,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<DateTime> get createdAt => $composableBuilder(
|
||||
column: $table.createdAt,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<DateTime> get updatedAt => $composableBuilder(
|
||||
column: $table.updatedAt,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<bool> get isActivityEnabled => $composableBuilder(
|
||||
column: $table.isActivityEnabled,
|
||||
builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i0.ColumnOrderings<int> get order => $composableBuilder(
|
||||
column: $table.order, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i5.$$UserEntityTableOrderingComposer get ownerId {
|
||||
final i5.$$UserEntityTableOrderingComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.ownerId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$UserEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i7.$$RemoteAssetEntityTableOrderingComposer get thumbnailAssetId {
|
||||
final i7.$$RemoteAssetEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.thumbnailAssetId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i7.$$RemoteAssetEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableAnnotationComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumEntityTable> {
|
||||
$$RemoteAlbumEntityTableAnnotationComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.GeneratedColumn<String> get id =>
|
||||
$composableBuilder(column: $table.id, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<String> get name =>
|
||||
$composableBuilder(column: $table.name, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<String> get description => $composableBuilder(
|
||||
column: $table.description, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<DateTime> get createdAt =>
|
||||
$composableBuilder(column: $table.createdAt, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<DateTime> get updatedAt =>
|
||||
$composableBuilder(column: $table.updatedAt, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumn<bool> get isActivityEnabled => $composableBuilder(
|
||||
column: $table.isActivityEnabled, builder: (column) => column);
|
||||
|
||||
i0.GeneratedColumnWithTypeConverter<i2.AlbumAssetOrder, int> get order =>
|
||||
$composableBuilder(column: $table.order, builder: (column) => column);
|
||||
|
||||
i5.$$UserEntityTableAnnotationComposer get ownerId {
|
||||
final i5.$$UserEntityTableAnnotationComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.ownerId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$UserEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i7.$$RemoteAssetEntityTableAnnotationComposer get thumbnailAssetId {
|
||||
final i7.$$RemoteAssetEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.thumbnailAssetId,
|
||||
referencedTable: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i7.$$RemoteAssetEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i6.ReadDatabaseContainer($db)
|
||||
.resultSet<i7.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumEntityTable,
|
||||
i1.RemoteAlbumEntityData,
|
||||
i1.$$RemoteAlbumEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumEntityData, i1.$$RemoteAlbumEntityTableReferences),
|
||||
i1.RemoteAlbumEntityData,
|
||||
i0.PrefetchHooks Function({bool ownerId, bool thumbnailAssetId})> {
|
||||
$$RemoteAlbumEntityTableTableManager(
|
||||
i0.GeneratedDatabase db, i1.$RemoteAlbumEntityTable table)
|
||||
: super(i0.TableManagerState(
|
||||
db: db,
|
||||
table: table,
|
||||
createFilteringComposer: () =>
|
||||
i1.$$RemoteAlbumEntityTableFilterComposer($db: db, $table: table),
|
||||
createOrderingComposer: () => i1
|
||||
.$$RemoteAlbumEntityTableOrderingComposer($db: db, $table: table),
|
||||
createComputedFieldComposer: () =>
|
||||
i1.$$RemoteAlbumEntityTableAnnotationComposer(
|
||||
$db: db, $table: table),
|
||||
updateCompanionCallback: ({
|
||||
i0.Value<String> id = const i0.Value.absent(),
|
||||
i0.Value<String> name = const i0.Value.absent(),
|
||||
i0.Value<String> description = const i0.Value.absent(),
|
||||
i0.Value<DateTime> createdAt = const i0.Value.absent(),
|
||||
i0.Value<DateTime> updatedAt = const i0.Value.absent(),
|
||||
i0.Value<String> ownerId = const i0.Value.absent(),
|
||||
i0.Value<String?> thumbnailAssetId = const i0.Value.absent(),
|
||||
i0.Value<bool> isActivityEnabled = const i0.Value.absent(),
|
||||
i0.Value<i2.AlbumAssetOrder> order = const i0.Value.absent(),
|
||||
}) =>
|
||||
i1.RemoteAlbumEntityCompanion(
|
||||
id: id,
|
||||
name: name,
|
||||
description: description,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
ownerId: ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled,
|
||||
order: order,
|
||||
),
|
||||
createCompanionCallback: ({
|
||||
required String id,
|
||||
required String name,
|
||||
i0.Value<String> description = const i0.Value.absent(),
|
||||
i0.Value<DateTime> createdAt = const i0.Value.absent(),
|
||||
i0.Value<DateTime> updatedAt = const i0.Value.absent(),
|
||||
required String ownerId,
|
||||
i0.Value<String?> thumbnailAssetId = const i0.Value.absent(),
|
||||
i0.Value<bool> isActivityEnabled = const i0.Value.absent(),
|
||||
required i2.AlbumAssetOrder order,
|
||||
}) =>
|
||||
i1.RemoteAlbumEntityCompanion.insert(
|
||||
id: id,
|
||||
name: name,
|
||||
description: description,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
ownerId: ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled,
|
||||
order: order,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (
|
||||
e.readTable(table),
|
||||
i1.$$RemoteAlbumEntityTableReferences(db, table, e)
|
||||
))
|
||||
.toList(),
|
||||
prefetchHooksCallback: ({ownerId = false, thumbnailAssetId = false}) {
|
||||
return i0.PrefetchHooks(
|
||||
db: db,
|
||||
explicitlyWatchedTables: [],
|
||||
addJoins: <
|
||||
T extends i0.TableManagerState<
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic>>(state) {
|
||||
if (ownerId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.ownerId,
|
||||
referencedTable:
|
||||
i1.$$RemoteAlbumEntityTableReferences._ownerIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumEntityTableReferences
|
||||
._ownerIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
if (thumbnailAssetId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.thumbnailAssetId,
|
||||
referencedTable: i1.$$RemoteAlbumEntityTableReferences
|
||||
._thumbnailAssetIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumEntityTableReferences
|
||||
._thumbnailAssetIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
|
||||
return state;
|
||||
},
|
||||
getPrefetchedDataCallback: (items) async {
|
||||
return [];
|
||||
},
|
||||
);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
typedef $$RemoteAlbumEntityTableProcessedTableManager
|
||||
= i0.ProcessedTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumEntityTable,
|
||||
i1.RemoteAlbumEntityData,
|
||||
i1.$$RemoteAlbumEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumEntityData, i1.$$RemoteAlbumEntityTableReferences),
|
||||
i1.RemoteAlbumEntityData,
|
||||
i0.PrefetchHooks Function({bool ownerId, bool thumbnailAssetId})>;
|
||||
|
||||
class $RemoteAlbumEntityTable extends i3.RemoteAlbumEntity
|
||||
with i0.TableInfo<$RemoteAlbumEntityTable, i1.RemoteAlbumEntityData> {
|
||||
@override
|
||||
final i0.GeneratedDatabase attachedDatabase;
|
||||
final String? _alias;
|
||||
$RemoteAlbumEntityTable(this.attachedDatabase, [this._alias]);
|
||||
static const i0.VerificationMeta _idMeta = const i0.VerificationMeta('id');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> id = i0.GeneratedColumn<String>(
|
||||
'id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string, requiredDuringInsert: true);
|
||||
static const i0.VerificationMeta _nameMeta =
|
||||
const i0.VerificationMeta('name');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> name = i0.GeneratedColumn<String>(
|
||||
'name', aliasedName, false,
|
||||
type: i0.DriftSqlType.string, requiredDuringInsert: true);
|
||||
static const i0.VerificationMeta _descriptionMeta =
|
||||
const i0.VerificationMeta('description');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> description =
|
||||
i0.GeneratedColumn<String>('description', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: const i4.Constant(''));
|
||||
static const i0.VerificationMeta _createdAtMeta =
|
||||
const i0.VerificationMeta('createdAt');
|
||||
@override
|
||||
late final i0.GeneratedColumn<DateTime> createdAt =
|
||||
i0.GeneratedColumn<DateTime>('created_at', aliasedName, false,
|
||||
type: i0.DriftSqlType.dateTime,
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: i4.currentDateAndTime);
|
||||
static const i0.VerificationMeta _updatedAtMeta =
|
||||
const i0.VerificationMeta('updatedAt');
|
||||
@override
|
||||
late final i0.GeneratedColumn<DateTime> updatedAt =
|
||||
i0.GeneratedColumn<DateTime>('updated_at', aliasedName, false,
|
||||
type: i0.DriftSqlType.dateTime,
|
||||
requiredDuringInsert: false,
|
||||
defaultValue: i4.currentDateAndTime);
|
||||
static const i0.VerificationMeta _ownerIdMeta =
|
||||
const i0.VerificationMeta('ownerId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> ownerId = i0.GeneratedColumn<String>(
|
||||
'owner_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES user_entity (id) ON DELETE CASCADE'));
|
||||
static const i0.VerificationMeta _thumbnailAssetIdMeta =
|
||||
const i0.VerificationMeta('thumbnailAssetId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> thumbnailAssetId =
|
||||
i0.GeneratedColumn<String>('thumbnail_asset_id', aliasedName, true,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_asset_entity (id) ON DELETE SET NULL'));
|
||||
static const i0.VerificationMeta _isActivityEnabledMeta =
|
||||
const i0.VerificationMeta('isActivityEnabled');
|
||||
@override
|
||||
late final i0.GeneratedColumn<bool> isActivityEnabled =
|
||||
i0.GeneratedColumn<bool>('is_activity_enabled', aliasedName, false,
|
||||
type: i0.DriftSqlType.bool,
|
||||
requiredDuringInsert: false,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'CHECK ("is_activity_enabled" IN (0, 1))'),
|
||||
defaultValue: const i4.Constant(true));
|
||||
@override
|
||||
late final i0.GeneratedColumnWithTypeConverter<i2.AlbumAssetOrder, int>
|
||||
order = i0.GeneratedColumn<int>('order', aliasedName, false,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: true)
|
||||
.withConverter<i2.AlbumAssetOrder>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder);
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
ownerId,
|
||||
thumbnailAssetId,
|
||||
isActivityEnabled,
|
||||
order
|
||||
];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@override
|
||||
String get actualTableName => $name;
|
||||
static const String $name = 'remote_album_entity';
|
||||
@override
|
||||
i0.VerificationContext validateIntegrity(
|
||||
i0.Insertable<i1.RemoteAlbumEntityData> instance,
|
||||
{bool isInserting = false}) {
|
||||
final context = i0.VerificationContext();
|
||||
final data = instance.toColumns(true);
|
||||
if (data.containsKey('id')) {
|
||||
context.handle(_idMeta, id.isAcceptableOrUnknown(data['id']!, _idMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_idMeta);
|
||||
}
|
||||
if (data.containsKey('name')) {
|
||||
context.handle(
|
||||
_nameMeta, name.isAcceptableOrUnknown(data['name']!, _nameMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_nameMeta);
|
||||
}
|
||||
if (data.containsKey('description')) {
|
||||
context.handle(
|
||||
_descriptionMeta,
|
||||
description.isAcceptableOrUnknown(
|
||||
data['description']!, _descriptionMeta));
|
||||
}
|
||||
if (data.containsKey('created_at')) {
|
||||
context.handle(_createdAtMeta,
|
||||
createdAt.isAcceptableOrUnknown(data['created_at']!, _createdAtMeta));
|
||||
}
|
||||
if (data.containsKey('updated_at')) {
|
||||
context.handle(_updatedAtMeta,
|
||||
updatedAt.isAcceptableOrUnknown(data['updated_at']!, _updatedAtMeta));
|
||||
}
|
||||
if (data.containsKey('owner_id')) {
|
||||
context.handle(_ownerIdMeta,
|
||||
ownerId.isAcceptableOrUnknown(data['owner_id']!, _ownerIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_ownerIdMeta);
|
||||
}
|
||||
if (data.containsKey('thumbnail_asset_id')) {
|
||||
context.handle(
|
||||
_thumbnailAssetIdMeta,
|
||||
thumbnailAssetId.isAcceptableOrUnknown(
|
||||
data['thumbnail_asset_id']!, _thumbnailAssetIdMeta));
|
||||
}
|
||||
if (data.containsKey('is_activity_enabled')) {
|
||||
context.handle(
|
||||
_isActivityEnabledMeta,
|
||||
isActivityEnabled.isAcceptableOrUnknown(
|
||||
data['is_activity_enabled']!, _isActivityEnabledMeta));
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<i0.GeneratedColumn> get $primaryKey => {id};
|
||||
@override
|
||||
i1.RemoteAlbumEntityData map(Map<String, dynamic> data,
|
||||
{String? tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||
return i1.RemoteAlbumEntityData(
|
||||
id: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}id'])!,
|
||||
name: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}name'])!,
|
||||
description: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}description'])!,
|
||||
createdAt: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.dateTime, data['${effectivePrefix}created_at'])!,
|
||||
updatedAt: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.dateTime, data['${effectivePrefix}updated_at'])!,
|
||||
ownerId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}owner_id'])!,
|
||||
thumbnailAssetId: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.string, data['${effectivePrefix}thumbnail_asset_id']),
|
||||
isActivityEnabled: attachedDatabase.typeMapping.read(
|
||||
i0.DriftSqlType.bool, data['${effectivePrefix}is_activity_enabled'])!,
|
||||
order: i1.$RemoteAlbumEntityTable.$converterorder.fromSql(attachedDatabase
|
||||
.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}order'])!),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
$RemoteAlbumEntityTable createAlias(String alias) {
|
||||
return $RemoteAlbumEntityTable(attachedDatabase, alias);
|
||||
}
|
||||
|
||||
static i0.JsonTypeConverter2<i2.AlbumAssetOrder, int, int> $converterorder =
|
||||
const i0.EnumIndexConverter<i2.AlbumAssetOrder>(
|
||||
i2.AlbumAssetOrder.values);
|
||||
@override
|
||||
bool get withoutRowId => true;
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
|
||||
class RemoteAlbumEntityData extends i0.DataClass
|
||||
implements i0.Insertable<i1.RemoteAlbumEntityData> {
|
||||
final String id;
|
||||
final String name;
|
||||
final String description;
|
||||
final DateTime createdAt;
|
||||
final DateTime updatedAt;
|
||||
final String ownerId;
|
||||
final String? thumbnailAssetId;
|
||||
final bool isActivityEnabled;
|
||||
final i2.AlbumAssetOrder order;
|
||||
const RemoteAlbumEntityData(
|
||||
{required this.id,
|
||||
required this.name,
|
||||
required this.description,
|
||||
required this.createdAt,
|
||||
required this.updatedAt,
|
||||
required this.ownerId,
|
||||
this.thumbnailAssetId,
|
||||
required this.isActivityEnabled,
|
||||
required this.order});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
map['id'] = i0.Variable<String>(id);
|
||||
map['name'] = i0.Variable<String>(name);
|
||||
map['description'] = i0.Variable<String>(description);
|
||||
map['created_at'] = i0.Variable<DateTime>(createdAt);
|
||||
map['updated_at'] = i0.Variable<DateTime>(updatedAt);
|
||||
map['owner_id'] = i0.Variable<String>(ownerId);
|
||||
if (!nullToAbsent || thumbnailAssetId != null) {
|
||||
map['thumbnail_asset_id'] = i0.Variable<String>(thumbnailAssetId);
|
||||
}
|
||||
map['is_activity_enabled'] = i0.Variable<bool>(isActivityEnabled);
|
||||
{
|
||||
map['order'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder.toSql(order));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
factory RemoteAlbumEntityData.fromJson(Map<String, dynamic> json,
|
||||
{i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return RemoteAlbumEntityData(
|
||||
id: serializer.fromJson<String>(json['id']),
|
||||
name: serializer.fromJson<String>(json['name']),
|
||||
description: serializer.fromJson<String>(json['description']),
|
||||
createdAt: serializer.fromJson<DateTime>(json['createdAt']),
|
||||
updatedAt: serializer.fromJson<DateTime>(json['updatedAt']),
|
||||
ownerId: serializer.fromJson<String>(json['ownerId']),
|
||||
thumbnailAssetId: serializer.fromJson<String?>(json['thumbnailAssetId']),
|
||||
isActivityEnabled: serializer.fromJson<bool>(json['isActivityEnabled']),
|
||||
order: i1.$RemoteAlbumEntityTable.$converterorder
|
||||
.fromJson(serializer.fromJson<int>(json['order'])),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'id': serializer.toJson<String>(id),
|
||||
'name': serializer.toJson<String>(name),
|
||||
'description': serializer.toJson<String>(description),
|
||||
'createdAt': serializer.toJson<DateTime>(createdAt),
|
||||
'updatedAt': serializer.toJson<DateTime>(updatedAt),
|
||||
'ownerId': serializer.toJson<String>(ownerId),
|
||||
'thumbnailAssetId': serializer.toJson<String?>(thumbnailAssetId),
|
||||
'isActivityEnabled': serializer.toJson<bool>(isActivityEnabled),
|
||||
'order': serializer.toJson<int>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder.toJson(order)),
|
||||
};
|
||||
}
|
||||
|
||||
i1.RemoteAlbumEntityData copyWith(
|
||||
{String? id,
|
||||
String? name,
|
||||
String? description,
|
||||
DateTime? createdAt,
|
||||
DateTime? updatedAt,
|
||||
String? ownerId,
|
||||
i0.Value<String?> thumbnailAssetId = const i0.Value.absent(),
|
||||
bool? isActivityEnabled,
|
||||
i2.AlbumAssetOrder? order}) =>
|
||||
i1.RemoteAlbumEntityData(
|
||||
id: id ?? this.id,
|
||||
name: name ?? this.name,
|
||||
description: description ?? this.description,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
ownerId: ownerId ?? this.ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId.present
|
||||
? thumbnailAssetId.value
|
||||
: this.thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled ?? this.isActivityEnabled,
|
||||
order: order ?? this.order,
|
||||
);
|
||||
RemoteAlbumEntityData copyWithCompanion(i1.RemoteAlbumEntityCompanion data) {
|
||||
return RemoteAlbumEntityData(
|
||||
id: data.id.present ? data.id.value : this.id,
|
||||
name: data.name.present ? data.name.value : this.name,
|
||||
description:
|
||||
data.description.present ? data.description.value : this.description,
|
||||
createdAt: data.createdAt.present ? data.createdAt.value : this.createdAt,
|
||||
updatedAt: data.updatedAt.present ? data.updatedAt.value : this.updatedAt,
|
||||
ownerId: data.ownerId.present ? data.ownerId.value : this.ownerId,
|
||||
thumbnailAssetId: data.thumbnailAssetId.present
|
||||
? data.thumbnailAssetId.value
|
||||
: this.thumbnailAssetId,
|
||||
isActivityEnabled: data.isActivityEnabled.present
|
||||
? data.isActivityEnabled.value
|
||||
: this.isActivityEnabled,
|
||||
order: data.order.present ? data.order.value : this.order,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumEntityData(')
|
||||
..write('id: $id, ')
|
||||
..write('name: $name, ')
|
||||
..write('description: $description, ')
|
||||
..write('createdAt: $createdAt, ')
|
||||
..write('updatedAt: $updatedAt, ')
|
||||
..write('ownerId: $ownerId, ')
|
||||
..write('thumbnailAssetId: $thumbnailAssetId, ')
|
||||
..write('isActivityEnabled: $isActivityEnabled, ')
|
||||
..write('order: $order')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(id, name, description, createdAt, updatedAt,
|
||||
ownerId, thumbnailAssetId, isActivityEnabled, order);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
(other is i1.RemoteAlbumEntityData &&
|
||||
other.id == this.id &&
|
||||
other.name == this.name &&
|
||||
other.description == this.description &&
|
||||
other.createdAt == this.createdAt &&
|
||||
other.updatedAt == this.updatedAt &&
|
||||
other.ownerId == this.ownerId &&
|
||||
other.thumbnailAssetId == this.thumbnailAssetId &&
|
||||
other.isActivityEnabled == this.isActivityEnabled &&
|
||||
other.order == this.order);
|
||||
}
|
||||
|
||||
class RemoteAlbumEntityCompanion
|
||||
extends i0.UpdateCompanion<i1.RemoteAlbumEntityData> {
|
||||
final i0.Value<String> id;
|
||||
final i0.Value<String> name;
|
||||
final i0.Value<String> description;
|
||||
final i0.Value<DateTime> createdAt;
|
||||
final i0.Value<DateTime> updatedAt;
|
||||
final i0.Value<String> ownerId;
|
||||
final i0.Value<String?> thumbnailAssetId;
|
||||
final i0.Value<bool> isActivityEnabled;
|
||||
final i0.Value<i2.AlbumAssetOrder> order;
|
||||
const RemoteAlbumEntityCompanion({
|
||||
this.id = const i0.Value.absent(),
|
||||
this.name = const i0.Value.absent(),
|
||||
this.description = const i0.Value.absent(),
|
||||
this.createdAt = const i0.Value.absent(),
|
||||
this.updatedAt = const i0.Value.absent(),
|
||||
this.ownerId = const i0.Value.absent(),
|
||||
this.thumbnailAssetId = const i0.Value.absent(),
|
||||
this.isActivityEnabled = const i0.Value.absent(),
|
||||
this.order = const i0.Value.absent(),
|
||||
});
|
||||
RemoteAlbumEntityCompanion.insert({
|
||||
required String id,
|
||||
required String name,
|
||||
this.description = const i0.Value.absent(),
|
||||
this.createdAt = const i0.Value.absent(),
|
||||
this.updatedAt = const i0.Value.absent(),
|
||||
required String ownerId,
|
||||
this.thumbnailAssetId = const i0.Value.absent(),
|
||||
this.isActivityEnabled = const i0.Value.absent(),
|
||||
required i2.AlbumAssetOrder order,
|
||||
}) : id = i0.Value(id),
|
||||
name = i0.Value(name),
|
||||
ownerId = i0.Value(ownerId),
|
||||
order = i0.Value(order);
|
||||
static i0.Insertable<i1.RemoteAlbumEntityData> custom({
|
||||
i0.Expression<String>? id,
|
||||
i0.Expression<String>? name,
|
||||
i0.Expression<String>? description,
|
||||
i0.Expression<DateTime>? createdAt,
|
||||
i0.Expression<DateTime>? updatedAt,
|
||||
i0.Expression<String>? ownerId,
|
||||
i0.Expression<String>? thumbnailAssetId,
|
||||
i0.Expression<bool>? isActivityEnabled,
|
||||
i0.Expression<int>? order,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (id != null) 'id': id,
|
||||
if (name != null) 'name': name,
|
||||
if (description != null) 'description': description,
|
||||
if (createdAt != null) 'created_at': createdAt,
|
||||
if (updatedAt != null) 'updated_at': updatedAt,
|
||||
if (ownerId != null) 'owner_id': ownerId,
|
||||
if (thumbnailAssetId != null) 'thumbnail_asset_id': thumbnailAssetId,
|
||||
if (isActivityEnabled != null) 'is_activity_enabled': isActivityEnabled,
|
||||
if (order != null) 'order': order,
|
||||
});
|
||||
}
|
||||
|
||||
i1.RemoteAlbumEntityCompanion copyWith(
|
||||
{i0.Value<String>? id,
|
||||
i0.Value<String>? name,
|
||||
i0.Value<String>? description,
|
||||
i0.Value<DateTime>? createdAt,
|
||||
i0.Value<DateTime>? updatedAt,
|
||||
i0.Value<String>? ownerId,
|
||||
i0.Value<String?>? thumbnailAssetId,
|
||||
i0.Value<bool>? isActivityEnabled,
|
||||
i0.Value<i2.AlbumAssetOrder>? order}) {
|
||||
return i1.RemoteAlbumEntityCompanion(
|
||||
id: id ?? this.id,
|
||||
name: name ?? this.name,
|
||||
description: description ?? this.description,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
ownerId: ownerId ?? this.ownerId,
|
||||
thumbnailAssetId: thumbnailAssetId ?? this.thumbnailAssetId,
|
||||
isActivityEnabled: isActivityEnabled ?? this.isActivityEnabled,
|
||||
order: order ?? this.order,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
if (id.present) {
|
||||
map['id'] = i0.Variable<String>(id.value);
|
||||
}
|
||||
if (name.present) {
|
||||
map['name'] = i0.Variable<String>(name.value);
|
||||
}
|
||||
if (description.present) {
|
||||
map['description'] = i0.Variable<String>(description.value);
|
||||
}
|
||||
if (createdAt.present) {
|
||||
map['created_at'] = i0.Variable<DateTime>(createdAt.value);
|
||||
}
|
||||
if (updatedAt.present) {
|
||||
map['updated_at'] = i0.Variable<DateTime>(updatedAt.value);
|
||||
}
|
||||
if (ownerId.present) {
|
||||
map['owner_id'] = i0.Variable<String>(ownerId.value);
|
||||
}
|
||||
if (thumbnailAssetId.present) {
|
||||
map['thumbnail_asset_id'] = i0.Variable<String>(thumbnailAssetId.value);
|
||||
}
|
||||
if (isActivityEnabled.present) {
|
||||
map['is_activity_enabled'] = i0.Variable<bool>(isActivityEnabled.value);
|
||||
}
|
||||
if (order.present) {
|
||||
map['order'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumEntityTable.$converterorder.toSql(order.value));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumEntityCompanion(')
|
||||
..write('id: $id, ')
|
||||
..write('name: $name, ')
|
||||
..write('description: $description, ')
|
||||
..write('createdAt: $createdAt, ')
|
||||
..write('updatedAt: $updatedAt, ')
|
||||
..write('ownerId: $ownerId, ')
|
||||
..write('thumbnailAssetId: $thumbnailAssetId, ')
|
||||
..write('isActivityEnabled: $isActivityEnabled, ')
|
||||
..write('order: $order')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class RemoteAlbumAssetEntity extends Table with DriftDefaultsMixin {
|
||||
const RemoteAlbumAssetEntity();
|
||||
|
||||
TextColumn get assetId =>
|
||||
text().references(RemoteAssetEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
TextColumn get albumId =>
|
||||
text().references(RemoteAlbumEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {assetId, albumId};
|
||||
}
|
||||
565
mobile/lib/infrastructure/entities/remote_album_asset.entity.drift.dart
generated
Normal file
565
mobile/lib/infrastructure/entities/remote_album_asset.entity.drift.dart
generated
Normal file
@@ -0,0 +1,565 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.dart'
|
||||
as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
|
||||
as i3;
|
||||
import 'package:drift/internal/modular.dart' as i4;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i5;
|
||||
|
||||
typedef $$RemoteAlbumAssetEntityTableCreateCompanionBuilder
|
||||
= i1.RemoteAlbumAssetEntityCompanion Function({
|
||||
required String assetId,
|
||||
required String albumId,
|
||||
});
|
||||
typedef $$RemoteAlbumAssetEntityTableUpdateCompanionBuilder
|
||||
= i1.RemoteAlbumAssetEntityCompanion Function({
|
||||
i0.Value<String> assetId,
|
||||
i0.Value<String> albumId,
|
||||
});
|
||||
|
||||
final class $$RemoteAlbumAssetEntityTableReferences extends i0.BaseReferences<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData> {
|
||||
$$RemoteAlbumAssetEntityTableReferences(
|
||||
super.$_db, super.$_table, super.$_typedResult);
|
||||
|
||||
static i3.$RemoteAssetEntityTable _assetIdTable(i0.GeneratedDatabase db) =>
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumAssetEntityTable>(
|
||||
'remote_album_asset_entity')
|
||||
.assetId,
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity')
|
||||
.id));
|
||||
|
||||
i3.$$RemoteAssetEntityTableProcessedTableManager get assetId {
|
||||
final $_column = $_itemColumn<String>('asset_id')!;
|
||||
|
||||
final manager = i3
|
||||
.$$RemoteAssetEntityTableTableManager(
|
||||
$_db,
|
||||
i4.ReadDatabaseContainer($_db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_assetIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
|
||||
static i5.$RemoteAlbumEntityTable _albumIdTable(i0.GeneratedDatabase db) =>
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumAssetEntityTable>(
|
||||
'remote_album_asset_entity')
|
||||
.albumId,
|
||||
i4.ReadDatabaseContainer(db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.id));
|
||||
|
||||
i5.$$RemoteAlbumEntityTableProcessedTableManager get albumId {
|
||||
final $_column = $_itemColumn<String>('album_id')!;
|
||||
|
||||
final manager = i5
|
||||
.$$RemoteAlbumEntityTableTableManager(
|
||||
$_db,
|
||||
i4.ReadDatabaseContainer($_db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_albumIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableFilterComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumAssetEntityTable> {
|
||||
$$RemoteAlbumAssetEntityTableFilterComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i3.$$RemoteAssetEntityTableFilterComposer get assetId {
|
||||
final i3.$$RemoteAssetEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.assetId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i3.$$RemoteAssetEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i5.$$RemoteAlbumEntityTableFilterComposer get albumId {
|
||||
final i5.$$RemoteAlbumEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$RemoteAlbumEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableOrderingComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumAssetEntityTable> {
|
||||
$$RemoteAlbumAssetEntityTableOrderingComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i3.$$RemoteAssetEntityTableOrderingComposer get assetId {
|
||||
final i3.$$RemoteAssetEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.assetId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i3.$$RemoteAssetEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i5.$$RemoteAlbumEntityTableOrderingComposer get albumId {
|
||||
final i5.$$RemoteAlbumEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$RemoteAlbumEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableAnnotationComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumAssetEntityTable> {
|
||||
$$RemoteAlbumAssetEntityTableAnnotationComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i3.$$RemoteAssetEntityTableAnnotationComposer get assetId {
|
||||
final i3.$$RemoteAssetEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.assetId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>('remote_asset_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i3.$$RemoteAssetEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i3.$RemoteAssetEntityTable>(
|
||||
'remote_asset_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i5.$$RemoteAlbumEntityTableAnnotationComposer get albumId {
|
||||
final i5.$$RemoteAlbumEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i5.$$RemoteAlbumEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i4.ReadDatabaseContainer($db)
|
||||
.resultSet<i5.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumAssetEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i1.$$RemoteAlbumAssetEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumAssetEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumAssetEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumAssetEntityData, i1.$$RemoteAlbumAssetEntityTableReferences),
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i0.PrefetchHooks Function({bool assetId, bool albumId})> {
|
||||
$$RemoteAlbumAssetEntityTableTableManager(
|
||||
i0.GeneratedDatabase db, i1.$RemoteAlbumAssetEntityTable table)
|
||||
: super(i0.TableManagerState(
|
||||
db: db,
|
||||
table: table,
|
||||
createFilteringComposer: () =>
|
||||
i1.$$RemoteAlbumAssetEntityTableFilterComposer(
|
||||
$db: db, $table: table),
|
||||
createOrderingComposer: () =>
|
||||
i1.$$RemoteAlbumAssetEntityTableOrderingComposer(
|
||||
$db: db, $table: table),
|
||||
createComputedFieldComposer: () =>
|
||||
i1.$$RemoteAlbumAssetEntityTableAnnotationComposer(
|
||||
$db: db, $table: table),
|
||||
updateCompanionCallback: ({
|
||||
i0.Value<String> assetId = const i0.Value.absent(),
|
||||
i0.Value<String> albumId = const i0.Value.absent(),
|
||||
}) =>
|
||||
i1.RemoteAlbumAssetEntityCompanion(
|
||||
assetId: assetId,
|
||||
albumId: albumId,
|
||||
),
|
||||
createCompanionCallback: ({
|
||||
required String assetId,
|
||||
required String albumId,
|
||||
}) =>
|
||||
i1.RemoteAlbumAssetEntityCompanion.insert(
|
||||
assetId: assetId,
|
||||
albumId: albumId,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (
|
||||
e.readTable(table),
|
||||
i1.$$RemoteAlbumAssetEntityTableReferences(db, table, e)
|
||||
))
|
||||
.toList(),
|
||||
prefetchHooksCallback: ({assetId = false, albumId = false}) {
|
||||
return i0.PrefetchHooks(
|
||||
db: db,
|
||||
explicitlyWatchedTables: [],
|
||||
addJoins: <
|
||||
T extends i0.TableManagerState<
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic>>(state) {
|
||||
if (assetId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.assetId,
|
||||
referencedTable: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._assetIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._assetIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
if (albumId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.albumId,
|
||||
referencedTable: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._albumIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
._albumIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
|
||||
return state;
|
||||
},
|
||||
getPrefetchedDataCallback: (items) async {
|
||||
return [];
|
||||
},
|
||||
);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
typedef $$RemoteAlbumAssetEntityTableProcessedTableManager
|
||||
= i0.ProcessedTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i1.$$RemoteAlbumAssetEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumAssetEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumAssetEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumAssetEntityTableUpdateCompanionBuilder,
|
||||
(
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i1.$$RemoteAlbumAssetEntityTableReferences
|
||||
),
|
||||
i1.RemoteAlbumAssetEntityData,
|
||||
i0.PrefetchHooks Function({bool assetId, bool albumId})>;
|
||||
|
||||
class $RemoteAlbumAssetEntityTable extends i2.RemoteAlbumAssetEntity
|
||||
with
|
||||
i0.TableInfo<$RemoteAlbumAssetEntityTable,
|
||||
i1.RemoteAlbumAssetEntityData> {
|
||||
@override
|
||||
final i0.GeneratedDatabase attachedDatabase;
|
||||
final String? _alias;
|
||||
$RemoteAlbumAssetEntityTable(this.attachedDatabase, [this._alias]);
|
||||
static const i0.VerificationMeta _assetIdMeta =
|
||||
const i0.VerificationMeta('assetId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> assetId = i0.GeneratedColumn<String>(
|
||||
'asset_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_asset_entity (id) ON DELETE CASCADE'));
|
||||
static const i0.VerificationMeta _albumIdMeta =
|
||||
const i0.VerificationMeta('albumId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> albumId = i0.GeneratedColumn<String>(
|
||||
'album_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_album_entity (id) ON DELETE CASCADE'));
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [assetId, albumId];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@override
|
||||
String get actualTableName => $name;
|
||||
static const String $name = 'remote_album_asset_entity';
|
||||
@override
|
||||
i0.VerificationContext validateIntegrity(
|
||||
i0.Insertable<i1.RemoteAlbumAssetEntityData> instance,
|
||||
{bool isInserting = false}) {
|
||||
final context = i0.VerificationContext();
|
||||
final data = instance.toColumns(true);
|
||||
if (data.containsKey('asset_id')) {
|
||||
context.handle(_assetIdMeta,
|
||||
assetId.isAcceptableOrUnknown(data['asset_id']!, _assetIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_assetIdMeta);
|
||||
}
|
||||
if (data.containsKey('album_id')) {
|
||||
context.handle(_albumIdMeta,
|
||||
albumId.isAcceptableOrUnknown(data['album_id']!, _albumIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_albumIdMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<i0.GeneratedColumn> get $primaryKey => {assetId, albumId};
|
||||
@override
|
||||
i1.RemoteAlbumAssetEntityData map(Map<String, dynamic> data,
|
||||
{String? tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||
return i1.RemoteAlbumAssetEntityData(
|
||||
assetId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}asset_id'])!,
|
||||
albumId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}album_id'])!,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
$RemoteAlbumAssetEntityTable createAlias(String alias) {
|
||||
return $RemoteAlbumAssetEntityTable(attachedDatabase, alias);
|
||||
}
|
||||
|
||||
@override
|
||||
bool get withoutRowId => true;
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
|
||||
class RemoteAlbumAssetEntityData extends i0.DataClass
|
||||
implements i0.Insertable<i1.RemoteAlbumAssetEntityData> {
|
||||
final String assetId;
|
||||
final String albumId;
|
||||
const RemoteAlbumAssetEntityData(
|
||||
{required this.assetId, required this.albumId});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
map['asset_id'] = i0.Variable<String>(assetId);
|
||||
map['album_id'] = i0.Variable<String>(albumId);
|
||||
return map;
|
||||
}
|
||||
|
||||
factory RemoteAlbumAssetEntityData.fromJson(Map<String, dynamic> json,
|
||||
{i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return RemoteAlbumAssetEntityData(
|
||||
assetId: serializer.fromJson<String>(json['assetId']),
|
||||
albumId: serializer.fromJson<String>(json['albumId']),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'assetId': serializer.toJson<String>(assetId),
|
||||
'albumId': serializer.toJson<String>(albumId),
|
||||
};
|
||||
}
|
||||
|
||||
i1.RemoteAlbumAssetEntityData copyWith({String? assetId, String? albumId}) =>
|
||||
i1.RemoteAlbumAssetEntityData(
|
||||
assetId: assetId ?? this.assetId,
|
||||
albumId: albumId ?? this.albumId,
|
||||
);
|
||||
RemoteAlbumAssetEntityData copyWithCompanion(
|
||||
i1.RemoteAlbumAssetEntityCompanion data) {
|
||||
return RemoteAlbumAssetEntityData(
|
||||
assetId: data.assetId.present ? data.assetId.value : this.assetId,
|
||||
albumId: data.albumId.present ? data.albumId.value : this.albumId,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumAssetEntityData(')
|
||||
..write('assetId: $assetId, ')
|
||||
..write('albumId: $albumId')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(assetId, albumId);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
(other is i1.RemoteAlbumAssetEntityData &&
|
||||
other.assetId == this.assetId &&
|
||||
other.albumId == this.albumId);
|
||||
}
|
||||
|
||||
class RemoteAlbumAssetEntityCompanion
|
||||
extends i0.UpdateCompanion<i1.RemoteAlbumAssetEntityData> {
|
||||
final i0.Value<String> assetId;
|
||||
final i0.Value<String> albumId;
|
||||
const RemoteAlbumAssetEntityCompanion({
|
||||
this.assetId = const i0.Value.absent(),
|
||||
this.albumId = const i0.Value.absent(),
|
||||
});
|
||||
RemoteAlbumAssetEntityCompanion.insert({
|
||||
required String assetId,
|
||||
required String albumId,
|
||||
}) : assetId = i0.Value(assetId),
|
||||
albumId = i0.Value(albumId);
|
||||
static i0.Insertable<i1.RemoteAlbumAssetEntityData> custom({
|
||||
i0.Expression<String>? assetId,
|
||||
i0.Expression<String>? albumId,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (assetId != null) 'asset_id': assetId,
|
||||
if (albumId != null) 'album_id': albumId,
|
||||
});
|
||||
}
|
||||
|
||||
i1.RemoteAlbumAssetEntityCompanion copyWith(
|
||||
{i0.Value<String>? assetId, i0.Value<String>? albumId}) {
|
||||
return i1.RemoteAlbumAssetEntityCompanion(
|
||||
assetId: assetId ?? this.assetId,
|
||||
albumId: albumId ?? this.albumId,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
if (assetId.present) {
|
||||
map['asset_id'] = i0.Variable<String>(assetId.value);
|
||||
}
|
||||
if (albumId.present) {
|
||||
map['album_id'] = i0.Variable<String>(albumId.value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumAssetEntityCompanion(')
|
||||
..write('assetId: $assetId, ')
|
||||
..write('albumId: $albumId')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
class RemoteAlbumUserEntity extends Table with DriftDefaultsMixin {
|
||||
const RemoteAlbumUserEntity();
|
||||
|
||||
TextColumn get albumId =>
|
||||
text().references(RemoteAlbumEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
TextColumn get userId =>
|
||||
text().references(UserEntity, #id, onDelete: KeyAction.cascade)();
|
||||
|
||||
IntColumn get role => intEnum<AlbumUserRole>()();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {albumId, userId};
|
||||
}
|
||||
618
mobile/lib/infrastructure/entities/remote_album_user.entity.drift.dart
generated
Normal file
618
mobile/lib/infrastructure/entities/remote_album_user.entity.drift.dart
generated
Normal file
@@ -0,0 +1,618 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/domain/models/album/album.model.dart' as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.dart'
|
||||
as i3;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i4;
|
||||
import 'package:drift/internal/modular.dart' as i5;
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'
|
||||
as i6;
|
||||
|
||||
typedef $$RemoteAlbumUserEntityTableCreateCompanionBuilder
|
||||
= i1.RemoteAlbumUserEntityCompanion Function({
|
||||
required String albumId,
|
||||
required String userId,
|
||||
required i2.AlbumUserRole role,
|
||||
});
|
||||
typedef $$RemoteAlbumUserEntityTableUpdateCompanionBuilder
|
||||
= i1.RemoteAlbumUserEntityCompanion Function({
|
||||
i0.Value<String> albumId,
|
||||
i0.Value<String> userId,
|
||||
i0.Value<i2.AlbumUserRole> role,
|
||||
});
|
||||
|
||||
final class $$RemoteAlbumUserEntityTableReferences extends i0.BaseReferences<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumUserEntityTable,
|
||||
i1.RemoteAlbumUserEntityData> {
|
||||
$$RemoteAlbumUserEntityTableReferences(
|
||||
super.$_db, super.$_table, super.$_typedResult);
|
||||
|
||||
static i4.$RemoteAlbumEntityTable _albumIdTable(i0.GeneratedDatabase db) =>
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumUserEntityTable>(
|
||||
'remote_album_user_entity')
|
||||
.albumId,
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity')
|
||||
.id));
|
||||
|
||||
i4.$$RemoteAlbumEntityTableProcessedTableManager get albumId {
|
||||
final $_column = $_itemColumn<String>('album_id')!;
|
||||
|
||||
final manager = i4
|
||||
.$$RemoteAlbumEntityTableTableManager(
|
||||
$_db,
|
||||
i5.ReadDatabaseContainer($_db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_albumIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
|
||||
static i6.$UserEntityTable _userIdTable(i0.GeneratedDatabase db) =>
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity')
|
||||
.createAlias(i0.$_aliasNameGenerator(
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i1.$RemoteAlbumUserEntityTable>(
|
||||
'remote_album_user_entity')
|
||||
.userId,
|
||||
i5.ReadDatabaseContainer(db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity')
|
||||
.id));
|
||||
|
||||
i6.$$UserEntityTableProcessedTableManager get userId {
|
||||
final $_column = $_itemColumn<String>('user_id')!;
|
||||
|
||||
final manager = i6
|
||||
.$$UserEntityTableTableManager(
|
||||
$_db,
|
||||
i5.ReadDatabaseContainer($_db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'))
|
||||
.filter((f) => f.id.sqlEquals($_column));
|
||||
final item = $_typedResult.readTableOrNull(_userIdTable($_db));
|
||||
if (item == null) return manager;
|
||||
return i0.ProcessedTableManager(
|
||||
manager.$state.copyWith(prefetchedData: [item]));
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableFilterComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumUserEntityTable> {
|
||||
$$RemoteAlbumUserEntityTableFilterComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnWithTypeConverterFilters<i2.AlbumUserRole, i2.AlbumUserRole, int>
|
||||
get role => $composableBuilder(
|
||||
column: $table.role,
|
||||
builder: (column) => i0.ColumnWithTypeConverterFilters(column));
|
||||
|
||||
i4.$$RemoteAlbumEntityTableFilterComposer get albumId {
|
||||
final i4.$$RemoteAlbumEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i4.$$RemoteAlbumEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i6.$$UserEntityTableFilterComposer get userId {
|
||||
final i6.$$UserEntityTableFilterComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.userId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i6.$$UserEntityTableFilterComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableOrderingComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumUserEntityTable> {
|
||||
$$RemoteAlbumUserEntityTableOrderingComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.ColumnOrderings<int> get role => $composableBuilder(
|
||||
column: $table.role, builder: (column) => i0.ColumnOrderings(column));
|
||||
|
||||
i4.$$RemoteAlbumEntityTableOrderingComposer get albumId {
|
||||
final i4.$$RemoteAlbumEntityTableOrderingComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i4.$$RemoteAlbumEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i6.$$UserEntityTableOrderingComposer get userId {
|
||||
final i6.$$UserEntityTableOrderingComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.userId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i6.$$UserEntityTableOrderingComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableAnnotationComposer
|
||||
extends i0.Composer<i0.GeneratedDatabase, i1.$RemoteAlbumUserEntityTable> {
|
||||
$$RemoteAlbumUserEntityTableAnnotationComposer({
|
||||
required super.$db,
|
||||
required super.$table,
|
||||
super.joinBuilder,
|
||||
super.$addJoinBuilderToRootComposer,
|
||||
super.$removeJoinBuilderFromRootComposer,
|
||||
});
|
||||
i0.GeneratedColumnWithTypeConverter<i2.AlbumUserRole, int> get role =>
|
||||
$composableBuilder(column: $table.role, builder: (column) => column);
|
||||
|
||||
i4.$$RemoteAlbumEntityTableAnnotationComposer get albumId {
|
||||
final i4.$$RemoteAlbumEntityTableAnnotationComposer composer =
|
||||
$composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.albumId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>('remote_album_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i4.$$RemoteAlbumEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i4.$RemoteAlbumEntityTable>(
|
||||
'remote_album_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
|
||||
i6.$$UserEntityTableAnnotationComposer get userId {
|
||||
final i6.$$UserEntityTableAnnotationComposer composer = $composerBuilder(
|
||||
composer: this,
|
||||
getCurrentColumn: (t) => t.userId,
|
||||
referencedTable: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
getReferencedColumn: (t) => t.id,
|
||||
builder: (joinBuilder,
|
||||
{$addJoinBuilderToRootComposer,
|
||||
$removeJoinBuilderFromRootComposer}) =>
|
||||
i6.$$UserEntityTableAnnotationComposer(
|
||||
$db: $db,
|
||||
$table: i5.ReadDatabaseContainer($db)
|
||||
.resultSet<i6.$UserEntityTable>('user_entity'),
|
||||
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
|
||||
joinBuilder: joinBuilder,
|
||||
$removeJoinBuilderFromRootComposer:
|
||||
$removeJoinBuilderFromRootComposer,
|
||||
));
|
||||
return composer;
|
||||
}
|
||||
}
|
||||
|
||||
class $$RemoteAlbumUserEntityTableTableManager extends i0.RootTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumUserEntityTable,
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i1.$$RemoteAlbumUserEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumUserEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumUserEntityTableUpdateCompanionBuilder,
|
||||
(i1.RemoteAlbumUserEntityData, i1.$$RemoteAlbumUserEntityTableReferences),
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i0.PrefetchHooks Function({bool albumId, bool userId})> {
|
||||
$$RemoteAlbumUserEntityTableTableManager(
|
||||
i0.GeneratedDatabase db, i1.$RemoteAlbumUserEntityTable table)
|
||||
: super(i0.TableManagerState(
|
||||
db: db,
|
||||
table: table,
|
||||
createFilteringComposer: () =>
|
||||
i1.$$RemoteAlbumUserEntityTableFilterComposer(
|
||||
$db: db, $table: table),
|
||||
createOrderingComposer: () =>
|
||||
i1.$$RemoteAlbumUserEntityTableOrderingComposer(
|
||||
$db: db, $table: table),
|
||||
createComputedFieldComposer: () =>
|
||||
i1.$$RemoteAlbumUserEntityTableAnnotationComposer(
|
||||
$db: db, $table: table),
|
||||
updateCompanionCallback: ({
|
||||
i0.Value<String> albumId = const i0.Value.absent(),
|
||||
i0.Value<String> userId = const i0.Value.absent(),
|
||||
i0.Value<i2.AlbumUserRole> role = const i0.Value.absent(),
|
||||
}) =>
|
||||
i1.RemoteAlbumUserEntityCompanion(
|
||||
albumId: albumId,
|
||||
userId: userId,
|
||||
role: role,
|
||||
),
|
||||
createCompanionCallback: ({
|
||||
required String albumId,
|
||||
required String userId,
|
||||
required i2.AlbumUserRole role,
|
||||
}) =>
|
||||
i1.RemoteAlbumUserEntityCompanion.insert(
|
||||
albumId: albumId,
|
||||
userId: userId,
|
||||
role: role,
|
||||
),
|
||||
withReferenceMapper: (p0) => p0
|
||||
.map((e) => (
|
||||
e.readTable(table),
|
||||
i1.$$RemoteAlbumUserEntityTableReferences(db, table, e)
|
||||
))
|
||||
.toList(),
|
||||
prefetchHooksCallback: ({albumId = false, userId = false}) {
|
||||
return i0.PrefetchHooks(
|
||||
db: db,
|
||||
explicitlyWatchedTables: [],
|
||||
addJoins: <
|
||||
T extends i0.TableManagerState<
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic,
|
||||
dynamic>>(state) {
|
||||
if (albumId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.albumId,
|
||||
referencedTable: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._albumIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._albumIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
if (userId) {
|
||||
state = state.withJoin(
|
||||
currentTable: table,
|
||||
currentColumn: table.userId,
|
||||
referencedTable: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._userIdTable(db),
|
||||
referencedColumn: i1.$$RemoteAlbumUserEntityTableReferences
|
||||
._userIdTable(db)
|
||||
.id,
|
||||
) as T;
|
||||
}
|
||||
|
||||
return state;
|
||||
},
|
||||
getPrefetchedDataCallback: (items) async {
|
||||
return [];
|
||||
},
|
||||
);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
typedef $$RemoteAlbumUserEntityTableProcessedTableManager
|
||||
= i0.ProcessedTableManager<
|
||||
i0.GeneratedDatabase,
|
||||
i1.$RemoteAlbumUserEntityTable,
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i1.$$RemoteAlbumUserEntityTableFilterComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableOrderingComposer,
|
||||
i1.$$RemoteAlbumUserEntityTableAnnotationComposer,
|
||||
$$RemoteAlbumUserEntityTableCreateCompanionBuilder,
|
||||
$$RemoteAlbumUserEntityTableUpdateCompanionBuilder,
|
||||
(
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i1.$$RemoteAlbumUserEntityTableReferences
|
||||
),
|
||||
i1.RemoteAlbumUserEntityData,
|
||||
i0.PrefetchHooks Function({bool albumId, bool userId})>;
|
||||
|
||||
class $RemoteAlbumUserEntityTable extends i3.RemoteAlbumUserEntity
|
||||
with
|
||||
i0
|
||||
.TableInfo<$RemoteAlbumUserEntityTable, i1.RemoteAlbumUserEntityData> {
|
||||
@override
|
||||
final i0.GeneratedDatabase attachedDatabase;
|
||||
final String? _alias;
|
||||
$RemoteAlbumUserEntityTable(this.attachedDatabase, [this._alias]);
|
||||
static const i0.VerificationMeta _albumIdMeta =
|
||||
const i0.VerificationMeta('albumId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> albumId = i0.GeneratedColumn<String>(
|
||||
'album_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES remote_album_entity (id) ON DELETE CASCADE'));
|
||||
static const i0.VerificationMeta _userIdMeta =
|
||||
const i0.VerificationMeta('userId');
|
||||
@override
|
||||
late final i0.GeneratedColumn<String> userId = i0.GeneratedColumn<String>(
|
||||
'user_id', aliasedName, false,
|
||||
type: i0.DriftSqlType.string,
|
||||
requiredDuringInsert: true,
|
||||
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
|
||||
'REFERENCES user_entity (id) ON DELETE CASCADE'));
|
||||
@override
|
||||
late final i0.GeneratedColumnWithTypeConverter<i2.AlbumUserRole, int> role =
|
||||
i0.GeneratedColumn<int>('role', aliasedName, false,
|
||||
type: i0.DriftSqlType.int, requiredDuringInsert: true)
|
||||
.withConverter<i2.AlbumUserRole>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole);
|
||||
@override
|
||||
List<i0.GeneratedColumn> get $columns => [albumId, userId, role];
|
||||
@override
|
||||
String get aliasedName => _alias ?? actualTableName;
|
||||
@override
|
||||
String get actualTableName => $name;
|
||||
static const String $name = 'remote_album_user_entity';
|
||||
@override
|
||||
i0.VerificationContext validateIntegrity(
|
||||
i0.Insertable<i1.RemoteAlbumUserEntityData> instance,
|
||||
{bool isInserting = false}) {
|
||||
final context = i0.VerificationContext();
|
||||
final data = instance.toColumns(true);
|
||||
if (data.containsKey('album_id')) {
|
||||
context.handle(_albumIdMeta,
|
||||
albumId.isAcceptableOrUnknown(data['album_id']!, _albumIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_albumIdMeta);
|
||||
}
|
||||
if (data.containsKey('user_id')) {
|
||||
context.handle(_userIdMeta,
|
||||
userId.isAcceptableOrUnknown(data['user_id']!, _userIdMeta));
|
||||
} else if (isInserting) {
|
||||
context.missing(_userIdMeta);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
@override
|
||||
Set<i0.GeneratedColumn> get $primaryKey => {albumId, userId};
|
||||
@override
|
||||
i1.RemoteAlbumUserEntityData map(Map<String, dynamic> data,
|
||||
{String? tablePrefix}) {
|
||||
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
|
||||
return i1.RemoteAlbumUserEntityData(
|
||||
albumId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}album_id'])!,
|
||||
userId: attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.string, data['${effectivePrefix}user_id'])!,
|
||||
role: i1.$RemoteAlbumUserEntityTable.$converterrole.fromSql(
|
||||
attachedDatabase.typeMapping
|
||||
.read(i0.DriftSqlType.int, data['${effectivePrefix}role'])!),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
$RemoteAlbumUserEntityTable createAlias(String alias) {
|
||||
return $RemoteAlbumUserEntityTable(attachedDatabase, alias);
|
||||
}
|
||||
|
||||
static i0.JsonTypeConverter2<i2.AlbumUserRole, int, int> $converterrole =
|
||||
const i0.EnumIndexConverter<i2.AlbumUserRole>(i2.AlbumUserRole.values);
|
||||
@override
|
||||
bool get withoutRowId => true;
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
|
||||
class RemoteAlbumUserEntityData extends i0.DataClass
|
||||
implements i0.Insertable<i1.RemoteAlbumUserEntityData> {
|
||||
final String albumId;
|
||||
final String userId;
|
||||
final i2.AlbumUserRole role;
|
||||
const RemoteAlbumUserEntityData(
|
||||
{required this.albumId, required this.userId, required this.role});
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
map['album_id'] = i0.Variable<String>(albumId);
|
||||
map['user_id'] = i0.Variable<String>(userId);
|
||||
{
|
||||
map['role'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole.toSql(role));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
factory RemoteAlbumUserEntityData.fromJson(Map<String, dynamic> json,
|
||||
{i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return RemoteAlbumUserEntityData(
|
||||
albumId: serializer.fromJson<String>(json['albumId']),
|
||||
userId: serializer.fromJson<String>(json['userId']),
|
||||
role: i1.$RemoteAlbumUserEntityTable.$converterrole
|
||||
.fromJson(serializer.fromJson<int>(json['role'])),
|
||||
);
|
||||
}
|
||||
@override
|
||||
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
|
||||
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
|
||||
return <String, dynamic>{
|
||||
'albumId': serializer.toJson<String>(albumId),
|
||||
'userId': serializer.toJson<String>(userId),
|
||||
'role': serializer.toJson<int>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole.toJson(role)),
|
||||
};
|
||||
}
|
||||
|
||||
i1.RemoteAlbumUserEntityData copyWith(
|
||||
{String? albumId, String? userId, i2.AlbumUserRole? role}) =>
|
||||
i1.RemoteAlbumUserEntityData(
|
||||
albumId: albumId ?? this.albumId,
|
||||
userId: userId ?? this.userId,
|
||||
role: role ?? this.role,
|
||||
);
|
||||
RemoteAlbumUserEntityData copyWithCompanion(
|
||||
i1.RemoteAlbumUserEntityCompanion data) {
|
||||
return RemoteAlbumUserEntityData(
|
||||
albumId: data.albumId.present ? data.albumId.value : this.albumId,
|
||||
userId: data.userId.present ? data.userId.value : this.userId,
|
||||
role: data.role.present ? data.role.value : this.role,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumUserEntityData(')
|
||||
..write('albumId: $albumId, ')
|
||||
..write('userId: $userId, ')
|
||||
..write('role: $role')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(albumId, userId, role);
|
||||
@override
|
||||
bool operator ==(Object other) =>
|
||||
identical(this, other) ||
|
||||
(other is i1.RemoteAlbumUserEntityData &&
|
||||
other.albumId == this.albumId &&
|
||||
other.userId == this.userId &&
|
||||
other.role == this.role);
|
||||
}
|
||||
|
||||
class RemoteAlbumUserEntityCompanion
|
||||
extends i0.UpdateCompanion<i1.RemoteAlbumUserEntityData> {
|
||||
final i0.Value<String> albumId;
|
||||
final i0.Value<String> userId;
|
||||
final i0.Value<i2.AlbumUserRole> role;
|
||||
const RemoteAlbumUserEntityCompanion({
|
||||
this.albumId = const i0.Value.absent(),
|
||||
this.userId = const i0.Value.absent(),
|
||||
this.role = const i0.Value.absent(),
|
||||
});
|
||||
RemoteAlbumUserEntityCompanion.insert({
|
||||
required String albumId,
|
||||
required String userId,
|
||||
required i2.AlbumUserRole role,
|
||||
}) : albumId = i0.Value(albumId),
|
||||
userId = i0.Value(userId),
|
||||
role = i0.Value(role);
|
||||
static i0.Insertable<i1.RemoteAlbumUserEntityData> custom({
|
||||
i0.Expression<String>? albumId,
|
||||
i0.Expression<String>? userId,
|
||||
i0.Expression<int>? role,
|
||||
}) {
|
||||
return i0.RawValuesInsertable({
|
||||
if (albumId != null) 'album_id': albumId,
|
||||
if (userId != null) 'user_id': userId,
|
||||
if (role != null) 'role': role,
|
||||
});
|
||||
}
|
||||
|
||||
i1.RemoteAlbumUserEntityCompanion copyWith(
|
||||
{i0.Value<String>? albumId,
|
||||
i0.Value<String>? userId,
|
||||
i0.Value<i2.AlbumUserRole>? role}) {
|
||||
return i1.RemoteAlbumUserEntityCompanion(
|
||||
albumId: albumId ?? this.albumId,
|
||||
userId: userId ?? this.userId,
|
||||
role: role ?? this.role,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
|
||||
final map = <String, i0.Expression>{};
|
||||
if (albumId.present) {
|
||||
map['album_id'] = i0.Variable<String>(albumId.value);
|
||||
}
|
||||
if (userId.present) {
|
||||
map['user_id'] = i0.Variable<String>(userId.value);
|
||||
}
|
||||
if (role.present) {
|
||||
map['role'] = i0.Variable<int>(
|
||||
i1.$RemoteAlbumUserEntityTable.$converterrole.toSql(role.value));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return (StringBuffer('RemoteAlbumUserEntityCompanion(')
|
||||
..write('albumId: $albumId, ')
|
||||
..write('userId: $userId, ')
|
||||
..write('role: $role')
|
||||
..write(')'))
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
@@ -34,3 +35,22 @@ class RemoteAssetEntity extends Table
|
||||
@override
|
||||
Set<Column> get primaryKey => {id};
|
||||
}
|
||||
|
||||
extension RemoteAssetEntityDataDomainEx on RemoteAssetEntityData {
|
||||
RemoteAsset toDto() => RemoteAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
ownerId: ownerId,
|
||||
checksum: checksum,
|
||||
type: type,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
height: height,
|
||||
width: width,
|
||||
thumbHash: thumbHash,
|
||||
visibility: visibility,
|
||||
localId: null,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ class ApiRepository {
|
||||
|
||||
Future<T> checkNull<T>(Future<T?> future) async {
|
||||
final response = await future;
|
||||
if (response == null) throw NoResponseDtoError();
|
||||
if (response == null) throw const NoResponseDtoError();
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,9 @@ import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.dart';
|
||||
@@ -40,6 +43,9 @@ class IsarDatabaseRepository implements IDatabaseRepository {
|
||||
LocalAlbumAssetEntity,
|
||||
RemoteAssetEntity,
|
||||
RemoteExifEntity,
|
||||
RemoteAlbumEntity,
|
||||
RemoteAlbumAssetEntity,
|
||||
RemoteAlbumUserEntity,
|
||||
],
|
||||
include: {
|
||||
'package:immich_mobile/infrastructure/entities/merged_asset.drift',
|
||||
|
||||
@@ -17,9 +17,15 @@ import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.d
|
||||
as i7;
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
|
||||
as i8;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i9;
|
||||
import 'package:drift/internal/modular.dart' as i10;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
|
||||
as i10;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
|
||||
as i11;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
as i12;
|
||||
import 'package:drift/internal/modular.dart' as i13;
|
||||
|
||||
abstract class $Drift extends i0.GeneratedDatabase {
|
||||
$Drift(i0.QueryExecutor e) : super(e);
|
||||
@@ -39,8 +45,14 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
i7.$LocalAlbumAssetEntityTable(this);
|
||||
late final i8.$RemoteExifEntityTable remoteExifEntity =
|
||||
i8.$RemoteExifEntityTable(this);
|
||||
i9.MergedAssetDrift get mergedAssetDrift => i10.ReadDatabaseContainer(this)
|
||||
.accessor<i9.MergedAssetDrift>(i9.MergedAssetDrift.new);
|
||||
late final i9.$RemoteAlbumEntityTable remoteAlbumEntity =
|
||||
i9.$RemoteAlbumEntityTable(this);
|
||||
late final i10.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity =
|
||||
i10.$RemoteAlbumAssetEntityTable(this);
|
||||
late final i11.$RemoteAlbumUserEntityTable remoteAlbumUserEntity =
|
||||
i11.$RemoteAlbumUserEntityTable(this);
|
||||
i12.MergedAssetDrift get mergedAssetDrift => i13.ReadDatabaseContainer(this)
|
||||
.accessor<i12.MergedAssetDrift>(i12.MergedAssetDrift.new);
|
||||
@override
|
||||
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
|
||||
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
|
||||
@@ -56,7 +68,10 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
partnerEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
remoteExifEntity
|
||||
remoteExifEntity,
|
||||
remoteAlbumEntity,
|
||||
remoteAlbumAssetEntity,
|
||||
remoteAlbumUserEntity
|
||||
];
|
||||
@override
|
||||
i0.StreamQueryUpdateRules get streamUpdateRules =>
|
||||
@@ -114,6 +129,52 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
i0.TableUpdate('remote_exif_entity', kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('user_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_entity', kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_asset_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_entity', kind: i0.UpdateKind.update),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_asset_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_asset_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_album_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_asset_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('remote_album_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_user_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName('user_entity',
|
||||
limitUpdateKind: i0.UpdateKind.delete),
|
||||
result: [
|
||||
i0.TableUpdate('remote_album_user_entity',
|
||||
kind: i0.UpdateKind.delete),
|
||||
],
|
||||
),
|
||||
],
|
||||
);
|
||||
@override
|
||||
@@ -140,4 +201,11 @@ class $DriftManager {
|
||||
.$$LocalAlbumAssetEntityTableTableManager(_db, _db.localAlbumAssetEntity);
|
||||
i8.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
|
||||
i8.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
|
||||
i9.$$RemoteAlbumEntityTableTableManager get remoteAlbumEntity =>
|
||||
i9.$$RemoteAlbumEntityTableTableManager(_db, _db.remoteAlbumEntity);
|
||||
i10.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
|
||||
i10.$$RemoteAlbumAssetEntityTableTableManager(
|
||||
_db, _db.remoteAlbumAssetEntity);
|
||||
i11.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i11
|
||||
.$$RemoteAlbumUserEntityTableTableManager(_db, _db.remoteAlbumUserEntity);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/exif.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.dart'
|
||||
as entity;
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
|
||||
@@ -41,3 +43,36 @@ class IsarExifRepository extends IsarDatabaseRepository {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class DriftRemoteExifRepository extends DriftDatabaseRepository {
|
||||
final Drift _db;
|
||||
const DriftRemoteExifRepository(this._db) : super(_db);
|
||||
|
||||
Future<ExifInfo?> get(String assetId) {
|
||||
final query = _db.remoteExifEntity.select()
|
||||
..where((exif) => exif.assetId.equals(assetId));
|
||||
|
||||
return query.map((asset) => asset.toDto()).getSingleOrNull();
|
||||
}
|
||||
}
|
||||
|
||||
extension on RemoteExifEntityData {
|
||||
ExifInfo toDto() {
|
||||
return ExifInfo(
|
||||
fileSize: fileSize,
|
||||
description: description,
|
||||
orientation: orientation,
|
||||
timeZone: timeZone,
|
||||
dateTimeOriginal: dateTimeOriginal,
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
city: city,
|
||||
state: state,
|
||||
country: country,
|
||||
make: make,
|
||||
model: model,
|
||||
f: fNumber,
|
||||
iso: iso,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/local_album.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart';
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user