Compare commits

..

1 Commits

Author SHA1 Message Date
Jason Rasmussen
d892624197 chore: run on windows-2025 2025-09-03 13:23:10 -04:00
105 changed files with 986 additions and 2151 deletions

1
.github/.nvmrc vendored Normal file
View File

@@ -0,0 +1 @@
22.18.0

View File

@@ -1,5 +1,8 @@
{
"name": "github",
"scripts": {
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"devDependencies": {
"prettier": "^3.5.3"
}

View File

@@ -33,20 +33,24 @@ jobs:
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
- name: Install dependencies
run: mise run cli:install
- name: Run build
run: mise run cli:build
- name: Publish package
run: pnpm publish
- run: pnpm install --frozen-lockfile
- run: pnpm build
- run: pnpm publish
if: ${{ github.event_name == 'release' }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -55,17 +55,24 @@ jobs:
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './docs/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run install
run: mise run docs:install
run: pnpm install
- name: Check formatting
run: mise run docs:format-fix
run: pnpm format
- name: Run build
run: mise run docs:build
run: pnpm build
- name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2

View File

@@ -28,11 +28,15 @@ jobs:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Fix formatting
run: mise run server:format-fix && mise run web:format-fix && mise run docs:format-fix
run: make install-all && make format-all
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4

View File

@@ -46,8 +46,15 @@ jobs:
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Bump version
env:

View File

@@ -20,15 +20,20 @@ jobs:
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install deps
run: mise run sdk:install
run: pnpm install --frozen-lockfile
- name: Build
run: mise run sdk:build
run: pnpm build
- name: Publish
run: pnpm publish
env:

View File

@@ -72,21 +72,27 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run package manager install
run: mise run server:install
run: pnpm install
- name: Run linter
run: mise run server:lint
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: mise run server:format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: mise run server:check
run: pnpm check
if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: mise run server:test
run: pnpm test
if: ${{ !cancelled() }}
cli-unit-tests:
name: Unit Test CLI
@@ -103,29 +109,36 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: mise run cli:install
run: pnpm install
- name: Run linter
run: mise run cli:lint
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: mise run cli:format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: mise run cli:check
run: pnpm check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: mise run cli:test
run: pnpm test
if: ${{ !cancelled() }}
cli-unit-tests-win:
name: Unit Test CLI (Windows)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest
runs-on: windows-2025
permissions:
contents: read
defaults:
@@ -136,18 +149,25 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: mise run cli:install
run: pnpm install --frozen-lockfile
# Skip linter & formatter in Windows test.
- name: Run tsc
run: mise run cli:check
run: pnpm check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: mise run cli:test
run: pnpm test
if: ${{ !cancelled() }}
web-lint:
name: Lint Web
@@ -156,25 +176,35 @@ jobs:
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
- name: Run install
run: mise run web:install
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run pnpm install
run: pnpm rebuild && pnpm install --frozen-lockfile
- name: Run linter
run: mise run web:lint-p
run: pnpm lint:p
if: ${{ !cancelled() }}
- name: Run formatter
run: mise run web:format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: mise run web:check-svelte
run: pnpm check:svelte
if: ${{ !cancelled() }}
web-unit-tests:
name: Test Web
@@ -183,22 +213,32 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: mise run web:install
run: pnpm install --frozen-lockfile
- name: Run tsc
run: mise run web:check
run: pnpm check:typescript
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: mise run web:test
run: pnpm test
if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
@@ -212,12 +252,18 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install dependencies
run: mise run web:install
run: pnpm --filter=immich-web install --frozen-lockfile
- name: Format
run: mise run i18n:format-fix
run: pnpm --filter=immich-web format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -247,22 +293,29 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: mise run e2e:install
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
- name: Run linter
run: mise run e2e:lint
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: mise run e2e:format
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: mise run e2e:check
run: pnpm check
if: ${{ !cancelled() }}
server-medium-tests:
name: Medium Tests (Server)
@@ -271,17 +324,26 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true mise run server:install
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Run medium tests
run: mise run server:test-medium
run: pnpm test:medium
if: ${{ !cancelled() }}
e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI)
@@ -290,6 +352,9 @@ jobs:
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
@@ -299,25 +364,34 @@ jobs:
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Run setup web
run: mise run web:install && mise run web:svelte-kit-sync
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
working-directory: ./web
if: ${{ !cancelled() }}
- name: Run setup cli
run: mise run cli:install && mise run cli:build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./cli
if: ${{ !cancelled() }}
- name: Install dependencies
run: mise run e2e:install
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose -f e2e/docker-compose.yml build
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: mise run e2e:test
run: pnpm test
if: ${{ !cancelled() }}
e2e-tests-web:
name: End-to-End Tests (Web)
@@ -326,6 +400,9 @@ jobs:
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
@@ -335,26 +412,29 @@ jobs:
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: mise run sdk:install && mise run sdk:build
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Run setup web
run: mise run web:install && mise run web:svelte-kit-sync
if: ${{ !cancelled() }}
- name: Run setup cli
run: mise run cli:install && mise run cli:build
- name: Install dependencies
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
run: npx playwright install chromium --only-shell
working-directory: e2e
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose -f e2e/docker-compose.yml build
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (web)
run: mise run e2e:test-web
run: npx playwright test
if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
@@ -439,12 +519,18 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run install
run: mise run github:install
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: pnpm install --frozen-lockfile
- name: Run formatter
run: mise run github:format
run: pnpm format
if: ${{ !cancelled() }}
shellcheck:
name: ShellCheck
@@ -470,12 +556,18 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true mise run server:install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich install --frozen-lockfile
- name: Build the app
run: mise run server:build
run: pnpm --filter immich build
- name: Run API generation
run: ./bin/generate-open-api.sh
working-directory: open-api
@@ -519,19 +611,25 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Build the app
run: mise run server:build
run: pnpm build
- name: Run existing migrations
run: mise run server:migrations run
run: pnpm migrations:run
- name: Test npm run schema:reset command works
run: mise run server:schema-reset
run: pnpm schema:reset
- name: Generate new migrations
continue-on-error: true
run: mise run server:migrations generate src/TestMigration
run: pnpm migrations:generate src/TestMigration
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -548,7 +646,7 @@ jobs:
cat ./src/*-TestMigration.ts
exit 1
- name: Run SQL generation
run: mise run server:sql
run: pnpm sync:sql
env:
DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes

View File

@@ -10,14 +10,14 @@ dev-update: prepare-volumes
dev-scale: prepare-volumes
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
dev-docs:
dev-docs: prepare-volumes
npm --prefix docs run start
.PHONY: e2e
e2e:
e2e: prepare-volumes
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
e2e-update:
e2e-update: prepare-volumes
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-down:
@@ -73,8 +73,6 @@ define safe_chown
if chown $(2) $(or $(UID),1000):$(or $(GID),1000) "$(1)" 2>/dev/null; then \
true; \
else \
STATUS=$$?; echo "Exit code: $$STATUS $(1)"; \
echo "$$STATUS $(1)"; \
echo "Permission denied when changing owner of volumes and upload location. Try running 'sudo make prepare-volumes' first."; \
exit 1; \
fi;
@@ -85,13 +83,11 @@ prepare-volumes:
@$(foreach dir,$(VOLUME_DIRS),$(call safe_chown,$(dir),-R))
ifneq ($(UPLOAD_LOCATION),)
ifeq ($(filter /%,$(UPLOAD_LOCATION)),)
@mkdir -p "docker/$(UPLOAD_LOCATION)/photos/upload"
@mkdir -p "docker/$(UPLOAD_LOCATION)"
@$(call safe_chown,docker/$(UPLOAD_LOCATION),)
@$(call safe_chown,docker/$(UPLOAD_LOCATION)/photos,-R)
else
@mkdir -p "$(UPLOAD_LOCATION)/photos/upload"
@mkdir -p "$(UPLOAD_LOCATION)"
@$(call safe_chown,$(UPLOAD_LOCATION),)
@$(call safe_chown,$(UPLOAD_LOCATION)/photos,-R)
endif
endif

1
cli/.nvmrc Normal file
View File

@@ -0,0 +1 @@
22.18.0

View File

@@ -42,6 +42,17 @@
"vitest-fetch-mock": "^0.4.0",
"yaml": "^2.3.1"
},
"scripts": {
"build": "vite build",
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"prepack": "npm run build",
"test": "vitest",
"test:cov": "vitest --coverage",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"check": "tsc --noEmit"
},
"repository": {
"type": "git",
"url": "git+https://github.com/immich-app/immich.git",
@@ -56,5 +67,8 @@
"fastq": "^1.17.1",
"lodash-es": "^4.17.21",
"micromatch": "^4.0.8"
},
"volta": {
"node": "22.18.0"
}
}

1
docs/.nvmrc Normal file
View File

@@ -0,0 +1 @@
22.18.0

View File

@@ -5,7 +5,7 @@ After making any changes in the `server/src/schema`, a database migration need t
1. Run the command
```bash
mise run server:migrations generate <migration-name>
pnpm run migrations:generate <migration-name>
```
2. Check if the migration file makes sense.

View File

@@ -8,11 +8,11 @@ When contributing code through a pull request, please check the following:
## Web Checks
- [ ] `mise run web:lint` (linting via ESLint)
- [ ] `mise run web:format` (formatting via Prettier)
- [ ] `mise run web:check` (check typescript)
- [ ] `mise run web:check-svelte` (Type checking via SvelteKit)
- [ ] `mise run web:test` (unit tests)
- [ ] `pnpm run lint` (linting via ESLint)
- [ ] `pnpm run format` (formatting via Prettier)
- [ ] `pnpm run check:svelte` (Type checking via SvelteKit)
- [ ] `pnpm run check:typescript` (check typescript)
- [ ] `pnpm test` (unit tests)
## Documentation
@@ -25,18 +25,17 @@ Run all web checks with `pnpm run check:all`
## Server Checks
- [ ] `mise run server:lint` (linting via ESLint)
- [ ] `mise run server:format` (formatting via Prettier)
- [ ] `mise run server:check` (type checking via `tsc`)
- [ ] `mise run server:test` (unit tests)
- [ ] `mise run server:test-medium` (medium tests)
- [ ] `pnpm run lint` (linting via ESLint)
- [ ] `pnpm run format` (formatting via Prettier)
- [ ] `pnpm run check` (Type checking via `tsc`)
- [ ] `pnpm test` (unit tests)
:::tip AIO
Run all server checks with `pnpm run check:all`
:::
:::info Auto Fix
You can use `mise run server:lint-fix` and `mise run server:format-fix` to potentially correct some issues automatically.
You can use `pnpm run __:fix` to potentially correct some issues automatically for `pnpm run format` and `lint`.
:::
## Mobile Checks

View File

@@ -2,6 +2,20 @@
"name": "documentation",
"version": "0.0.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"start": "docusaurus start --port 3005",
"copy:openapi": "jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"build": "npm run copy:openapi && docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "~3.8.0",
"@docusaurus/preset-classic": "~3.8.0",
@@ -44,5 +58,8 @@
},
"engines": {
"node": ">=20"
},
"volta": {
"node": "22.18.0"
}
}

1
e2e/.nvmrc Normal file
View File

@@ -0,0 +1 @@
22.18.0

View File

@@ -4,6 +4,17 @@
"description": "",
"main": "index.js",
"type": "module",
"scripts": {
"test": "vitest --run",
"test:watch": "vitest",
"test:web": "npx playwright test",
"start:web": "npx playwright test --ui",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"check": "tsc --noEmit"
},
"keywords": [],
"author": "",
"license": "GNU Affero General Public License version 3",
@@ -41,5 +52,8 @@
"typescript-eslint": "^8.28.0",
"utimes": "^5.2.1",
"vitest": "^3.0.0"
},
"volta": {
"node": "22.18.0"
}
}

View File

@@ -1557,7 +1557,6 @@
"purchase_server_description_2": "Supporter status",
"purchase_server_title": "Server",
"purchase_settings_server_activated": "The server product key is managed by the admin",
"query_asset_id": "Query Asset ID",
"queue_status": "Queuing {count}/{total}",
"rating": "Star rating",
"rating_clear": "Clear rating",
@@ -1736,7 +1735,7 @@
"select_user_for_sharing_page_err_album": "Failed to create album",
"selected": "Selected",
"selected_count": "{count, plural, other {# selected}}",
"selected_gps_coordinates": "Selected GPS Coordinates",
"selected_gps_coordinates": "selected gps coordinates",
"send_message": "Send message",
"send_welcome_email": "Send welcome email",
"server_endpoint": "Server Endpoint",
@@ -2078,7 +2077,6 @@
"view_next_asset": "View next asset",
"view_previous_asset": "View previous asset",
"view_qr_code": "View QR code",
"view_similar_photos": "View similar photos",
"view_stack": "View Stack",
"view_user": "View User",
"viewer_remove_from_stack": "Remove from Stack",

View File

@@ -1,34 +0,0 @@
[tools.dart]
version = "3.8.2"
backend = "asdf:dart"
[tools.flutter]
version = "3.32.8-stable"
backend = "asdf:flutter"
[tools."github:CQLabs/homebrew-dcm"]
version = "1.31.4"
backend = "github:CQLabs/homebrew-dcm"
[tools."github:CQLabs/homebrew-dcm".platforms.linux-x64]
checksum = "blake3:e9df5b765df327e1248fccf2c6165a89d632a065667f99c01765bf3047b94955"
size = 8821083
url = "https://github.com/CQLabs/homebrew-dcm/releases/download/1.31.4/dcm-linux-x64-release.zip"
[tools.node]
version = "22.18.0"
backend = "core:node"
[tools.node.platforms.linux-x64]
checksum = "sha256:a2e703725d8683be86bb5da967bf8272f4518bdaf10f21389e2b2c9eaeae8c8a"
size = 54824343
url = "https://nodejs.org/dist/v22.18.0/node-v22.18.0-linux-x64.tar.gz"
[tools.pnpm]
version = "10.14.0"
backend = "aqua:pnpm/pnpm"
[tools.pnpm.platforms.linux-x64]
checksum = "blake3:13dfa46b7173d3cad3bad60a756a492ecf0bce48b23eb9f793e7ccec5a09b46d"
size = 66231525
url = "https://github.com/pnpm/pnpm/releases/download/v10.14.0/pnpm-linux-x64"

312
mise.toml
View File

@@ -1,312 +0,0 @@
[tools]
node = "22.18.0"
flutter = "3.32.8"
pnpm = "10.14.0"
dart = "3.8.2"
[tools."github:CQLabs/homebrew-dcm"]
version = "1.31.4"
bin = "dcm"
postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
[settings]
experimental = true
lockfile = true
pin = true
# .github
[tasks."github:install"]
run = "pnpm install --filter github --frozen-lockfile"
[tasks."github:format"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --check ."
[tasks."github:format-fix"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --write ."
# @immich/cli
[tasks."cli:install"]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks."cli:build"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite build"
[tasks."cli:test"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite"
[tasks."cli:lint"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."cli:lint-fix"]
run = "mise run cli:lint --fix"
[tasks."cli:format"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --check ."
[tasks."cli:format-fix"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --write ."
[tasks."cli:check"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "tsc --noEmit"
# @immich/sdk
[tasks."sdk:install"]
run = "pnpm install --filter @immich/sdk --frozen-lockfile"
[tasks."sdk:build"]
env._.path = "./open-api/typescript-sdk/node_modules/.bin"
dir = "./open-api/typescript-sdk"
run = "tsc"
# docs
[tasks."docs:install"]
run = "pnpm install --filter documentation --frozen-lockfile"
[tasks."docs:start"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus --port 3005"
[tasks."docs:build"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = [
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"docusaurus build",
]
[tasks."docs:preview"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus serve"
[tasks."docs:format"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --check ."
[tasks."docs:format-fix"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --write ."
# e2e
[tasks."e2e:install"]
run = "pnpm install --filter immich-e2e --frozen-lockfile"
[tasks."e2e:test"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "vitest --run"
[tasks."e2e:test-web"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "playwright test"
[tasks."e2e:format"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --check ."
[tasks."e2e:format-fix"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --write ."
[tasks."e2e:lint"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."e2e:lint-fix"]
run = "mise run e2e:lint --fix"
[tasks."e2e:check"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "tsc --noEmit"
# i18n
[tasks."i18n:format"]
run = "mise run i18n:format-fix"
[tasks."i18n:format-fix"]
run = "pnpm dlx sort-json ./i18n/*.json"
# server
[tasks."server:install"]
run = "pnpm install --filter immich --frozen-lockfile"
[tasks."server:build"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "nest build"
[tasks."server:test"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.mjs"
[tasks."server:test-medium"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.medium.mjs"
[tasks."server:format"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --check ."
[tasks."server:format-fix"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --write ."
[tasks."server:lint"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0"
[tasks."server:lint-fix"]
run = "mise run server:lint --fix"
[tasks."server:check"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "tsc --noEmit"
[tasks."server:sql"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:open-api"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:migrations"]
dir = "server"
run = "node ./dist/bin/migrations.js"
description = "Run database migration commands (create, generate, run, debug, or query)"
[tasks."server:schema-drop"]
run = "mise run server:migrations query 'DROP schema public cascade; CREATE schema public;'"
[tasks."server:schema-reset"]
run = "mise run server:schema-drop && mise run server:migrations run"
[tasks."server:email-dev"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "email dev -p 3050 --dir src/emails"
[tasks."server:checklist"]
run = [
"mise run server:install",
"mise run server:format",
"mise run server:lint",
"mise run server:check",
"mise run server:test-medium --run",
"mise run server:test --run",
]
# web
[tasks."web:install"]
run = "pnpm install --filter immich-web --frozen-lockfile"
[tasks."web:svelte-kit-sync"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "svelte-kit sync"
[tasks."web:build"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:build-stats"]
env.BUILD_STATS = "true"
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:preview"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite preview"
[tasks."web:start"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vite dev --host 0.0.0.0 --port 3000"
[tasks."web:test"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vitest"
[tasks."web:format"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --check ."
[tasks."web:format-fix"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --write ."
[tasks."web:lint"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "eslint . --max-warnings 0"
[tasks."web:lint-p"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "eslint-p . --max-warnings 0 --concurrency=4"
[tasks."web:lint-fix"]
run = "mise run web:lint --fix"
[tasks."web:check"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "tsc --noEmit"
[tasks."web:check-svelte"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "svelte-check --no-tsconfig --fail-on-warnings --compiler-warnings 'reactive_declaration_non_reactive_property:ignore' --ignore src/lib/components/photos-page/asset-grid.svelte"
[tasks."web:checklist"]
run = [
"mise run web:install",
"mise run web:format",
"mise run web:check",
"mise run web:test --run",
"mise run web:lint",
]

View File

@@ -130,10 +130,8 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
* - Parameter success: Indicates whether the background task completed successfully
*/
private fun complete(success: Result) {
Log.d(TAG, "About to complete BackupWorker with result: $success")
isComplete = true
engine?.destroy()
engine = null
flutterApi = null
completionHandler.set(success)
}

View File

@@ -3,7 +3,7 @@
archiveVersion = 1;
classes = {
};
objectVersion = 54;
objectVersion = 77;
objects = {
/* Begin PBXBuildFile section */
@@ -507,10 +507,14 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Copy Pods Resources";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
@@ -539,10 +543,14 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";

View File

@@ -118,7 +118,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
self.handleHostResult(result: result)
})
}
/**
* Cancels the currently running background task, either due to timeout or external request.
* Sends a cancel signal to the Flutter side and sets up a fallback timer to ensure
@@ -140,7 +140,6 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
self.complete(success: false)
}
}
/**
* Handles the result from Flutter API calls and determines the success/failure status.

View File

@@ -46,23 +46,6 @@ class ThumbnailApiImpl: ThumbnailApi {
assetCache.countLimit = 10000
return assetCache
}()
private static let activitySemaphore = DispatchSemaphore(value: 1)
private static let willResignActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.willResignActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.suspend()
activitySemaphore.wait()
}
private static let didBecomeActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.didBecomeActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.resume()
activitySemaphore.signal()
}
func getThumbhash(thumbhash: String, completion: @escaping (Result<[String : Int64], any Error>) -> Void) {
Self.processingQueue.async {
@@ -70,7 +53,6 @@ class ThumbnailApiImpl: ThumbnailApi {
else { return completion(.failure(PigeonError(code: "", message: "Invalid base64 string: \(thumbhash)", details: nil)))}
let (width, height, pointer) = thumbHashToRGBA(hash: data)
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer.baseAddress)), "width": Int64(width), "height": Int64(height)]))
}
}
@@ -160,7 +142,6 @@ class ThumbnailApiImpl: ThumbnailApi {
return completion(Self.cancelledResult)
}
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer)), "width": Int64(cgImage.width), "height": Int64(cgImage.height)]))
Self.removeRequest(requestId: requestId)
}
@@ -203,9 +184,4 @@ class ThumbnailApiImpl: ThumbnailApi {
assetQueue.async { assetCache.setObject(asset, forKey: assetId as NSString) }
return asset
}
func waitForActiveState() {
Self.activitySemaphore.wait()
Self.activitySemaphore.signal()
}
}

View File

@@ -5,7 +5,6 @@ import 'package:background_downloader/background_downloader.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
import 'package:immich_mobile/platform/background_worker_api.g.dart';
@@ -42,8 +41,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final Drift _drift;
final DriftLogger _driftLogger;
final BackgroundWorkerBgHostApi _backgroundHostApi;
final Logger _logger = Logger('BackgroundUploadBgService');
late final IsolateLockManager _lockManager;
final Logger _logger = Logger('BackgroundWorkerBgService');
bool _isCleanedUp = false;
@@ -59,7 +57,6 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
driftProvider.overrideWith(driftOverride(drift)),
],
);
_lockManager = IsolateLockManager(onCloseRequest: _cleanup);
BackgroundWorkerFlutterApi.setUp(this);
}
@@ -83,25 +80,11 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
await FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false);
await FileDownloader().trackTasks();
configureFileDownloaderNotifications();
await _ref.read(fileMediaRepositoryProvider).enableBackgroundAccess();
// Notify the host that the background upload service has been initialized and is ready to use
debugPrint("Acquiring background worker lock");
if (await _lockManager.acquireLock().timeout(
const Duration(seconds: 5),
onTimeout: () {
_lockManager.cancel();
return false;
},
)) {
_logger.info("Acquired background worker lock");
await _backgroundHostApi.onInitialized();
return;
}
_logger.warning("Failed to acquire background worker lock");
await _cleanup();
await _backgroundHostApi.close();
// Notify the host that the background worker service has been initialized and is ready to use
_backgroundHostApi.onInitialized();
} catch (error, stack) {
_logger.severe("Failed to initialize background worker", error, stack);
_backgroundHostApi.close();
@@ -177,8 +160,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
await _drift.close();
await _driftLogger.close();
_ref.dispose();
_lockManager.releaseLock();
_logger.info("Background worker resources cleaned up");
debugPrint("Background worker cleaned up");
} catch (error, stack) {
debugPrint('Failed to cleanup background worker: $error with stack: $stack');
}

View File

@@ -1,235 +0,0 @@
import 'dart:isolate';
import 'dart:ui';
import 'package:flutter/foundation.dart';
import 'package:logging/logging.dart';
const String kIsolateLockManagerPort = "immich://isolate_mutex";
enum _LockStatus { active, released }
class _IsolateRequest {
const _IsolateRequest();
}
class _HeartbeatRequest extends _IsolateRequest {
// Port for the receiver to send replies back
final SendPort sendPort;
const _HeartbeatRequest(this.sendPort);
Map<String, dynamic> toJson() {
return {'type': 'heartbeat', 'sendPort': sendPort};
}
}
class _CloseRequest extends _IsolateRequest {
const _CloseRequest();
Map<String, dynamic> toJson() {
return {'type': 'close'};
}
}
class _IsolateResponse {
const _IsolateResponse();
}
class _HeartbeatResponse extends _IsolateResponse {
final _LockStatus status;
const _HeartbeatResponse(this.status);
Map<String, dynamic> toJson() {
return {'type': 'heartbeat', 'status': status.index};
}
}
typedef OnCloseLockHolderRequest = void Function();
class IsolateLockManager {
final String _portName;
bool _hasLock = false;
ReceivePort? _receivePort;
final OnCloseLockHolderRequest? _onCloseRequest;
final Set<SendPort> _waitingIsolates = {};
// Token object - a new one is created for each acquisition attempt
Object? _currentAcquisitionToken;
IsolateLockManager({String? portName, OnCloseLockHolderRequest? onCloseRequest})
: _portName = portName ?? kIsolateLockManagerPort,
_onCloseRequest = onCloseRequest;
Future<bool> acquireLock() async {
if (_hasLock) {
Logger('BackgroundWorkerLockManager').warning("WARNING: [acquireLock] called more than once");
return true;
}
// Create a new token - this invalidates any previous attempt
final token = _currentAcquisitionToken = Object();
final ReceivePort rp = _receivePort = ReceivePort(_portName);
final SendPort sp = rp.sendPort;
while (!IsolateNameServer.registerPortWithName(sp, _portName)) {
// This attempt was superseded by a newer one in the same isolate
if (_currentAcquisitionToken != token) {
return false;
}
await _lockReleasedByHolder(token);
}
_hasLock = true;
rp.listen(_onRequest);
return true;
}
Future<void> _lockReleasedByHolder(Object token) async {
SendPort? holder = IsolateNameServer.lookupPortByName(_portName);
debugPrint("Found lock holder: $holder");
if (holder == null) {
// No holder, try and acquire lock
return;
}
final ReceivePort tempRp = ReceivePort();
final SendPort tempSp = tempRp.sendPort;
final bs = tempRp.asBroadcastStream();
try {
while (true) {
// Send a heartbeat request with the send port to receive reply from the holder
debugPrint("Sending heartbeat request to lock holder");
holder.send(_HeartbeatRequest(tempSp).toJson());
dynamic answer = await bs.first.timeout(const Duration(seconds: 3), onTimeout: () => null);
debugPrint("Received heartbeat response from lock holder: $answer");
// This attempt was superseded by a newer one in the same isolate
if (_currentAcquisitionToken != token) {
break;
}
if (answer == null) {
// Holder failed, most likely killed without calling releaseLock
// Check if a different waiting isolate took the lock
if (holder == IsolateNameServer.lookupPortByName(_portName)) {
// No, remove the stale lock
IsolateNameServer.removePortNameMapping(_portName);
}
break;
}
// Unknown message type received for heartbeat request. Try again
_IsolateResponse? response = _parseResponse(answer);
if (response == null || response is! _HeartbeatResponse) {
break;
}
if (response.status == _LockStatus.released) {
// Holder has released the lock
break;
}
// If the _LockStatus is active, we check again if the task completed
// by sending a released messaged again, if not, send a new heartbeat again
// Check if the holder completed its task after the heartbeat
answer = await bs.first.timeout(
const Duration(seconds: 3),
onTimeout: () => const _HeartbeatResponse(_LockStatus.active).toJson(),
);
response = _parseResponse(answer);
if (response is _HeartbeatResponse && response.status == _LockStatus.released) {
break;
}
}
} catch (e) {
// Timeout or error
} finally {
tempRp.close();
}
return;
}
_IsolateRequest? _parseRequest(dynamic msg) {
if (msg is! Map<String, dynamic>) {
return null;
}
return switch (msg['type']) {
'heartbeat' => _HeartbeatRequest(msg['sendPort']),
'close' => const _CloseRequest(),
_ => null,
};
}
_IsolateResponse? _parseResponse(dynamic msg) {
if (msg is! Map<String, dynamic>) {
return null;
}
return switch (msg['type']) {
'heartbeat' => _HeartbeatResponse(_LockStatus.values[msg['status']]),
_ => null,
};
}
// Executed in the isolate with the lock
void _onRequest(dynamic msg) {
final request = _parseRequest(msg);
if (request == null) {
return;
}
if (request is _HeartbeatRequest) {
// Add the send port to the list of waiting isolates
_waitingIsolates.add(request.sendPort);
request.sendPort.send(const _HeartbeatResponse(_LockStatus.active).toJson());
return;
}
if (request is _CloseRequest) {
_onCloseRequest?.call();
return;
}
}
void releaseLock() {
if (_hasLock) {
IsolateNameServer.removePortNameMapping(_portName);
// Notify waiting isolates
for (final port in _waitingIsolates) {
port.send(const _HeartbeatResponse(_LockStatus.released).toJson());
}
_waitingIsolates.clear();
_hasLock = false;
}
_receivePort?.close();
_receivePort = null;
}
void cancel() {
if (_hasLock) {
return;
}
debugPrint("Cancelling ongoing acquire lock attempts");
// Create a new token to invalidate ongoing acquire lock attempts
_currentAcquisitionToken = Object();
}
void requestHolderToClose() {
if (_hasLock) {
return;
}
IsolateNameServer.lookupPortByName(_portName)?.send(const _CloseRequest().toJson());
}
}

View File

@@ -2,10 +2,8 @@ import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/providers/auth.provider.dart';
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/backup/backup.provider.dart';
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
@@ -23,23 +21,14 @@ class SplashScreenPage extends StatefulHookConsumerWidget {
class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
final log = Logger("SplashScreenPage");
@override
void initState() {
super.initState();
final lockManager = ref.read(isolateLockManagerProvider(kIsolateLockManagerPort));
lockManager.requestHolderToClose();
lockManager
.acquireLock()
.timeout(const Duration(seconds: 5))
.whenComplete(
() => ref
.read(authProvider.notifier)
.setOpenApiServiceEndpoint()
.then(logConnectionInfo)
.whenComplete(() => resumeSession()),
);
ref
.read(authProvider.notifier)
.setOpenApiServiceEndpoint()
.then(logConnectionInfo)
.whenComplete(() => resumeSession());
}
void logConnectionInfo(String? endpoint) {

View File

@@ -1,6 +1,5 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
@@ -39,14 +38,14 @@ class DriftPlacePage extends StatelessWidget {
}
}
class _PlaceSliverAppBar extends HookWidget {
class _PlaceSliverAppBar extends StatelessWidget {
const _PlaceSliverAppBar({required this.search});
final ValueNotifier<String?> search;
@override
Widget build(BuildContext context) {
final searchFocusNode = useFocusNode();
final searchFocusNode = FocusNode();
return SliverAppBar(
floating: true,

View File

@@ -19,7 +19,6 @@ import 'package:immich_mobile/providers/infrastructure/current_album.provider.da
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/utils/album_filter.utils.dart';
import 'package:immich_mobile/widgets/common/confirm_dialog.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
import 'package:immich_mobile/widgets/common/search_field.dart';
@@ -40,12 +39,8 @@ class AlbumSelector extends ConsumerStatefulWidget {
class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
bool isGrid = false;
final searchController = TextEditingController();
QuickFilterMode filterMode = QuickFilterMode.all;
final searchFocusNode = FocusNode();
List<RemoteAlbum> sortedAlbums = [];
List<RemoteAlbum> shownAlbums = [];
AlbumFilter filter = AlbumFilter(query: "", mode: QuickFilterMode.all);
AlbumSort sort = AlbumSort(mode: RemoteAlbumSortMode.lastModified, isReverse: true);
@override
void initState() {
@@ -57,7 +52,7 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
});
searchController.addListener(() {
onSearch(searchController.text, filter.mode);
onSearch(searchController.text, filterMode);
});
searchFocusNode.addListener(() {
@@ -67,11 +62,9 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
});
}
void onSearch(String searchTerm, QuickFilterMode filterMode) {
void onSearch(String searchTerm, QuickFilterMode sortMode) {
final userId = ref.watch(currentUserProvider)?.id;
filter = filter.copyWith(query: searchTerm, userId: userId, mode: filterMode);
filterAlbums();
ref.read(remoteAlbumProvider.notifier).searchAlbums(searchTerm, userId, sortMode);
}
Future<void> onRefresh() async {
@@ -84,60 +77,17 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
});
}
void changeFilter(QuickFilterMode mode) {
void changeFilter(QuickFilterMode sortMode) {
setState(() {
filter = filter.copyWith(mode: mode);
filterMode = sortMode;
});
filterAlbums();
}
Future<void> changeSort(AlbumSort sort) async {
setState(() {
this.sort = sort;
});
await sortAlbums();
}
void clearSearch() {
setState(() {
filter = filter.copyWith(mode: QuickFilterMode.all, query: null);
filterMode = QuickFilterMode.all;
searchController.clear();
});
filterAlbums();
}
Future<void> sortAlbums() async {
final sorted = await ref
.read(remoteAlbumProvider.notifier)
.sortAlbums(ref.read(remoteAlbumProvider).albums, sort.mode, isReverse: sort.isReverse);
setState(() {
sortedAlbums = sorted;
});
// we need to re-filter the albums after sorting
// so shownAlbums gets updated
filterAlbums();
}
Future<void> filterAlbums() async {
if (filter.query == null) {
setState(() {
shownAlbums = sortedAlbums;
});
return;
}
final filteredAlbums = ref
.read(remoteAlbumProvider.notifier)
.searchAlbums(sortedAlbums, filter.query!, filter.userId, filter.mode);
setState(() {
shownAlbums = filteredAlbums;
ref.read(remoteAlbumProvider.notifier).clearSearch();
});
}
@@ -150,12 +100,9 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
@override
Widget build(BuildContext context) {
final userId = ref.watch(currentUserProvider)?.id;
final albums = ref.watch(remoteAlbumProvider.select((s) => s.filteredAlbums));
// refilter and sort when albums change
ref.listen(remoteAlbumProvider.select((state) => state.albums), (_, _) async {
await sortAlbums();
});
final userId = ref.watch(currentUserProvider)?.id;
return MultiSliver(
children: [
@@ -163,28 +110,26 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
searchController: searchController,
searchFocusNode: searchFocusNode,
onSearch: onSearch,
filterMode: filter.mode,
filterMode: filterMode,
onClearSearch: clearSearch,
),
_QuickFilterButtonRow(
filterMode: filter.mode,
filterMode: filterMode,
onChangeFilter: changeFilter,
onSearch: onSearch,
searchController: searchController,
),
_QuickSortAndViewMode(isGrid: isGrid, onToggleViewMode: toggleViewMode, onSortChanged: changeSort),
_QuickSortAndViewMode(isGrid: isGrid, onToggleViewMode: toggleViewMode),
isGrid
? _AlbumGrid(albums: shownAlbums, userId: userId, onAlbumSelected: widget.onAlbumSelected)
: _AlbumList(albums: shownAlbums, userId: userId, onAlbumSelected: widget.onAlbumSelected),
? _AlbumGrid(albums: albums, userId: userId, onAlbumSelected: widget.onAlbumSelected)
: _AlbumList(albums: albums, userId: userId, onAlbumSelected: widget.onAlbumSelected),
],
);
}
}
class _SortButton extends ConsumerStatefulWidget {
const _SortButton(this.onSortChanged);
final Future<void> Function(AlbumSort) onSortChanged;
const _SortButton();
@override
ConsumerState<_SortButton> createState() => _SortButtonState();
@@ -203,15 +148,15 @@ class _SortButtonState extends ConsumerState<_SortButton> {
albumSortIsReverse = !albumSortIsReverse;
isSorting = true;
});
await ref.read(remoteAlbumProvider.notifier).sortFilteredAlbums(sortMode, isReverse: albumSortIsReverse);
} else {
setState(() {
albumSortOption = sortMode;
isSorting = true;
});
await ref.read(remoteAlbumProvider.notifier).sortFilteredAlbums(sortMode, isReverse: albumSortIsReverse);
}
await widget.onSortChanged.call(AlbumSort(mode: albumSortOption, isReverse: albumSortIsReverse));
setState(() {
isSorting = false;
});
@@ -449,11 +394,10 @@ class _QuickFilterButton extends StatelessWidget {
}
class _QuickSortAndViewMode extends StatelessWidget {
const _QuickSortAndViewMode({required this.isGrid, required this.onToggleViewMode, required this.onSortChanged});
const _QuickSortAndViewMode({required this.isGrid, required this.onToggleViewMode});
final bool isGrid;
final VoidCallback onToggleViewMode;
final Future<void> Function(AlbumSort) onSortChanged;
@override
Widget build(BuildContext context) {
@@ -463,7 +407,7 @@ class _QuickSortAndViewMode extends StatelessWidget {
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
_SortButton(onSortChanged),
const _SortButton(),
IconButton(
icon: Icon(isGrid ? Icons.view_list_outlined : Icons.grid_view_outlined, size: 24),
onPressed: onToggleViewMode,

View File

@@ -3,7 +3,6 @@ import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/timeline.model.dart';
@@ -130,7 +129,6 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
reloadSubscription?.cancel();
_prevPreCacheStream?.removeListener(_dummyListener);
_nextPreCacheStream?.removeListener(_dummyListener);
SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge);
super.dispose();
}
@@ -598,7 +596,6 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
// Rebuild the widget when the asset viewer state changes
// Using multiple selectors to avoid unnecessary rebuilds for other state changes
ref.watch(assetViewerProvider.select((s) => s.showingBottomSheet));
ref.watch(assetViewerProvider.select((s) => s.showingControls));
ref.watch(assetViewerProvider.select((s) => s.backgroundOpacity));
ref.watch(assetViewerProvider.select((s) => s.stackIndex));
ref.watch(isPlayingMotionVideoProvider);
@@ -615,15 +612,6 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
});
});
// Listen for control visibility changes and change system UI mode accordingly
ref.listen(assetViewerProvider.select((value) => value.showingControls), (_, showingControls) async {
if (showingControls) {
SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge);
} else {
SystemChrome.setEnabledSystemUIMode(SystemUiMode.immersiveSticky);
}
});
// Currently it is not possible to scroll the asset when the bottom sheet is open all the way.
// Issue: https://github.com/flutter/flutter/issues/109037
// TODO: Add a custom scrum builder once the fix lands on stable

View File

@@ -62,7 +62,7 @@ class ViewerBottomBar extends ConsumerWidget {
duration: Durations.short2,
child: AnimatedSwitcher(
duration: Durations.short4,
child: isSheetOpen
child: isSheetOpen || isReadonlyModeEnabled
? const SizedBox.shrink()
: Theme(
data: context.themeData.copyWith(
@@ -72,14 +72,14 @@ class ViewerBottomBar extends ConsumerWidget {
),
),
child: Container(
height: context.padding.bottom + (asset.isVideo ? 160 : 90),
color: Colors.black.withAlpha(125),
padding: EdgeInsets.only(bottom: context.padding.bottom, top: 16),
padding: EdgeInsets.only(bottom: context.padding.bottom),
child: Column(
mainAxisAlignment: MainAxisAlignment.end,
children: [
if (asset.isVideo) const VideoControls(),
if (!isInLockedView && !isReadonlyModeEnabled)
Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: actions),
if (!isInLockedView) Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: actions),
],
),
),

View File

@@ -1,9 +1,7 @@
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/archive_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/delete_permanent_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/delete_local_action_button.widget.dart';
@@ -18,74 +16,22 @@ import 'package:immich_mobile/presentation/widgets/action_buttons/share_link_act
import 'package:immich_mobile/presentation/widgets/action_buttons/stack_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/trash_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/upload_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/album/album_selector.widget.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
class RemoteAlbumBottomSheet extends ConsumerStatefulWidget {
class RemoteAlbumBottomSheet extends ConsumerWidget {
final RemoteAlbum album;
const RemoteAlbumBottomSheet({super.key, required this.album});
@override
ConsumerState<RemoteAlbumBottomSheet> createState() => _RemoteAlbumBottomSheetState();
}
class _RemoteAlbumBottomSheetState extends ConsumerState<RemoteAlbumBottomSheet> {
late DraggableScrollableController sheetController;
@override
void initState() {
super.initState();
sheetController = DraggableScrollableController();
}
@override
void dispose() {
sheetController.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
Widget build(BuildContext context, WidgetRef ref) {
final multiselect = ref.watch(multiSelectProvider);
final isTrashEnable = ref.watch(serverInfoProvider.select((state) => state.serverFeatures.trash));
Future<void> addAssetsToAlbum(RemoteAlbum album) async {
final selectedAssets = multiselect.selectedAssets;
if (selectedAssets.isEmpty) {
return;
}
final addedCount = await ref
.read(remoteAlbumProvider.notifier)
.addAssets(album.id, selectedAssets.map((e) => (e as RemoteAsset).id).toList());
if (addedCount != selectedAssets.length) {
ImmichToast.show(
context: context,
msg: 'add_to_album_bottom_sheet_already_exists'.t(context: context, args: {"album": album.name}),
);
} else {
ImmichToast.show(
context: context,
msg: 'add_to_album_bottom_sheet_added'.t(context: context, args: {"album": album.name}),
);
}
ref.read(multiSelectProvider.notifier).reset();
}
Future<void> onKeyboardExpand() {
return sheetController.animateTo(0.85, duration: const Duration(milliseconds: 200), curve: Curves.easeInOut);
}
return BaseBottomSheet(
controller: sheetController,
initialChildSize: 0.45,
maxChildSize: 0.85,
initialChildSize: 0.25,
maxChildSize: 0.4,
shouldCloseOnMinExtent: false,
actions: [
const ShareActionButton(source: ActionSource.timeline),
@@ -106,11 +52,7 @@ class _RemoteAlbumBottomSheetState extends ConsumerState<RemoteAlbumBottomSheet>
const DeleteLocalActionButton(source: ActionSource.timeline),
const UploadActionButton(source: ActionSource.timeline),
],
RemoveFromAlbumActionButton(source: ActionSource.timeline, albumId: widget.album.id),
],
slivers: [
const AddToAlbumHeader(),
AlbumSelector(onAlbumSelected: addAssetsToAlbum, onKeyboardExpanded: onKeyboardExpand),
RemoveFromAlbumActionButton(source: ActionSource.timeline, albumId: album.id),
],
);
}

View File

@@ -10,7 +10,6 @@ import 'package:immich_mobile/presentation/widgets/timeline/constants.dart';
import 'package:immich_mobile/presentation/widgets/timeline/segment.model.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.state.dart';
import 'package:intl/intl.dart' hide TextDirection;
import 'package:immich_mobile/providers/haptic_feedback.provider.dart';
/// A widget that will display a BoxScrollView with a ScrollThumb that can be dragged
/// for quick navigation of the BoxScrollView.
@@ -75,7 +74,6 @@ List<_Segment> _buildSegments({required List<Segment> layoutSegments, required d
}
class ScrubberState extends ConsumerState<Scrubber> with TickerProviderStateMixin {
String? _lastLabel;
double _thumbTopOffset = 0.0;
bool _isDragging = false;
List<_Segment> _segments = [];
@@ -174,7 +172,6 @@ class ScrubberState extends ConsumerState<Scrubber> with TickerProviderStateMixi
_isDragging = true;
_labelAnimationController.forward();
_fadeOutTimer?.cancel();
_lastLabel = null;
});
}
@@ -192,11 +189,6 @@ class ScrubberState extends ConsumerState<Scrubber> with TickerProviderStateMixi
if (nearestMonthSegment != null) {
_snapToSegment(nearestMonthSegment);
final label = nearestMonthSegment.scrollLabel;
if (_lastLabel != label) {
ref.read(hapticFeedbackProvider.notifier).selectionClick();
_lastLabel = label;
}
}
}

View File

@@ -3,7 +3,6 @@ import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/services/log.service.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/models/backup/backup_state.model.dart';
import 'package:immich_mobile/providers/album/album.provider.dart';
@@ -82,12 +81,6 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
}
} else {
_ref.read(backupProvider.notifier).cancelBackup();
final lockManager = _ref.read(isolateLockManagerProvider(kIsolateLockManagerPort));
lockManager.requestHolderToClose();
debugPrint("Requested lock holder to close on resume");
await lockManager.acquireLock();
debugPrint("Lock acquired for background sync on resume");
final backgroundManager = _ref.read(backgroundSyncProvider);
// Ensure proper cleanup before starting new background tasks
@@ -137,7 +130,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
// do not stop/clean up anything on inactivity: issued on every orientation change
}
Future<void> handleAppPause() async {
void handleAppPause() {
state = AppLifeCycleEnum.paused;
_wasPaused = true;
@@ -147,12 +140,6 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
if (_ref.read(backupProvider.notifier).backupProgress != BackUpProgressEnum.manualInProgress) {
_ref.read(backupProvider.notifier).cancelBackup();
}
} else {
final backgroundManager = _ref.read(backgroundSyncProvider);
await backgroundManager.cancel();
await backgroundManager.cancelLocal();
_ref.read(isolateLockManagerProvider(kIsolateLockManagerPort)).releaseLock();
debugPrint("Lock released on app pause");
}
_ref.read(websocketProvider.notifier).disconnect();
@@ -186,7 +173,6 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
}
if (Store.isBetaTimelineEnabled) {
_ref.read(isolateLockManagerProvider(kIsolateLockManagerPort)).releaseLock();
return;
}

View File

@@ -1,6 +1,5 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/utils/background_sync.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/providers/sync_status.provider.dart';
final backgroundSyncProvider = Provider<BackgroundSyncManager>((ref) {
@@ -19,7 +18,3 @@ final backgroundSyncProvider = Provider<BackgroundSyncManager>((ref) {
ref.onDispose(manager.cancel);
return manager;
});
final isolateLockManagerProvider = Provider.family<IsolateLockManager, String>((ref, name) {
return IsolateLockManager(portName: name);
});

View File

@@ -12,42 +12,43 @@ import 'album.provider.dart';
class RemoteAlbumState {
final List<RemoteAlbum> albums;
final List<RemoteAlbum> filteredAlbums;
const RemoteAlbumState({required this.albums});
const RemoteAlbumState({required this.albums, List<RemoteAlbum>? filteredAlbums})
: filteredAlbums = filteredAlbums ?? albums;
RemoteAlbumState copyWith({List<RemoteAlbum>? albums}) {
return RemoteAlbumState(albums: albums ?? this.albums);
RemoteAlbumState copyWith({List<RemoteAlbum>? albums, List<RemoteAlbum>? filteredAlbums}) {
return RemoteAlbumState(albums: albums ?? this.albums, filteredAlbums: filteredAlbums ?? this.filteredAlbums);
}
@override
String toString() => 'RemoteAlbumState(albums: ${albums.length})';
String toString() => 'RemoteAlbumState(albums: ${albums.length}, filteredAlbums: ${filteredAlbums.length})';
@override
bool operator ==(covariant RemoteAlbumState other) {
if (identical(this, other)) return true;
final listEquals = const DeepCollectionEquality().equals;
return listEquals(other.albums, albums);
return listEquals(other.albums, albums) && listEquals(other.filteredAlbums, filteredAlbums);
}
@override
int get hashCode => albums.hashCode;
int get hashCode => albums.hashCode ^ filteredAlbums.hashCode;
}
class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
late RemoteAlbumService _remoteAlbumService;
final _logger = Logger('RemoteAlbumNotifier');
@override
RemoteAlbumState build() {
_remoteAlbumService = ref.read(remoteAlbumServiceProvider);
return const RemoteAlbumState(albums: []);
return const RemoteAlbumState(albums: [], filteredAlbums: []);
}
Future<List<RemoteAlbum>> _getAll() async {
try {
final albums = await _remoteAlbumService.getAll();
state = state.copyWith(albums: albums);
state = state.copyWith(albums: albums, filteredAlbums: albums);
return albums;
} catch (error, stack) {
_logger.severe('Failed to fetch albums', error, stack);
@@ -59,21 +60,19 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
await _getAll();
}
List<RemoteAlbum> searchAlbums(
List<RemoteAlbum> albums,
String query,
String? userId, [
QuickFilterMode filterMode = QuickFilterMode.all,
]) {
return _remoteAlbumService.searchAlbums(albums, query, userId, filterMode);
void searchAlbums(String query, String? userId, [QuickFilterMode filterMode = QuickFilterMode.all]) {
final filtered = _remoteAlbumService.searchAlbums(state.albums, query, userId, filterMode);
state = state.copyWith(filteredAlbums: filtered);
}
Future<List<RemoteAlbum>> sortAlbums(
List<RemoteAlbum> albums,
RemoteAlbumSortMode sortMode, {
bool isReverse = false,
}) async {
return await _remoteAlbumService.sortAlbums(albums, sortMode, isReverse: isReverse);
void clearSearch() {
state = state.copyWith(filteredAlbums: state.albums);
}
Future<void> sortFilteredAlbums(RemoteAlbumSortMode sortMode, {bool isReverse = false}) async {
final sortedAlbums = await _remoteAlbumService.sortAlbums(state.filteredAlbums, sortMode, isReverse: isReverse);
state = state.copyWith(filteredAlbums: sortedAlbums);
}
Future<RemoteAlbum?> createAlbum({
@@ -84,7 +83,7 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
try {
final album = await _remoteAlbumService.createAlbum(title: title, description: description, assetIds: assetIds);
state = state.copyWith(albums: [...state.albums, album]);
state = state.copyWith(albums: [...state.albums, album], filteredAlbums: [...state.filteredAlbums, album]);
return album;
} catch (error, stack) {
@@ -115,7 +114,11 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
return album.id == albumId ? updatedAlbum : album;
}).toList();
state = state.copyWith(albums: updatedAlbums);
final updatedFilteredAlbums = state.filteredAlbums.map((album) {
return album.id == albumId ? updatedAlbum : album;
}).toList();
state = state.copyWith(albums: updatedAlbums, filteredAlbums: updatedFilteredAlbums);
return updatedAlbum;
} catch (error, stack) {
@@ -136,7 +139,9 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
await _remoteAlbumService.deleteAlbum(albumId);
final updatedAlbums = state.albums.where((album) => album.id != albumId).toList();
state = state.copyWith(albums: updatedAlbums);
final updatedFilteredAlbums = state.filteredAlbums.where((album) => album.id != albumId).toList();
state = state.copyWith(albums: updatedAlbums, filteredAlbums: updatedFilteredAlbums);
}
Future<List<RemoteAsset>> getAssets(String albumId) {
@@ -159,7 +164,9 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
await _remoteAlbumService.removeUser(albumId, userId: userId);
final updatedAlbums = state.albums.where((album) => album.id != albumId).toList();
state = state.copyWith(albums: updatedAlbums);
final updatedFilteredAlbums = state.filteredAlbums.where((album) => album.id != albumId).toList();
state = state.copyWith(albums: updatedAlbums, filteredAlbums: updatedFilteredAlbums);
}
Future<void> setActivityStatus(String albumId, bool enabled) {

View File

@@ -1,25 +0,0 @@
import 'package:immich_mobile/domain/services/remote_album.service.dart';
import 'package:immich_mobile/models/albums/album_search.model.dart';
class AlbumFilter {
String? userId;
String? query;
QuickFilterMode mode;
AlbumFilter({required this.mode, this.userId, this.query});
AlbumFilter copyWith({String? userId, String? query, QuickFilterMode? mode}) {
return AlbumFilter(userId: userId ?? this.userId, query: query ?? this.query, mode: mode ?? this.mode);
}
}
class AlbumSort {
RemoteAlbumSortMode mode;
bool isReverse;
AlbumSort({required this.mode, this.isReverse = false});
AlbumSort copyWith({RemoteAlbumSortMode? mode, bool? isReverse}) {
return AlbumSort(mode: mode ?? this.mode, isReverse: isReverse ?? this.isReverse);
}
}

View File

@@ -8,14 +8,12 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/providers/album/album.provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/widgets/album/add_to_album_sliverlist.dart';
import 'package:immich_mobile/widgets/album/add_to_album_bottom_sheet.dart';
import 'package:immich_mobile/models/asset_selection_state.dart';
import 'package:immich_mobile/widgets/asset_grid/delete_dialog.dart';
import 'package:immich_mobile/widgets/asset_grid/upload_dialog.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/widgets/common/drag_sheet.dart';
import 'package:immich_mobile/entities/album.entity.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/utils/draggable_scroll_controller.dart';
final controlBottomAppBarNotifier = ControlBottomAppBarNotifier();
@@ -47,7 +45,6 @@ class ControlBottomAppBar extends HookConsumerWidget {
final bool unfavorite;
final bool unarchive;
final AssetSelectionState selectionAssetState;
final List<Asset> selectedAssets;
const ControlBottomAppBar({
super.key,
@@ -67,7 +64,6 @@ class ControlBottomAppBar extends HookConsumerWidget {
this.onRemoveFromAlbum,
this.onToggleLocked,
this.selectionAssetState = const AssetSelectionState(),
this.selectedAssets = const [],
this.enabled = true,
this.unarchive = false,
this.unfavorite = false,
@@ -104,18 +100,6 @@ class ControlBottomAppBar extends HookConsumerWidget {
);
}
/// Show existing AddToAlbumBottomSheet
void showAddToAlbumBottomSheet() {
showModalBottomSheet(
elevation: 0,
shape: const RoundedRectangleBorder(borderRadius: BorderRadius.all(Radius.circular(15.0))),
context: context,
builder: (BuildContext _) {
return AddToAlbumBottomSheet(assets: selectedAssets);
},
);
}
void handleRemoteDelete(bool force, Function(bool) deleteCb, {String? alertMsg}) {
if (!force) {
deleteCb(force);
@@ -137,15 +121,6 @@ class ControlBottomAppBar extends HookConsumerWidget {
label: "share_link".tr(),
onPressed: enabled ? () => onShare(false) : null,
),
if (!isInLockedView && hasRemote && albums.isNotEmpty)
ConstrainedBox(
constraints: const BoxConstraints(maxWidth: 100),
child: ControlBoxButton(
iconData: Icons.photo_album,
label: "add_to_album".tr(),
onPressed: enabled ? showAddToAlbumBottomSheet : null,
),
),
if (hasRemote && onArchive != null)
ControlBoxButton(
iconData: unarchive ? Icons.unarchive_outlined : Icons.archive_outlined,

View File

@@ -440,7 +440,6 @@ class MultiselectGrid extends HookConsumerWidget {
onUpload: onUpload,
enabled: !processing.value,
selectionAssetState: selectionAssetState.value,
selectedAssets: selection.value.toList(),
onStack: stackEnabled ? onStack : null,
onEditTime: editEnabled ? onEditTime : null,
onEditLocation: editEnabled ? onEditLocation : null,

View File

@@ -1,8 +1,7 @@
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/models/backup/backup_state.model.dart';
@@ -260,7 +259,7 @@ class ImmichAppBarDialog extends HookConsumerWidget {
const AppBarProfileInfoBox(),
buildStorageInformation(),
const AppBarServerInfo(),
if (Store.isBetaTimelineEnabled && isReadonlyModeEnabled) buildReadonlyMessage(),
if (isReadonlyModeEnabled) buildReadonlyMessage(),
buildAppLogButton(),
buildSettingButton(),
buildSignOutButton(),

View File

@@ -121,6 +121,7 @@ class PhotoViewCore extends StatefulWidget {
class PhotoViewCoreState extends State<PhotoViewCore>
with TickerProviderStateMixin, PhotoViewControllerDelegate, HitCornersDetector {
Offset? _normalizedPosition;
double? _scaleBefore;
double? _rotationBefore;
@@ -153,6 +154,7 @@ class PhotoViewCoreState extends State<PhotoViewCore>
void onScaleStart(ScaleStartDetails details) {
_rotationBefore = controller.rotation;
_scaleBefore = scale;
_normalizedPosition = details.focalPoint - controller.position;
_scaleAnimationController.stop();
_positionAnimationController.stop();
_rotationAnimationController.stop();
@@ -164,14 +166,8 @@ class PhotoViewCoreState extends State<PhotoViewCore>
};
void onScaleUpdate(ScaleUpdateDetails details) {
final centeredFocalPoint = Offset(
details.focalPoint.dx - scaleBoundaries.outerSize.width / 2,
details.focalPoint.dy - scaleBoundaries.outerSize.height / 2,
);
final double newScale = _scaleBefore! * details.scale;
final double scaleDelta = newScale / scale;
final Offset newPosition =
(controller.position + details.focalPointDelta) * scaleDelta - centeredFocalPoint * (scaleDelta - 1);
Offset delta = details.focalPoint - _normalizedPosition!;
updateScaleStateFromNewScale(newScale);
@@ -180,7 +176,7 @@ class PhotoViewCoreState extends State<PhotoViewCore>
updateMultiple(
scale: newScale,
position: panEnabled ? newPosition : clampPosition(position: newPosition),
position: panEnabled ? delta : clampPosition(position: delta * details.scale),
rotation: rotationEnabled ? _rotationBefore! + details.rotation : null,
rotationFocusPoint: rotationEnabled ? details.focalPoint : null,
);

View File

@@ -31,8 +31,7 @@ class SmartSearchDto {
this.model,
this.page,
this.personIds = const [],
this.query,
this.queryAssetId,
required this.query,
this.rating,
this.size,
this.state,
@@ -152,21 +151,7 @@ class SmartSearchDto {
List<String> personIds;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? query;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? queryAssetId;
String query;
/// Minimum value: -1
/// Maximum value: 5
@@ -293,7 +278,6 @@ class SmartSearchDto {
other.page == page &&
_deepEquality.equals(other.personIds, personIds) &&
other.query == query &&
other.queryAssetId == queryAssetId &&
other.rating == rating &&
other.size == size &&
other.state == state &&
@@ -330,8 +314,7 @@ class SmartSearchDto {
(model == null ? 0 : model!.hashCode) +
(page == null ? 0 : page!.hashCode) +
(personIds.hashCode) +
(query == null ? 0 : query!.hashCode) +
(queryAssetId == null ? 0 : queryAssetId!.hashCode) +
(query.hashCode) +
(rating == null ? 0 : rating!.hashCode) +
(size == null ? 0 : size!.hashCode) +
(state == null ? 0 : state!.hashCode) +
@@ -348,7 +331,7 @@ class SmartSearchDto {
(withExif == null ? 0 : withExif!.hashCode);
@override
String toString() => 'SmartSearchDto[albumIds=$albumIds, city=$city, country=$country, createdAfter=$createdAfter, createdBefore=$createdBefore, deviceId=$deviceId, isEncoded=$isEncoded, isFavorite=$isFavorite, isMotion=$isMotion, isNotInAlbum=$isNotInAlbum, isOffline=$isOffline, language=$language, lensModel=$lensModel, libraryId=$libraryId, make=$make, model=$model, page=$page, personIds=$personIds, query=$query, queryAssetId=$queryAssetId, rating=$rating, size=$size, state=$state, tagIds=$tagIds, takenAfter=$takenAfter, takenBefore=$takenBefore, trashedAfter=$trashedAfter, trashedBefore=$trashedBefore, type=$type, updatedAfter=$updatedAfter, updatedBefore=$updatedBefore, visibility=$visibility, withDeleted=$withDeleted, withExif=$withExif]';
String toString() => 'SmartSearchDto[albumIds=$albumIds, city=$city, country=$country, createdAfter=$createdAfter, createdBefore=$createdBefore, deviceId=$deviceId, isEncoded=$isEncoded, isFavorite=$isFavorite, isMotion=$isMotion, isNotInAlbum=$isNotInAlbum, isOffline=$isOffline, language=$language, lensModel=$lensModel, libraryId=$libraryId, make=$make, model=$model, page=$page, personIds=$personIds, query=$query, rating=$rating, size=$size, state=$state, tagIds=$tagIds, takenAfter=$takenAfter, takenBefore=$takenBefore, trashedAfter=$trashedAfter, trashedBefore=$trashedBefore, type=$type, updatedAfter=$updatedAfter, updatedBefore=$updatedBefore, visibility=$visibility, withDeleted=$withDeleted, withExif=$withExif]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -434,16 +417,7 @@ class SmartSearchDto {
// json[r'page'] = null;
}
json[r'personIds'] = this.personIds;
if (this.query != null) {
json[r'query'] = this.query;
} else {
// json[r'query'] = null;
}
if (this.queryAssetId != null) {
json[r'queryAssetId'] = this.queryAssetId;
} else {
// json[r'queryAssetId'] = null;
}
if (this.rating != null) {
json[r'rating'] = this.rating;
} else {
@@ -548,8 +522,7 @@ class SmartSearchDto {
personIds: json[r'personIds'] is Iterable
? (json[r'personIds'] as Iterable).cast<String>().toList(growable: false)
: const [],
query: mapValueOfType<String>(json, r'query'),
queryAssetId: mapValueOfType<String>(json, r'queryAssetId'),
query: mapValueOfType<String>(json, r'query')!,
rating: num.parse('${json[r'rating']}'),
size: num.parse('${json[r'size']}'),
state: mapValueOfType<String>(json, r'state'),
@@ -613,6 +586,7 @@ class SmartSearchDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'query',
};
}

View File

@@ -69,7 +69,6 @@ class SyncEntityType {
static const userMetadataDeleteV1 = SyncEntityType._(r'UserMetadataDeleteV1');
static const syncAckV1 = SyncEntityType._(r'SyncAckV1');
static const syncResetV1 = SyncEntityType._(r'SyncResetV1');
static const syncCompleteV1 = SyncEntityType._(r'SyncCompleteV1');
/// List of all possible values in this [enum][SyncEntityType].
static const values = <SyncEntityType>[
@@ -119,7 +118,6 @@ class SyncEntityType {
userMetadataDeleteV1,
syncAckV1,
syncResetV1,
syncCompleteV1,
];
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
@@ -204,7 +202,6 @@ class SyncEntityTypeTypeTransformer {
case r'UserMetadataDeleteV1': return SyncEntityType.userMetadataDeleteV1;
case r'SyncAckV1': return SyncEntityType.syncAckV1;
case r'SyncResetV1': return SyncEntityType.syncResetV1;
case r'SyncCompleteV1': return SyncEntityType.syncCompleteV1;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@@ -24,7 +24,6 @@ abstract class BackgroundWorkerBgHostApi {
// required platform channels to notify the native side to start the background upload
void onInitialized();
// Called from the background flutter engine to request the native side to cleanup
void close();
}

View File

@@ -15,7 +15,7 @@ function dart {
patch --no-backup-if-mismatch -u api.mustache <api.mustache.patch
cd ../../
pnpm dlx @openapitools/openapi-generator-cli generate -g dart -i ./immich-openapi-specs.json -o ../mobile/openapi -t ./templates/mobile
pnpx @openapitools/openapi-generator-cli generate -g dart -i ./immich-openapi-specs.json -o ../mobile/openapi -t ./templates/mobile
# Post generate patches
patch --no-backup-if-mismatch -u ../mobile/openapi/lib/api_client.dart <./patch/api_client.dart.patch
@@ -27,7 +27,7 @@ function dart {
}
function typescript {
pnpm dlx oazapfts --optimistic --argumentStyle=object --useEnumType immich-openapi-specs.json typescript-sdk/src/fetch-client.ts
pnpx oazapfts --optimistic --argumentStyle=object --useEnumType immich-openapi-specs.json typescript-sdk/src/fetch-client.ts
pnpm --filter @immich/sdk install --frozen-lockfile
pnpm --filter @immich/sdk build
}
@@ -35,8 +35,8 @@ function typescript {
# requires server to be built
(
cd ..
SHARP_IGNORE_GLOBAL_LIBVIPS=true mise run server:build
mise run server:open-api
SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich build
pnpm --filter immich sync:open-api
)
if [[ $1 == 'dart' ]]; then

View File

@@ -14571,10 +14571,6 @@
"query": {
"type": "string"
},
"queryAssetId": {
"format": "uuid",
"type": "string"
},
"rating": {
"maximum": 5,
"minimum": -1,
@@ -14642,6 +14638,9 @@
"type": "boolean"
}
},
"required": [
"query"
],
"type": "object"
},
"SourceType": {
@@ -15417,10 +15416,6 @@
],
"type": "object"
},
"SyncCompleteV1": {
"properties": {},
"type": "object"
},
"SyncEntityType": {
"enum": [
"AuthUserV1",
@@ -15468,8 +15463,7 @@
"UserMetadataV1",
"UserMetadataDeleteV1",
"SyncAckV1",
"SyncResetV1",
"SyncCompleteV1"
"SyncResetV1"
],
"type": "string"
},

View File

@@ -0,0 +1 @@
22.18.0

View File

@@ -11,6 +11,9 @@
"default": "./build/index.js"
}
},
"scripts": {
"build": "tsc"
},
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
@@ -23,5 +26,8 @@
"type": "git",
"url": "git+https://github.com/immich-app/immich.git",
"directory": "open-api/typescript-sdk"
},
"volta": {
"node": "22.18.0"
}
}

View File

@@ -1014,8 +1014,7 @@ export type SmartSearchDto = {
model?: string | null;
page?: number;
personIds?: string[];
query?: string;
queryAssetId?: string;
query: string;
rating?: number;
size?: number;
state?: string | null;
@@ -4922,8 +4921,7 @@ export enum SyncEntityType {
UserMetadataV1 = "UserMetadataV1",
UserMetadataDeleteV1 = "UserMetadataDeleteV1",
SyncAckV1 = "SyncAckV1",
SyncResetV1 = "SyncResetV1",
SyncCompleteV1 = "SyncCompleteV1"
SyncResetV1 = "SyncResetV1"
}
export enum SyncRequestType {
AlbumsV1 = "AlbumsV1",

1
server/.nvmrc Normal file
View File

@@ -0,0 +1 @@
22.18.0

View File

@@ -5,6 +5,34 @@
"author": "",
"private": true,
"license": "GNU Affero General Public License version 3",
"scripts": {
"build": "nest build",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"start": "npm run start:dev",
"nest": "nest",
"start:dev": "nest start --watch --",
"start:debug": "nest start --debug 0.0.0.0:9230 --watch --",
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"check": "tsc --noEmit",
"check:code": "npm run format && npm run lint && npm run check",
"check:all": "npm run check:code && npm run test:cov",
"test": "vitest --config test/vitest.config.mjs",
"test:cov": "vitest --config test/vitest.config.mjs --coverage",
"test:medium": "vitest --config test/vitest.config.medium.mjs",
"typeorm": "typeorm",
"lifecycle": "node ./dist/utils/lifecycle.js",
"migrations:debug": "node ./dist/bin/migrations.js debug",
"migrations:generate": "node ./dist/bin/migrations.js generate",
"migrations:create": "node ./dist/bin/migrations.js create",
"migrations:run": "node ./dist/bin/migrations.js run",
"schema:drop": "node ./dist/bin/migrations.js query 'DROP schema public cascade; CREATE schema public;'",
"schema:reset": "npm run schema:drop && npm run migrations:run",
"sync:open-api": "node ./dist/bin/sync-open-api.js",
"sync:sql": "node ./dist/bin/sync-sql.js",
"email:dev": "email dev -p 3050 --dir src/emails"
},
"dependencies": {
"@nestjs/bullmq": "^11.0.1",
"@nestjs/common": "^11.0.4",
@@ -144,6 +172,9 @@
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0"
},
"volta": {
"node": "22.18.0"
},
"overrides": {
"sharp": "^0.34.2"
}

View File

@@ -128,6 +128,12 @@ describe(SearchController.name, () => {
await request(ctx.getHttpServer()).post('/search/smart');
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require a query', async () => {
const { status, body } = await request(ctx.getHttpServer()).post('/search/smart').send({});
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['query should not be empty', 'query must be a string']));
});
});
describe('GET /search/explore', () => {

View File

@@ -199,12 +199,7 @@ export class StatisticsSearchDto extends BaseSearchDto {
export class SmartSearchDto extends BaseSearchWithResultsDto {
@IsString()
@IsNotEmpty()
@Optional()
query?: string;
@ValidateUUID({ optional: true })
@Optional()
queryAssetId?: string;
query!: string;
@IsString()
@IsNotEmpty()

View File

@@ -336,9 +336,6 @@ export class SyncAckV1 {}
@ExtraModel()
export class SyncResetV1 {}
@ExtraModel()
export class SyncCompleteV1 {}
export type SyncItem = {
[SyncEntityType.AuthUserV1]: SyncAuthUserV1;
[SyncEntityType.UserV1]: SyncUserV1;
@@ -385,7 +382,6 @@ export type SyncItem = {
[SyncEntityType.UserMetadataV1]: SyncUserMetadataV1;
[SyncEntityType.UserMetadataDeleteV1]: SyncUserMetadataDeleteV1;
[SyncEntityType.SyncAckV1]: SyncAckV1;
[SyncEntityType.SyncCompleteV1]: SyncCompleteV1;
[SyncEntityType.SyncResetV1]: SyncResetV1;
};

View File

@@ -530,7 +530,6 @@ export enum JobName {
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
AuditLogCleanup = 'AuditLogCleanup',
AuditTableCleanup = 'AuditTableCleanup',
DatabaseBackup = 'DatabaseBackup',
@@ -571,7 +570,8 @@ export enum JobName {
SendMail = 'SendMail',
SidecarQueueAll = 'SidecarQueueAll',
SidecarCheck = 'SidecarCheck',
SidecarDiscovery = 'SidecarDiscovery',
SidecarSync = 'SidecarSync',
SidecarWrite = 'SidecarWrite',
SmartSearchQueueAll = 'SmartSearchQueueAll',
@@ -708,7 +708,6 @@ export enum SyncEntityType {
SyncAckV1 = 'SyncAckV1',
SyncResetV1 = 'SyncResetV1',
SyncCompleteV1 = 'SyncCompleteV1',
}
export enum NotificationLevel {

View File

@@ -43,18 +43,6 @@ where
limit
$2
-- AssetJobRepository.getForSidecarCheckJob
select
"id",
"sidecarPath",
"originalPath"
from
"asset"
where
"asset"."id" = $1::uuid
limit
$2
-- AssetJobRepository.streamForThumbnailJob
select
"asset"."id",

View File

@@ -123,14 +123,6 @@ offset
$8
commit
-- SearchRepository.getEmbedding
select
*
from
"smart_search"
where
"assetId" = $1
-- SearchRepository.searchFaces
begin
set

View File

@@ -957,7 +957,7 @@ where
order by
"stack"."updateId" asc
-- SyncRepository.person.getDeletes
-- SyncRepository.people.getDeletes
select
"id",
"personId"
@@ -970,7 +970,7 @@ where
order by
"person_audit"."id" asc
-- SyncRepository.person.getUpserts
-- SyncRepository.people.getUpserts
select
"id",
"createdAt",

View File

@@ -39,8 +39,10 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'sidecarPath', 'originalPath'])
.select((eb) =>
.select((eb) => [
'id',
'sidecarPath',
'originalPath',
jsonArrayFrom(
eb
.selectFrom('tag')
@@ -48,17 +50,7 @@ export class AssetJobRepository {
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
).as('tags'),
)
.limit(1)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForSidecarCheckJob(id: string) {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'sidecarPath', 'originalPath'])
])
.limit(1)
.executeTakeFirst();
}

View File

@@ -293,13 +293,6 @@ export class SearchRepository {
});
}
@GenerateSql({
params: [DummyValue.UUID],
})
async getEmbedding(assetId: string) {
return this.db.selectFrom('smart_search').selectAll().where('assetId', '=', assetId).executeTakeFirst();
}
@GenerateSql({
params: [
{

View File

@@ -1,5 +1,5 @@
import { Injectable } from '@nestjs/common';
import { Kysely, sql } from 'kysely';
import { Kysely } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
@@ -62,7 +62,7 @@ export class SyncRepository {
partnerAsset: PartnerAssetsSync;
partnerAssetExif: PartnerAssetExifsSync;
partnerStack: PartnerStackSync;
person: PersonSync;
people: PersonSync;
stack: StackSync;
user: UserSync;
userMetadata: UserMetadataSync;
@@ -84,7 +84,7 @@ export class SyncRepository {
this.partnerAsset = new PartnerAssetsSync(this.db);
this.partnerAssetExif = new PartnerAssetExifsSync(this.db);
this.partnerStack = new PartnerStackSync(this.db);
this.person = new PersonSync(this.db);
this.people = new PersonSync(this.db);
this.stack = new StackSync(this.db);
this.user = new UserSync(this.db);
this.userMetadata = new UserMetadataSync(this.db);
@@ -117,15 +117,6 @@ class BaseSync {
.orderBy(idRef, 'asc');
}
protected auditCleanup<T extends keyof DB>(t: T, days: number) {
const { table, ref } = this.db.dynamic;
return this.db
.deleteFrom(table(t).as(t))
.where(ref(`${t}.deletedAt`), '<', sql.raw(`now() - interval '${days} days'`))
.execute();
}
protected upsertQuery<T extends keyof DB>(t: T, { nowId, ack }: SyncQueryOptions) {
const { table, ref } = this.db.dynamic;
const updateIdRef = ref(`${t}.updateId`);
@@ -159,10 +150,6 @@ class AlbumSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('album_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -299,10 +286,6 @@ class AlbumToAssetSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('album_asset_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -351,10 +334,6 @@ class AlbumUserSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('album_user_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -392,10 +371,6 @@ class AssetSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('asset', options)
@@ -425,10 +400,6 @@ class PersonSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('person_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('person', options)
@@ -460,10 +431,6 @@ class AssetFaceSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_face_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('asset_face', options)
@@ -506,10 +473,6 @@ class MemorySync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('memory_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('memory', options)
@@ -542,10 +505,6 @@ class MemoryToAssetSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('memory_asset_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('memory_asset', options)
@@ -578,10 +537,6 @@ class PartnerSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('partner_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -661,10 +616,6 @@ class StackSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('stack_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('stack', options)
@@ -713,10 +664,6 @@ class UserSync extends BaseSync {
return this.auditQuery('user_audit', options).select(['id', 'userId']).stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('user_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('user', options).select(columns.syncUser).stream();
@@ -732,10 +679,6 @@ class UserMetadataSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('user_metadata_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('user_metadata', options)
@@ -755,10 +698,6 @@ class AssetMetadataSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_metadata_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions, DummyValue.UUID], stream: true })
getUpserts(options: SyncQueryOptions, userId: string) {
return this.upsertQuery('asset_metadata', options)

View File

@@ -166,7 +166,6 @@ export interface DB {
api_key: ApiKeyTable;
asset: AssetTable;
asset_audit: AssetAuditTable;
asset_exif: AssetExifTable;
asset_face: AssetFaceTable;
asset_face_audit: AssetFaceAuditTable;
@@ -174,6 +173,7 @@ export interface DB {
asset_metadata: AssetMetadataTable;
asset_metadata_audit: AssetMetadataAuditTable;
asset_job_status: AssetJobStatusTable;
asset_audit: AssetAuditTable;
audit: AuditTable;

View File

@@ -1,11 +1,11 @@
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
import { MemoryTable } from 'src/schema/tables/memory.table';
import { Column, CreateDateColumn, ForeignKeyColumn, Generated, Table, Timestamp } from 'src/sql-tools';
import { Column, CreateDateColumn, ForeignKeyColumn, Table } from 'src/sql-tools';
@Table('memory_asset_audit')
export class MemoryAssetAuditTable {
@PrimaryGeneratedUuidV7Column()
id!: Generated<string>;
id!: string;
@ForeignKeyColumn(() => MemoryTable, { type: 'uuid', onDelete: 'CASCADE', onUpdate: 'CASCADE' })
memoryId!: string;
@@ -14,5 +14,5 @@ export class MemoryAssetAuditTable {
assetId!: string;
@CreateDateColumn({ default: () => 'clock_timestamp()', index: true })
deletedAt!: Generated<Timestamp>;
deletedAt!: Date;
}

View File

@@ -42,7 +42,6 @@ describe(JobService.name, () => {
{ name: JobName.PersonCleanup },
{ name: JobName.MemoryCleanup },
{ name: JobName.SessionCleanup },
{ name: JobName.AuditTableCleanup },
{ name: JobName.AuditLogCleanup },
{ name: JobName.MemoryGenerate },
{ name: JobName.UserSyncUsage },
@@ -239,11 +238,11 @@ describe(JobService.name, () => {
const tests: Array<{ item: JobItem; jobs: JobName[]; stub?: any }> = [
{
item: { name: JobName.SidecarCheck, data: { id: 'asset-1' } },
item: { name: JobName.SidecarSync, data: { id: 'asset-1' } },
jobs: [JobName.AssetExtractMetadata],
},
{
item: { name: JobName.SidecarCheck, data: { id: 'asset-1' } },
item: { name: JobName.SidecarDiscovery, data: { id: 'asset-1' } },
jobs: [JobName.AssetExtractMetadata],
},
{

View File

@@ -281,7 +281,6 @@ export class JobService extends BaseService {
{ name: JobName.PersonCleanup },
{ name: JobName.MemoryCleanup },
{ name: JobName.SessionCleanup },
{ name: JobName.AuditTableCleanup },
{ name: JobName.AuditLogCleanup },
);
}
@@ -310,7 +309,8 @@ export class JobService extends BaseService {
*/
private async onDone(item: JobItem) {
switch (item.name) {
case JobName.SidecarCheck: {
case JobName.SidecarSync:
case JobName.SidecarDiscovery: {
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: item.data });
break;
}

View File

@@ -527,7 +527,7 @@ describe(LibraryService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarCheck,
name: JobName.SidecarDiscovery,
data: {
id: assetStub.external.id,
source: 'upload',
@@ -573,7 +573,7 @@ describe(LibraryService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarCheck,
name: JobName.SidecarDiscovery,
data: {
id: assetStub.image.id,
source: 'upload',

View File

@@ -414,7 +414,7 @@ export class LibraryService extends BaseService {
// We queue a sidecar discovery which, in turn, queues metadata extraction
await this.jobRepository.queueAll(
assetIds.map((assetId) => ({
name: JobName.SidecarCheck,
name: JobName.SidecarDiscovery,
data: { id: assetId, source: 'upload' },
})),
);

View File

@@ -1,6 +1,7 @@
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { defaults } from 'src/config';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
@@ -14,21 +15,6 @@ import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const forSidecarJob = (
asset: {
id?: string;
originalPath?: string;
sidecarPath?: string | null;
} = {},
) => {
return {
id: factory.uuid(),
originalPath: '/path/to/IMG_123.jpg',
sidecarPath: null,
...asset,
};
};
const makeFaceTags = (face: Partial<{ Name: string }> = {}, orientation?: ImmichTags['Orientation']) => ({
Orientation: orientation,
RegionInfo: {
@@ -1471,7 +1457,7 @@ describe(MetadataService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarCheck,
name: JobName.SidecarSync,
data: { id: assetStub.sidecar.id },
},
]);
@@ -1485,65 +1471,133 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarCheck,
name: JobName.SidecarDiscovery,
data: { id: assetStub.image.id },
},
]);
});
});
describe('handleSidecarCheck', () => {
describe('handleSidecarSync', () => {
it('should do nothing if asset could not be found', async () => {
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(void 0);
await expect(sut.handleSidecarCheck({ id: assetStub.image.id })).resolves.toBeUndefined();
mocks.asset.getByIds.mockResolvedValue([]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed);
expect(mocks.asset.update).not.toHaveBeenCalled();
});
it('should detect a new sidecar at .jpg.xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
it('should do nothing if asset has no sidecar path', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed);
expect(mocks.asset.update).not.toHaveBeenCalled();
});
it('should detect a new sidecar at .xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
it('should set sidecar path if exists (sidecar named photo.ext.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValue(true);
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith(
`${assetStub.sidecar.originalPath}.xmp`,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
sidecarPath: assetStub.sidecar.sidecarPath,
});
});
it('should set sidecar path if exists (sidecar named photo.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt as any]);
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(
2,
assetStub.sidecarWithoutExt.sidecarPath,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecarWithoutExt.id,
sidecarPath: assetStub.sidecarWithoutExt.sidecarPath,
});
});
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
it('should set sidecar path if exists (two sidecars named photo.ext.xmp and photo.xmp, should pick photo.ext.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(1, assetStub.sidecar.sidecarPath, constants.R_OK);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(
2,
assetStub.sidecarWithoutExt.sidecarPath,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
sidecarPath: assetStub.sidecar.sidecarPath,
});
});
it('should unset sidecar path if file does not exist anymore', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValue(false);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith(
`${assetStub.sidecar.originalPath}.xmp`,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
sidecarPath: null,
});
});
});
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
describe('handleSidecarDiscovery', () => {
it('should skip hidden assets', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset as any]);
await sut.handleSidecarDiscovery({ id: assetStub.livePhotoMotionAsset.id });
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
});
it('should do nothing if the sidecar file still exists', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
it('should skip assets with a sidecar path', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
await sut.handleSidecarDiscovery({ id: assetStub.sidecar.id });
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
});
it('should do nothing when a sidecar is not found ', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.storage.checkFileExists.mockResolvedValue(false);
await sut.handleSidecarDiscovery({ id: assetStub.image.id });
expect(mocks.asset.update).not.toHaveBeenCalled();
});
it('should update a image asset when a sidecar is found', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.storage.checkFileExists.mockResolvedValue(true);
await sut.handleSidecarDiscovery({ id: assetStub.image.id });
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith('/original/path.jpg.xmp', constants.R_OK);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
sidecarPath: '/original/path.jpg.xmp',
});
});
it('should update a video asset when a sidecar is found', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.storage.checkFileExists.mockResolvedValue(true);
await sut.handleSidecarDiscovery({ id: assetStub.video.id });
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith('/original/path.ext.xmp', constants.R_OK);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
sidecarPath: '/original/path.ext.xmp',
});
});
});
describe('handleSidecarWrite', () => {

View File

@@ -5,7 +5,7 @@ import _ from 'lodash';
import { Duration } from 'luxon';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { join, parse } from 'node:path';
import path from 'node:path';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { Asset, AssetFace } from 'src/database';
@@ -331,7 +331,7 @@ export class MetadataService extends BaseService {
const assets = this.assetJobRepository.streamForSidecar(force);
for await (const asset of assets) {
jobs.push({ name: JobName.SidecarCheck, data: { id: asset.id } });
jobs.push({ name: force ? JobName.SidecarSync : JobName.SidecarDiscovery, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
@@ -342,37 +342,14 @@ export class MetadataService extends BaseService {
return JobStatus.Success;
}
@OnJob({ name: JobName.SidecarCheck, queue: QueueName.Sidecar })
async handleSidecarCheck({ id }: JobOf<JobName.SidecarCheck>): Promise<JobStatus | undefined> {
const asset = await this.assetJobRepository.getForSidecarCheckJob(id);
if (!asset) {
return;
}
@OnJob({ name: JobName.SidecarSync, queue: QueueName.Sidecar })
handleSidecarSync({ id }: JobOf<JobName.SidecarSync>): Promise<JobStatus> {
return this.processSidecar(id, true);
}
let sidecarPath = null;
for (const candidate of this.getSidecarCandidates(asset)) {
const exists = await this.storageRepository.checkFileExists(candidate, constants.R_OK);
if (!exists) {
continue;
}
sidecarPath = candidate;
break;
}
const isChanged = sidecarPath !== asset.sidecarPath;
this.logger.debug(
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
);
if (!isChanged) {
return JobStatus.Skipped;
}
await this.assetRepository.update({ id: asset.id, sidecarPath });
return JobStatus.Success;
@OnJob({ name: JobName.SidecarDiscovery, queue: QueueName.Sidecar })
handleSidecarDiscovery({ id }: JobOf<JobName.SidecarDiscovery>): Promise<JobStatus> {
return this.processSidecar(id, false);
}
@OnEvent({ name: 'AssetTag' })
@@ -422,25 +399,6 @@ export class MetadataService extends BaseService {
return JobStatus.Success;
}
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
const candidates: string[] = [];
if (sidecarPath) {
candidates.push(sidecarPath);
}
const assetPath = parse(originalPath);
candidates.push(
// IMG_123.jpg.xmp
`${originalPath}.xmp`,
// IMG_123.xmp
`${join(assetPath.dir, assetPath.name)}.xmp`,
);
return candidates;
}
private getImageDimensions(exifTags: ImmichTags): { width?: number; height?: number } {
/*
* The "true" values for width and height are a bit hidden, depending on the camera model and file format.
@@ -606,7 +564,7 @@ export class MetadataService extends BaseService {
checksum,
ownerId: asset.ownerId,
originalPath: StorageCore.getAndroidMotionPath(asset, motionAssetId),
originalFileName: `${parse(asset.originalFileName).name}.mp4`,
originalFileName: `${path.parse(asset.originalFileName).name}.mp4`,
visibility: AssetVisibility.Hidden,
deviceAssetId: 'NONE',
deviceId: 'NONE',
@@ -947,4 +905,60 @@ export class MetadataService extends BaseService {
return tags;
}
private async processSidecar(id: string, isSync: boolean): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return JobStatus.Failed;
}
if (isSync && !asset.sidecarPath) {
return JobStatus.Failed;
}
if (!isSync && (asset.visibility === AssetVisibility.Hidden || asset.sidecarPath) && !asset.isExternal) {
return JobStatus.Failed;
}
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
const assetPath = path.parse(asset.originalPath);
const assetPathWithoutExt = path.join(assetPath.dir, assetPath.name);
const sidecarPathWithoutExt = `${assetPathWithoutExt}.xmp`;
const sidecarPathWithExt = `${asset.originalPath}.xmp`;
const [sidecarPathWithExtExists, sidecarPathWithoutExtExists] = await Promise.all([
this.storageRepository.checkFileExists(sidecarPathWithExt, constants.R_OK),
this.storageRepository.checkFileExists(sidecarPathWithoutExt, constants.R_OK),
]);
let sidecarPath = null;
if (sidecarPathWithExtExists) {
sidecarPath = sidecarPathWithExt;
} else if (sidecarPathWithoutExtExists) {
sidecarPath = sidecarPathWithoutExt;
}
if (asset.isExternal) {
if (sidecarPath !== asset.sidecarPath) {
await this.assetRepository.update({ id: asset.id, sidecarPath });
}
return JobStatus.Success;
}
if (sidecarPath) {
this.logger.debug(`Detected sidecar at '${sidecarPath}' for asset ${asset.id}: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, sidecarPath });
return JobStatus.Success;
}
if (!isSync) {
return JobStatus.Failed;
}
this.logger.debug(`No sidecar found for asset ${asset.id}: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, sidecarPath: null });
return JobStatus.Success;
}
}

View File

@@ -18,7 +18,7 @@ import {
SmartSearchDto,
StatisticsSearchDto,
} from 'src/dtos/search.dto';
import { AssetOrder, AssetVisibility, Permission } from 'src/enum';
import { AssetOrder, AssetVisibility } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { requireElevatedPermission } from 'src/utils/access';
import { getMyPartnerIds } from 'src/utils/asset.util';
@@ -113,27 +113,14 @@ export class SearchService extends BaseService {
}
const userIds = this.getUserIdsToSearch(auth);
let embedding;
if (dto.query) {
const key = machineLearning.clip.modelName + dto.query + dto.language;
embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});
this.embeddingCache.set(key, embedding);
}
} else if (dto.queryAssetId) {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.queryAssetId] });
const getEmbeddingResponse = await this.searchRepository.getEmbedding(dto.queryAssetId);
const assetEmbedding = getEmbeddingResponse?.embedding;
if (!assetEmbedding) {
throw new BadRequestException(`Asset ${dto.queryAssetId} has no embedding`);
}
embedding = assetEmbedding;
} else {
throw new BadRequestException('Either `query` or `queryAssetId` must be set');
const key = machineLearning.clip.modelName + dto.query + dto.language;
let embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});
this.embeddingCache.set(key, embedding);
}
const page = dto.page ?? 1;
const size = dto.size || 100;

View File

@@ -1,9 +1,8 @@
import { BadRequestException, ForbiddenException, Injectable } from '@nestjs/common';
import { Insertable } from 'kysely';
import { DateTime, Duration } from 'luxon';
import { DateTime } from 'luxon';
import { Writable } from 'node:stream';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { OnJob } from 'src/decorators';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
@@ -16,16 +15,7 @@ import {
SyncItem,
SyncStreamDto,
} from 'src/dtos/sync.dto';
import {
AssetVisibility,
DatabaseAction,
EntityType,
JobName,
Permission,
QueueName,
SyncEntityType,
SyncRequestType,
} from 'src/enum';
import { AssetVisibility, DatabaseAction, EntityType, Permission, SyncEntityType, SyncRequestType } from 'src/enum';
import { SyncQueryOptions } from 'src/repositories/sync.repository';
import { SessionSyncCheckpointTable } from 'src/schema/tables/sync-checkpoint.table';
import { BaseService } from 'src/services/base.service';
@@ -42,8 +32,6 @@ type AssetLike = Omit<SyncAssetV1, 'checksum' | 'thumbhash'> & {
};
const COMPLETE_ID = 'complete';
const MAX_DAYS = 30;
const MAX_DURATION = Duration.fromObject({ days: MAX_DAYS });
const mapSyncAssetV1 = ({ checksum, thumbhash, ...data }: AssetLike): SyncAssetV1 => ({
...data,
@@ -149,24 +137,19 @@ export class SyncService extends BaseService {
}
const isPendingSyncReset = await this.sessionRepository.isPendingSyncReset(session.id);
if (isPendingSyncReset) {
send(response, { type: SyncEntityType.SyncResetV1, ids: ['reset'], data: {} });
response.end();
return;
}
const checkpoints = await this.syncCheckpointRepository.getAll(session.id);
const checkpointMap: CheckpointMap = Object.fromEntries(checkpoints.map(({ type, ack }) => [type, fromAck(ack)]));
if (this.needsFullSync(checkpointMap)) {
send(response, { type: SyncEntityType.SyncResetV1, ids: ['reset'], data: {} });
response.end();
return;
}
const { nowId } = await this.syncCheckpointRepository.getNow();
const options: SyncQueryOptions = { nowId, userId: auth.user.id };
const checkpoints = await this.syncCheckpointRepository.getAll(session.id);
const checkpointMap: CheckpointMap = Object.fromEntries(checkpoints.map(({ type, ack }) => [type, fromAck(ack)]));
const handlers: Record<SyncRequestType, () => Promise<void>> = {
[SyncRequestType.AuthUsersV1]: () => this.syncAuthUsersV1(options, response, checkpointMap),
[SyncRequestType.UsersV1]: () => this.syncUsersV1(options, response, checkpointMap),
@@ -197,41 +180,9 @@ export class SyncService extends BaseService {
await handler();
}
send(response, { type: SyncEntityType.SyncCompleteV1, ids: [nowId], data: {} });
response.end();
}
@OnJob({ name: JobName.AuditTableCleanup, queue: QueueName.BackgroundTask })
async onAuditTableCleanup() {
const pruneThreshold = MAX_DAYS + 1;
await this.syncRepository.album.cleanupAuditTable(pruneThreshold);
await this.syncRepository.albumUser.cleanupAuditTable(pruneThreshold);
await this.syncRepository.albumToAsset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.asset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetFace.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetMetadata.cleanupAuditTable(pruneThreshold);
await this.syncRepository.memory.cleanupAuditTable(pruneThreshold);
await this.syncRepository.memoryToAsset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.partner.cleanupAuditTable(pruneThreshold);
await this.syncRepository.person.cleanupAuditTable(pruneThreshold);
await this.syncRepository.stack.cleanupAuditTable(pruneThreshold);
await this.syncRepository.user.cleanupAuditTable(pruneThreshold);
await this.syncRepository.userMetadata.cleanupAuditTable(pruneThreshold);
}
private needsFullSync(checkpointMap: CheckpointMap) {
const completeAck = checkpointMap[SyncEntityType.SyncCompleteV1];
if (!completeAck) {
return false;
}
const milliseconds = Number.parseInt(completeAck.updateId.replaceAll('-', '').slice(0, 12), 16);
return DateTime.fromMillis(milliseconds) < DateTime.now().minus(MAX_DURATION);
}
private async syncAuthUsersV1(options: SyncQueryOptions, response: Writable, checkpointMap: CheckpointMap) {
const upsertType = SyncEntityType.AuthUserV1;
const upserts = this.syncRepository.authUser.getUpserts({ ...options, ack: checkpointMap[upsertType] });
@@ -768,13 +719,13 @@ export class SyncService extends BaseService {
private async syncPeopleV1(options: SyncQueryOptions, response: Writable, checkpointMap: CheckpointMap) {
const deleteType = SyncEntityType.PersonDeleteV1;
const deletes = this.syncRepository.person.getDeletes({ ...options, ack: checkpointMap[deleteType] });
const deletes = this.syncRepository.people.getDeletes({ ...options, ack: checkpointMap[deleteType] });
for await (const { id, ...data } of deletes) {
send(response, { type: deleteType, ids: [id], data });
}
const upsertType = SyncEntityType.PersonV1;
const upserts = this.syncRepository.person.getUpserts({ ...options, ack: checkpointMap[upsertType] });
const upserts = this.syncRepository.people.getUpserts({ ...options, ack: checkpointMap[upsertType] });
for await (const { updateId, ...data } of upserts) {
send(response, { type: upsertType, ids: [updateId], data });
}

View File

@@ -275,9 +275,6 @@ export interface QueueStatus {
}
export type JobItem =
// Audit
| { name: JobName.AuditTableCleanup; data?: IBaseJob }
// Backups
| { name: JobName.DatabaseBackup; data?: IBaseJob }
@@ -312,7 +309,8 @@ export type JobItem =
// Sidecar Scanning
| { name: JobName.SidecarQueueAll; data: IBaseJob }
| { name: JobName.SidecarCheck; data: IEntityJob }
| { name: JobName.SidecarDiscovery; data: IEntityJob }
| { name: JobName.SidecarSync; data: IEntityJob }
| { name: JobName.SidecarWrite; data: ISidecarWriteJob }
// Facial Recognition
@@ -399,8 +397,8 @@ export interface VectorUpdateResult {
}
export interface ImmichFile extends Express.Multer.File {
uuid: string;
/** sha1 hash of file */
uuid: string;
checksum: Buffer;
}

View File

@@ -35,7 +35,7 @@ export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif
primaryAssetId: assets[0].id,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
updateId: expect.any(String),
updateId: 'uuid-v7',
};
};

View File

@@ -1,6 +1,5 @@
import { Tag } from 'src/database';
import { TagResponseDto } from 'src/dtos/tag.dto';
import { newUuidV7 } from 'test/small.factory';
const parent = Object.freeze<Tag>({
id: 'tag-parent',
@@ -38,10 +37,7 @@ const color = {
parentId: null,
};
const upsert = {
userId: 'tag-user',
updateId: newUuidV7(),
};
const upsert = { userId: 'tag-user', updateId: 'uuid-v7' };
export const tagStub = {
tag,

View File

@@ -258,12 +258,6 @@ export class SyncTestContext extends MediumTestContext<SyncService> {
return stream.getResponse();
}
async assertSyncIsComplete(auth: AuthDto, types: SyncRequestType[]) {
await expect(this.syncStream(auth, types)).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
}
async syncAckAll(auth: AuthDto, response: Array<{ type: string; ack: string }>) {
const acks: Record<string, string> = {};
const syncAcks: string[] = [];

View File

@@ -1,226 +0,0 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { AssetMetadataKey, UserMetadataKey } from 'src/enum';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { DB } from 'src/schema';
import { SyncService } from 'src/services/sync.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
import { v4 } from 'uuid';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(SyncService, {
database: db || defaultDatabase,
real: [DatabaseRepository, SyncRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
const deletedLongAgo = DateTime.now().minus({ days: 35 }).toISO();
const assertTableCount = async <T extends keyof DB>(db: Kysely<DB>, t: T, count: number) => {
const { table } = db.dynamic;
const results = await db.selectFrom(table(t).as(t)).selectAll().execute();
expect(results).toHaveLength(count);
};
describe(SyncService.name, () => {
describe('onAuditTableCleanup', () => {
it('should work', async () => {
const { sut } = setup();
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
});
it('should cleanup the album_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_audit';
await ctx.database
.insertInto(tableName)
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the album_asset_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_asset_audit';
const { user } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user.id });
await ctx.database
.insertInto(tableName)
.values({ albumId: album.id, assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the album_user_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_user_audit';
await ctx.database
.insertInto(tableName)
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the asset_audit table', async () => {
const { sut, ctx } = setup();
await ctx.database
.insertInto('asset_audit')
.values({ assetId: v4(), ownerId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, 'asset_audit', 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, 'asset_audit', 0);
});
it('should cleanup the asset_face_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'asset_face_audit';
await ctx.database
.insertInto(tableName)
.values({ assetFaceId: v4(), assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the asset_metadata_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'asset_metadata_audit';
await ctx.database
.insertInto(tableName)
.values({ assetId: v4(), key: AssetMetadataKey.MobileApp, deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the memory_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'memory_audit';
await ctx.database
.insertInto(tableName)
.values({ memoryId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the memory_asset_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'memory_asset_audit';
const { user } = await ctx.newUser();
const { memory } = await ctx.newMemory({ ownerId: user.id });
await ctx.database
.insertInto(tableName)
.values({ memoryId: memory.id, assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the partner_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'partner_audit';
await ctx.database
.insertInto(tableName)
.values({ sharedById: v4(), sharedWithId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the stack_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'stack_audit';
await ctx.database
.insertInto(tableName)
.values({ stackId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the user_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'user_audit';
await ctx.database.insertInto(tableName).values({ userId: v4(), deletedAt: deletedLongAgo }).execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the user_metadata_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'user_metadata_audit';
await ctx.database
.insertInto(tableName)
.values({ userId: v4(), key: UserMetadataKey.Onboarding, deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should skip recent records', async () => {
const { sut, ctx } = setup();
const keep = {
id: v4(),
assetId: v4(),
ownerId: v4(),
deletedAt: DateTime.now().minus({ days: 25 }).toISO(),
};
const remove = {
id: v4(),
assetId: v4(),
ownerId: v4(),
deletedAt: DateTime.now().minus({ days: 35 }).toISO(),
};
await ctx.database.insertInto('asset_audit').values([keep, remove]).execute();
await assertTableCount(ctx.database, 'asset_audit', 2);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
const after = await ctx.database.selectFrom('asset_audit').select(['id']).execute();
expect(after).toHaveLength(1);
expect(after[0].id).toBe(keep.id);
});
});
});

View File

@@ -74,11 +74,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
},
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([]);
});
it('should sync album asset exif for own user', async () => {
@@ -88,15 +88,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumAssetExifCreateV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toHaveLength(2);
});
it('should not sync album asset exif for unrelated user', async () => {
@@ -111,11 +104,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toHaveLength(0);
});
it('should backfill album assets exif when a user shares an album with you', async () => {
@@ -149,8 +139,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
// ack initial album asset exif sync
await ctx.syncAckAll(auth, response);
@@ -184,11 +174,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toHaveLength(5);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([]);
});
it('should sync old asset exif when a user adds them to an album they share you', async () => {
@@ -217,8 +207,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(firstAlbumResponse).toHaveLength(2);
await ctx.syncAckAll(auth, firstAlbumResponse);
@@ -234,8 +224,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
type: SyncEntityType.AlbumAssetExifBackfillV1,
},
backfillSyncAck,
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
// ack initial album asset sync
await ctx.syncAckAll(auth, response);
@@ -254,11 +244,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toHaveLength(2);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([]);
});
it('should sync asset exif updates for an album shared with you', async () => {
@@ -272,6 +262,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -281,7 +272,6 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -293,7 +283,9 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
city: 'New City',
});
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(updateResponse).toHaveLength(1);
expect(updateResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
@@ -302,7 +294,6 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
@@ -339,8 +330,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(3);
await ctx.syncAckAll(auth, response);
@@ -351,7 +342,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
city: 'Delayed Exif',
});
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(updateResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
@@ -360,7 +352,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(updateResponse).toHaveLength(1);
});
});

View File

@@ -58,6 +58,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -82,11 +83,10 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
},
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([]);
});
it('should sync album asset for own user', async () => {
@@ -95,15 +95,8 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumAssetCreateV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toHaveLength(2);
});
it('should not sync album asset for unrelated user', async () => {
@@ -117,11 +110,8 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toHaveLength(0);
});
it('should backfill album assets when a user shares an album with you', async () => {
@@ -143,6 +133,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -152,7 +143,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
@@ -186,11 +176,10 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([]);
});
it('should sync old assets when a user adds them to an album they share you', async () => {
@@ -207,6 +196,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const firstAlbumResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(firstAlbumResponse).toHaveLength(2);
expect(firstAlbumResponse).toEqual([
updateSyncAck,
{
@@ -216,7 +206,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, firstAlbumResponse);
@@ -224,6 +213,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
// expect(response).toHaveLength(2);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -233,7 +223,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
type: SyncEntityType.AlbumAssetBackfillV1,
},
backfillSyncAck,
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
@@ -253,11 +242,10 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([]);
});
it('should sync asset updates for an album shared with you', async () => {
@@ -270,6 +258,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -279,7 +268,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -292,6 +280,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
});
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(updateResponse).toHaveLength(1);
expect(updateResponse).toEqual([
{
ack: expect.any(String),
@@ -301,7 +290,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -28,6 +28,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -37,11 +38,10 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should sync album to asset for owned albums', async () => {
@@ -51,6 +51,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -60,11 +61,10 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should detect and sync the album to asset for shared albums', async () => {
@@ -76,6 +76,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -85,11 +86,10 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should not sync album to asset for an album owned by another user', async () => {
@@ -98,7 +98,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should backfill album to assets when a user shares an album with you', async () => {
@@ -114,6 +114,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -123,7 +124,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album to asset sync
@@ -148,11 +148,10 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted album to asset relation', async () => {
@@ -163,6 +162,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -172,7 +172,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -180,6 +179,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await wait(2);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -189,11 +189,10 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted album to asset relation when an asset is deleted', async () => {
@@ -204,6 +203,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -213,7 +213,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -221,6 +220,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await wait(2);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -230,11 +230,10 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
it('should not sync a deleted album to asset relation when the album is deleted', async () => {
@@ -245,6 +244,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -254,12 +254,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await albumRepo.delete(album.id);
await wait(2);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
});
});

View File

@@ -34,7 +34,6 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
@@ -46,6 +45,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -56,11 +56,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
it('should detect and sync an updated shared user', async () => {
@@ -72,10 +71,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -86,11 +86,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted shared user', async () => {
@@ -101,8 +100,9 @@ describe(SyncRequestType.AlbumUsersV1, () => {
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user1.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
@@ -115,11 +115,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
});
@@ -135,6 +134,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
});
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -145,11 +145,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
it('should detect and sync an updated shared user', async () => {
@@ -162,14 +161,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
@@ -183,11 +178,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted shared user', async () => {
@@ -200,14 +194,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
@@ -220,11 +210,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
it('should backfill album users when a user shares an album with you', async () => {
@@ -243,6 +232,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.newAlbumUser({ albumId: album1.id, userId: user2.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -253,7 +243,6 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial user
@@ -296,11 +285,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
});
});
});

View File

@@ -24,6 +24,7 @@ describe(SyncRequestType.AlbumsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -34,11 +35,10 @@ describe(SyncRequestType.AlbumsV1, () => {
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
it('should detect and sync a new album', async () => {
@@ -46,6 +46,7 @@ describe(SyncRequestType.AlbumsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -54,11 +55,10 @@ describe(SyncRequestType.AlbumsV1, () => {
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
it('should detect and sync an album delete', async () => {
@@ -67,6 +67,7 @@ describe(SyncRequestType.AlbumsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -75,12 +76,12 @@ describe(SyncRequestType.AlbumsV1, () => {
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await albumRepo.delete(album.id);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -89,11 +90,10 @@ describe(SyncRequestType.AlbumsV1, () => {
},
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
describe('shared albums', () => {
@@ -104,17 +104,17 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
it('should detect and sync an album share (share before sync)', async () => {
@@ -124,17 +124,17 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
it('should detect and sync an album share (share after sync)', async () => {
@@ -150,24 +150,23 @@ describe(SyncRequestType.AlbumsV1, () => {
data: expect.objectContaining({ id: userAlbum.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newAlbumUser({ userId: auth.user.id, albumId: user2Album.id, role: AlbumUserRole.Editor });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user2Album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
it('should detect and sync an album delete`', async () => {
@@ -178,27 +177,24 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(1);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await albumRepo.delete(album.id);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: { albumId: album.id },
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
it('should detect and sync an album unshare as an album delete', async () => {
@@ -209,13 +205,10 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(1);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await albumUserRepo.delete({ albumsId: album.id, usersId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
@@ -225,11 +218,10 @@ describe(SyncRequestType.AlbumsV1, () => {
data: { albumId: album.id },
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
});
});
});

View File

@@ -24,6 +24,7 @@ describe(SyncRequestType.AssetExifsV1, () => {
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -56,11 +57,10 @@ describe(SyncRequestType.AssetExifsV1, () => {
},
type: SyncEntityType.AssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([]);
});
it('should only sync asset exif for own user', async () => {
@@ -72,10 +72,7 @@ describe(SyncRequestType.AssetExifsV1, () => {
const { session } = await ctx.newSession({ userId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0);
});
});

View File

@@ -26,6 +26,7 @@ describe(SyncEntityType.AssetFaceV1, () => {
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -43,11 +44,10 @@ describe(SyncEntityType.AssetFaceV1, () => {
}),
type: 'AssetFaceV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted asset face', async () => {
@@ -58,6 +58,7 @@ describe(SyncEntityType.AssetFaceV1, () => {
await personRepo.deleteAssetFace(assetFace.id);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -66,11 +67,10 @@ describe(SyncEntityType.AssetFaceV1, () => {
},
type: 'AssetFaceDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).resolves.toEqual([]);
});
it('should not sync an asset face or asset face delete for an unrelated user', async () => {
@@ -82,18 +82,11 @@ describe(SyncEntityType.AssetFaceV1, () => {
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetFaceV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).toHaveLength(0);
await personRepo.deleteAssetFace(assetFace.id);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetFaceDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).toHaveLength(0);
});
});

View File

@@ -26,6 +26,7 @@ describe(SyncEntityType.AssetMetadataV1, () => {
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -36,11 +37,10 @@ describe(SyncEntityType.AssetMetadataV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1])).resolves.toEqual([]);
});
it('should update asset metadata', async () => {
@@ -51,6 +51,7 @@ describe(SyncEntityType.AssetMetadataV1, () => {
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -61,7 +62,6 @@ describe(SyncEntityType.AssetMetadataV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -79,11 +79,10 @@ describe(SyncEntityType.AssetMetadataV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, updatedResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1])).resolves.toEqual([]);
});
});
@@ -96,6 +95,7 @@ describe(SyncEntityType.AssetMetadataDeleteV1, () => {
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -106,7 +106,6 @@ describe(SyncEntityType.AssetMetadataDeleteV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -122,7 +121,6 @@ describe(SyncEntityType.AssetMetadataDeleteV1, () => {
},
type: 'AssetMetadataDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -40,6 +40,7 @@ describe(SyncEntityType.AssetV1, () => {
});
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -63,11 +64,10 @@ describe(SyncEntityType.AssetV1, () => {
},
type: 'AssetV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted asset', async () => {
@@ -77,6 +77,7 @@ describe(SyncEntityType.AssetV1, () => {
await assetRepo.remove(asset);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -85,11 +86,10 @@ describe(SyncEntityType.AssetV1, () => {
},
type: 'AssetDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([]);
});
it('should not sync an asset or asset delete for an unrelated user', async () => {
@@ -100,17 +100,11 @@ describe(SyncEntityType.AssetV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
await assetRepo.remove(asset);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
});
});

View File

@@ -22,6 +22,7 @@ describe(SyncEntityType.AuthUserV1, () => {
const { auth, user, ctx } = await setup(await getKyselyDB());
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -42,11 +43,10 @@ describe(SyncEntityType.AuthUserV1, () => {
},
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AuthUsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AuthUsersV1])).resolves.toEqual([]);
});
it('should sync a change and then another change to that same user', async () => {
@@ -55,6 +55,7 @@ describe(SyncEntityType.AuthUserV1, () => {
const userRepo = ctx.get(UserRepository);
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -64,7 +65,6 @@ describe(SyncEntityType.AuthUserV1, () => {
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -72,6 +72,7 @@ describe(SyncEntityType.AuthUserV1, () => {
await userRepo.update(user.id, { isAdmin: true });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -81,7 +82,6 @@ describe(SyncEntityType.AuthUserV1, () => {
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -1,60 +0,0 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { SyncCheckpointRepository } from 'src/repositories/sync-checkpoint.repository';
import { DB } from 'src/schema';
import { toAck } from 'src/utils/sync';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
import { v7 } from 'uuid';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.SyncCompleteV1, () => {
it('should work', async () => {
const { auth, ctx } = await setup();
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect an old checkpoint and send back a reset', async () => {
const { auth, session, ctx } = await setup();
const updateId = v7({ msecs: DateTime.now().minus({ days: 60 }).toMillis() });
await ctx.get(SyncCheckpointRepository).upsertAll([
{
type: SyncEntityType.SyncCompleteV1,
sessionId: session.id,
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
},
]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' }]);
});
it('should not send back a reset if the checkpoint is recent', async () => {
const { auth, session, ctx } = await setup();
const updateId = v7({ msecs: DateTime.now().minus({ days: 7 }).toMillis() });
await ctx.get(SyncCheckpointRepository).upsertAll([
{
type: SyncEntityType.SyncCompleteV1,
sessionId: session.id,
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
},
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
});

View File

@@ -25,6 +25,7 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -34,11 +35,10 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
},
type: 'MemoryToAssetV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted memory to asset relation', async () => {
@@ -50,6 +50,7 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -59,11 +60,10 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
},
type: 'MemoryToAssetDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).resolves.toEqual([]);
});
it('should not sync a memory to asset relation or delete for an unrelated user', async () => {
@@ -74,18 +74,11 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
const { memory } = await ctx.newMemory({ ownerId: user2.id });
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.MemoryToAssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(1);
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.MemoryToAssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(1);
});
});

View File

@@ -23,6 +23,7 @@ describe(SyncEntityType.MemoryV1, () => {
const { memory } = await ctx.newMemory({ ownerId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -42,11 +43,10 @@ describe(SyncEntityType.MemoryV1, () => {
},
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted memory', async () => {
@@ -56,6 +56,7 @@ describe(SyncEntityType.MemoryV1, () => {
await memoryRepo.delete(memory.id);
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -64,11 +65,10 @@ describe(SyncEntityType.MemoryV1, () => {
},
type: 'MemoryDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
});
it('should sync a memory and then an update to that same memory', async () => {
@@ -77,29 +77,29 @@ describe(SyncEntityType.MemoryV1, () => {
const { memory } = await ctx.newMemory({ ownerId: user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: memory.id }),
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await memoryRepo.update(memory.id, { seenAt: new Date() });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: memory.id }),
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
});
it('should not sync a memory or a memory delete for an unrelated user', async () => {
@@ -108,8 +108,8 @@ describe(SyncEntityType.MemoryV1, () => {
const { user: user2 } = await ctx.newUser();
const { memory } = await ctx.newMemory({ ownerId: user2.id });
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
await memoryRepo.delete(memory.id);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
});
});

View File

@@ -26,6 +26,7 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -58,11 +59,10 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
},
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
});
it('should not sync partner asset exif for own user', async () => {
@@ -72,11 +72,8 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
});
it('should not sync partner asset exif for unrelated user', async () => {
@@ -89,11 +86,8 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
});
it('should backfill partner asset exif when a partner shared their library with you', async () => {
@@ -108,6 +102,7 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
@@ -117,7 +112,6 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
@@ -125,6 +119,7 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -138,11 +133,10 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
});
it('should handle partners with users ids lower than a uuidv7', async () => {
@@ -157,6 +151,7 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -165,15 +160,15 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
// This checks that our ack upsert is correct
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.stringMatching(new RegExp(`${SyncEntityType.PartnerAssetExifBackfillV1}\\|.+?\\|.+`)),
@@ -187,11 +182,10 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
@@ -209,6 +203,7 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -217,13 +212,13 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toHaveLength(3);
expect(newResponse).toEqual([
{
ack: expect.stringMatching(new RegExp(`${SyncEntityType.PartnerAssetExifBackfillV1}\\|.+?\\|.+`)),
@@ -244,10 +239,9 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
});
});

View File

@@ -46,6 +46,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -69,11 +70,10 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
},
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted partner asset', async () => {
@@ -86,6 +86,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await assetRepo.remove(asset);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -94,11 +95,10 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
},
type: SyncEntityType.PartnerAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
});
it('should not sync a deleted partner asset due to a user delete', async () => {
@@ -109,7 +109,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await ctx.newAsset({ ownerId: user2.id });
await userRepo.delete({ id: user2.id }, true);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
});
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
@@ -119,12 +119,9 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
const { user: user2 } = await ctx.newUser();
await ctx.newAsset({ ownerId: user2.id });
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.PartnerAssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1);
await partnerRepo.remove(partner);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
});
it('should not sync an asset or asset delete for own user', async () => {
@@ -135,19 +132,13 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await assetRepo.remove(asset);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
});
it('should not sync an asset or asset delete for unrelated user', async () => {
@@ -159,19 +150,13 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await assetRepo.remove(asset);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
});
it('should backfill partner assets when a partner shared their library with you', async () => {
@@ -185,6 +170,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -193,13 +179,13 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -213,11 +199,10 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
@@ -233,6 +218,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -241,12 +227,12 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(newResponse).toHaveLength(3);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -267,10 +253,9 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
});
});

View File

@@ -29,6 +29,7 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -41,11 +42,10 @@ describe(SyncRequestType.PartnerStacksV1, () => {
},
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted partner stack', async () => {
@@ -58,6 +58,7 @@ describe(SyncRequestType.PartnerStacksV1, () => {
await stackRepo.delete(stack.id);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('PartnerStackDeleteV1'),
@@ -66,11 +67,10 @@ describe(SyncRequestType.PartnerStacksV1, () => {
},
type: SyncEntityType.PartnerStackDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
});
it('should not sync a deleted partner stack due to a user delete', async () => {
@@ -81,7 +81,7 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
await userRepo.delete({ id: user2.id }, true);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
});
it('should not sync a deleted partner stack due to a partner delete (unshare)', async () => {
@@ -91,12 +91,9 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.PartnerStackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(1);
await partnerRepo.remove(partner);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
});
it('should not sync a stack or stack delete for own user', async () => {
@@ -106,17 +103,11 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset.id]);
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
});
it('should not sync a stack or stack delete for unrelated user', async () => {
@@ -128,19 +119,13 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
});
it('should backfill partner stacks when a partner shared their library with you', async () => {
@@ -155,6 +140,7 @@ describe(SyncRequestType.PartnerStacksV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('PartnerStackV1'),
@@ -163,12 +149,12 @@ describe(SyncRequestType.PartnerStacksV1, () => {
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
@@ -182,11 +168,10 @@ describe(SyncRequestType.PartnerStacksV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
});
it('should only backfill partner stacks created prior to the current partner stack checkpoint', async () => {
@@ -204,6 +189,7 @@ describe(SyncRequestType.PartnerStacksV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining(SyncEntityType.PartnerStackV1),
@@ -212,12 +198,12 @@ describe(SyncRequestType.PartnerStacksV1, () => {
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(newResponse).toHaveLength(3);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -238,10 +224,9 @@ describe(SyncRequestType.PartnerStacksV1, () => {
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
});
});

View File

@@ -26,6 +26,7 @@ describe(SyncEntityType.PartnerV1, () => {
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -36,11 +37,10 @@ describe(SyncEntityType.PartnerV1, () => {
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted partner', async () => {
@@ -53,20 +53,22 @@ describe(SyncEntityType.PartnerV1, () => {
await partnerRepo.remove(partner);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerDeleteV1',
},
type: 'PartnerDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
]),
);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
});
it('should detect and sync a partner share both to and from another user', async () => {
@@ -77,30 +79,32 @@ describe(SyncEntityType.PartnerV1, () => {
const { partner: partner2 } = await ctx.newPartner({ sharedById: user1.id, sharedWithId: user2.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
inTimeline: partner1.inTimeline,
sharedById: partner1.sharedById,
sharedWithId: partner1.sharedWithId,
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
inTimeline: partner1.inTimeline,
sharedById: partner1.sharedById,
sharedWithId: partner1.sharedWithId,
},
type: 'PartnerV1',
},
type: 'PartnerV1',
},
{
ack: expect.any(String),
data: {
inTimeline: partner2.inTimeline,
sharedById: partner2.sharedById,
sharedWithId: partner2.sharedWithId,
{
ack: expect.any(String),
data: {
inTimeline: partner2.inTimeline,
sharedById: partner2.sharedById,
sharedWithId: partner2.sharedWithId,
},
type: 'PartnerV1',
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
]),
);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
});
it('should sync a partner and then an update to that same partner', async () => {
@@ -112,6 +116,7 @@ describe(SyncEntityType.PartnerV1, () => {
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -122,7 +127,6 @@ describe(SyncEntityType.PartnerV1, () => {
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -133,6 +137,7 @@ describe(SyncEntityType.PartnerV1, () => {
);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -143,11 +148,10 @@ describe(SyncEntityType.PartnerV1, () => {
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
});
it('should not sync a partner or partner delete for an unrelated user', async () => {
@@ -159,9 +163,9 @@ describe(SyncEntityType.PartnerV1, () => {
const { user: user3 } = await ctx.newUser();
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user3.id });
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await partnerRepo.remove(partner);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
});
it('should not sync a partner delete after a user is deleted', async () => {
@@ -173,6 +177,6 @@ describe(SyncEntityType.PartnerV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await userRepo.delete({ id: user2.id }, true);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
});
});

View File

@@ -24,6 +24,7 @@ describe(SyncEntityType.PersonV1, () => {
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -39,11 +40,10 @@ describe(SyncEntityType.PersonV1, () => {
}),
type: 'PersonV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PeopleV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted person', async () => {
@@ -53,6 +53,7 @@ describe(SyncEntityType.PersonV1, () => {
await personRepo.delete([person.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -61,11 +62,10 @@ describe(SyncEntityType.PersonV1, () => {
},
type: 'PersonDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.PeopleV1])).resolves.toEqual([]);
});
it('should not sync a person or person delete for an unrelated user', async () => {
@@ -76,18 +76,11 @@ describe(SyncEntityType.PersonV1, () => {
const { person } = await ctx.newPerson({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.PersonV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.PeopleV1])).toHaveLength(0);
await personRepo.delete([person.id]);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.PersonDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.PeopleV1])).toHaveLength(0);
});
});

View File

@@ -21,7 +21,8 @@ describe(SyncEntityType.SyncResetV1, () => {
it('should work', async () => {
const { auth, ctx } = await setup();
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([]);
});
it('should detect a pending sync reset', async () => {
@@ -40,10 +41,7 @@ describe(SyncEntityType.SyncResetV1, () => {
await ctx.newAsset({ ownerId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await ctx.get(SessionRepository).update(auth.session!.id, {
isPendingSyncReset: true,
@@ -64,8 +62,9 @@ describe(SyncEntityType.SyncResetV1, () => {
});
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1], true)).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
expect.objectContaining({
type: SyncEntityType.AssetV1,
}),
]);
});
@@ -87,8 +86,9 @@ describe(SyncEntityType.SyncResetV1, () => {
const postResetResponse = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(postResetResponse).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
expect.objectContaining({
type: SyncEntityType.AssetV1,
}),
]);
});
});

View File

@@ -25,6 +25,7 @@ describe(SyncEntityType.StackV1, () => {
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('StackV1'),
@@ -37,11 +38,10 @@ describe(SyncEntityType.StackV1, () => {
},
type: 'StackV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted stack', async () => {
@@ -53,17 +53,17 @@ describe(SyncEntityType.StackV1, () => {
await stackRepo.delete(stack.id);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('StackDeleteV1'),
data: { stackId: stack.id },
type: 'StackDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
});
it('should sync a stack and then an update to that same stack', async () => {
@@ -74,29 +74,22 @@ describe(SyncEntityType.StackV1, () => {
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(1);
await ctx.syncAckAll(auth, response);
await stackRepo.update(stack.id, { primaryAssetId: asset2.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(newResponse).toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.stringContaining('StackV1'),
data: expect.objectContaining({ id: stack.id, primaryAssetId: asset2.id }),
type: 'StackV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
});
it('should not sync a stack or stack delete for an unrelated user', async () => {
@@ -107,8 +100,8 @@ describe(SyncEntityType.StackV1, () => {
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset1.id, asset2.id]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
await stackRepo.delete(stack.id);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
});
});

View File

@@ -25,6 +25,7 @@ describe(SyncEntityType.UserMetadataV1, () => {
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -35,11 +36,10 @@ describe(SyncEntityType.UserMetadataV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([]);
});
it('should update user metadata', async () => {
@@ -49,6 +49,7 @@ describe(SyncEntityType.UserMetadataV1, () => {
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -59,7 +60,6 @@ describe(SyncEntityType.UserMetadataV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -77,11 +77,10 @@ describe(SyncEntityType.UserMetadataV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, updatedResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([]);
});
});
@@ -93,6 +92,7 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => {
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -103,7 +103,6 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -119,7 +118,6 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => {
},
type: 'UserMetadataDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -28,6 +28,7 @@ describe(SyncEntityType.UserV1, () => {
}
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -42,11 +43,10 @@ describe(SyncEntityType.UserV1, () => {
},
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.UsersV1])).resolves.toEqual([]);
});
it('should detect and sync a soft deleted user', async () => {
@@ -56,6 +56,7 @@ describe(SyncEntityType.UserV1, () => {
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
@@ -68,12 +69,11 @@ describe(SyncEntityType.UserV1, () => {
data: expect.objectContaining({ id: deleted.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.UsersV1])).resolves.toEqual([]);
});
it('should detect and sync a deleted user', async () => {
@@ -85,6 +85,7 @@ describe(SyncEntityType.UserV1, () => {
await userRepo.delete({ id: user.id }, true);
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -98,11 +99,10 @@ describe(SyncEntityType.UserV1, () => {
data: expect.objectContaining({ id: authUser.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
await expect(ctx.syncStream(auth, [SyncRequestType.UsersV1])).resolves.toEqual([]);
});
it('should sync a user and then an update to that same user', async () => {
@@ -111,13 +111,13 @@ describe(SyncEntityType.UserV1, () => {
const userRepo = ctx.get(UserRepository);
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -125,13 +125,13 @@ describe(SyncEntityType.UserV1, () => {
const updated = await userRepo.update(auth.user.id, { name: 'new name' });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user.id, name: updated.name }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -1,3 +1,4 @@
import { randomUUID } from 'node:crypto';
import {
Activity,
ApiKey,
@@ -16,15 +17,14 @@ import { MapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetStatus, AssetType, AssetVisibility, MemoryType, Permission, UserMetadataKey, UserStatus } from 'src/enum';
import { OnThisDayData, UserMetadataItem } from 'src/types';
import { v4, v7 } from 'uuid';
export const newUuid = () => v4();
export const newUuid = () => randomUUID() as string;
export const newUuids = () =>
Array.from({ length: 100 })
.fill(0)
.map(() => newUuid());
export const newDate = () => new Date();
export const newUuidV7 = () => v7();
export const newUuidV7 = () => 'uuid-v7';
export const newSha1 = () => Buffer.from('this is a fake hash');
export const newEmbedding = () => {
const embedding = Array.from({ length: 512 })

1
web/.nvmrc Normal file
View File

@@ -0,0 +1 @@
22.18.0

Some files were not shown because too many files have changed in this diff Show More