Compare commits

...

16 Commits

Author SHA1 Message Date
Jason Rasmussen
4202dd6c9a feat: mise in CI 2025-09-04 13:17:55 -04:00
bo0tzz
7bd79b551c feat: use mise for core dev tools (#21566)
* feat: use mise for core tools

* feat: mise handle dart

* feat: install dcm through mise

* fix: enable experimental in mise config

* feat: use mise.lock

* chore: always pin mise use

---------

Co-authored-by: bwees <brandonwees@gmail.com>
2025-09-04 12:58:42 -04:00
shenlong
5fe954b3c9 fix: use lock to synchronise foreground and background backup (#21522)
* fix: use lock to synchronise foreground and background backup

# Conflicts:
#	mobile/lib/domain/services/background_worker.service.dart
#	mobile/lib/platform/background_worker_api.g.dart
#	mobile/pigeon/background_worker_api.dart

* add timeout to the splash-screen acquire lock

* fix: null check on created date

---------

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
Co-authored-by: Alex <alex.tran1502@gmail.com>
2025-09-04 11:44:33 -05:00
Jason Rasmussen
7f81a5bd6f fix: sidecar check job (#21312) 2025-09-04 16:23:58 +00:00
Arthur Normand
37a79292c0 feat: view similar photos (#21108)
* Enable filteing by example

* Drop `@GenerateSql` for `getEmbedding`?

* Improve error message

* PR Feedback

* Sort en.json

* Add SQL

* Fix lint

* Drop test that is no longer valid

* Fix i18n file sorting

* Fix TS error

* Add a `requireAccess` before pulling the embedding

* Fix decorators

* Run `make open-api`

---------

Co-authored-by: Alex <alex.tran1502@gmail.com>
2025-09-04 09:22:09 -05:00
Brandon Wees
bf6211776f fix: retain filter and sort options when pulling to refresh (#21452)
* fix: retain filter and sort options when pulling to refresh

* chore: use classes to manage state

* chore: format

* chore: refactor to keep local state of filter/sorted albums instead of a global filteredAlbums

* fix: keep sort when page is navigated away and returned

* chore: lint

* chore: format

why is autoformat not working

* fix: default sort direction state

* fix: search clears sorting

we have to cache our sorted albums since sorting is very computationally expensive and cannot be run on every keystroke. For searches, instead of pulling from the list of albums, we now pull from the cached sorted list and then filter which is then shown to the user
2025-09-04 09:08:17 -05:00
waclaw66
6c178a04dc fix(mobile): pinch + move scale (#21332)
* fix: pinch + move scale

* added lost changes from #18744
2025-09-04 09:01:39 -05:00
Snowknight26
036d314cb6 fix(web): Make Manage location utility header responsive (#21480)
* fix(web): Make Manage location utility header responsive

* Consolidate <p> into <Text>
2025-09-04 08:59:26 -05:00
Noel S
1fc5da398a fix(mobile): Hide system UI when entering immersive mode in asset viewer (#21539)
Implement hiding system ui in asset viewer
2025-09-04 08:57:34 -05:00
Sudheer Reddy Puthana
4d84338086 fix(mobile): readonly mode fixes (#21545)
* fix: Enables videotimeline in readonly mode

- Enables only the video controls in the bottom bar when readonlyMode is enabled.
- Fixes the message on the app profile bar when readOnlyMode is enabled **but** betaTimeline is not enabled.

Fixes https://github.com/immich-app/immich/issues/21441

Signed-off-by: Sudheer Puthana <Sud-Puth@users.noreply.github.com>

* cleanup bottom bar handling

---------

Signed-off-by: Sudheer Puthana <Sud-Puth@users.noreply.github.com>
Co-authored-by: bwees <brandonwees@gmail.com>
2025-09-04 08:50:38 -05:00
Yaros
0ac49b00ee feat(mobile): scrubber haptics (beta timeline) (#21351)
* feat(mobile): scrubber haptics beta timeline

* changed haptic to selectionClick
2025-09-04 08:47:16 -05:00
Mert
e427778a96 fix(mobile): pause image loading on inactive state (#21543)
* pause image loading

* make thumbhashes wait too
2025-09-04 08:40:38 -05:00
Pedro Simão
b82e29fbb4 feat(mobile): add to albums from existing albums (#21554)
* feat(mobile): add to albums from existing albums

* formatted files

* used the new t() method for translation

* removed unused import
2025-09-04 08:39:10 -05:00
shenlong
ff19aea4ac fix: keyboard not dismissed in places page (#21583)
Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2025-09-04 08:38:44 -05:00
Jason Rasmussen
28179a3a1d feat: audit cleanup (#21567) 2025-09-03 22:50:27 +00:00
Min Idzelis
af1e18d07e fix: docker upload_location perm fix for dev (#21501) 2025-09-03 18:27:30 +01:00
105 changed files with 2150 additions and 985 deletions

1
.github/.nvmrc vendored
View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -1,8 +1,5 @@
{
"scripts": {
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"name": "github",
"devDependencies": {
"prettier": "^3.5.3"
}

View File

@@ -33,24 +33,20 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup typescript-sdk
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
- run: pnpm install --frozen-lockfile
- run: pnpm build
- run: pnpm publish
- name: Install dependencies
run: mise run cli:install
- name: Run build
run: mise run cli:build
- name: Publish package
run: pnpm publish
if: ${{ github.event_name == 'release' }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -55,24 +55,17 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './docs/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run install
run: pnpm install
run: mise run docs:install
- name: Check formatting
run: pnpm format
run: mise run docs:format-fix
- name: Run build
run: pnpm build
run: mise run docs:build
- name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2

View File

@@ -28,15 +28,11 @@ jobs:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Fix formatting
run: make install-all && make format-all
run: mise run server:format-fix && mise run web:format-fix && mise run docs:format-fix
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4

View File

@@ -46,15 +46,8 @@ jobs:
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Bump version
env:

View File

@@ -20,20 +20,15 @@ jobs:
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install deps
run: pnpm install --frozen-lockfile
run: mise run sdk:install
- name: Build
run: pnpm build
run: mise run sdk:build
- name: Publish
run: pnpm publish
env:

View File

@@ -72,27 +72,21 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run package manager install
run: pnpm install
run: mise run server:install
- name: Run linter
run: pnpm lint
run: mise run server:lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: mise run server:format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: mise run server:check
if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: pnpm test
run: mise run server:test
if: ${{ !cancelled() }}
cli-unit-tests:
name: Unit Test CLI
@@ -109,30 +103,23 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup typescript-sdk
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
- name: Install deps
run: pnpm install
run: mise run cli:install
- name: Run linter
run: pnpm lint
run: mise run cli:lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: mise run cli:format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: mise run cli:check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
run: mise run cli:test
if: ${{ !cancelled() }}
cli-unit-tests-win:
name: Unit Test CLI (Windows)
@@ -149,25 +136,18 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
- name: Install deps
run: pnpm install --frozen-lockfile
run: mise run cli:install
# Skip linter & formatter in Windows test.
- name: Run tsc
run: pnpm check
run: mise run cli:check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
run: mise run cli:test
if: ${{ !cancelled() }}
web-lint:
name: Lint Web
@@ -176,35 +156,25 @@ jobs:
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run pnpm install
run: pnpm rebuild && pnpm install --frozen-lockfile
run: mise run sdk:install && mise run sdk:build
- name: Run install
run: mise run web:install
- name: Run linter
run: pnpm lint:p
run: mise run web:lint-p
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: mise run web:format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: pnpm check:svelte
run: mise run web:check-svelte
if: ${{ !cancelled() }}
web-unit-tests:
name: Test Web
@@ -213,32 +183,22 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
- name: Run npm install
run: pnpm install --frozen-lockfile
run: mise run web:install
- name: Run tsc
run: pnpm check:typescript
run: mise run web:check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
run: mise run web:test
if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
@@ -252,18 +212,12 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Install dependencies
run: pnpm --filter=immich-web install --frozen-lockfile
run: mise run web:install
- name: Format
run: pnpm --filter=immich-web format:i18n
run: mise run i18n:format-fix
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -293,29 +247,22 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
run: mise run e2e:install
if: ${{ !cancelled() }}
- name: Run linter
run: pnpm lint
run: mise run e2e:lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: mise run e2e:format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: mise run e2e:check
if: ${{ !cancelled() }}
server-medium-tests:
name: Medium Tests (Server)
@@ -324,26 +271,17 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true mise run server:install
- name: Run medium tests
run: pnpm test:medium
run: mise run server:test-medium
if: ${{ !cancelled() }}
e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI)
@@ -352,9 +290,6 @@ jobs:
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
@@ -364,34 +299,25 @@ jobs:
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
if: ${{ !cancelled() }}
- name: Run setup web
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
working-directory: ./web
run: mise run web:install && mise run web:svelte-kit-sync
if: ${{ !cancelled() }}
- name: Run setup cli
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./cli
run: mise run cli:install && mise run cli:build
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
run: mise run e2e:install
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
run: docker compose -f e2e/docker-compose.yml build
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: pnpm test
run: mise run e2e:test
if: ${{ !cancelled() }}
e2e-tests-web:
name: End-to-End Tests (Web)
@@ -400,9 +326,6 @@ jobs:
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
@@ -412,29 +335,26 @@ jobs:
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
run: mise run sdk:install && mise run sdk:build
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Run setup web
run: mise run web:install && mise run web:svelte-kit-sync
if: ${{ !cancelled() }}
- name: Run setup cli
run: mise run cli:install && mise run cli:build
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
run: npx playwright install chromium --only-shell
working-directory: e2e
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
run: docker compose -f e2e/docker-compose.yml build
if: ${{ !cancelled() }}
- name: Run e2e tests (web)
run: npx playwright test
run: mise run e2e:test-web
if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
@@ -519,18 +439,12 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: pnpm install --frozen-lockfile
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Run install
run: mise run github:install
- name: Run formatter
run: pnpm format
run: mise run github:format
if: ${{ !cancelled() }}
shellcheck:
name: ShellCheck
@@ -556,18 +470,12 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich install --frozen-lockfile
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true mise run server:install
- name: Build the app
run: pnpm --filter immich build
run: mise run server:build
- name: Run API generation
run: ./bin/generate-open-api.sh
working-directory: open-api
@@ -611,25 +519,19 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup mise
uses: jdx/mise-action@5ac50f778e26fac95da98d50503682459e86d566 # v3.2.0
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Build the app
run: pnpm build
run: mise run server:build
- name: Run existing migrations
run: pnpm migrations:run
run: mise run server:migrations run
- name: Test npm run schema:reset command works
run: pnpm schema:reset
run: mise run server:schema-reset
- name: Generate new migrations
continue-on-error: true
run: pnpm migrations:generate src/TestMigration
run: mise run server:migrations generate src/TestMigration
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -646,7 +548,7 @@ jobs:
cat ./src/*-TestMigration.ts
exit 1
- name: Run SQL generation
run: pnpm sync:sql
run: mise run server:sql
env:
DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes

View File

@@ -10,14 +10,14 @@ dev-update: prepare-volumes
dev-scale: prepare-volumes
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
dev-docs: prepare-volumes
dev-docs:
npm --prefix docs run start
.PHONY: e2e
e2e: prepare-volumes
e2e:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
e2e-update: prepare-volumes
e2e-update:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-down:
@@ -73,6 +73,8 @@ define safe_chown
if chown $(2) $(or $(UID),1000):$(or $(GID),1000) "$(1)" 2>/dev/null; then \
true; \
else \
STATUS=$$?; echo "Exit code: $$STATUS $(1)"; \
echo "$$STATUS $(1)"; \
echo "Permission denied when changing owner of volumes and upload location. Try running 'sudo make prepare-volumes' first."; \
exit 1; \
fi;
@@ -83,11 +85,13 @@ prepare-volumes:
@$(foreach dir,$(VOLUME_DIRS),$(call safe_chown,$(dir),-R))
ifneq ($(UPLOAD_LOCATION),)
ifeq ($(filter /%,$(UPLOAD_LOCATION)),)
@mkdir -p "docker/$(UPLOAD_LOCATION)"
@mkdir -p "docker/$(UPLOAD_LOCATION)/photos/upload"
@$(call safe_chown,docker/$(UPLOAD_LOCATION),)
@$(call safe_chown,docker/$(UPLOAD_LOCATION)/photos,-R)
else
@mkdir -p "$(UPLOAD_LOCATION)"
@mkdir -p "$(UPLOAD_LOCATION)/photos/upload"
@$(call safe_chown,$(UPLOAD_LOCATION),)
@$(call safe_chown,$(UPLOAD_LOCATION)/photos,-R)
endif
endif

View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -42,17 +42,6 @@
"vitest-fetch-mock": "^0.4.0",
"yaml": "^2.3.1"
},
"scripts": {
"build": "vite build",
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"prepack": "npm run build",
"test": "vitest",
"test:cov": "vitest --coverage",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"check": "tsc --noEmit"
},
"repository": {
"type": "git",
"url": "git+https://github.com/immich-app/immich.git",
@@ -67,8 +56,5 @@
"fastq": "^1.17.1",
"lodash-es": "^4.17.21",
"micromatch": "^4.0.8"
},
"volta": {
"node": "22.18.0"
}
}

View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -5,7 +5,7 @@ After making any changes in the `server/src/schema`, a database migration need t
1. Run the command
```bash
pnpm run migrations:generate <migration-name>
mise run server:migrations generate <migration-name>
```
2. Check if the migration file makes sense.

View File

@@ -8,11 +8,11 @@ When contributing code through a pull request, please check the following:
## Web Checks
- [ ] `pnpm run lint` (linting via ESLint)
- [ ] `pnpm run format` (formatting via Prettier)
- [ ] `pnpm run check:svelte` (Type checking via SvelteKit)
- [ ] `pnpm run check:typescript` (check typescript)
- [ ] `pnpm test` (unit tests)
- [ ] `mise run web:lint` (linting via ESLint)
- [ ] `mise run web:format` (formatting via Prettier)
- [ ] `mise run web:check` (check typescript)
- [ ] `mise run web:check-svelte` (Type checking via SvelteKit)
- [ ] `mise run web:test` (unit tests)
## Documentation
@@ -25,17 +25,18 @@ Run all web checks with `pnpm run check:all`
## Server Checks
- [ ] `pnpm run lint` (linting via ESLint)
- [ ] `pnpm run format` (formatting via Prettier)
- [ ] `pnpm run check` (Type checking via `tsc`)
- [ ] `pnpm test` (unit tests)
- [ ] `mise run server:lint` (linting via ESLint)
- [ ] `mise run server:format` (formatting via Prettier)
- [ ] `mise run server:check` (type checking via `tsc`)
- [ ] `mise run server:test` (unit tests)
- [ ] `mise run server:test-medium` (medium tests)
:::tip AIO
Run all server checks with `pnpm run check:all`
:::
:::info Auto Fix
You can use `pnpm run __:fix` to potentially correct some issues automatically for `pnpm run format` and `lint`.
You can use `mise run server:lint-fix` and `mise run server:format-fix` to potentially correct some issues automatically.
:::
## Mobile Checks

View File

@@ -2,20 +2,6 @@
"name": "documentation",
"version": "0.0.0",
"private": true,
"scripts": {
"docusaurus": "docusaurus",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"start": "docusaurus start --port 3005",
"copy:openapi": "jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"build": "npm run copy:openapi && docusaurus build",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "~3.8.0",
"@docusaurus/preset-classic": "~3.8.0",
@@ -58,8 +44,5 @@
},
"engines": {
"node": ">=20"
},
"volta": {
"node": "22.18.0"
}
}

View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -4,17 +4,6 @@
"description": "",
"main": "index.js",
"type": "module",
"scripts": {
"test": "vitest --run",
"test:watch": "vitest",
"test:web": "npx playwright test",
"start:web": "npx playwright test --ui",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"check": "tsc --noEmit"
},
"keywords": [],
"author": "",
"license": "GNU Affero General Public License version 3",
@@ -52,8 +41,5 @@
"typescript-eslint": "^8.28.0",
"utimes": "^5.2.1",
"vitest": "^3.0.0"
},
"volta": {
"node": "22.18.0"
}
}

View File

@@ -1557,6 +1557,7 @@
"purchase_server_description_2": "Supporter status",
"purchase_server_title": "Server",
"purchase_settings_server_activated": "The server product key is managed by the admin",
"query_asset_id": "Query Asset ID",
"queue_status": "Queuing {count}/{total}",
"rating": "Star rating",
"rating_clear": "Clear rating",
@@ -1735,7 +1736,7 @@
"select_user_for_sharing_page_err_album": "Failed to create album",
"selected": "Selected",
"selected_count": "{count, plural, other {# selected}}",
"selected_gps_coordinates": "selected gps coordinates",
"selected_gps_coordinates": "Selected GPS Coordinates",
"send_message": "Send message",
"send_welcome_email": "Send welcome email",
"server_endpoint": "Server Endpoint",
@@ -2077,6 +2078,7 @@
"view_next_asset": "View next asset",
"view_previous_asset": "View previous asset",
"view_qr_code": "View QR code",
"view_similar_photos": "View similar photos",
"view_stack": "View Stack",
"view_user": "View User",
"viewer_remove_from_stack": "Remove from Stack",

34
mise.lock Normal file
View File

@@ -0,0 +1,34 @@
[tools.dart]
version = "3.8.2"
backend = "asdf:dart"
[tools.flutter]
version = "3.32.8-stable"
backend = "asdf:flutter"
[tools."github:CQLabs/homebrew-dcm"]
version = "1.31.4"
backend = "github:CQLabs/homebrew-dcm"
[tools."github:CQLabs/homebrew-dcm".platforms.linux-x64]
checksum = "blake3:e9df5b765df327e1248fccf2c6165a89d632a065667f99c01765bf3047b94955"
size = 8821083
url = "https://github.com/CQLabs/homebrew-dcm/releases/download/1.31.4/dcm-linux-x64-release.zip"
[tools.node]
version = "22.18.0"
backend = "core:node"
[tools.node.platforms.linux-x64]
checksum = "sha256:a2e703725d8683be86bb5da967bf8272f4518bdaf10f21389e2b2c9eaeae8c8a"
size = 54824343
url = "https://nodejs.org/dist/v22.18.0/node-v22.18.0-linux-x64.tar.gz"
[tools.pnpm]
version = "10.14.0"
backend = "aqua:pnpm/pnpm"
[tools.pnpm.platforms.linux-x64]
checksum = "blake3:13dfa46b7173d3cad3bad60a756a492ecf0bce48b23eb9f793e7ccec5a09b46d"
size = 66231525
url = "https://github.com/pnpm/pnpm/releases/download/v10.14.0/pnpm-linux-x64"

312
mise.toml Normal file
View File

@@ -0,0 +1,312 @@
[tools]
node = "22.18.0"
flutter = "3.32.8"
pnpm = "10.14.0"
dart = "3.8.2"
[tools."github:CQLabs/homebrew-dcm"]
version = "1.31.4"
bin = "dcm"
postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
[settings]
experimental = true
lockfile = true
pin = true
# .github
[tasks."github:install"]
run = "pnpm install --filter github --frozen-lockfile"
[tasks."github:format"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --check ."
[tasks."github:format-fix"]
env._.path = "./.github/node_modules/.bin"
dir = ".github"
run = "prettier --write ."
# @immich/cli
[tasks."cli:install"]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks."cli:build"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite build"
[tasks."cli:test"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "vite"
[tasks."cli:lint"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."cli:lint-fix"]
run = "mise run cli:lint --fix"
[tasks."cli:format"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --check ."
[tasks."cli:format-fix"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "prettier --write ."
[tasks."cli:check"]
env._.path = "./cli/node_modules/.bin"
dir = "cli"
run = "tsc --noEmit"
# @immich/sdk
[tasks."sdk:install"]
run = "pnpm install --filter @immich/sdk --frozen-lockfile"
[tasks."sdk:build"]
env._.path = "./open-api/typescript-sdk/node_modules/.bin"
dir = "./open-api/typescript-sdk"
run = "tsc"
# docs
[tasks."docs:install"]
run = "pnpm install --filter documentation --frozen-lockfile"
[tasks."docs:start"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus --port 3005"
[tasks."docs:build"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = [
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
"docusaurus build",
]
[tasks."docs:preview"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "docusaurus serve"
[tasks."docs:format"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --check ."
[tasks."docs:format-fix"]
env._.path = "./docs/node_modules/.bin"
dir = "docs"
run = "prettier --write ."
# e2e
[tasks."e2e:install"]
run = "pnpm install --filter immich-e2e --frozen-lockfile"
[tasks."e2e:test"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "vitest --run"
[tasks."e2e:test-web"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "playwright test"
[tasks."e2e:format"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --check ."
[tasks."e2e:format-fix"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "prettier --write ."
[tasks."e2e:lint"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."e2e:lint-fix"]
run = "mise run e2e:lint --fix"
[tasks."e2e:check"]
env._.path = "./e2e/node_modules/.bin"
dir = "e2e"
run = "tsc --noEmit"
# i18n
[tasks."i18n:format"]
run = "mise run i18n:format-fix"
[tasks."i18n:format-fix"]
run = "pnpm dlx sort-json ./i18n/*.json"
# server
[tasks."server:install"]
run = "pnpm install --filter immich --frozen-lockfile"
[tasks."server:build"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "nest build"
[tasks."server:test"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.mjs"
[tasks."server:test-medium"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "vitest --config test/vitest.config.medium.mjs"
[tasks."server:format"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --check ."
[tasks."server:format-fix"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "prettier --write ."
[tasks."server:lint"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0"
[tasks."server:lint-fix"]
run = "mise run server:lint --fix"
[tasks."server:check"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "tsc --noEmit"
[tasks."server:sql"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:open-api"]
dir = "server"
run = "node ./dist/bin/sync-open-api.js"
[tasks."server:migrations"]
dir = "server"
run = "node ./dist/bin/migrations.js"
description = "Run database migration commands (create, generate, run, debug, or query)"
[tasks."server:schema-drop"]
run = "mise run server:migrations query 'DROP schema public cascade; CREATE schema public;'"
[tasks."server:schema-reset"]
run = "mise run server:schema-drop && mise run server:migrations run"
[tasks."server:email-dev"]
env._.path = "./server/node_modules/.bin"
dir = "server"
run = "email dev -p 3050 --dir src/emails"
[tasks."server:checklist"]
run = [
"mise run server:install",
"mise run server:format",
"mise run server:lint",
"mise run server:check",
"mise run server:test-medium --run",
"mise run server:test --run",
]
# web
[tasks."web:install"]
run = "pnpm install --filter immich-web --frozen-lockfile"
[tasks."web:svelte-kit-sync"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "svelte-kit sync"
[tasks."web:build"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:build-stats"]
env.BUILD_STATS = "true"
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite build"
[tasks."web:preview"]
env._.path = "./web/node_modules/.bin"
dir = "web"
run = "vite preview"
[tasks."web:start"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vite dev --host 0.0.0.0 --port 3000"
[tasks."web:test"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "vitest"
[tasks."web:format"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --check ."
[tasks."web:format-fix"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "prettier --write ."
[tasks."web:lint"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "eslint . --max-warnings 0"
[tasks."web:lint-p"]
env._.path = "web/node_modules/.bin"
dir = "web"
run = "eslint-p . --max-warnings 0 --concurrency=4"
[tasks."web:lint-fix"]
run = "mise run web:lint --fix"
[tasks."web:check"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "tsc --noEmit"
[tasks."web:check-svelte"]
depends = "web:svelte-kit-sync"
env._.path = "web/node_modules/.bin"
dir = "web"
run = "svelte-check --no-tsconfig --fail-on-warnings --compiler-warnings 'reactive_declaration_non_reactive_property:ignore' --ignore src/lib/components/photos-page/asset-grid.svelte"
[tasks."web:checklist"]
run = [
"mise run web:install",
"mise run web:format",
"mise run web:check",
"mise run web:test --run",
"mise run web:lint",
]

View File

@@ -130,8 +130,10 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
* - Parameter success: Indicates whether the background task completed successfully
*/
private fun complete(success: Result) {
Log.d(TAG, "About to complete BackupWorker with result: $success")
isComplete = true
engine?.destroy()
engine = null
flutterApi = null
completionHandler.set(success)
}

View File

@@ -3,7 +3,7 @@
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objectVersion = 54;
objects = {
/* Begin PBXBuildFile section */
@@ -507,14 +507,10 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Copy Pods Resources";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
@@ -543,14 +539,10 @@
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";

View File

@@ -118,7 +118,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
self.handleHostResult(result: result)
})
}
/**
* Cancels the currently running background task, either due to timeout or external request.
* Sends a cancel signal to the Flutter side and sets up a fallback timer to ensure
@@ -140,6 +140,7 @@ class BackgroundWorker: BackgroundWorkerBgHostApi {
self.complete(success: false)
}
}
/**
* Handles the result from Flutter API calls and determines the success/failure status.

View File

@@ -46,6 +46,23 @@ class ThumbnailApiImpl: ThumbnailApi {
assetCache.countLimit = 10000
return assetCache
}()
private static let activitySemaphore = DispatchSemaphore(value: 1)
private static let willResignActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.willResignActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.suspend()
activitySemaphore.wait()
}
private static let didBecomeActiveObserver = NotificationCenter.default.addObserver(
forName: UIApplication.didBecomeActiveNotification,
object: nil,
queue: .main
) { _ in
processingQueue.resume()
activitySemaphore.signal()
}
func getThumbhash(thumbhash: String, completion: @escaping (Result<[String : Int64], any Error>) -> Void) {
Self.processingQueue.async {
@@ -53,6 +70,7 @@ class ThumbnailApiImpl: ThumbnailApi {
else { return completion(.failure(PigeonError(code: "", message: "Invalid base64 string: \(thumbhash)", details: nil)))}
let (width, height, pointer) = thumbHashToRGBA(hash: data)
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer.baseAddress)), "width": Int64(width), "height": Int64(height)]))
}
}
@@ -142,6 +160,7 @@ class ThumbnailApiImpl: ThumbnailApi {
return completion(Self.cancelledResult)
}
self.waitForActiveState()
completion(.success(["pointer": Int64(Int(bitPattern: pointer)), "width": Int64(cgImage.width), "height": Int64(cgImage.height)]))
Self.removeRequest(requestId: requestId)
}
@@ -184,4 +203,9 @@ class ThumbnailApiImpl: ThumbnailApi {
assetQueue.async { assetCache.setObject(asset, forKey: assetId as NSString) }
return asset
}
func waitForActiveState() {
Self.activitySemaphore.wait()
Self.activitySemaphore.signal()
}
}

View File

@@ -5,6 +5,7 @@ import 'package:background_downloader/background_downloader.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
import 'package:immich_mobile/platform/background_worker_api.g.dart';
@@ -41,7 +42,8 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final Drift _drift;
final DriftLogger _driftLogger;
final BackgroundWorkerBgHostApi _backgroundHostApi;
final Logger _logger = Logger('BackgroundWorkerBgService');
final Logger _logger = Logger('BackgroundUploadBgService');
late final IsolateLockManager _lockManager;
bool _isCleanedUp = false;
@@ -57,6 +59,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
driftProvider.overrideWith(driftOverride(drift)),
],
);
_lockManager = IsolateLockManager(onCloseRequest: _cleanup);
BackgroundWorkerFlutterApi.setUp(this);
}
@@ -80,11 +83,25 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
await FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false);
await FileDownloader().trackTasks();
configureFileDownloaderNotifications();
await _ref.read(fileMediaRepositoryProvider).enableBackgroundAccess();
// Notify the host that the background worker service has been initialized and is ready to use
_backgroundHostApi.onInitialized();
// Notify the host that the background upload service has been initialized and is ready to use
debugPrint("Acquiring background worker lock");
if (await _lockManager.acquireLock().timeout(
const Duration(seconds: 5),
onTimeout: () {
_lockManager.cancel();
return false;
},
)) {
_logger.info("Acquired background worker lock");
await _backgroundHostApi.onInitialized();
return;
}
_logger.warning("Failed to acquire background worker lock");
await _cleanup();
await _backgroundHostApi.close();
} catch (error, stack) {
_logger.severe("Failed to initialize background worker", error, stack);
_backgroundHostApi.close();
@@ -160,7 +177,8 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
await _drift.close();
await _driftLogger.close();
_ref.dispose();
debugPrint("Background worker cleaned up");
_lockManager.releaseLock();
_logger.info("Background worker resources cleaned up");
} catch (error, stack) {
debugPrint('Failed to cleanup background worker: $error with stack: $stack');
}

View File

@@ -0,0 +1,235 @@
import 'dart:isolate';
import 'dart:ui';
import 'package:flutter/foundation.dart';
import 'package:logging/logging.dart';
const String kIsolateLockManagerPort = "immich://isolate_mutex";
enum _LockStatus { active, released }
class _IsolateRequest {
const _IsolateRequest();
}
class _HeartbeatRequest extends _IsolateRequest {
// Port for the receiver to send replies back
final SendPort sendPort;
const _HeartbeatRequest(this.sendPort);
Map<String, dynamic> toJson() {
return {'type': 'heartbeat', 'sendPort': sendPort};
}
}
class _CloseRequest extends _IsolateRequest {
const _CloseRequest();
Map<String, dynamic> toJson() {
return {'type': 'close'};
}
}
class _IsolateResponse {
const _IsolateResponse();
}
class _HeartbeatResponse extends _IsolateResponse {
final _LockStatus status;
const _HeartbeatResponse(this.status);
Map<String, dynamic> toJson() {
return {'type': 'heartbeat', 'status': status.index};
}
}
typedef OnCloseLockHolderRequest = void Function();
class IsolateLockManager {
final String _portName;
bool _hasLock = false;
ReceivePort? _receivePort;
final OnCloseLockHolderRequest? _onCloseRequest;
final Set<SendPort> _waitingIsolates = {};
// Token object - a new one is created for each acquisition attempt
Object? _currentAcquisitionToken;
IsolateLockManager({String? portName, OnCloseLockHolderRequest? onCloseRequest})
: _portName = portName ?? kIsolateLockManagerPort,
_onCloseRequest = onCloseRequest;
Future<bool> acquireLock() async {
if (_hasLock) {
Logger('BackgroundWorkerLockManager').warning("WARNING: [acquireLock] called more than once");
return true;
}
// Create a new token - this invalidates any previous attempt
final token = _currentAcquisitionToken = Object();
final ReceivePort rp = _receivePort = ReceivePort(_portName);
final SendPort sp = rp.sendPort;
while (!IsolateNameServer.registerPortWithName(sp, _portName)) {
// This attempt was superseded by a newer one in the same isolate
if (_currentAcquisitionToken != token) {
return false;
}
await _lockReleasedByHolder(token);
}
_hasLock = true;
rp.listen(_onRequest);
return true;
}
Future<void> _lockReleasedByHolder(Object token) async {
SendPort? holder = IsolateNameServer.lookupPortByName(_portName);
debugPrint("Found lock holder: $holder");
if (holder == null) {
// No holder, try and acquire lock
return;
}
final ReceivePort tempRp = ReceivePort();
final SendPort tempSp = tempRp.sendPort;
final bs = tempRp.asBroadcastStream();
try {
while (true) {
// Send a heartbeat request with the send port to receive reply from the holder
debugPrint("Sending heartbeat request to lock holder");
holder.send(_HeartbeatRequest(tempSp).toJson());
dynamic answer = await bs.first.timeout(const Duration(seconds: 3), onTimeout: () => null);
debugPrint("Received heartbeat response from lock holder: $answer");
// This attempt was superseded by a newer one in the same isolate
if (_currentAcquisitionToken != token) {
break;
}
if (answer == null) {
// Holder failed, most likely killed without calling releaseLock
// Check if a different waiting isolate took the lock
if (holder == IsolateNameServer.lookupPortByName(_portName)) {
// No, remove the stale lock
IsolateNameServer.removePortNameMapping(_portName);
}
break;
}
// Unknown message type received for heartbeat request. Try again
_IsolateResponse? response = _parseResponse(answer);
if (response == null || response is! _HeartbeatResponse) {
break;
}
if (response.status == _LockStatus.released) {
// Holder has released the lock
break;
}
// If the _LockStatus is active, we check again if the task completed
// by sending a released messaged again, if not, send a new heartbeat again
// Check if the holder completed its task after the heartbeat
answer = await bs.first.timeout(
const Duration(seconds: 3),
onTimeout: () => const _HeartbeatResponse(_LockStatus.active).toJson(),
);
response = _parseResponse(answer);
if (response is _HeartbeatResponse && response.status == _LockStatus.released) {
break;
}
}
} catch (e) {
// Timeout or error
} finally {
tempRp.close();
}
return;
}
_IsolateRequest? _parseRequest(dynamic msg) {
if (msg is! Map<String, dynamic>) {
return null;
}
return switch (msg['type']) {
'heartbeat' => _HeartbeatRequest(msg['sendPort']),
'close' => const _CloseRequest(),
_ => null,
};
}
_IsolateResponse? _parseResponse(dynamic msg) {
if (msg is! Map<String, dynamic>) {
return null;
}
return switch (msg['type']) {
'heartbeat' => _HeartbeatResponse(_LockStatus.values[msg['status']]),
_ => null,
};
}
// Executed in the isolate with the lock
void _onRequest(dynamic msg) {
final request = _parseRequest(msg);
if (request == null) {
return;
}
if (request is _HeartbeatRequest) {
// Add the send port to the list of waiting isolates
_waitingIsolates.add(request.sendPort);
request.sendPort.send(const _HeartbeatResponse(_LockStatus.active).toJson());
return;
}
if (request is _CloseRequest) {
_onCloseRequest?.call();
return;
}
}
void releaseLock() {
if (_hasLock) {
IsolateNameServer.removePortNameMapping(_portName);
// Notify waiting isolates
for (final port in _waitingIsolates) {
port.send(const _HeartbeatResponse(_LockStatus.released).toJson());
}
_waitingIsolates.clear();
_hasLock = false;
}
_receivePort?.close();
_receivePort = null;
}
void cancel() {
if (_hasLock) {
return;
}
debugPrint("Cancelling ongoing acquire lock attempts");
// Create a new token to invalidate ongoing acquire lock attempts
_currentAcquisitionToken = Object();
}
void requestHolderToClose() {
if (_hasLock) {
return;
}
IsolateNameServer.lookupPortByName(_portName)?.send(const _CloseRequest().toJson());
}
}

View File

@@ -2,8 +2,10 @@ import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/providers/auth.provider.dart';
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/backup/backup.provider.dart';
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
@@ -21,14 +23,23 @@ class SplashScreenPage extends StatefulHookConsumerWidget {
class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
final log = Logger("SplashScreenPage");
@override
void initState() {
super.initState();
ref
.read(authProvider.notifier)
.setOpenApiServiceEndpoint()
.then(logConnectionInfo)
.whenComplete(() => resumeSession());
final lockManager = ref.read(isolateLockManagerProvider(kIsolateLockManagerPort));
lockManager.requestHolderToClose();
lockManager
.acquireLock()
.timeout(const Duration(seconds: 5))
.whenComplete(
() => ref
.read(authProvider.notifier)
.setOpenApiServiceEndpoint()
.then(logConnectionInfo)
.whenComplete(() => resumeSession()),
);
}
void logConnectionInfo(String? endpoint) {

View File

@@ -1,5 +1,6 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
@@ -38,14 +39,14 @@ class DriftPlacePage extends StatelessWidget {
}
}
class _PlaceSliverAppBar extends StatelessWidget {
class _PlaceSliverAppBar extends HookWidget {
const _PlaceSliverAppBar({required this.search});
final ValueNotifier<String?> search;
@override
Widget build(BuildContext context) {
final searchFocusNode = FocusNode();
final searchFocusNode = useFocusNode();
return SliverAppBar(
floating: true,

View File

@@ -19,6 +19,7 @@ import 'package:immich_mobile/providers/infrastructure/current_album.provider.da
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/utils/album_filter.utils.dart';
import 'package:immich_mobile/widgets/common/confirm_dialog.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
import 'package:immich_mobile/widgets/common/search_field.dart';
@@ -39,8 +40,12 @@ class AlbumSelector extends ConsumerStatefulWidget {
class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
bool isGrid = false;
final searchController = TextEditingController();
QuickFilterMode filterMode = QuickFilterMode.all;
final searchFocusNode = FocusNode();
List<RemoteAlbum> sortedAlbums = [];
List<RemoteAlbum> shownAlbums = [];
AlbumFilter filter = AlbumFilter(query: "", mode: QuickFilterMode.all);
AlbumSort sort = AlbumSort(mode: RemoteAlbumSortMode.lastModified, isReverse: true);
@override
void initState() {
@@ -52,7 +57,7 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
});
searchController.addListener(() {
onSearch(searchController.text, filterMode);
onSearch(searchController.text, filter.mode);
});
searchFocusNode.addListener(() {
@@ -62,9 +67,11 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
});
}
void onSearch(String searchTerm, QuickFilterMode sortMode) {
void onSearch(String searchTerm, QuickFilterMode filterMode) {
final userId = ref.watch(currentUserProvider)?.id;
ref.read(remoteAlbumProvider.notifier).searchAlbums(searchTerm, userId, sortMode);
filter = filter.copyWith(query: searchTerm, userId: userId, mode: filterMode);
filterAlbums();
}
Future<void> onRefresh() async {
@@ -77,17 +84,60 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
});
}
void changeFilter(QuickFilterMode sortMode) {
void changeFilter(QuickFilterMode mode) {
setState(() {
filterMode = sortMode;
filter = filter.copyWith(mode: mode);
});
filterAlbums();
}
Future<void> changeSort(AlbumSort sort) async {
setState(() {
this.sort = sort;
});
await sortAlbums();
}
void clearSearch() {
setState(() {
filterMode = QuickFilterMode.all;
filter = filter.copyWith(mode: QuickFilterMode.all, query: null);
searchController.clear();
ref.read(remoteAlbumProvider.notifier).clearSearch();
});
filterAlbums();
}
Future<void> sortAlbums() async {
final sorted = await ref
.read(remoteAlbumProvider.notifier)
.sortAlbums(ref.read(remoteAlbumProvider).albums, sort.mode, isReverse: sort.isReverse);
setState(() {
sortedAlbums = sorted;
});
// we need to re-filter the albums after sorting
// so shownAlbums gets updated
filterAlbums();
}
Future<void> filterAlbums() async {
if (filter.query == null) {
setState(() {
shownAlbums = sortedAlbums;
});
return;
}
final filteredAlbums = ref
.read(remoteAlbumProvider.notifier)
.searchAlbums(sortedAlbums, filter.query!, filter.userId, filter.mode);
setState(() {
shownAlbums = filteredAlbums;
});
}
@@ -100,36 +150,41 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
@override
Widget build(BuildContext context) {
final albums = ref.watch(remoteAlbumProvider.select((s) => s.filteredAlbums));
final userId = ref.watch(currentUserProvider)?.id;
// refilter and sort when albums change
ref.listen(remoteAlbumProvider.select((state) => state.albums), (_, _) async {
await sortAlbums();
});
return MultiSliver(
children: [
_SearchBar(
searchController: searchController,
searchFocusNode: searchFocusNode,
onSearch: onSearch,
filterMode: filterMode,
filterMode: filter.mode,
onClearSearch: clearSearch,
),
_QuickFilterButtonRow(
filterMode: filterMode,
filterMode: filter.mode,
onChangeFilter: changeFilter,
onSearch: onSearch,
searchController: searchController,
),
_QuickSortAndViewMode(isGrid: isGrid, onToggleViewMode: toggleViewMode),
_QuickSortAndViewMode(isGrid: isGrid, onToggleViewMode: toggleViewMode, onSortChanged: changeSort),
isGrid
? _AlbumGrid(albums: albums, userId: userId, onAlbumSelected: widget.onAlbumSelected)
: _AlbumList(albums: albums, userId: userId, onAlbumSelected: widget.onAlbumSelected),
? _AlbumGrid(albums: shownAlbums, userId: userId, onAlbumSelected: widget.onAlbumSelected)
: _AlbumList(albums: shownAlbums, userId: userId, onAlbumSelected: widget.onAlbumSelected),
],
);
}
}
class _SortButton extends ConsumerStatefulWidget {
const _SortButton();
const _SortButton(this.onSortChanged);
final Future<void> Function(AlbumSort) onSortChanged;
@override
ConsumerState<_SortButton> createState() => _SortButtonState();
@@ -148,15 +203,15 @@ class _SortButtonState extends ConsumerState<_SortButton> {
albumSortIsReverse = !albumSortIsReverse;
isSorting = true;
});
await ref.read(remoteAlbumProvider.notifier).sortFilteredAlbums(sortMode, isReverse: albumSortIsReverse);
} else {
setState(() {
albumSortOption = sortMode;
isSorting = true;
});
await ref.read(remoteAlbumProvider.notifier).sortFilteredAlbums(sortMode, isReverse: albumSortIsReverse);
}
await widget.onSortChanged.call(AlbumSort(mode: albumSortOption, isReverse: albumSortIsReverse));
setState(() {
isSorting = false;
});
@@ -394,10 +449,11 @@ class _QuickFilterButton extends StatelessWidget {
}
class _QuickSortAndViewMode extends StatelessWidget {
const _QuickSortAndViewMode({required this.isGrid, required this.onToggleViewMode});
const _QuickSortAndViewMode({required this.isGrid, required this.onToggleViewMode, required this.onSortChanged});
final bool isGrid;
final VoidCallback onToggleViewMode;
final Future<void> Function(AlbumSort) onSortChanged;
@override
Widget build(BuildContext context) {
@@ -407,7 +463,7 @@ class _QuickSortAndViewMode extends StatelessWidget {
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
const _SortButton(),
_SortButton(onSortChanged),
IconButton(
icon: Icon(isGrid ? Icons.view_list_outlined : Icons.grid_view_outlined, size: 24),
onPressed: onToggleViewMode,

View File

@@ -3,6 +3,7 @@ import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/timeline.model.dart';
@@ -129,6 +130,7 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
reloadSubscription?.cancel();
_prevPreCacheStream?.removeListener(_dummyListener);
_nextPreCacheStream?.removeListener(_dummyListener);
SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge);
super.dispose();
}
@@ -596,6 +598,7 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
// Rebuild the widget when the asset viewer state changes
// Using multiple selectors to avoid unnecessary rebuilds for other state changes
ref.watch(assetViewerProvider.select((s) => s.showingBottomSheet));
ref.watch(assetViewerProvider.select((s) => s.showingControls));
ref.watch(assetViewerProvider.select((s) => s.backgroundOpacity));
ref.watch(assetViewerProvider.select((s) => s.stackIndex));
ref.watch(isPlayingMotionVideoProvider);
@@ -612,6 +615,15 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
});
});
// Listen for control visibility changes and change system UI mode accordingly
ref.listen(assetViewerProvider.select((value) => value.showingControls), (_, showingControls) async {
if (showingControls) {
SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge);
} else {
SystemChrome.setEnabledSystemUIMode(SystemUiMode.immersiveSticky);
}
});
// Currently it is not possible to scroll the asset when the bottom sheet is open all the way.
// Issue: https://github.com/flutter/flutter/issues/109037
// TODO: Add a custom scrum builder once the fix lands on stable

View File

@@ -62,7 +62,7 @@ class ViewerBottomBar extends ConsumerWidget {
duration: Durations.short2,
child: AnimatedSwitcher(
duration: Durations.short4,
child: isSheetOpen || isReadonlyModeEnabled
child: isSheetOpen
? const SizedBox.shrink()
: Theme(
data: context.themeData.copyWith(
@@ -72,14 +72,14 @@ class ViewerBottomBar extends ConsumerWidget {
),
),
child: Container(
height: context.padding.bottom + (asset.isVideo ? 160 : 90),
color: Colors.black.withAlpha(125),
padding: EdgeInsets.only(bottom: context.padding.bottom),
padding: EdgeInsets.only(bottom: context.padding.bottom, top: 16),
child: Column(
mainAxisAlignment: MainAxisAlignment.end,
children: [
if (asset.isVideo) const VideoControls(),
if (!isInLockedView) Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: actions),
if (!isInLockedView && !isReadonlyModeEnabled)
Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: actions),
],
),
),

View File

@@ -1,7 +1,9 @@
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/archive_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/delete_permanent_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/delete_local_action_button.widget.dart';
@@ -16,22 +18,74 @@ import 'package:immich_mobile/presentation/widgets/action_buttons/share_link_act
import 'package:immich_mobile/presentation/widgets/action_buttons/stack_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/trash_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/upload_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/album/album_selector.widget.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
class RemoteAlbumBottomSheet extends ConsumerWidget {
class RemoteAlbumBottomSheet extends ConsumerStatefulWidget {
final RemoteAlbum album;
const RemoteAlbumBottomSheet({super.key, required this.album});
@override
Widget build(BuildContext context, WidgetRef ref) {
ConsumerState<RemoteAlbumBottomSheet> createState() => _RemoteAlbumBottomSheetState();
}
class _RemoteAlbumBottomSheetState extends ConsumerState<RemoteAlbumBottomSheet> {
late DraggableScrollableController sheetController;
@override
void initState() {
super.initState();
sheetController = DraggableScrollableController();
}
@override
void dispose() {
sheetController.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
final multiselect = ref.watch(multiSelectProvider);
final isTrashEnable = ref.watch(serverInfoProvider.select((state) => state.serverFeatures.trash));
Future<void> addAssetsToAlbum(RemoteAlbum album) async {
final selectedAssets = multiselect.selectedAssets;
if (selectedAssets.isEmpty) {
return;
}
final addedCount = await ref
.read(remoteAlbumProvider.notifier)
.addAssets(album.id, selectedAssets.map((e) => (e as RemoteAsset).id).toList());
if (addedCount != selectedAssets.length) {
ImmichToast.show(
context: context,
msg: 'add_to_album_bottom_sheet_already_exists'.t(context: context, args: {"album": album.name}),
);
} else {
ImmichToast.show(
context: context,
msg: 'add_to_album_bottom_sheet_added'.t(context: context, args: {"album": album.name}),
);
}
ref.read(multiSelectProvider.notifier).reset();
}
Future<void> onKeyboardExpand() {
return sheetController.animateTo(0.85, duration: const Duration(milliseconds: 200), curve: Curves.easeInOut);
}
return BaseBottomSheet(
initialChildSize: 0.25,
maxChildSize: 0.4,
controller: sheetController,
initialChildSize: 0.45,
maxChildSize: 0.85,
shouldCloseOnMinExtent: false,
actions: [
const ShareActionButton(source: ActionSource.timeline),
@@ -52,7 +106,11 @@ class RemoteAlbumBottomSheet extends ConsumerWidget {
const DeleteLocalActionButton(source: ActionSource.timeline),
const UploadActionButton(source: ActionSource.timeline),
],
RemoveFromAlbumActionButton(source: ActionSource.timeline, albumId: album.id),
RemoveFromAlbumActionButton(source: ActionSource.timeline, albumId: widget.album.id),
],
slivers: [
const AddToAlbumHeader(),
AlbumSelector(onAlbumSelected: addAssetsToAlbum, onKeyboardExpanded: onKeyboardExpand),
],
);
}

View File

@@ -10,6 +10,7 @@ import 'package:immich_mobile/presentation/widgets/timeline/constants.dart';
import 'package:immich_mobile/presentation/widgets/timeline/segment.model.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.state.dart';
import 'package:intl/intl.dart' hide TextDirection;
import 'package:immich_mobile/providers/haptic_feedback.provider.dart';
/// A widget that will display a BoxScrollView with a ScrollThumb that can be dragged
/// for quick navigation of the BoxScrollView.
@@ -74,6 +75,7 @@ List<_Segment> _buildSegments({required List<Segment> layoutSegments, required d
}
class ScrubberState extends ConsumerState<Scrubber> with TickerProviderStateMixin {
String? _lastLabel;
double _thumbTopOffset = 0.0;
bool _isDragging = false;
List<_Segment> _segments = [];
@@ -172,6 +174,7 @@ class ScrubberState extends ConsumerState<Scrubber> with TickerProviderStateMixi
_isDragging = true;
_labelAnimationController.forward();
_fadeOutTimer?.cancel();
_lastLabel = null;
});
}
@@ -189,6 +192,11 @@ class ScrubberState extends ConsumerState<Scrubber> with TickerProviderStateMixi
if (nearestMonthSegment != null) {
_snapToSegment(nearestMonthSegment);
final label = nearestMonthSegment.scrollLabel;
if (_lastLabel != label) {
ref.read(hapticFeedbackProvider.notifier).selectionClick();
_lastLabel = label;
}
}
}

View File

@@ -3,6 +3,7 @@ import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/services/log.service.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/models/backup/backup_state.model.dart';
import 'package:immich_mobile/providers/album/album.provider.dart';
@@ -81,6 +82,12 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
}
} else {
_ref.read(backupProvider.notifier).cancelBackup();
final lockManager = _ref.read(isolateLockManagerProvider(kIsolateLockManagerPort));
lockManager.requestHolderToClose();
debugPrint("Requested lock holder to close on resume");
await lockManager.acquireLock();
debugPrint("Lock acquired for background sync on resume");
final backgroundManager = _ref.read(backgroundSyncProvider);
// Ensure proper cleanup before starting new background tasks
@@ -130,7 +137,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
// do not stop/clean up anything on inactivity: issued on every orientation change
}
void handleAppPause() {
Future<void> handleAppPause() async {
state = AppLifeCycleEnum.paused;
_wasPaused = true;
@@ -140,6 +147,12 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
if (_ref.read(backupProvider.notifier).backupProgress != BackUpProgressEnum.manualInProgress) {
_ref.read(backupProvider.notifier).cancelBackup();
}
} else {
final backgroundManager = _ref.read(backgroundSyncProvider);
await backgroundManager.cancel();
await backgroundManager.cancelLocal();
_ref.read(isolateLockManagerProvider(kIsolateLockManagerPort)).releaseLock();
debugPrint("Lock released on app pause");
}
_ref.read(websocketProvider.notifier).disconnect();
@@ -173,6 +186,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
}
if (Store.isBetaTimelineEnabled) {
_ref.read(isolateLockManagerProvider(kIsolateLockManagerPort)).releaseLock();
return;
}

View File

@@ -1,5 +1,6 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/utils/background_sync.dart';
import 'package:immich_mobile/domain/utils/isolate_lock_manager.dart';
import 'package:immich_mobile/providers/sync_status.provider.dart';
final backgroundSyncProvider = Provider<BackgroundSyncManager>((ref) {
@@ -18,3 +19,7 @@ final backgroundSyncProvider = Provider<BackgroundSyncManager>((ref) {
ref.onDispose(manager.cancel);
return manager;
});
final isolateLockManagerProvider = Provider.family<IsolateLockManager, String>((ref, name) {
return IsolateLockManager(portName: name);
});

View File

@@ -12,43 +12,42 @@ import 'album.provider.dart';
class RemoteAlbumState {
final List<RemoteAlbum> albums;
final List<RemoteAlbum> filteredAlbums;
const RemoteAlbumState({required this.albums, List<RemoteAlbum>? filteredAlbums})
: filteredAlbums = filteredAlbums ?? albums;
const RemoteAlbumState({required this.albums});
RemoteAlbumState copyWith({List<RemoteAlbum>? albums, List<RemoteAlbum>? filteredAlbums}) {
return RemoteAlbumState(albums: albums ?? this.albums, filteredAlbums: filteredAlbums ?? this.filteredAlbums);
RemoteAlbumState copyWith({List<RemoteAlbum>? albums}) {
return RemoteAlbumState(albums: albums ?? this.albums);
}
@override
String toString() => 'RemoteAlbumState(albums: ${albums.length}, filteredAlbums: ${filteredAlbums.length})';
String toString() => 'RemoteAlbumState(albums: ${albums.length})';
@override
bool operator ==(covariant RemoteAlbumState other) {
if (identical(this, other)) return true;
final listEquals = const DeepCollectionEquality().equals;
return listEquals(other.albums, albums) && listEquals(other.filteredAlbums, filteredAlbums);
return listEquals(other.albums, albums);
}
@override
int get hashCode => albums.hashCode ^ filteredAlbums.hashCode;
int get hashCode => albums.hashCode;
}
class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
late RemoteAlbumService _remoteAlbumService;
final _logger = Logger('RemoteAlbumNotifier');
@override
RemoteAlbumState build() {
_remoteAlbumService = ref.read(remoteAlbumServiceProvider);
return const RemoteAlbumState(albums: [], filteredAlbums: []);
return const RemoteAlbumState(albums: []);
}
Future<List<RemoteAlbum>> _getAll() async {
try {
final albums = await _remoteAlbumService.getAll();
state = state.copyWith(albums: albums, filteredAlbums: albums);
state = state.copyWith(albums: albums);
return albums;
} catch (error, stack) {
_logger.severe('Failed to fetch albums', error, stack);
@@ -60,19 +59,21 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
await _getAll();
}
void searchAlbums(String query, String? userId, [QuickFilterMode filterMode = QuickFilterMode.all]) {
final filtered = _remoteAlbumService.searchAlbums(state.albums, query, userId, filterMode);
state = state.copyWith(filteredAlbums: filtered);
List<RemoteAlbum> searchAlbums(
List<RemoteAlbum> albums,
String query,
String? userId, [
QuickFilterMode filterMode = QuickFilterMode.all,
]) {
return _remoteAlbumService.searchAlbums(albums, query, userId, filterMode);
}
void clearSearch() {
state = state.copyWith(filteredAlbums: state.albums);
}
Future<void> sortFilteredAlbums(RemoteAlbumSortMode sortMode, {bool isReverse = false}) async {
final sortedAlbums = await _remoteAlbumService.sortAlbums(state.filteredAlbums, sortMode, isReverse: isReverse);
state = state.copyWith(filteredAlbums: sortedAlbums);
Future<List<RemoteAlbum>> sortAlbums(
List<RemoteAlbum> albums,
RemoteAlbumSortMode sortMode, {
bool isReverse = false,
}) async {
return await _remoteAlbumService.sortAlbums(albums, sortMode, isReverse: isReverse);
}
Future<RemoteAlbum?> createAlbum({
@@ -83,7 +84,7 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
try {
final album = await _remoteAlbumService.createAlbum(title: title, description: description, assetIds: assetIds);
state = state.copyWith(albums: [...state.albums, album], filteredAlbums: [...state.filteredAlbums, album]);
state = state.copyWith(albums: [...state.albums, album]);
return album;
} catch (error, stack) {
@@ -114,11 +115,7 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
return album.id == albumId ? updatedAlbum : album;
}).toList();
final updatedFilteredAlbums = state.filteredAlbums.map((album) {
return album.id == albumId ? updatedAlbum : album;
}).toList();
state = state.copyWith(albums: updatedAlbums, filteredAlbums: updatedFilteredAlbums);
state = state.copyWith(albums: updatedAlbums);
return updatedAlbum;
} catch (error, stack) {
@@ -139,9 +136,7 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
await _remoteAlbumService.deleteAlbum(albumId);
final updatedAlbums = state.albums.where((album) => album.id != albumId).toList();
final updatedFilteredAlbums = state.filteredAlbums.where((album) => album.id != albumId).toList();
state = state.copyWith(albums: updatedAlbums, filteredAlbums: updatedFilteredAlbums);
state = state.copyWith(albums: updatedAlbums);
}
Future<List<RemoteAsset>> getAssets(String albumId) {
@@ -164,9 +159,7 @@ class RemoteAlbumNotifier extends Notifier<RemoteAlbumState> {
await _remoteAlbumService.removeUser(albumId, userId: userId);
final updatedAlbums = state.albums.where((album) => album.id != albumId).toList();
final updatedFilteredAlbums = state.filteredAlbums.where((album) => album.id != albumId).toList();
state = state.copyWith(albums: updatedAlbums, filteredAlbums: updatedFilteredAlbums);
state = state.copyWith(albums: updatedAlbums);
}
Future<void> setActivityStatus(String albumId, bool enabled) {

View File

@@ -0,0 +1,25 @@
import 'package:immich_mobile/domain/services/remote_album.service.dart';
import 'package:immich_mobile/models/albums/album_search.model.dart';
class AlbumFilter {
String? userId;
String? query;
QuickFilterMode mode;
AlbumFilter({required this.mode, this.userId, this.query});
AlbumFilter copyWith({String? userId, String? query, QuickFilterMode? mode}) {
return AlbumFilter(userId: userId ?? this.userId, query: query ?? this.query, mode: mode ?? this.mode);
}
}
class AlbumSort {
RemoteAlbumSortMode mode;
bool isReverse;
AlbumSort({required this.mode, this.isReverse = false});
AlbumSort copyWith({RemoteAlbumSortMode? mode, bool? isReverse}) {
return AlbumSort(mode: mode ?? this.mode, isReverse: isReverse ?? this.isReverse);
}
}

View File

@@ -8,12 +8,14 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/providers/album/album.provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/widgets/album/add_to_album_sliverlist.dart';
import 'package:immich_mobile/widgets/album/add_to_album_bottom_sheet.dart';
import 'package:immich_mobile/models/asset_selection_state.dart';
import 'package:immich_mobile/widgets/asset_grid/delete_dialog.dart';
import 'package:immich_mobile/widgets/asset_grid/upload_dialog.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/widgets/common/drag_sheet.dart';
import 'package:immich_mobile/entities/album.entity.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/utils/draggable_scroll_controller.dart';
final controlBottomAppBarNotifier = ControlBottomAppBarNotifier();
@@ -45,6 +47,7 @@ class ControlBottomAppBar extends HookConsumerWidget {
final bool unfavorite;
final bool unarchive;
final AssetSelectionState selectionAssetState;
final List<Asset> selectedAssets;
const ControlBottomAppBar({
super.key,
@@ -64,6 +67,7 @@ class ControlBottomAppBar extends HookConsumerWidget {
this.onRemoveFromAlbum,
this.onToggleLocked,
this.selectionAssetState = const AssetSelectionState(),
this.selectedAssets = const [],
this.enabled = true,
this.unarchive = false,
this.unfavorite = false,
@@ -100,6 +104,18 @@ class ControlBottomAppBar extends HookConsumerWidget {
);
}
/// Show existing AddToAlbumBottomSheet
void showAddToAlbumBottomSheet() {
showModalBottomSheet(
elevation: 0,
shape: const RoundedRectangleBorder(borderRadius: BorderRadius.all(Radius.circular(15.0))),
context: context,
builder: (BuildContext _) {
return AddToAlbumBottomSheet(assets: selectedAssets);
},
);
}
void handleRemoteDelete(bool force, Function(bool) deleteCb, {String? alertMsg}) {
if (!force) {
deleteCb(force);
@@ -121,6 +137,15 @@ class ControlBottomAppBar extends HookConsumerWidget {
label: "share_link".tr(),
onPressed: enabled ? () => onShare(false) : null,
),
if (!isInLockedView && hasRemote && albums.isNotEmpty)
ConstrainedBox(
constraints: const BoxConstraints(maxWidth: 100),
child: ControlBoxButton(
iconData: Icons.photo_album,
label: "add_to_album".tr(),
onPressed: enabled ? showAddToAlbumBottomSheet : null,
),
),
if (hasRemote && onArchive != null)
ControlBoxButton(
iconData: unarchive ? Icons.unarchive_outlined : Icons.archive_outlined,

View File

@@ -440,6 +440,7 @@ class MultiselectGrid extends HookConsumerWidget {
onUpload: onUpload,
enabled: !processing.value,
selectionAssetState: selectionAssetState.value,
selectedAssets: selection.value.toList(),
onStack: stackEnabled ? onStack : null,
onEditTime: editEnabled ? onEditTime : null,
onEditLocation: editEnabled ? onEditLocation : null,

View File

@@ -1,7 +1,8 @@
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/models/backup/backup_state.model.dart';
@@ -259,7 +260,7 @@ class ImmichAppBarDialog extends HookConsumerWidget {
const AppBarProfileInfoBox(),
buildStorageInformation(),
const AppBarServerInfo(),
if (isReadonlyModeEnabled) buildReadonlyMessage(),
if (Store.isBetaTimelineEnabled && isReadonlyModeEnabled) buildReadonlyMessage(),
buildAppLogButton(),
buildSettingButton(),
buildSignOutButton(),

View File

@@ -121,7 +121,6 @@ class PhotoViewCore extends StatefulWidget {
class PhotoViewCoreState extends State<PhotoViewCore>
with TickerProviderStateMixin, PhotoViewControllerDelegate, HitCornersDetector {
Offset? _normalizedPosition;
double? _scaleBefore;
double? _rotationBefore;
@@ -154,7 +153,6 @@ class PhotoViewCoreState extends State<PhotoViewCore>
void onScaleStart(ScaleStartDetails details) {
_rotationBefore = controller.rotation;
_scaleBefore = scale;
_normalizedPosition = details.focalPoint - controller.position;
_scaleAnimationController.stop();
_positionAnimationController.stop();
_rotationAnimationController.stop();
@@ -166,8 +164,14 @@ class PhotoViewCoreState extends State<PhotoViewCore>
};
void onScaleUpdate(ScaleUpdateDetails details) {
final centeredFocalPoint = Offset(
details.focalPoint.dx - scaleBoundaries.outerSize.width / 2,
details.focalPoint.dy - scaleBoundaries.outerSize.height / 2,
);
final double newScale = _scaleBefore! * details.scale;
Offset delta = details.focalPoint - _normalizedPosition!;
final double scaleDelta = newScale / scale;
final Offset newPosition =
(controller.position + details.focalPointDelta) * scaleDelta - centeredFocalPoint * (scaleDelta - 1);
updateScaleStateFromNewScale(newScale);
@@ -176,7 +180,7 @@ class PhotoViewCoreState extends State<PhotoViewCore>
updateMultiple(
scale: newScale,
position: panEnabled ? delta : clampPosition(position: delta * details.scale),
position: panEnabled ? newPosition : clampPosition(position: newPosition),
rotation: rotationEnabled ? _rotationBefore! + details.rotation : null,
rotationFocusPoint: rotationEnabled ? details.focalPoint : null,
);

View File

@@ -31,7 +31,8 @@ class SmartSearchDto {
this.model,
this.page,
this.personIds = const [],
required this.query,
this.query,
this.queryAssetId,
this.rating,
this.size,
this.state,
@@ -151,7 +152,21 @@ class SmartSearchDto {
List<String> personIds;
String query;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? query;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
String? queryAssetId;
/// Minimum value: -1
/// Maximum value: 5
@@ -278,6 +293,7 @@ class SmartSearchDto {
other.page == page &&
_deepEquality.equals(other.personIds, personIds) &&
other.query == query &&
other.queryAssetId == queryAssetId &&
other.rating == rating &&
other.size == size &&
other.state == state &&
@@ -314,7 +330,8 @@ class SmartSearchDto {
(model == null ? 0 : model!.hashCode) +
(page == null ? 0 : page!.hashCode) +
(personIds.hashCode) +
(query.hashCode) +
(query == null ? 0 : query!.hashCode) +
(queryAssetId == null ? 0 : queryAssetId!.hashCode) +
(rating == null ? 0 : rating!.hashCode) +
(size == null ? 0 : size!.hashCode) +
(state == null ? 0 : state!.hashCode) +
@@ -331,7 +348,7 @@ class SmartSearchDto {
(withExif == null ? 0 : withExif!.hashCode);
@override
String toString() => 'SmartSearchDto[albumIds=$albumIds, city=$city, country=$country, createdAfter=$createdAfter, createdBefore=$createdBefore, deviceId=$deviceId, isEncoded=$isEncoded, isFavorite=$isFavorite, isMotion=$isMotion, isNotInAlbum=$isNotInAlbum, isOffline=$isOffline, language=$language, lensModel=$lensModel, libraryId=$libraryId, make=$make, model=$model, page=$page, personIds=$personIds, query=$query, rating=$rating, size=$size, state=$state, tagIds=$tagIds, takenAfter=$takenAfter, takenBefore=$takenBefore, trashedAfter=$trashedAfter, trashedBefore=$trashedBefore, type=$type, updatedAfter=$updatedAfter, updatedBefore=$updatedBefore, visibility=$visibility, withDeleted=$withDeleted, withExif=$withExif]';
String toString() => 'SmartSearchDto[albumIds=$albumIds, city=$city, country=$country, createdAfter=$createdAfter, createdBefore=$createdBefore, deviceId=$deviceId, isEncoded=$isEncoded, isFavorite=$isFavorite, isMotion=$isMotion, isNotInAlbum=$isNotInAlbum, isOffline=$isOffline, language=$language, lensModel=$lensModel, libraryId=$libraryId, make=$make, model=$model, page=$page, personIds=$personIds, query=$query, queryAssetId=$queryAssetId, rating=$rating, size=$size, state=$state, tagIds=$tagIds, takenAfter=$takenAfter, takenBefore=$takenBefore, trashedAfter=$trashedAfter, trashedBefore=$trashedBefore, type=$type, updatedAfter=$updatedAfter, updatedBefore=$updatedBefore, visibility=$visibility, withDeleted=$withDeleted, withExif=$withExif]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
@@ -417,7 +434,16 @@ class SmartSearchDto {
// json[r'page'] = null;
}
json[r'personIds'] = this.personIds;
if (this.query != null) {
json[r'query'] = this.query;
} else {
// json[r'query'] = null;
}
if (this.queryAssetId != null) {
json[r'queryAssetId'] = this.queryAssetId;
} else {
// json[r'queryAssetId'] = null;
}
if (this.rating != null) {
json[r'rating'] = this.rating;
} else {
@@ -522,7 +548,8 @@ class SmartSearchDto {
personIds: json[r'personIds'] is Iterable
? (json[r'personIds'] as Iterable).cast<String>().toList(growable: false)
: const [],
query: mapValueOfType<String>(json, r'query')!,
query: mapValueOfType<String>(json, r'query'),
queryAssetId: mapValueOfType<String>(json, r'queryAssetId'),
rating: num.parse('${json[r'rating']}'),
size: num.parse('${json[r'size']}'),
state: mapValueOfType<String>(json, r'state'),
@@ -586,7 +613,6 @@ class SmartSearchDto {
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'query',
};
}

View File

@@ -69,6 +69,7 @@ class SyncEntityType {
static const userMetadataDeleteV1 = SyncEntityType._(r'UserMetadataDeleteV1');
static const syncAckV1 = SyncEntityType._(r'SyncAckV1');
static const syncResetV1 = SyncEntityType._(r'SyncResetV1');
static const syncCompleteV1 = SyncEntityType._(r'SyncCompleteV1');
/// List of all possible values in this [enum][SyncEntityType].
static const values = <SyncEntityType>[
@@ -118,6 +119,7 @@ class SyncEntityType {
userMetadataDeleteV1,
syncAckV1,
syncResetV1,
syncCompleteV1,
];
static SyncEntityType? fromJson(dynamic value) => SyncEntityTypeTypeTransformer().decode(value);
@@ -202,6 +204,7 @@ class SyncEntityTypeTypeTransformer {
case r'UserMetadataDeleteV1': return SyncEntityType.userMetadataDeleteV1;
case r'SyncAckV1': return SyncEntityType.syncAckV1;
case r'SyncResetV1': return SyncEntityType.syncResetV1;
case r'SyncCompleteV1': return SyncEntityType.syncCompleteV1;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@@ -24,6 +24,7 @@ abstract class BackgroundWorkerBgHostApi {
// required platform channels to notify the native side to start the background upload
void onInitialized();
// Called from the background flutter engine to request the native side to cleanup
void close();
}

View File

@@ -15,7 +15,7 @@ function dart {
patch --no-backup-if-mismatch -u api.mustache <api.mustache.patch
cd ../../
pnpx @openapitools/openapi-generator-cli generate -g dart -i ./immich-openapi-specs.json -o ../mobile/openapi -t ./templates/mobile
pnpm dlx @openapitools/openapi-generator-cli generate -g dart -i ./immich-openapi-specs.json -o ../mobile/openapi -t ./templates/mobile
# Post generate patches
patch --no-backup-if-mismatch -u ../mobile/openapi/lib/api_client.dart <./patch/api_client.dart.patch
@@ -27,7 +27,7 @@ function dart {
}
function typescript {
pnpx oazapfts --optimistic --argumentStyle=object --useEnumType immich-openapi-specs.json typescript-sdk/src/fetch-client.ts
pnpm dlx oazapfts --optimistic --argumentStyle=object --useEnumType immich-openapi-specs.json typescript-sdk/src/fetch-client.ts
pnpm --filter @immich/sdk install --frozen-lockfile
pnpm --filter @immich/sdk build
}
@@ -35,8 +35,8 @@ function typescript {
# requires server to be built
(
cd ..
SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich build
pnpm --filter immich sync:open-api
SHARP_IGNORE_GLOBAL_LIBVIPS=true mise run server:build
mise run server:open-api
)
if [[ $1 == 'dart' ]]; then

View File

@@ -14571,6 +14571,10 @@
"query": {
"type": "string"
},
"queryAssetId": {
"format": "uuid",
"type": "string"
},
"rating": {
"maximum": 5,
"minimum": -1,
@@ -14638,9 +14642,6 @@
"type": "boolean"
}
},
"required": [
"query"
],
"type": "object"
},
"SourceType": {
@@ -15416,6 +15417,10 @@
],
"type": "object"
},
"SyncCompleteV1": {
"properties": {},
"type": "object"
},
"SyncEntityType": {
"enum": [
"AuthUserV1",
@@ -15463,7 +15468,8 @@
"UserMetadataV1",
"UserMetadataDeleteV1",
"SyncAckV1",
"SyncResetV1"
"SyncResetV1",
"SyncCompleteV1"
],
"type": "string"
},

View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -11,9 +11,6 @@
"default": "./build/index.js"
}
},
"scripts": {
"build": "tsc"
},
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"
@@ -26,8 +23,5 @@
"type": "git",
"url": "git+https://github.com/immich-app/immich.git",
"directory": "open-api/typescript-sdk"
},
"volta": {
"node": "22.18.0"
}
}

View File

@@ -1014,7 +1014,8 @@ export type SmartSearchDto = {
model?: string | null;
page?: number;
personIds?: string[];
query: string;
query?: string;
queryAssetId?: string;
rating?: number;
size?: number;
state?: string | null;
@@ -4921,7 +4922,8 @@ export enum SyncEntityType {
UserMetadataV1 = "UserMetadataV1",
UserMetadataDeleteV1 = "UserMetadataDeleteV1",
SyncAckV1 = "SyncAckV1",
SyncResetV1 = "SyncResetV1"
SyncResetV1 = "SyncResetV1",
SyncCompleteV1 = "SyncCompleteV1"
}
export enum SyncRequestType {
AlbumsV1 = "AlbumsV1",

View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -5,34 +5,6 @@
"author": "",
"private": true,
"license": "GNU Affero General Public License version 3",
"scripts": {
"build": "nest build",
"format": "prettier --check .",
"format:fix": "prettier --write .",
"start": "npm run start:dev",
"nest": "nest",
"start:dev": "nest start --watch --",
"start:debug": "nest start --debug 0.0.0.0:9230 --watch --",
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"check": "tsc --noEmit",
"check:code": "npm run format && npm run lint && npm run check",
"check:all": "npm run check:code && npm run test:cov",
"test": "vitest --config test/vitest.config.mjs",
"test:cov": "vitest --config test/vitest.config.mjs --coverage",
"test:medium": "vitest --config test/vitest.config.medium.mjs",
"typeorm": "typeorm",
"lifecycle": "node ./dist/utils/lifecycle.js",
"migrations:debug": "node ./dist/bin/migrations.js debug",
"migrations:generate": "node ./dist/bin/migrations.js generate",
"migrations:create": "node ./dist/bin/migrations.js create",
"migrations:run": "node ./dist/bin/migrations.js run",
"schema:drop": "node ./dist/bin/migrations.js query 'DROP schema public cascade; CREATE schema public;'",
"schema:reset": "npm run schema:drop && npm run migrations:run",
"sync:open-api": "node ./dist/bin/sync-open-api.js",
"sync:sql": "node ./dist/bin/sync-sql.js",
"email:dev": "email dev -p 3050 --dir src/emails"
},
"dependencies": {
"@nestjs/bullmq": "^11.0.1",
"@nestjs/common": "^11.0.4",
@@ -172,9 +144,6 @@
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0"
},
"volta": {
"node": "22.18.0"
},
"overrides": {
"sharp": "^0.34.2"
}

View File

@@ -128,12 +128,6 @@ describe(SearchController.name, () => {
await request(ctx.getHttpServer()).post('/search/smart');
expect(ctx.authenticate).toHaveBeenCalled();
});
it('should require a query', async () => {
const { status, body } = await request(ctx.getHttpServer()).post('/search/smart').send({});
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['query should not be empty', 'query must be a string']));
});
});
describe('GET /search/explore', () => {

View File

@@ -199,7 +199,12 @@ export class StatisticsSearchDto extends BaseSearchDto {
export class SmartSearchDto extends BaseSearchWithResultsDto {
@IsString()
@IsNotEmpty()
query!: string;
@Optional()
query?: string;
@ValidateUUID({ optional: true })
@Optional()
queryAssetId?: string;
@IsString()
@IsNotEmpty()

View File

@@ -336,6 +336,9 @@ export class SyncAckV1 {}
@ExtraModel()
export class SyncResetV1 {}
@ExtraModel()
export class SyncCompleteV1 {}
export type SyncItem = {
[SyncEntityType.AuthUserV1]: SyncAuthUserV1;
[SyncEntityType.UserV1]: SyncUserV1;
@@ -382,6 +385,7 @@ export type SyncItem = {
[SyncEntityType.UserMetadataV1]: SyncUserMetadataV1;
[SyncEntityType.UserMetadataDeleteV1]: SyncUserMetadataDeleteV1;
[SyncEntityType.SyncAckV1]: SyncAckV1;
[SyncEntityType.SyncCompleteV1]: SyncCompleteV1;
[SyncEntityType.SyncResetV1]: SyncResetV1;
};

View File

@@ -530,6 +530,7 @@ export enum JobName {
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
AuditLogCleanup = 'AuditLogCleanup',
AuditTableCleanup = 'AuditTableCleanup',
DatabaseBackup = 'DatabaseBackup',
@@ -570,8 +571,7 @@ export enum JobName {
SendMail = 'SendMail',
SidecarQueueAll = 'SidecarQueueAll',
SidecarDiscovery = 'SidecarDiscovery',
SidecarSync = 'SidecarSync',
SidecarCheck = 'SidecarCheck',
SidecarWrite = 'SidecarWrite',
SmartSearchQueueAll = 'SmartSearchQueueAll',
@@ -708,6 +708,7 @@ export enum SyncEntityType {
SyncAckV1 = 'SyncAckV1',
SyncResetV1 = 'SyncResetV1',
SyncCompleteV1 = 'SyncCompleteV1',
}
export enum NotificationLevel {

View File

@@ -43,6 +43,18 @@ where
limit
$2
-- AssetJobRepository.getForSidecarCheckJob
select
"id",
"sidecarPath",
"originalPath"
from
"asset"
where
"asset"."id" = $1::uuid
limit
$2
-- AssetJobRepository.streamForThumbnailJob
select
"asset"."id",

View File

@@ -123,6 +123,14 @@ offset
$8
commit
-- SearchRepository.getEmbedding
select
*
from
"smart_search"
where
"assetId" = $1
-- SearchRepository.searchFaces
begin
set

View File

@@ -957,7 +957,7 @@ where
order by
"stack"."updateId" asc
-- SyncRepository.people.getDeletes
-- SyncRepository.person.getDeletes
select
"id",
"personId"
@@ -970,7 +970,7 @@ where
order by
"person_audit"."id" asc
-- SyncRepository.people.getUpserts
-- SyncRepository.person.getUpserts
select
"id",
"createdAt",

View File

@@ -39,10 +39,8 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select((eb) => [
'id',
'sidecarPath',
'originalPath',
.select(['id', 'sidecarPath', 'originalPath'])
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom('tag')
@@ -50,7 +48,17 @@ export class AssetJobRepository {
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
).as('tags'),
])
)
.limit(1)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForSidecarCheckJob(id: string) {
return this.db
.selectFrom('asset')
.where('asset.id', '=', asUuid(id))
.select(['id', 'sidecarPath', 'originalPath'])
.limit(1)
.executeTakeFirst();
}

View File

@@ -293,6 +293,13 @@ export class SearchRepository {
});
}
@GenerateSql({
params: [DummyValue.UUID],
})
async getEmbedding(assetId: string) {
return this.db.selectFrom('smart_search').selectAll().where('assetId', '=', assetId).executeTakeFirst();
}
@GenerateSql({
params: [
{

View File

@@ -1,5 +1,5 @@
import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { Kysely, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
@@ -62,7 +62,7 @@ export class SyncRepository {
partnerAsset: PartnerAssetsSync;
partnerAssetExif: PartnerAssetExifsSync;
partnerStack: PartnerStackSync;
people: PersonSync;
person: PersonSync;
stack: StackSync;
user: UserSync;
userMetadata: UserMetadataSync;
@@ -84,7 +84,7 @@ export class SyncRepository {
this.partnerAsset = new PartnerAssetsSync(this.db);
this.partnerAssetExif = new PartnerAssetExifsSync(this.db);
this.partnerStack = new PartnerStackSync(this.db);
this.people = new PersonSync(this.db);
this.person = new PersonSync(this.db);
this.stack = new StackSync(this.db);
this.user = new UserSync(this.db);
this.userMetadata = new UserMetadataSync(this.db);
@@ -117,6 +117,15 @@ class BaseSync {
.orderBy(idRef, 'asc');
}
protected auditCleanup<T extends keyof DB>(t: T, days: number) {
const { table, ref } = this.db.dynamic;
return this.db
.deleteFrom(table(t).as(t))
.where(ref(`${t}.deletedAt`), '<', sql.raw(`now() - interval '${days} days'`))
.execute();
}
protected upsertQuery<T extends keyof DB>(t: T, { nowId, ack }: SyncQueryOptions) {
const { table, ref } = this.db.dynamic;
const updateIdRef = ref(`${t}.updateId`);
@@ -150,6 +159,10 @@ class AlbumSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('album_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -286,6 +299,10 @@ class AlbumToAssetSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('album_asset_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -334,6 +351,10 @@ class AlbumUserSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('album_user_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -371,6 +392,10 @@ class AssetSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('asset', options)
@@ -400,6 +425,10 @@ class PersonSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('person_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('person', options)
@@ -431,6 +460,10 @@ class AssetFaceSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_face_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('asset_face', options)
@@ -473,6 +506,10 @@ class MemorySync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('memory_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('memory', options)
@@ -505,6 +542,10 @@ class MemoryToAssetSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('memory_asset_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('memory_asset', options)
@@ -537,6 +578,10 @@ class PartnerSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('partner_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
@@ -616,6 +661,10 @@ class StackSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('stack_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('stack', options)
@@ -664,6 +713,10 @@ class UserSync extends BaseSync {
return this.auditQuery('user_audit', options).select(['id', 'userId']).stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('user_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('user', options).select(columns.syncUser).stream();
@@ -679,6 +732,10 @@ class UserMetadataSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('user_metadata_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('user_metadata', options)
@@ -698,6 +755,10 @@ class AssetMetadataSync extends BaseSync {
.stream();
}
cleanupAuditTable(daysAgo: number) {
return this.auditCleanup('asset_metadata_audit', daysAgo);
}
@GenerateSql({ params: [dummyQueryOptions, DummyValue.UUID], stream: true })
getUpserts(options: SyncQueryOptions, userId: string) {
return this.upsertQuery('asset_metadata', options)

View File

@@ -166,6 +166,7 @@ export interface DB {
api_key: ApiKeyTable;
asset: AssetTable;
asset_audit: AssetAuditTable;
asset_exif: AssetExifTable;
asset_face: AssetFaceTable;
asset_face_audit: AssetFaceAuditTable;
@@ -173,7 +174,6 @@ export interface DB {
asset_metadata: AssetMetadataTable;
asset_metadata_audit: AssetMetadataAuditTable;
asset_job_status: AssetJobStatusTable;
asset_audit: AssetAuditTable;
audit: AuditTable;

View File

@@ -1,11 +1,11 @@
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
import { MemoryTable } from 'src/schema/tables/memory.table';
import { Column, CreateDateColumn, ForeignKeyColumn, Table } from 'src/sql-tools';
import { Column, CreateDateColumn, ForeignKeyColumn, Generated, Table, Timestamp } from 'src/sql-tools';
@Table('memory_asset_audit')
export class MemoryAssetAuditTable {
@PrimaryGeneratedUuidV7Column()
id!: string;
id!: Generated<string>;
@ForeignKeyColumn(() => MemoryTable, { type: 'uuid', onDelete: 'CASCADE', onUpdate: 'CASCADE' })
memoryId!: string;
@@ -14,5 +14,5 @@ export class MemoryAssetAuditTable {
assetId!: string;
@CreateDateColumn({ default: () => 'clock_timestamp()', index: true })
deletedAt!: Date;
deletedAt!: Generated<Timestamp>;
}

View File

@@ -42,6 +42,7 @@ describe(JobService.name, () => {
{ name: JobName.PersonCleanup },
{ name: JobName.MemoryCleanup },
{ name: JobName.SessionCleanup },
{ name: JobName.AuditTableCleanup },
{ name: JobName.AuditLogCleanup },
{ name: JobName.MemoryGenerate },
{ name: JobName.UserSyncUsage },
@@ -238,11 +239,11 @@ describe(JobService.name, () => {
const tests: Array<{ item: JobItem; jobs: JobName[]; stub?: any }> = [
{
item: { name: JobName.SidecarSync, data: { id: 'asset-1' } },
item: { name: JobName.SidecarCheck, data: { id: 'asset-1' } },
jobs: [JobName.AssetExtractMetadata],
},
{
item: { name: JobName.SidecarDiscovery, data: { id: 'asset-1' } },
item: { name: JobName.SidecarCheck, data: { id: 'asset-1' } },
jobs: [JobName.AssetExtractMetadata],
},
{

View File

@@ -281,6 +281,7 @@ export class JobService extends BaseService {
{ name: JobName.PersonCleanup },
{ name: JobName.MemoryCleanup },
{ name: JobName.SessionCleanup },
{ name: JobName.AuditTableCleanup },
{ name: JobName.AuditLogCleanup },
);
}
@@ -309,8 +310,7 @@ export class JobService extends BaseService {
*/
private async onDone(item: JobItem) {
switch (item.name) {
case JobName.SidecarSync:
case JobName.SidecarDiscovery: {
case JobName.SidecarCheck: {
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: item.data });
break;
}

View File

@@ -527,7 +527,7 @@ describe(LibraryService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarDiscovery,
name: JobName.SidecarCheck,
data: {
id: assetStub.external.id,
source: 'upload',
@@ -573,7 +573,7 @@ describe(LibraryService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarDiscovery,
name: JobName.SidecarCheck,
data: {
id: assetStub.image.id,
source: 'upload',

View File

@@ -414,7 +414,7 @@ export class LibraryService extends BaseService {
// We queue a sidecar discovery which, in turn, queues metadata extraction
await this.jobRepository.queueAll(
assetIds.map((assetId) => ({
name: JobName.SidecarDiscovery,
name: JobName.SidecarCheck,
data: { id: assetId, source: 'upload' },
})),
);

View File

@@ -1,7 +1,6 @@
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { defaults } from 'src/config';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetType, AssetVisibility, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
@@ -15,6 +14,21 @@ import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const forSidecarJob = (
asset: {
id?: string;
originalPath?: string;
sidecarPath?: string | null;
} = {},
) => {
return {
id: factory.uuid(),
originalPath: '/path/to/IMG_123.jpg',
sidecarPath: null,
...asset,
};
};
const makeFaceTags = (face: Partial<{ Name: string }> = {}, orientation?: ImmichTags['Orientation']) => ({
Orientation: orientation,
RegionInfo: {
@@ -1457,7 +1471,7 @@ describe(MetadataService.name, () => {
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarSync,
name: JobName.SidecarCheck,
data: { id: assetStub.sidecar.id },
},
]);
@@ -1471,133 +1485,65 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarDiscovery,
name: JobName.SidecarCheck,
data: { id: assetStub.image.id },
},
]);
});
});
describe('handleSidecarSync', () => {
describe('handleSidecarCheck', () => {
it('should do nothing if asset could not be found', async () => {
mocks.asset.getByIds.mockResolvedValue([]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed);
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(void 0);
await expect(sut.handleSidecarCheck({ id: assetStub.image.id })).resolves.toBeUndefined();
expect(mocks.asset.update).not.toHaveBeenCalled();
});
it('should do nothing if asset has no sidecar path', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
await expect(sut.handleSidecarSync({ id: assetStub.image.id })).resolves.toBe(JobStatus.Failed);
expect(mocks.asset.update).not.toHaveBeenCalled();
it('should detect a new sidecar at .jpg.xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: `/path/to/IMG_123.jpg.xmp` });
});
it('should set sidecar path if exists (sidecar named photo.ext.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValue(true);
it('should detect a new sidecar at .xmp', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg' });
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith(
`${assetStub.sidecar.originalPath}.xmp`,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
sidecarPath: assetStub.sidecar.sidecarPath,
});
});
it('should set sidecar path if exists (sidecar named photo.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecarWithoutExt as any]);
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(false);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecarWithoutExt.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(
2,
assetStub.sidecarWithoutExt.sidecarPath,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecarWithoutExt.id,
sidecarPath: assetStub.sidecarWithoutExt.sidecarPath,
});
});
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
it('should set sidecar path if exists (two sidecars named photo.ext.xmp and photo.xmp, should pick photo.ext.xmp)', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(1, assetStub.sidecar.sidecarPath, constants.R_OK);
expect(mocks.storage.checkFileExists).toHaveBeenNthCalledWith(
2,
assetStub.sidecarWithoutExt.sidecarPath,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
sidecarPath: assetStub.sidecar.sidecarPath,
});
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: '/path/to/IMG_123.xmp' });
});
it('should unset sidecar path if file does not exist anymore', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg.xmp' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValue(false);
await expect(sut.handleSidecarSync({ id: assetStub.sidecar.id })).resolves.toBe(JobStatus.Success);
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith(
`${assetStub.sidecar.originalPath}.xmp`,
constants.R_OK,
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.sidecar.id,
sidecarPath: null,
});
});
});
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Success);
describe('handleSidecarDiscovery', () => {
it('should skip hidden assets', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.livePhotoMotionAsset as any]);
await sut.handleSidecarDiscovery({ id: assetStub.livePhotoMotionAsset.id });
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
expect(mocks.asset.update).toHaveBeenCalledWith({ id: asset.id, sidecarPath: null });
});
it('should skip assets with a sidecar path', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.sidecar]);
await sut.handleSidecarDiscovery({ id: assetStub.sidecar.id });
expect(mocks.storage.checkFileExists).not.toHaveBeenCalled();
});
it('should do nothing if the sidecar file still exists', async () => {
const asset = forSidecarJob({ originalPath: '/path/to/IMG_123.jpg', sidecarPath: '/path/to/IMG_123.jpg' });
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
mocks.storage.checkFileExists.mockResolvedValueOnce(true);
await expect(sut.handleSidecarCheck({ id: asset.id })).resolves.toBe(JobStatus.Skipped);
it('should do nothing when a sidecar is not found ', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.storage.checkFileExists.mockResolvedValue(false);
await sut.handleSidecarDiscovery({ id: assetStub.image.id });
expect(mocks.asset.update).not.toHaveBeenCalled();
});
it('should update a image asset when a sidecar is found', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.image]);
mocks.storage.checkFileExists.mockResolvedValue(true);
await sut.handleSidecarDiscovery({ id: assetStub.image.id });
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith('/original/path.jpg.xmp', constants.R_OK);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
sidecarPath: '/original/path.jpg.xmp',
});
});
it('should update a video asset when a sidecar is found', async () => {
mocks.asset.getByIds.mockResolvedValue([assetStub.video]);
mocks.storage.checkFileExists.mockResolvedValue(true);
await sut.handleSidecarDiscovery({ id: assetStub.video.id });
expect(mocks.storage.checkFileExists).toHaveBeenCalledWith('/original/path.ext.xmp', constants.R_OK);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
sidecarPath: '/original/path.ext.xmp',
});
});
});
describe('handleSidecarWrite', () => {

View File

@@ -5,7 +5,7 @@ import _ from 'lodash';
import { Duration } from 'luxon';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import path from 'node:path';
import { join, parse } from 'node:path';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { Asset, AssetFace } from 'src/database';
@@ -331,7 +331,7 @@ export class MetadataService extends BaseService {
const assets = this.assetJobRepository.streamForSidecar(force);
for await (const asset of assets) {
jobs.push({ name: force ? JobName.SidecarSync : JobName.SidecarDiscovery, data: { id: asset.id } });
jobs.push({ name: JobName.SidecarCheck, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
@@ -342,14 +342,37 @@ export class MetadataService extends BaseService {
return JobStatus.Success;
}
@OnJob({ name: JobName.SidecarSync, queue: QueueName.Sidecar })
handleSidecarSync({ id }: JobOf<JobName.SidecarSync>): Promise<JobStatus> {
return this.processSidecar(id, true);
}
@OnJob({ name: JobName.SidecarCheck, queue: QueueName.Sidecar })
async handleSidecarCheck({ id }: JobOf<JobName.SidecarCheck>): Promise<JobStatus | undefined> {
const asset = await this.assetJobRepository.getForSidecarCheckJob(id);
if (!asset) {
return;
}
@OnJob({ name: JobName.SidecarDiscovery, queue: QueueName.Sidecar })
handleSidecarDiscovery({ id }: JobOf<JobName.SidecarDiscovery>): Promise<JobStatus> {
return this.processSidecar(id, false);
let sidecarPath = null;
for (const candidate of this.getSidecarCandidates(asset)) {
const exists = await this.storageRepository.checkFileExists(candidate, constants.R_OK);
if (!exists) {
continue;
}
sidecarPath = candidate;
break;
}
const isChanged = sidecarPath !== asset.sidecarPath;
this.logger.debug(
`Sidecar check found old=${asset.sidecarPath}, new=${sidecarPath} will ${isChanged ? 'update' : 'do nothing for'} asset ${asset.id}: ${asset.originalPath}`,
);
if (!isChanged) {
return JobStatus.Skipped;
}
await this.assetRepository.update({ id: asset.id, sidecarPath });
return JobStatus.Success;
}
@OnEvent({ name: 'AssetTag' })
@@ -399,6 +422,25 @@ export class MetadataService extends BaseService {
return JobStatus.Success;
}
private getSidecarCandidates({ sidecarPath, originalPath }: { sidecarPath: string | null; originalPath: string }) {
const candidates: string[] = [];
if (sidecarPath) {
candidates.push(sidecarPath);
}
const assetPath = parse(originalPath);
candidates.push(
// IMG_123.jpg.xmp
`${originalPath}.xmp`,
// IMG_123.xmp
`${join(assetPath.dir, assetPath.name)}.xmp`,
);
return candidates;
}
private getImageDimensions(exifTags: ImmichTags): { width?: number; height?: number } {
/*
* The "true" values for width and height are a bit hidden, depending on the camera model and file format.
@@ -564,7 +606,7 @@ export class MetadataService extends BaseService {
checksum,
ownerId: asset.ownerId,
originalPath: StorageCore.getAndroidMotionPath(asset, motionAssetId),
originalFileName: `${path.parse(asset.originalFileName).name}.mp4`,
originalFileName: `${parse(asset.originalFileName).name}.mp4`,
visibility: AssetVisibility.Hidden,
deviceAssetId: 'NONE',
deviceId: 'NONE',
@@ -905,60 +947,4 @@ export class MetadataService extends BaseService {
return tags;
}
private async processSidecar(id: string, isSync: boolean): Promise<JobStatus> {
const [asset] = await this.assetRepository.getByIds([id]);
if (!asset) {
return JobStatus.Failed;
}
if (isSync && !asset.sidecarPath) {
return JobStatus.Failed;
}
if (!isSync && (asset.visibility === AssetVisibility.Hidden || asset.sidecarPath) && !asset.isExternal) {
return JobStatus.Failed;
}
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
const assetPath = path.parse(asset.originalPath);
const assetPathWithoutExt = path.join(assetPath.dir, assetPath.name);
const sidecarPathWithoutExt = `${assetPathWithoutExt}.xmp`;
const sidecarPathWithExt = `${asset.originalPath}.xmp`;
const [sidecarPathWithExtExists, sidecarPathWithoutExtExists] = await Promise.all([
this.storageRepository.checkFileExists(sidecarPathWithExt, constants.R_OK),
this.storageRepository.checkFileExists(sidecarPathWithoutExt, constants.R_OK),
]);
let sidecarPath = null;
if (sidecarPathWithExtExists) {
sidecarPath = sidecarPathWithExt;
} else if (sidecarPathWithoutExtExists) {
sidecarPath = sidecarPathWithoutExt;
}
if (asset.isExternal) {
if (sidecarPath !== asset.sidecarPath) {
await this.assetRepository.update({ id: asset.id, sidecarPath });
}
return JobStatus.Success;
}
if (sidecarPath) {
this.logger.debug(`Detected sidecar at '${sidecarPath}' for asset ${asset.id}: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, sidecarPath });
return JobStatus.Success;
}
if (!isSync) {
return JobStatus.Failed;
}
this.logger.debug(`No sidecar found for asset ${asset.id}: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, sidecarPath: null });
return JobStatus.Success;
}
}

View File

@@ -18,7 +18,7 @@ import {
SmartSearchDto,
StatisticsSearchDto,
} from 'src/dtos/search.dto';
import { AssetOrder, AssetVisibility } from 'src/enum';
import { AssetOrder, AssetVisibility, Permission } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { requireElevatedPermission } from 'src/utils/access';
import { getMyPartnerIds } from 'src/utils/asset.util';
@@ -113,14 +113,27 @@ export class SearchService extends BaseService {
}
const userIds = this.getUserIdsToSearch(auth);
const key = machineLearning.clip.modelName + dto.query + dto.language;
let embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});
this.embeddingCache.set(key, embedding);
let embedding;
if (dto.query) {
const key = machineLearning.clip.modelName + dto.query + dto.language;
embedding = this.embeddingCache.get(key);
if (!embedding) {
embedding = await this.machineLearningRepository.encodeText(machineLearning.urls, dto.query, {
modelName: machineLearning.clip.modelName,
language: dto.language,
});
this.embeddingCache.set(key, embedding);
}
} else if (dto.queryAssetId) {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.queryAssetId] });
const getEmbeddingResponse = await this.searchRepository.getEmbedding(dto.queryAssetId);
const assetEmbedding = getEmbeddingResponse?.embedding;
if (!assetEmbedding) {
throw new BadRequestException(`Asset ${dto.queryAssetId} has no embedding`);
}
embedding = assetEmbedding;
} else {
throw new BadRequestException('Either `query` or `queryAssetId` must be set');
}
const page = dto.page ?? 1;
const size = dto.size || 100;

View File

@@ -1,8 +1,9 @@
import { BadRequestException, ForbiddenException, Injectable } from '@nestjs/common';
import { Insertable } from 'kysely';
import { DateTime } from 'luxon';
import { DateTime, Duration } from 'luxon';
import { Writable } from 'node:stream';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { OnJob } from 'src/decorators';
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
@@ -15,7 +16,16 @@ import {
SyncItem,
SyncStreamDto,
} from 'src/dtos/sync.dto';
import { AssetVisibility, DatabaseAction, EntityType, Permission, SyncEntityType, SyncRequestType } from 'src/enum';
import {
AssetVisibility,
DatabaseAction,
EntityType,
JobName,
Permission,
QueueName,
SyncEntityType,
SyncRequestType,
} from 'src/enum';
import { SyncQueryOptions } from 'src/repositories/sync.repository';
import { SessionSyncCheckpointTable } from 'src/schema/tables/sync-checkpoint.table';
import { BaseService } from 'src/services/base.service';
@@ -32,6 +42,8 @@ type AssetLike = Omit<SyncAssetV1, 'checksum' | 'thumbhash'> & {
};
const COMPLETE_ID = 'complete';
const MAX_DAYS = 30;
const MAX_DURATION = Duration.fromObject({ days: MAX_DAYS });
const mapSyncAssetV1 = ({ checksum, thumbhash, ...data }: AssetLike): SyncAssetV1 => ({
...data,
@@ -137,19 +149,24 @@ export class SyncService extends BaseService {
}
const isPendingSyncReset = await this.sessionRepository.isPendingSyncReset(session.id);
if (isPendingSyncReset) {
send(response, { type: SyncEntityType.SyncResetV1, ids: ['reset'], data: {} });
response.end();
return;
}
const checkpoints = await this.syncCheckpointRepository.getAll(session.id);
const checkpointMap: CheckpointMap = Object.fromEntries(checkpoints.map(({ type, ack }) => [type, fromAck(ack)]));
if (this.needsFullSync(checkpointMap)) {
send(response, { type: SyncEntityType.SyncResetV1, ids: ['reset'], data: {} });
response.end();
return;
}
const { nowId } = await this.syncCheckpointRepository.getNow();
const options: SyncQueryOptions = { nowId, userId: auth.user.id };
const checkpoints = await this.syncCheckpointRepository.getAll(session.id);
const checkpointMap: CheckpointMap = Object.fromEntries(checkpoints.map(({ type, ack }) => [type, fromAck(ack)]));
const handlers: Record<SyncRequestType, () => Promise<void>> = {
[SyncRequestType.AuthUsersV1]: () => this.syncAuthUsersV1(options, response, checkpointMap),
[SyncRequestType.UsersV1]: () => this.syncUsersV1(options, response, checkpointMap),
@@ -180,9 +197,41 @@ export class SyncService extends BaseService {
await handler();
}
send(response, { type: SyncEntityType.SyncCompleteV1, ids: [nowId], data: {} });
response.end();
}
@OnJob({ name: JobName.AuditTableCleanup, queue: QueueName.BackgroundTask })
async onAuditTableCleanup() {
const pruneThreshold = MAX_DAYS + 1;
await this.syncRepository.album.cleanupAuditTable(pruneThreshold);
await this.syncRepository.albumUser.cleanupAuditTable(pruneThreshold);
await this.syncRepository.albumToAsset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.asset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetFace.cleanupAuditTable(pruneThreshold);
await this.syncRepository.assetMetadata.cleanupAuditTable(pruneThreshold);
await this.syncRepository.memory.cleanupAuditTable(pruneThreshold);
await this.syncRepository.memoryToAsset.cleanupAuditTable(pruneThreshold);
await this.syncRepository.partner.cleanupAuditTable(pruneThreshold);
await this.syncRepository.person.cleanupAuditTable(pruneThreshold);
await this.syncRepository.stack.cleanupAuditTable(pruneThreshold);
await this.syncRepository.user.cleanupAuditTable(pruneThreshold);
await this.syncRepository.userMetadata.cleanupAuditTable(pruneThreshold);
}
private needsFullSync(checkpointMap: CheckpointMap) {
const completeAck = checkpointMap[SyncEntityType.SyncCompleteV1];
if (!completeAck) {
return false;
}
const milliseconds = Number.parseInt(completeAck.updateId.replaceAll('-', '').slice(0, 12), 16);
return DateTime.fromMillis(milliseconds) < DateTime.now().minus(MAX_DURATION);
}
private async syncAuthUsersV1(options: SyncQueryOptions, response: Writable, checkpointMap: CheckpointMap) {
const upsertType = SyncEntityType.AuthUserV1;
const upserts = this.syncRepository.authUser.getUpserts({ ...options, ack: checkpointMap[upsertType] });
@@ -719,13 +768,13 @@ export class SyncService extends BaseService {
private async syncPeopleV1(options: SyncQueryOptions, response: Writable, checkpointMap: CheckpointMap) {
const deleteType = SyncEntityType.PersonDeleteV1;
const deletes = this.syncRepository.people.getDeletes({ ...options, ack: checkpointMap[deleteType] });
const deletes = this.syncRepository.person.getDeletes({ ...options, ack: checkpointMap[deleteType] });
for await (const { id, ...data } of deletes) {
send(response, { type: deleteType, ids: [id], data });
}
const upsertType = SyncEntityType.PersonV1;
const upserts = this.syncRepository.people.getUpserts({ ...options, ack: checkpointMap[upsertType] });
const upserts = this.syncRepository.person.getUpserts({ ...options, ack: checkpointMap[upsertType] });
for await (const { updateId, ...data } of upserts) {
send(response, { type: upsertType, ids: [updateId], data });
}

View File

@@ -275,6 +275,9 @@ export interface QueueStatus {
}
export type JobItem =
// Audit
| { name: JobName.AuditTableCleanup; data?: IBaseJob }
// Backups
| { name: JobName.DatabaseBackup; data?: IBaseJob }
@@ -309,8 +312,7 @@ export type JobItem =
// Sidecar Scanning
| { name: JobName.SidecarQueueAll; data: IBaseJob }
| { name: JobName.SidecarDiscovery; data: IEntityJob }
| { name: JobName.SidecarSync; data: IEntityJob }
| { name: JobName.SidecarCheck; data: IEntityJob }
| { name: JobName.SidecarWrite; data: ISidecarWriteJob }
// Facial Recognition
@@ -397,8 +399,8 @@ export interface VectorUpdateResult {
}
export interface ImmichFile extends Express.Multer.File {
/** sha1 hash of file */
uuid: string;
/** sha1 hash of file */
checksum: Buffer;
}

View File

@@ -35,7 +35,7 @@ export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif
primaryAssetId: assets[0].id,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
updateId: 'uuid-v7',
updateId: expect.any(String),
};
};

View File

@@ -1,5 +1,6 @@
import { Tag } from 'src/database';
import { TagResponseDto } from 'src/dtos/tag.dto';
import { newUuidV7 } from 'test/small.factory';
const parent = Object.freeze<Tag>({
id: 'tag-parent',
@@ -37,7 +38,10 @@ const color = {
parentId: null,
};
const upsert = { userId: 'tag-user', updateId: 'uuid-v7' };
const upsert = {
userId: 'tag-user',
updateId: newUuidV7(),
};
export const tagStub = {
tag,

View File

@@ -258,6 +258,12 @@ export class SyncTestContext extends MediumTestContext<SyncService> {
return stream.getResponse();
}
async assertSyncIsComplete(auth: AuthDto, types: SyncRequestType[]) {
await expect(this.syncStream(auth, types)).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
}
async syncAckAll(auth: AuthDto, response: Array<{ type: string; ack: string }>) {
const acks: Record<string, string> = {};
const syncAcks: string[] = [];

View File

@@ -0,0 +1,226 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { AssetMetadataKey, UserMetadataKey } from 'src/enum';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { DB } from 'src/schema';
import { SyncService } from 'src/services/sync.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
import { v4 } from 'uuid';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(SyncService, {
database: db || defaultDatabase,
real: [DatabaseRepository, SyncRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
const deletedLongAgo = DateTime.now().minus({ days: 35 }).toISO();
const assertTableCount = async <T extends keyof DB>(db: Kysely<DB>, t: T, count: number) => {
const { table } = db.dynamic;
const results = await db.selectFrom(table(t).as(t)).selectAll().execute();
expect(results).toHaveLength(count);
};
describe(SyncService.name, () => {
describe('onAuditTableCleanup', () => {
it('should work', async () => {
const { sut } = setup();
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
});
it('should cleanup the album_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_audit';
await ctx.database
.insertInto(tableName)
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the album_asset_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_asset_audit';
const { user } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user.id });
await ctx.database
.insertInto(tableName)
.values({ albumId: album.id, assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the album_user_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_user_audit';
await ctx.database
.insertInto(tableName)
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the asset_audit table', async () => {
const { sut, ctx } = setup();
await ctx.database
.insertInto('asset_audit')
.values({ assetId: v4(), ownerId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, 'asset_audit', 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, 'asset_audit', 0);
});
it('should cleanup the asset_face_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'asset_face_audit';
await ctx.database
.insertInto(tableName)
.values({ assetFaceId: v4(), assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the asset_metadata_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'asset_metadata_audit';
await ctx.database
.insertInto(tableName)
.values({ assetId: v4(), key: AssetMetadataKey.MobileApp, deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the memory_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'memory_audit';
await ctx.database
.insertInto(tableName)
.values({ memoryId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the memory_asset_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'memory_asset_audit';
const { user } = await ctx.newUser();
const { memory } = await ctx.newMemory({ ownerId: user.id });
await ctx.database
.insertInto(tableName)
.values({ memoryId: memory.id, assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the partner_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'partner_audit';
await ctx.database
.insertInto(tableName)
.values({ sharedById: v4(), sharedWithId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the stack_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'stack_audit';
await ctx.database
.insertInto(tableName)
.values({ stackId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the user_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'user_audit';
await ctx.database.insertInto(tableName).values({ userId: v4(), deletedAt: deletedLongAgo }).execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the user_metadata_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'user_metadata_audit';
await ctx.database
.insertInto(tableName)
.values({ userId: v4(), key: UserMetadataKey.Onboarding, deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should skip recent records', async () => {
const { sut, ctx } = setup();
const keep = {
id: v4(),
assetId: v4(),
ownerId: v4(),
deletedAt: DateTime.now().minus({ days: 25 }).toISO(),
};
const remove = {
id: v4(),
assetId: v4(),
ownerId: v4(),
deletedAt: DateTime.now().minus({ days: 35 }).toISO(),
};
await ctx.database.insertInto('asset_audit').values([keep, remove]).execute();
await assertTableCount(ctx.database, 'asset_audit', 2);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
const after = await ctx.database.selectFrom('asset_audit').select(['id']).execute();
expect(after).toHaveLength(1);
expect(after[0].id).toBe(keep.id);
});
});
});

View File

@@ -74,11 +74,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
},
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should sync album asset exif for own user', async () => {
@@ -88,8 +88,15 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toHaveLength(2);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumAssetExifCreateV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should not sync album asset exif for unrelated user', async () => {
@@ -104,8 +111,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should backfill album assets exif when a user shares an album with you', async () => {
@@ -139,8 +149,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
// ack initial album asset exif sync
await ctx.syncAckAll(auth, response);
@@ -174,11 +184,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toHaveLength(5);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should sync old asset exif when a user adds them to an album they share you', async () => {
@@ -207,8 +217,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(firstAlbumResponse).toHaveLength(2);
await ctx.syncAckAll(auth, firstAlbumResponse);
@@ -224,8 +234,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
type: SyncEntityType.AlbumAssetExifBackfillV1,
},
backfillSyncAck,
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(2);
// ack initial album asset sync
await ctx.syncAckAll(auth, response);
@@ -244,11 +254,11 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toHaveLength(2);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should sync asset exif updates for an album shared with you', async () => {
@@ -262,7 +272,6 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -272,6 +281,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -283,9 +293,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
city: 'New City',
});
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(updateResponse).toHaveLength(1);
expect(updateResponse).toEqual([
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
@@ -294,6 +302,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
@@ -330,8 +339,8 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(response).toHaveLength(3);
await ctx.syncAckAll(auth, response);
@@ -342,8 +351,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
city: 'Delayed Exif',
});
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(updateResponse).toEqual([
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
@@ -352,7 +360,7 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
}),
type: SyncEntityType.AlbumAssetExifUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(updateResponse).toHaveLength(1);
});
});

View File

@@ -58,7 +58,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -83,10 +82,11 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
},
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should sync album asset for own user', async () => {
@@ -95,8 +95,15 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toHaveLength(2);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumAssetCreateV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should not sync album asset for unrelated user', async () => {
@@ -110,8 +117,11 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should backfill album assets when a user shares an album with you', async () => {
@@ -133,7 +143,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -143,6 +152,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
@@ -176,10 +186,11 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should sync old assets when a user adds them to an album they share you', async () => {
@@ -196,7 +207,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const firstAlbumResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(firstAlbumResponse).toHaveLength(2);
expect(firstAlbumResponse).toEqual([
updateSyncAck,
{
@@ -206,6 +216,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, firstAlbumResponse);
@@ -213,7 +224,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
// expect(response).toHaveLength(2);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -223,6 +233,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
type: SyncEntityType.AlbumAssetBackfillV1,
},
backfillSyncAck,
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
@@ -242,10 +253,11 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should sync asset updates for an album shared with you', async () => {
@@ -258,7 +270,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
updateSyncAck,
{
@@ -268,6 +279,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -280,7 +292,6 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
});
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(updateResponse).toHaveLength(1);
expect(updateResponse).toEqual([
{
ack: expect.any(String),
@@ -290,6 +301,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
}),
type: SyncEntityType.AlbumAssetUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -28,7 +28,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -38,10 +37,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should sync album to asset for owned albums', async () => {
@@ -51,7 +51,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -61,10 +60,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should detect and sync the album to asset for shared albums', async () => {
@@ -76,7 +76,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -86,10 +85,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should not sync album to asset for an album owned by another user', async () => {
@@ -98,7 +98,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should backfill album to assets when a user shares an album with you', async () => {
@@ -114,7 +114,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -124,6 +123,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album to asset sync
@@ -148,10 +148,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should detect and sync a deleted album to asset relation', async () => {
@@ -162,7 +163,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -172,6 +172,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -179,7 +180,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await wait(2);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -189,10 +189,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should detect and sync a deleted album to asset relation when an asset is deleted', async () => {
@@ -203,7 +204,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -213,6 +213,7 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -220,7 +221,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await wait(2);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -230,10 +230,11 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should not sync a deleted album to asset relation when the album is deleted', async () => {
@@ -244,7 +245,6 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -254,11 +254,12 @@ describe(SyncRequestType.AlbumToAssetsV1, () => {
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await albumRepo.delete(album.id);
await wait(2);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
});

View File

@@ -34,6 +34,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
@@ -45,7 +46,6 @@ describe(SyncRequestType.AlbumUsersV1, () => {
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -56,10 +56,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync an updated shared user', async () => {
@@ -71,11 +72,10 @@ describe(SyncRequestType.AlbumUsersV1, () => {
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -86,10 +86,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync a deleted shared user', async () => {
@@ -100,9 +101,8 @@ describe(SyncRequestType.AlbumUsersV1, () => {
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user1.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
@@ -115,10 +115,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
});
@@ -134,7 +135,6 @@ describe(SyncRequestType.AlbumUsersV1, () => {
});
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -145,10 +145,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync an updated shared user', async () => {
@@ -161,10 +162,14 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
@@ -178,10 +183,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync a deleted shared user', async () => {
@@ -194,10 +200,14 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
@@ -210,10 +220,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should backfill album users when a user shares an album with you', async () => {
@@ -232,7 +243,6 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.newAlbumUser({ albumId: album1.id, userId: user2.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -243,6 +253,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial user
@@ -285,10 +296,11 @@ describe(SyncRequestType.AlbumUsersV1, () => {
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
});
});

View File

@@ -24,7 +24,6 @@ describe(SyncRequestType.AlbumsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -35,10 +34,11 @@ describe(SyncRequestType.AlbumsV1, () => {
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync a new album', async () => {
@@ -46,7 +46,6 @@ describe(SyncRequestType.AlbumsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -55,10 +54,11 @@ describe(SyncRequestType.AlbumsV1, () => {
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album delete', async () => {
@@ -67,7 +67,6 @@ describe(SyncRequestType.AlbumsV1, () => {
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -76,12 +75,12 @@ describe(SyncRequestType.AlbumsV1, () => {
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await albumRepo.delete(album.id);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -90,10 +89,11 @@ describe(SyncRequestType.AlbumsV1, () => {
},
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
describe('shared albums', () => {
@@ -104,17 +104,17 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album share (share before sync)', async () => {
@@ -124,17 +124,17 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album share (share after sync)', async () => {
@@ -150,23 +150,24 @@ describe(SyncRequestType.AlbumsV1, () => {
data: expect.objectContaining({ id: userAlbum.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newAlbumUser({ userId: auth.user.id, albumId: user2Album.id, role: AlbumUserRole.Editor });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user2Album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album delete`', async () => {
@@ -177,24 +178,27 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await albumRepo.delete(album.id);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: { albumId: album.id },
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album unshare as an album delete', async () => {
@@ -205,10 +209,13 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
@@ -218,10 +225,11 @@ describe(SyncRequestType.AlbumsV1, () => {
data: { albumId: album.id },
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
});
});

View File

@@ -24,7 +24,6 @@ describe(SyncRequestType.AssetExifsV1, () => {
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -57,10 +56,11 @@ describe(SyncRequestType.AssetExifsV1, () => {
},
type: SyncEntityType.AssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
});
it('should only sync asset exif for own user', async () => {
@@ -72,7 +72,10 @@ describe(SyncRequestType.AssetExifsV1, () => {
const { session } = await ctx.newSession({ userId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
});
});

View File

@@ -26,7 +26,6 @@ describe(SyncEntityType.AssetFaceV1, () => {
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -44,10 +43,11 @@ describe(SyncEntityType.AssetFaceV1, () => {
}),
type: 'AssetFaceV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
});
it('should detect and sync a deleted asset face', async () => {
@@ -58,7 +58,6 @@ describe(SyncEntityType.AssetFaceV1, () => {
await personRepo.deleteAssetFace(assetFace.id);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -67,10 +66,11 @@ describe(SyncEntityType.AssetFaceV1, () => {
},
type: 'AssetFaceDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
});
it('should not sync an asset face or asset face delete for an unrelated user', async () => {
@@ -82,11 +82,18 @@ describe(SyncEntityType.AssetFaceV1, () => {
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetFaceV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
await personRepo.deleteAssetFace(assetFace.id);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetFaceDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
});
});

View File

@@ -26,7 +26,6 @@ describe(SyncEntityType.AssetMetadataV1, () => {
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -37,10 +36,11 @@ describe(SyncEntityType.AssetMetadataV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
});
it('should update asset metadata', async () => {
@@ -51,7 +51,6 @@ describe(SyncEntityType.AssetMetadataV1, () => {
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -62,6 +61,7 @@ describe(SyncEntityType.AssetMetadataV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -79,10 +79,11 @@ describe(SyncEntityType.AssetMetadataV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, updatedResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
});
});
@@ -95,7 +96,6 @@ describe(SyncEntityType.AssetMetadataDeleteV1, () => {
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -106,6 +106,7 @@ describe(SyncEntityType.AssetMetadataDeleteV1, () => {
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -121,6 +122,7 @@ describe(SyncEntityType.AssetMetadataDeleteV1, () => {
},
type: 'AssetMetadataDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -40,7 +40,6 @@ describe(SyncEntityType.AssetV1, () => {
});
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -64,10 +63,11 @@ describe(SyncEntityType.AssetV1, () => {
},
type: 'AssetV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect and sync a deleted asset', async () => {
@@ -77,7 +77,6 @@ describe(SyncEntityType.AssetV1, () => {
await assetRepo.remove(asset);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -86,10 +85,11 @@ describe(SyncEntityType.AssetV1, () => {
},
type: 'AssetDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should not sync an asset or asset delete for an unrelated user', async () => {
@@ -100,11 +100,17 @@ describe(SyncEntityType.AssetV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
await assetRepo.remove(asset);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
});

View File

@@ -22,7 +22,6 @@ describe(SyncEntityType.AuthUserV1, () => {
const { auth, user, ctx } = await setup(await getKyselyDB());
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -43,10 +42,11 @@ describe(SyncEntityType.AuthUserV1, () => {
},
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.AuthUsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AuthUsersV1]);
});
it('should sync a change and then another change to that same user', async () => {
@@ -55,7 +55,6 @@ describe(SyncEntityType.AuthUserV1, () => {
const userRepo = ctx.get(UserRepository);
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -65,6 +64,7 @@ describe(SyncEntityType.AuthUserV1, () => {
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -72,7 +72,6 @@ describe(SyncEntityType.AuthUserV1, () => {
await userRepo.update(user.id, { isAdmin: true });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -82,6 +81,7 @@ describe(SyncEntityType.AuthUserV1, () => {
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -0,0 +1,60 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { SyncCheckpointRepository } from 'src/repositories/sync-checkpoint.repository';
import { DB } from 'src/schema';
import { toAck } from 'src/utils/sync';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
import { v7 } from 'uuid';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.SyncCompleteV1, () => {
it('should work', async () => {
const { auth, ctx } = await setup();
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect an old checkpoint and send back a reset', async () => {
const { auth, session, ctx } = await setup();
const updateId = v7({ msecs: DateTime.now().minus({ days: 60 }).toMillis() });
await ctx.get(SyncCheckpointRepository).upsertAll([
{
type: SyncEntityType.SyncCompleteV1,
sessionId: session.id,
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
},
]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' }]);
});
it('should not send back a reset if the checkpoint is recent', async () => {
const { auth, session, ctx } = await setup();
const updateId = v7({ msecs: DateTime.now().minus({ days: 7 }).toMillis() });
await ctx.get(SyncCheckpointRepository).upsertAll([
{
type: SyncEntityType.SyncCompleteV1,
sessionId: session.id,
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
},
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
});

View File

@@ -25,7 +25,6 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -35,10 +34,11 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
},
type: 'MemoryToAssetV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
});
it('should detect and sync a deleted memory to asset relation', async () => {
@@ -50,7 +50,6 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -60,10 +59,11 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
},
type: 'MemoryToAssetDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
});
it('should not sync a memory to asset relation or delete for an unrelated user', async () => {
@@ -74,11 +74,18 @@ describe(SyncEntityType.MemoryToAssetV1, () => {
const { memory } = await ctx.newMemory({ ownerId: user2.id });
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
expect(await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(1);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.MemoryToAssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
expect(await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toHaveLength(1);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.MemoryToAssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
});
});

View File

@@ -23,7 +23,6 @@ describe(SyncEntityType.MemoryV1, () => {
const { memory } = await ctx.newMemory({ ownerId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -43,10 +42,11 @@ describe(SyncEntityType.MemoryV1, () => {
},
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
it('should detect and sync a deleted memory', async () => {
@@ -56,7 +56,6 @@ describe(SyncEntityType.MemoryV1, () => {
await memoryRepo.delete(memory.id);
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -65,10 +64,11 @@ describe(SyncEntityType.MemoryV1, () => {
},
type: 'MemoryDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
it('should sync a memory and then an update to that same memory', async () => {
@@ -77,29 +77,29 @@ describe(SyncEntityType.MemoryV1, () => {
const { memory } = await ctx.newMemory({ ownerId: user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: memory.id }),
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await memoryRepo.update(memory.id, { seenAt: new Date() });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: memory.id }),
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
it('should not sync a memory or a memory delete for an unrelated user', async () => {
@@ -108,8 +108,8 @@ describe(SyncEntityType.MemoryV1, () => {
const { user: user2 } = await ctx.newUser();
const { memory } = await ctx.newMemory({ ownerId: user2.id });
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await memoryRepo.delete(memory.id);
await expect(ctx.syncStream(auth, [SyncRequestType.MemoriesV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
});

View File

@@ -26,7 +26,6 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -59,10 +58,11 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
},
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should not sync partner asset exif for own user', async () => {
@@ -72,8 +72,11 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should not sync partner asset exif for unrelated user', async () => {
@@ -86,8 +89,11 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should backfill partner asset exif when a partner shared their library with you', async () => {
@@ -102,7 +108,6 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
@@ -112,6 +117,7 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
@@ -119,7 +125,6 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -133,10 +138,11 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should handle partners with users ids lower than a uuidv7', async () => {
@@ -151,7 +157,6 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -160,15 +165,15 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
// This checks that our ack upsert is correct
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.stringMatching(new RegExp(`${SyncEntityType.PartnerAssetExifBackfillV1}\\|.+?\\|.+`)),
@@ -182,10 +187,11 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
@@ -203,7 +209,6 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -212,13 +217,13 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toHaveLength(3);
expect(newResponse).toEqual([
{
ack: expect.stringMatching(new RegExp(`${SyncEntityType.PartnerAssetExifBackfillV1}\\|.+?\\|.+`)),
@@ -239,9 +244,10 @@ describe(SyncRequestType.PartnerAssetExifsV1, () => {
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
});

View File

@@ -46,7 +46,6 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -70,10 +69,11 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
},
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should detect and sync a deleted partner asset', async () => {
@@ -86,7 +86,6 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await assetRepo.remove(asset);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -95,10 +94,11 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
},
type: SyncEntityType.PartnerAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync a deleted partner asset due to a user delete', async () => {
@@ -109,7 +109,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await ctx.newAsset({ ownerId: user2.id });
await userRepo.delete({ id: user2.id }, true);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
@@ -119,9 +119,12 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
const { user: user2 } = await ctx.newUser();
await ctx.newAsset({ ownerId: user2.id });
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.PartnerAssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await partnerRepo.remove(partner);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync an asset or asset delete for own user', async () => {
@@ -132,13 +135,19 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await assetRepo.remove(asset);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync an asset or asset delete for unrelated user', async () => {
@@ -150,13 +159,19 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await assetRepo.remove(asset);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should backfill partner assets when a partner shared their library with you', async () => {
@@ -170,7 +185,6 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -179,13 +193,13 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -199,10 +213,11 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
@@ -218,7 +233,6 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -227,12 +241,12 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(newResponse).toHaveLength(3);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -253,9 +267,10 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
});

View File

@@ -29,7 +29,6 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -42,10 +41,11 @@ describe(SyncRequestType.PartnerStacksV1, () => {
},
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should detect and sync a deleted partner stack', async () => {
@@ -58,7 +58,6 @@ describe(SyncRequestType.PartnerStacksV1, () => {
await stackRepo.delete(stack.id);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('PartnerStackDeleteV1'),
@@ -67,10 +66,11 @@ describe(SyncRequestType.PartnerStacksV1, () => {
},
type: SyncEntityType.PartnerStackDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a deleted partner stack due to a user delete', async () => {
@@ -81,7 +81,7 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
await userRepo.delete({ id: user2.id }, true);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a deleted partner stack due to a partner delete (unshare)', async () => {
@@ -91,9 +91,12 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.PartnerStackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await partnerRepo.remove(partner);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a stack or stack delete for own user', async () => {
@@ -103,11 +106,17 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset.id]);
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a stack or stack delete for unrelated user', async () => {
@@ -119,13 +128,19 @@ describe(SyncRequestType.PartnerStacksV1, () => {
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toHaveLength(0);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should backfill partner stacks when a partner shared their library with you', async () => {
@@ -140,7 +155,6 @@ describe(SyncRequestType.PartnerStacksV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('PartnerStackV1'),
@@ -149,12 +163,12 @@ describe(SyncRequestType.PartnerStacksV1, () => {
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(newResponse).toHaveLength(2);
expect(newResponse).toEqual([
{
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
@@ -168,10 +182,11 @@ describe(SyncRequestType.PartnerStacksV1, () => {
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should only backfill partner stacks created prior to the current partner stack checkpoint', async () => {
@@ -189,7 +204,6 @@ describe(SyncRequestType.PartnerStacksV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining(SyncEntityType.PartnerStackV1),
@@ -198,12 +212,12 @@ describe(SyncRequestType.PartnerStacksV1, () => {
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(newResponse).toHaveLength(3);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -224,9 +238,10 @@ describe(SyncRequestType.PartnerStacksV1, () => {
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
});

View File

@@ -26,7 +26,6 @@ describe(SyncEntityType.PartnerV1, () => {
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -37,10 +36,11 @@ describe(SyncEntityType.PartnerV1, () => {
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should detect and sync a deleted partner', async () => {
@@ -53,22 +53,20 @@ describe(SyncEntityType.PartnerV1, () => {
await partnerRepo.remove(partner);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerDeleteV1',
expect(response).toEqual([
{
ack: expect.any(String),
data: {
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
]),
);
type: 'PartnerDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should detect and sync a partner share both to and from another user', async () => {
@@ -79,32 +77,30 @@ describe(SyncEntityType.PartnerV1, () => {
const { partner: partner2 } = await ctx.newPartner({ sharedById: user1.id, sharedWithId: user2.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
inTimeline: partner1.inTimeline,
sharedById: partner1.sharedById,
sharedWithId: partner1.sharedWithId,
},
type: 'PartnerV1',
expect(response).toEqual([
{
ack: expect.any(String),
data: {
inTimeline: partner1.inTimeline,
sharedById: partner1.sharedById,
sharedWithId: partner1.sharedWithId,
},
{
ack: expect.any(String),
data: {
inTimeline: partner2.inTimeline,
sharedById: partner2.sharedById,
sharedWithId: partner2.sharedWithId,
},
type: 'PartnerV1',
type: 'PartnerV1',
},
{
ack: expect.any(String),
data: {
inTimeline: partner2.inTimeline,
sharedById: partner2.sharedById,
sharedWithId: partner2.sharedWithId,
},
]),
);
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should sync a partner and then an update to that same partner', async () => {
@@ -116,7 +112,6 @@ describe(SyncEntityType.PartnerV1, () => {
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -127,6 +122,7 @@ describe(SyncEntityType.PartnerV1, () => {
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -137,7 +133,6 @@ describe(SyncEntityType.PartnerV1, () => {
);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
@@ -148,10 +143,11 @@ describe(SyncEntityType.PartnerV1, () => {
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should not sync a partner or partner delete for an unrelated user', async () => {
@@ -163,9 +159,9 @@ describe(SyncEntityType.PartnerV1, () => {
const { user: user3 } = await ctx.newUser();
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user3.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await partnerRepo.remove(partner);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should not sync a partner delete after a user is deleted', async () => {
@@ -177,6 +173,6 @@ describe(SyncEntityType.PartnerV1, () => {
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await userRepo.delete({ id: user2.id }, true);
await expect(ctx.syncStream(auth, [SyncRequestType.PartnersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
});

View File

@@ -24,7 +24,6 @@ describe(SyncEntityType.PersonV1, () => {
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -40,10 +39,11 @@ describe(SyncEntityType.PersonV1, () => {
}),
type: 'PersonV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PeopleV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
});
it('should detect and sync a deleted person', async () => {
@@ -53,7 +53,6 @@ describe(SyncEntityType.PersonV1, () => {
await personRepo.delete([person.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -62,10 +61,11 @@ describe(SyncEntityType.PersonV1, () => {
},
type: 'PersonDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.PeopleV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
});
it('should not sync a person or person delete for an unrelated user', async () => {
@@ -76,11 +76,18 @@ describe(SyncEntityType.PersonV1, () => {
const { person } = await ctx.newPerson({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.PeopleV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.PersonV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
await personRepo.delete([person.id]);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toHaveLength(1);
expect(await ctx.syncStream(auth, [SyncRequestType.PeopleV1])).toHaveLength(0);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.PersonDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
});
});

View File

@@ -21,8 +21,7 @@ describe(SyncEntityType.SyncResetV1, () => {
it('should work', async () => {
const { auth, ctx } = await setup();
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect a pending sync reset', async () => {
@@ -41,7 +40,10 @@ describe(SyncEntityType.SyncResetV1, () => {
await ctx.newAsset({ ownerId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.get(SessionRepository).update(auth.session!.id, {
isPendingSyncReset: true,
@@ -62,9 +64,8 @@ describe(SyncEntityType.SyncResetV1, () => {
});
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1], true)).resolves.toEqual([
expect.objectContaining({
type: SyncEntityType.AssetV1,
}),
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
@@ -86,9 +87,8 @@ describe(SyncEntityType.SyncResetV1, () => {
const postResetResponse = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(postResetResponse).toEqual([
expect.objectContaining({
type: SyncEntityType.AssetV1,
}),
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -25,7 +25,6 @@ describe(SyncEntityType.StackV1, () => {
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('StackV1'),
@@ -38,10 +37,11 @@ describe(SyncEntityType.StackV1, () => {
},
type: 'StackV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
it('should detect and sync a deleted stack', async () => {
@@ -53,17 +53,17 @@ describe(SyncEntityType.StackV1, () => {
await stackRepo.delete(stack.id);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.stringContaining('StackDeleteV1'),
data: { stackId: stack.id },
type: 'StackDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
it('should sync a stack and then an update to that same stack', async () => {
@@ -74,22 +74,29 @@ describe(SyncEntityType.StackV1, () => {
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await stackRepo.update(stack.id, { primaryAssetId: asset2.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toEqual([
{
ack: expect.stringContaining('StackV1'),
data: expect.objectContaining({ id: stack.id, primaryAssetId: asset2.id }),
type: 'StackV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
it('should not sync a stack or stack delete for an unrelated user', async () => {
@@ -100,8 +107,8 @@ describe(SyncEntityType.StackV1, () => {
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset1.id, asset2.id]);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
});

View File

@@ -25,7 +25,6 @@ describe(SyncEntityType.UserMetadataV1, () => {
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -36,10 +35,11 @@ describe(SyncEntityType.UserMetadataV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
});
it('should update user metadata', async () => {
@@ -49,7 +49,6 @@ describe(SyncEntityType.UserMetadataV1, () => {
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -60,6 +59,7 @@ describe(SyncEntityType.UserMetadataV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -77,10 +77,11 @@ describe(SyncEntityType.UserMetadataV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, updatedResponse);
await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
});
});
@@ -92,7 +93,6 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => {
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -103,6 +103,7 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => {
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -118,6 +119,7 @@ describe(SyncEntityType.UserMetadataDeleteV1, () => {
},
type: 'UserMetadataDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -28,7 +28,6 @@ describe(SyncEntityType.UserV1, () => {
}
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -43,10 +42,11 @@ describe(SyncEntityType.UserV1, () => {
},
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.UsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
});
it('should detect and sync a soft deleted user', async () => {
@@ -56,7 +56,6 @@ describe(SyncEntityType.UserV1, () => {
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
@@ -69,11 +68,12 @@ describe(SyncEntityType.UserV1, () => {
data: expect.objectContaining({ id: deleted.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.UsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
});
it('should detect and sync a deleted user', async () => {
@@ -85,7 +85,6 @@ describe(SyncEntityType.UserV1, () => {
await userRepo.delete({ id: user.id }, true);
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual([
{
ack: expect.any(String),
@@ -99,10 +98,11 @@ describe(SyncEntityType.UserV1, () => {
data: expect.objectContaining({ id: authUser.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await expect(ctx.syncStream(auth, [SyncRequestType.UsersV1])).resolves.toEqual([]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
});
it('should sync a user and then an update to that same user', async () => {
@@ -111,13 +111,13 @@ describe(SyncEntityType.UserV1, () => {
const userRepo = ctx.get(UserRepository);
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
@@ -125,13 +125,13 @@ describe(SyncEntityType.UserV1, () => {
const updated = await userRepo.update(auth.user.id, { name: 'new name' });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(newResponse).toHaveLength(1);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user.id, name: updated.name }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View File

@@ -1,4 +1,3 @@
import { randomUUID } from 'node:crypto';
import {
Activity,
ApiKey,
@@ -17,14 +16,15 @@ import { MapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetStatus, AssetType, AssetVisibility, MemoryType, Permission, UserMetadataKey, UserStatus } from 'src/enum';
import { OnThisDayData, UserMetadataItem } from 'src/types';
import { v4, v7 } from 'uuid';
export const newUuid = () => randomUUID() as string;
export const newUuid = () => v4();
export const newUuids = () =>
Array.from({ length: 100 })
.fill(0)
.map(() => newUuid());
export const newDate = () => new Date();
export const newUuidV7 = () => 'uuid-v7';
export const newUuidV7 = () => v7();
export const newSha1 = () => Buffer.from('this is a fake hash');
export const newEmbedding = () => {
const embedding = Array.from({ length: 512 })

View File

@@ -1 +0,0 @@
22.18.0

Some files were not shown because too many files have changed in this diff Show More