Compare commits
3 Commits
feat/corem
...
refactor/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d309e91da | ||
|
|
a2c0ecfc2a | ||
|
|
085f39dea6 |
29
.github/workflows/build-mobile.yml
vendored
29
.github/workflows/build-mobile.yml
vendored
@@ -7,15 +7,6 @@ on:
|
||||
ref:
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
KEY_JKS:
|
||||
required: true
|
||||
ALIAS:
|
||||
required: true
|
||||
ANDROID_KEY_PASSWORD:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -24,21 +15,14 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@@ -54,17 +38,22 @@ jobs:
|
||||
build-sign-android:
|
||||
name: Build and sign Android
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- name: Determine ref
|
||||
id: get-ref
|
||||
run: |
|
||||
input_ref="${{ inputs.ref }}"
|
||||
github_ref="${{ github.sha }}"
|
||||
ref="${input_ref:-$github_ref}"
|
||||
echo "ref=$ref" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
ref: ${{ steps.get-ref.outputs.ref }}
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
|
||||
with:
|
||||
|
||||
17
.github/workflows/cache-cleanup.yml
vendored
17
.github/workflows/cache-cleanup.yml
vendored
@@ -8,38 +8,31 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
name: Cleanup
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Cleanup
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH=${{ github.ref }}
|
||||
|
||||
echo "Fetching list of cache keys"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
13
.github/workflows/cli.yml
vendored
13
.github/workflows/cli.yml
vendored
@@ -16,23 +16,19 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: CLI Publish
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
@@ -52,16 +48,11 @@ jobs:
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: publish
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
4
.github/workflows/codeql-analysis.yml
vendored
4
.github/workflows/codeql-analysis.yml
vendored
@@ -24,8 +24,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -45,8 +43,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
||||
85
.github/workflows/docker.yml
vendored
85
.github/workflows/docker.yml
vendored
@@ -12,21 +12,18 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@@ -48,9 +45,6 @@ jobs:
|
||||
retag_ml:
|
||||
name: Re-Tag ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@@ -64,22 +58,18 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
retag_server:
|
||||
name: Re-Tag Server
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@@ -93,22 +83,18 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
build_and_push_ml:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
@@ -162,8 +148,6 @@ jobs:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
@@ -177,14 +161,11 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
@@ -194,7 +175,7 @@ jobs:
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${{ matrix.device }}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
@@ -240,10 +221,6 @@ jobs:
|
||||
merge_ml:
|
||||
name: Merge & Push ML
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||
@@ -331,16 +308,15 @@ jobs:
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
|
||||
build_and_push_server:
|
||||
name: Build and Push Server
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
@@ -364,8 +340,6 @@ jobs:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
@@ -379,14 +353,11 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
@@ -396,7 +367,7 @@ jobs:
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
@@ -442,10 +413,6 @@ jobs:
|
||||
merge_server:
|
||||
name: Merge & Push Server
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||
@@ -519,14 +486,15 @@ jobs:
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||
SOURCE_ARGS=$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
echo "docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS"
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
|
||||
success-check-server:
|
||||
name: Docker Build & Push Server Success
|
||||
needs: [merge_server, retag_server]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
@@ -540,7 +508,6 @@ jobs:
|
||||
success-check-ml:
|
||||
name: Docker Build & Push ML Success
|
||||
needs: [merge_ml, retag_ml]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
|
||||
10
.github/workflows/docs-build.yml
vendored
10
.github/workflows/docs-build.yml
vendored
@@ -10,20 +10,14 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@@ -39,8 +33,6 @@ jobs:
|
||||
build:
|
||||
name: Docs Build
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
@@ -50,8 +42,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
||||
20
.github/workflows/docs-deploy.yml
vendored
20
.github/workflows/docs-deploy.yml
vendored
@@ -9,9 +9,6 @@ jobs:
|
||||
checks:
|
||||
name: Docs Deploy Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: read
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
@@ -39,8 +36,6 @@ jobs:
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
env:
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
with:
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
@@ -62,8 +57,7 @@ jobs:
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const {HEAD_SHA} = process.env;
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
@@ -101,16 +95,10 @@ jobs:
|
||||
name: Docs Deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
pull-requests: write
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
@@ -174,11 +162,9 @@ jobs:
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
env:
|
||||
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||
run: |
|
||||
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
|
||||
|
||||
7
.github/workflows/docs-destroy.yml
vendored
7
.github/workflows/docs-destroy.yml
vendored
@@ -3,20 +3,13 @@ on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Docs Destroy
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
|
||||
4
.github/workflows/fix-format.yml
vendored
4
.github/workflows/fix-format.yml
vendored
@@ -4,14 +4,11 @@ on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
fix-formatting:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
@@ -26,7 +23,6 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
||||
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@@ -4,8 +4,6 @@ on:
|
||||
pull_request_target:
|
||||
types: [opened, labeled, unlabeled, synchronize]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release-label:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/pr-labeler.yml
vendored
2
.github/workflows/pr-labeler.yml
vendored
@@ -2,8 +2,6 @@ name: 'Pull Request Labeler'
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
|
||||
@@ -4,13 +4,9 @@ on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
|
||||
|
||||
18
.github/workflows/prepare-release.yml
vendored
18
.github/workflows/prepare-release.yml
vendored
@@ -21,14 +21,13 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-root
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
bump_version:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||
permissions: {} # No job-level permissions are needed because it uses the app-token
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
@@ -41,7 +40,6 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||
@@ -61,20 +59,14 @@ jobs:
|
||||
build_mobile:
|
||||
uses: ./.github/workflows/build-mobile.yml
|
||||
needs: bump_version
|
||||
secrets:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
secrets: inherit
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_mobile
|
||||
permissions:
|
||||
actions: read # To download the app artifact
|
||||
# No content permissions are needed because it uses the app-token
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
@@ -87,7 +79,6 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
@@ -99,7 +90,6 @@ jobs:
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
generate_release_notes: true
|
||||
body_path: misc/release/notes.tmpl
|
||||
files: |
|
||||
|
||||
2
.github/workflows/preview-label.yaml
vendored
2
.github/workflows/preview-label.yaml
vendored
@@ -4,8 +4,6 @@ on:
|
||||
pull_request:
|
||||
types: [labeled, closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
comment-status:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
8
.github/workflows/sdk.yml
vendored
8
.github/workflows/sdk.yml
vendored
@@ -4,22 +4,18 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish `@immich/sdk`
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
|
||||
16
.github/workflows/static_analysis.yml
vendored
16
.github/workflows/static_analysis.yml
vendored
@@ -9,20 +9,14 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@@ -39,14 +33,12 @@ jobs:
|
||||
name: Run Dart Code Analysis
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
@@ -77,11 +69,9 @@ jobs:
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
exit 1
|
||||
|
||||
- name: Run dart analyze
|
||||
|
||||
80
.github/workflows/test.yml
vendored
80
.github/workflows/test.yml
vendored
@@ -9,13 +9,9 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
@@ -29,9 +25,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
@@ -65,8 +58,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
@@ -74,8 +65,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -106,8 +95,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
@@ -115,8 +102,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -151,8 +136,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: windows-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
@@ -160,8 +143,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -189,8 +170,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
@@ -198,8 +177,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -238,8 +215,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
@@ -247,8 +222,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -281,8 +254,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
@@ -290,8 +261,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -310,8 +279,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||
runs-on: mich
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
@@ -320,7 +287,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
@@ -355,8 +321,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||
runs-on: mich
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
@@ -365,7 +329,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
@@ -399,13 +362,8 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
with:
|
||||
@@ -420,16 +378,11 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
|
||||
@@ -458,8 +411,6 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./.github
|
||||
@@ -467,8 +418,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -485,31 +434,22 @@ jobs:
|
||||
shellcheck:
|
||||
name: ShellCheck
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
**/openapi**
|
||||
**/openapi/**
|
||||
**/node_modules/**
|
||||
|
||||
generated-api-up-to-date:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -536,18 +476,14 @@ jobs:
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
exit 1
|
||||
|
||||
generated-typeorm-migrations-up-to-date:
|
||||
name: TypeORM Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
@@ -569,8 +505,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -587,7 +521,7 @@ jobs:
|
||||
run: npm run migrations:run
|
||||
|
||||
- name: Test npm run schema:reset command works
|
||||
run: npm run schema:reset
|
||||
run: npm run typeorm:schema:reset
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
@@ -601,11 +535,9 @@ jobs:
|
||||
server/src
|
||||
- name: Verify migration files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated migration files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
cat ./src/*-TestMigration.ts
|
||||
exit 1
|
||||
|
||||
@@ -623,11 +555,9 @@ jobs:
|
||||
|
||||
- name: Verify SQL files have not changed
|
||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated SQL files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
|
||||
exit 1
|
||||
|
||||
# mobile-integration-tests:
|
||||
|
||||
13
.github/workflows/weblate-lock.yml
vendored
13
.github/workflows/weblate-lock.yml
vendored
@@ -4,32 +4,30 @@ on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
- name: Debug
|
||||
run: |
|
||||
echo "Should run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}"
|
||||
echo "Found i18n paths: ${{ steps.found_paths.outputs.i18n }}"
|
||||
echo "Head ref: ${{ github.head_ref }}"
|
||||
|
||||
enforce-lock:
|
||||
name: Check Weblate Lock
|
||||
needs: [pre-job]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
steps:
|
||||
- name: Check weblate lock
|
||||
@@ -49,7 +47,6 @@ jobs:
|
||||
name: Weblate Lock Check Success
|
||||
needs: [enforce-lock]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
# Database Migrations
|
||||
|
||||
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
|
||||
1. Run the command
|
||||
|
||||
```bash
|
||||
npm run migrations:generate <migration-name>
|
||||
npm run typeorm:migrations:generate <migration-name>
|
||||
```
|
||||
|
||||
2. Check if the migration file makes sense.
|
||||
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
||||
@@ -148,31 +148,30 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :--------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
|
||||
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
|
||||
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
|
||||
|
||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
# Errors
|
||||
|
||||
## TypeORM Upgrade
|
||||
|
||||
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.
|
||||
@@ -996,7 +996,6 @@
|
||||
"filetype": "Filetype",
|
||||
"filter": "Filter",
|
||||
"filter_people": "Filter people",
|
||||
"filter_places": "Filter places",
|
||||
"find_them_fast": "Find them fast by name with search",
|
||||
"fix_incorrect_match": "Fix incorrect match",
|
||||
"folder": "Folder",
|
||||
@@ -1432,6 +1431,8 @@
|
||||
"recent_searches": "Recent searches",
|
||||
"recently_added": "Recently added",
|
||||
"recently_added_page_title": "Recently Added",
|
||||
"recently_taken": "Recently taken",
|
||||
"recently_taken_page_title": "Recently Taken",
|
||||
"refresh": "Refresh",
|
||||
"refresh_encoded_videos": "Refresh encoded videos",
|
||||
"refresh_faces": "Refresh faces",
|
||||
|
||||
@@ -65,8 +65,7 @@ RUN if [ "$DEVICE" = "rocm" ]; then \
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:49d73c49616929b0a4f37c50fee0056eb4b0f15de624591e8d9bf84b4dfdd3ce AS prod-cpu
|
||||
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2 \
|
||||
MACHINE_LEARNING_MODEL_ARENA=false
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:49d73c49616929b0a4f37c50fee0056eb4b0f15de624591e8d9bf84b4dfdd3ce AS prod-openvino
|
||||
|
||||
@@ -83,8 +82,7 @@ RUN apt-get update && \
|
||||
|
||||
FROM nvidia/cuda:12.2.2-runtime-ubuntu22.04@sha256:94c1577b2cd9dd6c0312dc04dff9cb2fdce2b268018abc3d7c2dbcacf1155000 AS prod-cuda
|
||||
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2 \
|
||||
MACHINE_LEARNING_MODEL_ARENA=false
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -yqq libcudnn9-cuda-12 && \
|
||||
@@ -100,8 +98,7 @@ FROM rocm/dev-ubuntu-22.04:6.3.4-complete@sha256:1f7e92ca7e3a3785680473329ed1091
|
||||
FROM prod-cpu AS prod-armnn
|
||||
|
||||
ENV LD_LIBRARY_PATH=/opt/armnn \
|
||||
LD_PRELOAD=/usr/lib/libmimalloc.so.2 \
|
||||
MACHINE_LEARNING_MODEL_ARENA=false
|
||||
LD_PRELOAD=/usr/lib/libmimalloc.so.2
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends ocl-icd-libopencl1 mesa-opencl-icd libgomp1 && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
@@ -121,8 +118,7 @@ COPY --from=builder-armnn \
|
||||
|
||||
FROM prod-cpu AS prod-rknn
|
||||
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2 \
|
||||
MACHINE_LEARNING_MODEL_ARENA=false
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
|
||||
|
||||
ADD --checksum=sha256:73993ed4b440460825f21611731564503cc1d5a0c123746477da6cd574f34885 https://github.com/airockchip/rknn-toolkit2/raw/refs/tags/v2.3.0/rknpu2/runtime/Linux/librknn_api/aarch64/librknnrt.so /usr/lib/
|
||||
|
||||
|
||||
@@ -61,7 +61,6 @@ class Settings(BaseSettings):
|
||||
request_threads: int = os.cpu_count() or 4
|
||||
model_inter_op_threads: int = 0
|
||||
model_intra_op_threads: int = 0
|
||||
model_arena: bool = True
|
||||
ann: bool = True
|
||||
ann_fp16_turbo: bool = False
|
||||
ann_tuning_level: int = 2
|
||||
|
||||
@@ -79,7 +79,6 @@ SUPPORTED_PROVIDERS = [
|
||||
"CUDAExecutionProvider",
|
||||
"ROCMExecutionProvider",
|
||||
"OpenVINOExecutionProvider",
|
||||
"CoreMLExecutionProvider",
|
||||
"CPUExecutionProvider",
|
||||
]
|
||||
|
||||
|
||||
@@ -96,14 +96,6 @@ class OrtSession:
|
||||
"precision": "FP32",
|
||||
"cache_dir": (self.model_path.parent / "openvino").as_posix(),
|
||||
}
|
||||
case "CoreMLExecutionProvider":
|
||||
options = {
|
||||
"ModelFormat": "MLProgram",
|
||||
"MLComputeUnits": "ALL",
|
||||
"SpecializationStrategy": "FastPrediction",
|
||||
"AllowLowPrecisionAccumulationOnGPU": "1",
|
||||
"ModelCacheDirectory": (self.model_path.parent / "coreml").as_posix(),
|
||||
}
|
||||
case _:
|
||||
options = {}
|
||||
provider_options.append(options)
|
||||
@@ -123,7 +115,7 @@ class OrtSession:
|
||||
@property
|
||||
def _sess_options_default(self) -> ort.SessionOptions:
|
||||
sess_options = ort.SessionOptions()
|
||||
sess_options.enable_cpu_mem_arena = settings.model_arena
|
||||
sess_options.enable_cpu_mem_arena = False
|
||||
|
||||
# avoid thread contention between models
|
||||
if settings.model_inter_op_threads > 0:
|
||||
|
||||
@@ -180,7 +180,6 @@ class TestOrtSession:
|
||||
CUDA_EP_OUT_OF_ORDER = ["CPUExecutionProvider", "CUDAExecutionProvider"]
|
||||
TRT_EP = ["TensorrtExecutionProvider", "CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
ROCM_EP = ["ROCMExecutionProvider", "CPUExecutionProvider"]
|
||||
COREML_EP = ["CoreMLExecutionProvider", "CPUExecutionProvider"]
|
||||
|
||||
@pytest.mark.providers(CPU_EP)
|
||||
def test_sets_cpu_provider(self, providers: list[str]) -> None:
|
||||
@@ -226,12 +225,6 @@ class TestOrtSession:
|
||||
|
||||
assert session.providers == self.ROCM_EP
|
||||
|
||||
@pytest.mark.providers(COREML_EP)
|
||||
def test_uses_coreml(self, providers: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.COREML_EP
|
||||
|
||||
def test_sets_provider_kwarg(self) -> None:
|
||||
providers = ["CUDAExecutionProvider"]
|
||||
session = OrtSession("ViT-B-32__openai", providers=providers)
|
||||
@@ -291,6 +284,7 @@ class TestOrtSession:
|
||||
assert session.sess_options.execution_mode == ort.ExecutionMode.ORT_SEQUENTIAL
|
||||
assert session.sess_options.inter_op_num_threads == 1
|
||||
assert session.sess_options.intra_op_num_threads == 2
|
||||
assert session.sess_options.enable_cpu_mem_arena is False
|
||||
|
||||
def test_sets_default_sess_options_does_not_set_threads_if_non_cpu_and_default_threads(self) -> None:
|
||||
session = OrtSession("ViT-B-32__openai", providers=["CUDAExecutionProvider", "CPUExecutionProvider"])
|
||||
@@ -308,26 +302,6 @@ class TestOrtSession:
|
||||
assert session.sess_options.inter_op_num_threads == 2
|
||||
assert session.sess_options.intra_op_num_threads == 4
|
||||
|
||||
def test_uses_arena_if_enabled(self, mocker: MockerFixture) -> None:
|
||||
mock_settings = mocker.patch("immich_ml.sessions.ort.settings", autospec=True)
|
||||
mock_settings.model_inter_op_threads = 0
|
||||
mock_settings.model_intra_op_threads = 0
|
||||
mock_settings.model_arena = True
|
||||
|
||||
session = OrtSession("ViT-B-32__openai", providers=["CPUExecutionProvider"])
|
||||
|
||||
assert session.sess_options.enable_cpu_mem_arena
|
||||
|
||||
def test_does_not_use_arena_if_disabled(self, mocker: MockerFixture) -> None:
|
||||
mock_settings = mocker.patch("immich_ml.sessions.ort.settings", autospec=True)
|
||||
mock_settings.model_inter_op_threads = 0
|
||||
mock_settings.model_intra_op_threads = 0
|
||||
mock_settings.model_arena = False
|
||||
|
||||
session = OrtSession("ViT-B-32__openai", providers=["CPUExecutionProvider"])
|
||||
|
||||
assert not session.sess_options.enable_cpu_mem_arena
|
||||
|
||||
def test_sets_sess_options_kwarg(self) -> None:
|
||||
sess_options = ort.SessionOptions()
|
||||
session = OrtSession(
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
abstract interface class ISyncApiRepository {
|
||||
Future<void> ack(List<String> data);
|
||||
|
||||
Future<void> streamChanges(
|
||||
Function(List<SyncEvent>, Function() abort) onData, {
|
||||
int batchSize,
|
||||
http.Client? httpClient,
|
||||
});
|
||||
Stream<List<SyncEvent>> getSyncEvents(List<SyncRequestType> type);
|
||||
}
|
||||
|
||||
@@ -2,17 +2,9 @@ import 'package:immich_mobile/domain/interfaces/db.interface.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
abstract interface class ISyncStreamRepository implements IDatabaseRepository {
|
||||
Future<void> updateUsersV1(Iterable<SyncUserV1> data);
|
||||
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
|
||||
Future<bool> updateUsersV1(Iterable<SyncUserV1> data);
|
||||
Future<bool> deleteUsersV1(Iterable<SyncUserDeleteV1> data);
|
||||
|
||||
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data);
|
||||
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
|
||||
|
||||
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data);
|
||||
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data);
|
||||
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data);
|
||||
|
||||
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data);
|
||||
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data);
|
||||
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data);
|
||||
Future<bool> updatePartnerV1(Iterable<SyncPartnerV1> data);
|
||||
Future<bool> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data);
|
||||
}
|
||||
|
||||
@@ -2,11 +2,25 @@
|
||||
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_api.interface.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:worker_manager/worker_manager.dart';
|
||||
|
||||
const _kSyncTypeOrder = [
|
||||
SyncEntityType.userDeleteV1,
|
||||
SyncEntityType.userV1,
|
||||
SyncEntityType.partnerDeleteV1,
|
||||
SyncEntityType.partnerV1,
|
||||
SyncEntityType.assetDeleteV1,
|
||||
SyncEntityType.assetV1,
|
||||
SyncEntityType.assetExifV1,
|
||||
SyncEntityType.partnerAssetDeleteV1,
|
||||
SyncEntityType.partnerAssetV1,
|
||||
SyncEntityType.partnerAssetExifV1,
|
||||
];
|
||||
|
||||
class SyncStreamService {
|
||||
final Logger _logger = Logger('SyncStreamService');
|
||||
@@ -23,70 +37,164 @@ class SyncStreamService {
|
||||
_syncStreamRepository = syncStreamRepository,
|
||||
_cancelChecker = cancelChecker;
|
||||
|
||||
bool get isCancelled => _cancelChecker?.call() ?? false;
|
||||
|
||||
Future<void> sync() => _syncApiRepository.streamChanges(_handleEvents);
|
||||
|
||||
Future<void> _handleEvents(List<SyncEvent> events, Function() abort) async {
|
||||
List<SyncEvent> items = [];
|
||||
for (final event in events) {
|
||||
if (isCancelled) {
|
||||
_logger.warning("Sync stream cancelled");
|
||||
abort();
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type != items.firstOrNull?.type) {
|
||||
await _processBatch(items);
|
||||
}
|
||||
|
||||
items.add(event);
|
||||
}
|
||||
|
||||
await _processBatch(items);
|
||||
}
|
||||
|
||||
Future<void> _processBatch(List<SyncEvent> batch) async {
|
||||
if (batch.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
final type = batch.first.type;
|
||||
await _handleSyncData(type, batch.map((e) => e.data));
|
||||
await _syncApiRepository.ack([batch.last.ack]);
|
||||
batch.clear();
|
||||
}
|
||||
|
||||
Future<void> _handleSyncData(
|
||||
Future<bool> _handleSyncData(
|
||||
SyncEntityType type,
|
||||
// ignore: avoid-dynamic
|
||||
Iterable<dynamic> data,
|
||||
) async {
|
||||
_logger.fine("Processing sync data for $type of length ${data.length}");
|
||||
// ignore: prefer-switch-expression
|
||||
switch (type) {
|
||||
case SyncEntityType.userV1:
|
||||
return _syncStreamRepository.updateUsersV1(data.cast());
|
||||
case SyncEntityType.userDeleteV1:
|
||||
return _syncStreamRepository.deleteUsersV1(data.cast());
|
||||
case SyncEntityType.partnerV1:
|
||||
return _syncStreamRepository.updatePartnerV1(data.cast());
|
||||
case SyncEntityType.partnerDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerV1(data.cast());
|
||||
case SyncEntityType.assetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(data.cast());
|
||||
case SyncEntityType.assetDeleteV1:
|
||||
return _syncStreamRepository.deleteAssetsV1(data.cast());
|
||||
case SyncEntityType.assetExifV1:
|
||||
return _syncStreamRepository.updateAssetsExifV1(data.cast());
|
||||
case SyncEntityType.partnerAssetV1:
|
||||
return _syncStreamRepository.updatePartnerAssetsV1(data.cast());
|
||||
case SyncEntityType.partnerAssetDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerAssetsV1(data.cast());
|
||||
case SyncEntityType.partnerAssetExifV1:
|
||||
return _syncStreamRepository.updatePartnerAssetsExifV1(data.cast());
|
||||
default:
|
||||
_logger.warning("Unknown sync data type: $type");
|
||||
if (data.isEmpty) {
|
||||
_logger.warning("Received empty sync data for $type");
|
||||
return false;
|
||||
}
|
||||
|
||||
_logger.fine("Processing sync data for $type of length ${data.length}");
|
||||
|
||||
try {
|
||||
if (type == SyncEntityType.partnerV1) {
|
||||
return await _syncStreamRepository.updatePartnerV1(data.cast());
|
||||
}
|
||||
|
||||
if (type == SyncEntityType.partnerDeleteV1) {
|
||||
return await _syncStreamRepository.deletePartnerV1(data.cast());
|
||||
}
|
||||
|
||||
if (type == SyncEntityType.userV1) {
|
||||
return await _syncStreamRepository.updateUsersV1(data.cast());
|
||||
}
|
||||
|
||||
if (type == SyncEntityType.userDeleteV1) {
|
||||
return await _syncStreamRepository.deleteUsersV1(data.cast());
|
||||
}
|
||||
} catch (error, stack) {
|
||||
_logger.severe("Error processing sync data for $type", error, stack);
|
||||
return false;
|
||||
}
|
||||
|
||||
_logger.warning("Unknown sync data type: $type");
|
||||
return false;
|
||||
}
|
||||
|
||||
Future<void> _syncEvent(List<SyncRequestType> types) {
|
||||
_logger.info("Syncing Events: $types");
|
||||
final streamCompleter = Completer();
|
||||
bool shouldComplete = false;
|
||||
// the onDone callback might fire before the events are processed
|
||||
// the following flag ensures that the onDone callback is not called
|
||||
// before the events are processed and also that events are processed sequentially
|
||||
Completer? mutex;
|
||||
StreamSubscription? subscription;
|
||||
try {
|
||||
subscription = _syncApiRepository.getSyncEvents(types).listen(
|
||||
(events) async {
|
||||
if (events.isEmpty) {
|
||||
_logger.warning("Received empty sync events");
|
||||
return;
|
||||
}
|
||||
|
||||
// If previous events are still being processed, wait for them to finish
|
||||
if (mutex != null) {
|
||||
await mutex!.future;
|
||||
}
|
||||
|
||||
if (_cancelChecker?.call() ?? false) {
|
||||
_logger.info("Sync cancelled, stopping stream");
|
||||
subscription?.cancel();
|
||||
if (!streamCompleter.isCompleted) {
|
||||
streamCompleter.completeError(
|
||||
CanceledError(),
|
||||
StackTrace.current,
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Take control of the mutex and process the events
|
||||
mutex = Completer();
|
||||
|
||||
try {
|
||||
final eventsMap = events.groupListsBy((event) => event.type);
|
||||
final Map<SyncEntityType, String> acks = {};
|
||||
|
||||
for (final type in _kSyncTypeOrder) {
|
||||
final data = eventsMap[type];
|
||||
if (data == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_cancelChecker?.call() ?? false) {
|
||||
_logger.info("Sync cancelled, stopping stream");
|
||||
mutex?.complete();
|
||||
mutex = null;
|
||||
if (!streamCompleter.isCompleted) {
|
||||
streamCompleter.completeError(
|
||||
CanceledError(),
|
||||
StackTrace.current,
|
||||
);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.isEmpty) {
|
||||
_logger.warning("Received empty sync events for $type");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (await _handleSyncData(type, data.map((e) => e.data))) {
|
||||
// ignore: avoid-unsafe-collection-methods
|
||||
acks[type] = data.last.ack;
|
||||
} else {
|
||||
_logger.warning("Failed to handle sync events for $type");
|
||||
}
|
||||
}
|
||||
|
||||
if (acks.isNotEmpty) {
|
||||
await _syncApiRepository.ack(acks.values.toList());
|
||||
}
|
||||
_logger.info("$types events processed");
|
||||
} catch (error, stack) {
|
||||
_logger.warning("Error handling sync events", error, stack);
|
||||
} finally {
|
||||
mutex?.complete();
|
||||
mutex = null;
|
||||
}
|
||||
|
||||
if (shouldComplete) {
|
||||
_logger.info("Sync done, completing stream");
|
||||
if (!streamCompleter.isCompleted) streamCompleter.complete();
|
||||
}
|
||||
},
|
||||
onError: (error, stack) {
|
||||
_logger.warning("Error in sync stream for $types", error, stack);
|
||||
// Do not proceed if the stream errors
|
||||
if (!streamCompleter.isCompleted) {
|
||||
// ignore: avoid-missing-completer-stack-trace
|
||||
streamCompleter.completeError(error, stack);
|
||||
}
|
||||
},
|
||||
onDone: () {
|
||||
_logger.info("$types stream done");
|
||||
if (mutex == null && !streamCompleter.isCompleted) {
|
||||
streamCompleter.complete();
|
||||
} else {
|
||||
// Marks the stream as done but does not complete the completer
|
||||
// until the events are processed
|
||||
shouldComplete = true;
|
||||
}
|
||||
},
|
||||
);
|
||||
} catch (error, stack) {
|
||||
_logger.severe("Error starting sync stream", error, stack);
|
||||
if (!streamCompleter.isCompleted) {
|
||||
streamCompleter.completeError(error, stack);
|
||||
}
|
||||
}
|
||||
return streamCompleter.future.whenComplete(() {
|
||||
_logger.info("Sync stream completed");
|
||||
return subscription?.cancel();
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> syncUsers() =>
|
||||
_syncEvent([SyncRequestType.usersV1, SyncRequestType.partnersV1]);
|
||||
}
|
||||
|
||||
@@ -7,33 +7,31 @@ import 'package:immich_mobile/utils/isolate.dart';
|
||||
import 'package:worker_manager/worker_manager.dart';
|
||||
|
||||
class BackgroundSyncManager {
|
||||
Cancelable<void>? _syncTask;
|
||||
Cancelable<void>? _userSyncTask;
|
||||
|
||||
BackgroundSyncManager();
|
||||
|
||||
Future<void> cancel() {
|
||||
final futures = <Future>[];
|
||||
|
||||
if (_syncTask != null) {
|
||||
futures.add(_syncTask!.future);
|
||||
if (_userSyncTask != null) {
|
||||
futures.add(_userSyncTask!.future);
|
||||
}
|
||||
_syncTask?.cancel();
|
||||
_syncTask = null;
|
||||
|
||||
_userSyncTask?.cancel();
|
||||
_userSyncTask = null;
|
||||
return Future.wait(futures);
|
||||
}
|
||||
|
||||
Future<void> sync() {
|
||||
if (_syncTask != null) {
|
||||
return _syncTask!.future;
|
||||
Future<void> syncUsers() {
|
||||
if (_userSyncTask != null) {
|
||||
return _userSyncTask!.future;
|
||||
}
|
||||
|
||||
_syncTask = runInIsolateGentle(
|
||||
computation: (ref) => ref.read(syncStreamServiceProvider).sync(),
|
||||
_userSyncTask = runInIsolateGentle(
|
||||
computation: (ref) => ref.read(syncStreamServiceProvider).syncUsers(),
|
||||
);
|
||||
_syncTask!.whenComplete(() {
|
||||
_syncTask = null;
|
||||
_userSyncTask!.whenComplete(() {
|
||||
_userSyncTask = null;
|
||||
});
|
||||
return _syncTask!.future;
|
||||
return _userSyncTask!.future;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,22 +12,22 @@ import 'package:openapi/api.dart';
|
||||
class SyncApiRepository implements ISyncApiRepository {
|
||||
final Logger _logger = Logger('SyncApiRepository');
|
||||
final ApiService _api;
|
||||
SyncApiRepository(this._api);
|
||||
final int _batchSize;
|
||||
SyncApiRepository(this._api, {int batchSize = kSyncEventBatchSize})
|
||||
: _batchSize = batchSize;
|
||||
|
||||
@override
|
||||
Stream<List<SyncEvent>> getSyncEvents(List<SyncRequestType> type) {
|
||||
return _getSyncStream(SyncStreamDto(types: type));
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> ack(List<String> data) {
|
||||
return _api.syncApi.sendSyncAck(SyncAckSetDto(acks: data));
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> streamChanges(
|
||||
Function(List<SyncEvent>, Function() abort) onData, {
|
||||
int batchSize = kSyncEventBatchSize,
|
||||
http.Client? httpClient,
|
||||
}) async {
|
||||
// ignore: avoid-unused-assignment
|
||||
final stopwatch = Stopwatch()..start();
|
||||
final client = httpClient ?? http.Client();
|
||||
Stream<List<SyncEvent>> _getSyncStream(SyncStreamDto dto) async* {
|
||||
final client = http.Client();
|
||||
final endpoint = "${_api.apiClient.basePath}/sync/stream";
|
||||
|
||||
final headers = {
|
||||
@@ -35,38 +35,20 @@ class SyncApiRepository implements ISyncApiRepository {
|
||||
'Accept': 'application/jsonlines+json',
|
||||
};
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
await _api.applyToParams([], headerParams);
|
||||
await _api.applyToParams(queryParams, headerParams);
|
||||
headers.addAll(headerParams);
|
||||
|
||||
final request = http.Request('POST', Uri.parse(endpoint));
|
||||
request.headers.addAll(headers);
|
||||
request.body = jsonEncode(
|
||||
SyncStreamDto(
|
||||
types: [
|
||||
SyncRequestType.usersV1,
|
||||
SyncRequestType.partnersV1,
|
||||
SyncRequestType.assetsV1,
|
||||
SyncRequestType.partnerAssetsV1,
|
||||
SyncRequestType.assetExifsV1,
|
||||
SyncRequestType.partnerAssetExifsV1,
|
||||
],
|
||||
).toJson(),
|
||||
);
|
||||
request.body = jsonEncode(dto.toJson());
|
||||
|
||||
String previousChunk = '';
|
||||
List<String> lines = [];
|
||||
|
||||
bool shouldAbort = false;
|
||||
|
||||
void abort() {
|
||||
_logger.warning("Abort requested, stopping sync stream");
|
||||
shouldAbort = true;
|
||||
}
|
||||
|
||||
try {
|
||||
final response =
|
||||
await client.send(request).timeout(const Duration(seconds: 20));
|
||||
final response = await client.send(request);
|
||||
|
||||
if (response.statusCode != 200) {
|
||||
final errorBody = await response.stream.bytesToString();
|
||||
@@ -77,38 +59,27 @@ class SyncApiRepository implements ISyncApiRepository {
|
||||
}
|
||||
|
||||
await for (final chunk in response.stream.transform(utf8.decoder)) {
|
||||
if (shouldAbort) {
|
||||
break;
|
||||
}
|
||||
|
||||
previousChunk += chunk;
|
||||
final parts = previousChunk.toString().split('\n');
|
||||
previousChunk = parts.removeLast();
|
||||
lines.addAll(parts);
|
||||
|
||||
if (lines.length < batchSize) {
|
||||
if (lines.length < _batchSize) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await onData(_parseLines(lines), abort);
|
||||
yield _parseSyncResponse(lines);
|
||||
lines.clear();
|
||||
}
|
||||
|
||||
if (lines.isNotEmpty && !shouldAbort) {
|
||||
await onData(_parseLines(lines), abort);
|
||||
}
|
||||
} catch (error, stack) {
|
||||
_logger.severe("error processing stream", error, stack);
|
||||
return Future.error(error, stack);
|
||||
} finally {
|
||||
if (lines.isNotEmpty) {
|
||||
yield _parseSyncResponse(lines);
|
||||
}
|
||||
client.close();
|
||||
}
|
||||
stopwatch.stop();
|
||||
_logger
|
||||
.info("Remote Sync completed in ${stopwatch.elapsed.inMilliseconds}ms");
|
||||
}
|
||||
|
||||
List<SyncEvent> _parseLines(List<String> lines) {
|
||||
List<SyncEvent> _parseSyncResponse(List<String> lines) {
|
||||
final List<SyncEvent> data = [];
|
||||
|
||||
for (final line in lines) {
|
||||
@@ -139,10 +110,4 @@ const _kResponseMap = <SyncEntityType, Function(dynamic)>{
|
||||
SyncEntityType.userDeleteV1: SyncUserDeleteV1.fromJson,
|
||||
SyncEntityType.partnerV1: SyncPartnerV1.fromJson,
|
||||
SyncEntityType.partnerDeleteV1: SyncPartnerDeleteV1.fromJson,
|
||||
SyncEntityType.assetV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.assetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||
SyncEntityType.assetExifV1: SyncAssetExifV1.fromJson,
|
||||
SyncEntityType.partnerAssetV1: SyncAssetV1.fromJson,
|
||||
SyncEntityType.partnerAssetDeleteV1: SyncAssetDeleteV1.fromJson,
|
||||
SyncEntityType.partnerAssetExifV1: SyncAssetExifV1.fromJson,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/extensions/string_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart';
|
||||
@@ -16,7 +15,7 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
||||
DriftSyncStreamRepository(super.db) : _db = db;
|
||||
|
||||
@override
|
||||
Future<void> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
|
||||
Future<bool> deleteUsersV1(Iterable<SyncUserDeleteV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final user in data) {
|
||||
@@ -26,14 +25,15 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (error, stack) {
|
||||
_logger.severe('Error while processing SyncUserDeleteV1', error, stack);
|
||||
rethrow;
|
||||
return true;
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing SyncUserDeleteV1', e, s);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> updateUsersV1(Iterable<SyncUserV1> data) async {
|
||||
Future<bool> updateUsersV1(Iterable<SyncUserV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final user in data) {
|
||||
@@ -49,14 +49,15 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (error, stack) {
|
||||
_logger.severe('Error while processing SyncUserV1', error, stack);
|
||||
rethrow;
|
||||
return true;
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing SyncUserV1', e, s);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
|
||||
Future<bool> deletePartnerV1(Iterable<SyncPartnerDeleteV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final partner in data) {
|
||||
@@ -69,14 +70,15 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
||||
);
|
||||
}
|
||||
});
|
||||
return true;
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing SyncPartnerDeleteV1', e, s);
|
||||
rethrow;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
|
||||
Future<bool> updatePartnerV1(Iterable<SyncPartnerV1> data) async {
|
||||
try {
|
||||
await _db.batch((batch) {
|
||||
for (final partner in data) {
|
||||
@@ -93,42 +95,10 @@ class DriftSyncStreamRepository extends DriftDatabaseRepository
|
||||
);
|
||||
}
|
||||
});
|
||||
return true;
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while processing SyncPartnerV1', e, s);
|
||||
rethrow;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Assets
|
||||
@override
|
||||
Future<void> updateAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||
debugPrint("updateAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> deleteAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||
debugPrint("deleteAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
// Partner Assets
|
||||
@override
|
||||
Future<void> updatePartnerAssetsV1(Iterable<SyncAssetV1> data) async {
|
||||
debugPrint("updatePartnerAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> deletePartnerAssetsV1(Iterable<SyncAssetDeleteV1> data) async {
|
||||
debugPrint("deletePartnerAssetsV1 - ${data.length}");
|
||||
}
|
||||
|
||||
// EXIF
|
||||
@override
|
||||
Future<void> updateAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||
debugPrint("updateAssetsExifV1 - ${data.length}");
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> updatePartnerAssetsExifV1(Iterable<SyncAssetExifV1> data) async {
|
||||
debugPrint("updatePartnerAssetsExifV1 - ${data.length}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ abstract interface class IAssetRepository implements IDatabaseRepository {
|
||||
|
||||
Future<List<Asset>> getTrashAssets(String userId);
|
||||
|
||||
Future<List<Asset>> getRecentlyAddedAssets(String userId);
|
||||
Future<List<Asset>> getRecentlyTakenAssets(String userId);
|
||||
Future<List<Asset>> getMotionAssets(String userId);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@ import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/theme_extensions.dart';
|
||||
import 'package:immich_mobile/providers/search/people.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/utils/image_url_builder.dart';
|
||||
import 'package:immich_mobile/widgets/common/search_field.dart';
|
||||
import 'package:immich_mobile/widgets/search/person_name_edit_form.dart';
|
||||
|
||||
@RoutePage()
|
||||
@@ -42,12 +42,47 @@ class PeopleCollectionPage extends HookConsumerWidget {
|
||||
appBar: AppBar(
|
||||
automaticallyImplyLeading: search.value == null,
|
||||
title: search.value != null
|
||||
? SearchField(
|
||||
? TextField(
|
||||
focusNode: formFocus,
|
||||
onTapOutside: (_) => formFocus.unfocus(),
|
||||
onChanged: (value) => search.value = value,
|
||||
filled: true,
|
||||
hintText: 'filter_people'.tr(),
|
||||
decoration: InputDecoration(
|
||||
contentPadding: const EdgeInsets.only(left: 24),
|
||||
filled: true,
|
||||
fillColor: context.primaryColor.withValues(alpha: 0.1),
|
||||
hintStyle: context.textTheme.bodyLarge?.copyWith(
|
||||
color: context.themeData.colorScheme.onSurfaceSecondary,
|
||||
),
|
||||
border: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.surfaceContainerHighest,
|
||||
),
|
||||
),
|
||||
enabledBorder: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.surfaceContainerHighest,
|
||||
),
|
||||
),
|
||||
disabledBorder: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.surfaceContainerHighest,
|
||||
),
|
||||
),
|
||||
focusedBorder: OutlineInputBorder(
|
||||
borderRadius: BorderRadius.circular(25),
|
||||
borderSide: BorderSide(
|
||||
color: context.colorScheme.primary.withAlpha(150),
|
||||
),
|
||||
),
|
||||
prefixIcon: Icon(
|
||||
Icons.search_rounded,
|
||||
color: context.colorScheme.primary,
|
||||
),
|
||||
hintText: 'filter_people'.tr(),
|
||||
),
|
||||
autofocus: true,
|
||||
)
|
||||
: Text('people'.tr()),
|
||||
|
||||
@@ -2,7 +2,6 @@ import 'package:auto_route/auto_route.dart';
|
||||
import 'package:cached_network_image/cached_network_image.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
@@ -13,7 +12,6 @@ import 'package:immich_mobile/pages/common/large_leading_tile.dart';
|
||||
import 'package:immich_mobile/providers/search/search_page_state.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/widgets/common/search_field.dart';
|
||||
import 'package:immich_mobile/widgets/map/map_thumbnail.dart';
|
||||
import 'package:maplibre_gl/maplibre_gl.dart';
|
||||
|
||||
@@ -23,62 +21,34 @@ class PlacesCollectionPage extends HookConsumerWidget {
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final places = ref.watch(getAllPlacesProvider);
|
||||
final formFocus = useFocusNode();
|
||||
final ValueNotifier<String?> search = useState(null);
|
||||
|
||||
return Scaffold(
|
||||
appBar: AppBar(
|
||||
automaticallyImplyLeading: search.value == null,
|
||||
title: search.value != null
|
||||
? SearchField(
|
||||
autofocus: true,
|
||||
filled: true,
|
||||
focusNode: formFocus,
|
||||
onChanged: (value) => search.value = value,
|
||||
onTapOutside: (_) => formFocus.unfocus(),
|
||||
hintText: 'filter_places'.tr(),
|
||||
)
|
||||
: Text('places'.tr()),
|
||||
actions: [
|
||||
IconButton(
|
||||
icon: Icon(search.value != null ? Icons.close : Icons.search),
|
||||
onPressed: () {
|
||||
search.value = search.value == null ? '' : null;
|
||||
},
|
||||
),
|
||||
],
|
||||
title: Text('places'.tr()),
|
||||
),
|
||||
body: ListView(
|
||||
shrinkWrap: true,
|
||||
children: [
|
||||
if (search.value == null)
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(16.0),
|
||||
child: SizedBox(
|
||||
height: 200,
|
||||
width: context.width,
|
||||
child: MapThumbnail(
|
||||
onTap: (_, __) => context.pushRoute(const MapRoute()),
|
||||
zoom: 8,
|
||||
centre: const LatLng(
|
||||
21.44950,
|
||||
-157.91959,
|
||||
),
|
||||
showAttribution: false,
|
||||
themeMode:
|
||||
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(16.0),
|
||||
child: SizedBox(
|
||||
height: 200,
|
||||
width: context.width,
|
||||
child: MapThumbnail(
|
||||
onTap: (_, __) => context.pushRoute(const MapRoute()),
|
||||
zoom: 8,
|
||||
centre: const LatLng(
|
||||
21.44950,
|
||||
-157.91959,
|
||||
),
|
||||
showAttribution: false,
|
||||
themeMode:
|
||||
context.isDarkTheme ? ThemeMode.dark : ThemeMode.light,
|
||||
),
|
||||
),
|
||||
),
|
||||
places.when(
|
||||
data: (places) {
|
||||
if (search.value != null) {
|
||||
places = places.where((place) {
|
||||
return place.label
|
||||
.toLowerCase()
|
||||
.contains(search.value!.toLowerCase());
|
||||
}).toList();
|
||||
}
|
||||
return ListView.builder(
|
||||
shrinkWrap: true,
|
||||
physics: const NeverScrollableScrollPhysics(),
|
||||
|
||||
@@ -4,19 +4,19 @@ import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
|
||||
import 'package:immich_mobile/widgets/asset_grid/immich_asset_grid.dart';
|
||||
import 'package:immich_mobile/providers/search/recently_added_asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/search/recently_taken_asset.provider.dart';
|
||||
|
||||
@RoutePage()
|
||||
class RecentlyAddedPage extends HookConsumerWidget {
|
||||
const RecentlyAddedPage({super.key});
|
||||
class RecentlyTakenPage extends HookConsumerWidget {
|
||||
const RecentlyTakenPage({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final recents = ref.watch(recentlyAddedAssetProvider);
|
||||
final recents = ref.watch(recentlyTakenAssetProvider);
|
||||
|
||||
return Scaffold(
|
||||
appBar: AppBar(
|
||||
title: const Text('recently_added_page_title').tr(),
|
||||
title: const Text('recently_taken_page_title').tr(),
|
||||
leading: IconButton(
|
||||
onPressed: () => context.maybePop(),
|
||||
icon: const Icon(Icons.arrow_back_ios_rounded),
|
||||
@@ -843,10 +843,10 @@ class QuickLinkList extends StatelessWidget {
|
||||
physics: const NeverScrollableScrollPhysics(),
|
||||
children: [
|
||||
QuickLink(
|
||||
title: 'recently_added'.tr(),
|
||||
title: 'recently_taken'.tr(),
|
||||
icon: Icons.schedule_outlined,
|
||||
isTop: true,
|
||||
onTap: () => context.pushRoute(const RecentlyAddedRoute()),
|
||||
onTap: () => context.pushRoute(const RecentlyTakenRoute()),
|
||||
),
|
||||
QuickLink(
|
||||
title: 'videos'.tr(),
|
||||
|
||||
@@ -2,8 +2,8 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/services/asset.service.dart';
|
||||
|
||||
final recentlyAddedAssetProvider = FutureProvider<List<Asset>>((ref) async {
|
||||
final recentlyTakenAssetProvider = FutureProvider<List<Asset>>((ref) async {
|
||||
final assetService = ref.read(assetServiceProvider);
|
||||
|
||||
return assetService.getRecentlyAddedAssets();
|
||||
return assetService.getRecentlyTakenAssets();
|
||||
});
|
||||
@@ -225,7 +225,7 @@ class AssetRepository extends DatabaseRepository implements IAssetRepository {
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<Asset>> getRecentlyAddedAssets(String userId) {
|
||||
Future<List<Asset>> getRecentlyTakenAssets(String userId) {
|
||||
return db.assets
|
||||
.where()
|
||||
.ownerIdEqualToAnyChecksum(fastHash(userId))
|
||||
|
||||
@@ -58,7 +58,7 @@ import 'package:immich_mobile/pages/search/all_videos.page.dart';
|
||||
import 'package:immich_mobile/pages/search/map/map.page.dart';
|
||||
import 'package:immich_mobile/pages/search/map/map_location_picker.page.dart';
|
||||
import 'package:immich_mobile/pages/search/person_result.page.dart';
|
||||
import 'package:immich_mobile/pages/search/recently_added.page.dart';
|
||||
import 'package:immich_mobile/pages/search/recently_taken.page.dart';
|
||||
import 'package:immich_mobile/pages/search/search.page.dart';
|
||||
import 'package:immich_mobile/pages/share_intent/share_intent.page.dart';
|
||||
import 'package:immich_mobile/providers/api.provider.dart';
|
||||
@@ -160,7 +160,7 @@ class AppRouter extends RootStackRouter {
|
||||
guards: [_authGuard, _duplicateGuard],
|
||||
),
|
||||
AutoRoute(
|
||||
page: RecentlyAddedRoute.page,
|
||||
page: RecentlyTakenRoute.page,
|
||||
guards: [_authGuard, _duplicateGuard],
|
||||
),
|
||||
CustomRoute(
|
||||
|
||||
@@ -1351,20 +1351,20 @@ class PlacesCollectionRoute extends PageRouteInfo<void> {
|
||||
}
|
||||
|
||||
/// generated route for
|
||||
/// [RecentlyAddedPage]
|
||||
class RecentlyAddedRoute extends PageRouteInfo<void> {
|
||||
const RecentlyAddedRoute({List<PageRouteInfo>? children})
|
||||
/// [RecentlyTakenPage]
|
||||
class RecentlyTakenRoute extends PageRouteInfo<void> {
|
||||
const RecentlyTakenRoute({List<PageRouteInfo>? children})
|
||||
: super(
|
||||
RecentlyAddedRoute.name,
|
||||
RecentlyTakenRoute.name,
|
||||
initialChildren: children,
|
||||
);
|
||||
|
||||
static const String name = 'RecentlyAddedRoute';
|
||||
static const String name = 'RecentlyTakenRoute';
|
||||
|
||||
static PageInfo page = PageInfo(
|
||||
name,
|
||||
builder: (data) {
|
||||
return const RecentlyAddedPage();
|
||||
return const RecentlyTakenPage();
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -514,9 +514,9 @@ class AssetService {
|
||||
return _assetRepository.watchAsset(id, fireImmediately: fireImmediately);
|
||||
}
|
||||
|
||||
Future<List<Asset>> getRecentlyAddedAssets() {
|
||||
Future<List<Asset>> getRecentlyTakenAssets() {
|
||||
final me = _userService.getMyUser();
|
||||
return _assetRepository.getRecentlyAddedAssets(me.id);
|
||||
return _assetRepository.getRecentlyTakenAssets(me.id);
|
||||
}
|
||||
|
||||
Future<List<Asset>> getMotionAssets() {
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_svg/svg.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/extensions/build_context_extensions.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_state.model.dart';
|
||||
import 'package:immich_mobile/models/server_info/server_info.model.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
@@ -180,11 +178,6 @@ class ImmichAppBar extends ConsumerWidget implements PreferredSizeWidget {
|
||||
child: action,
|
||||
),
|
||||
),
|
||||
if (kDebugMode)
|
||||
IconButton(
|
||||
onPressed: () => ref.read(backgroundSyncProvider).sync(),
|
||||
icon: const Icon(Icons.sync),
|
||||
),
|
||||
if (showUploadButton)
|
||||
Padding(
|
||||
padding: const EdgeInsets.only(right: 20),
|
||||
|
||||
@@ -2,5 +2,3 @@ import 'package:mocktail/mocktail.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
class MockAssetsApi extends Mock implements AssetsApi {}
|
||||
|
||||
class MockSyncApi extends Mock implements SyncApi {}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// ignore_for_file: avoid-declaring-call-method, avoid-unnecessary-futures
|
||||
// ignore_for_file: avoid-unnecessary-futures, avoid-async-call-in-sync-function
|
||||
|
||||
import 'dart:async';
|
||||
|
||||
@@ -8,22 +8,16 @@ import 'package:immich_mobile/domain/interfaces/sync_stream.interface.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/domain/services/sync_stream.service.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:worker_manager/worker_manager.dart';
|
||||
|
||||
import '../../fixtures/sync_stream.stub.dart';
|
||||
import '../../infrastructure/repository.mock.dart';
|
||||
|
||||
class _AbortCallbackWrapper {
|
||||
const _AbortCallbackWrapper();
|
||||
|
||||
bool call() => false;
|
||||
}
|
||||
|
||||
class _MockAbortCallbackWrapper extends Mock implements _AbortCallbackWrapper {}
|
||||
|
||||
class _CancellationWrapper {
|
||||
const _CancellationWrapper();
|
||||
|
||||
bool call() => false;
|
||||
bool isCancelled() => false;
|
||||
}
|
||||
|
||||
class _MockCancellationWrapper extends Mock implements _CancellationWrapper {}
|
||||
@@ -32,26 +26,35 @@ void main() {
|
||||
late SyncStreamService sut;
|
||||
late ISyncStreamRepository mockSyncStreamRepo;
|
||||
late ISyncApiRepository mockSyncApiRepo;
|
||||
late Function(List<SyncEvent>, Function()) handleEventsCallback;
|
||||
late _MockAbortCallbackWrapper mockAbortCallbackWrapper;
|
||||
late StreamController<List<SyncEvent>> streamController;
|
||||
|
||||
successHandler(Invocation _) async => true;
|
||||
failureHandler(Invocation _) async => false;
|
||||
|
||||
setUp(() {
|
||||
mockSyncStreamRepo = MockSyncStreamRepository();
|
||||
mockSyncApiRepo = MockSyncApiRepository();
|
||||
mockAbortCallbackWrapper = _MockAbortCallbackWrapper();
|
||||
streamController = StreamController<List<SyncEvent>>.broadcast();
|
||||
|
||||
when(() => mockAbortCallbackWrapper()).thenReturn(false);
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
);
|
||||
|
||||
when(() => mockSyncApiRepo.streamChanges(any()))
|
||||
.thenAnswer((invocation) async {
|
||||
// ignore: avoid-unsafe-collection-methods
|
||||
handleEventsCallback = invocation.positionalArguments.first;
|
||||
});
|
||||
// Default stream setup - emits one batch and closes
|
||||
when(() => mockSyncApiRepo.getSyncEvents(any()))
|
||||
.thenAnswer((_) => streamController.stream);
|
||||
|
||||
// Default ack setup
|
||||
when(() => mockSyncApiRepo.ack(any())).thenAnswer((_) async => {});
|
||||
|
||||
// Register fallbacks for mocktail verification
|
||||
registerFallbackValue(<SyncUserV1>[]);
|
||||
registerFallbackValue(<SyncPartnerV1>[]);
|
||||
registerFallbackValue(<SyncUserDeleteV1>[]);
|
||||
registerFallbackValue(<SyncPartnerDeleteV1>[]);
|
||||
|
||||
// Default successful repository calls
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
@@ -60,163 +63,381 @@ void main() {
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updateAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deleteAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updateAssetsExifV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updatePartnerAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.deletePartnerAssetsV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
when(() => mockSyncStreamRepo.updatePartnerAssetsExifV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
);
|
||||
});
|
||||
|
||||
Future<void> simulateEvents(List<SyncEvent> events) async {
|
||||
await sut.sync();
|
||||
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||
tearDown(() async {
|
||||
if (!streamController.isClosed) {
|
||||
await streamController.close();
|
||||
}
|
||||
});
|
||||
|
||||
// Helper to trigger sync and add events to the stream
|
||||
Future<void> triggerSyncAndEmit(List<SyncEvent> events) async {
|
||||
final future = sut.syncUsers(); // Start listening
|
||||
await Future.delayed(Duration.zero); // Allow listener to attach
|
||||
if (!streamController.isClosed) {
|
||||
streamController.add(events);
|
||||
await streamController.close(); // Close after emitting
|
||||
}
|
||||
await future; // Wait for processing to complete
|
||||
}
|
||||
|
||||
group("SyncStreamService - _handleEvents", () {
|
||||
group("SyncStreamService", () {
|
||||
test(
|
||||
"processes events and acks successfully when handlers succeed",
|
||||
"completes successfully when stream emits data and handlers succeed",
|
||||
() async {
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
SyncStreamStub.userV1User,
|
||||
SyncStreamStub.partnerDeleteV1,
|
||||
SyncStreamStub.partnerV1,
|
||||
...SyncStreamStub.userEvents,
|
||||
...SyncStreamStub.partnerEvents,
|
||||
];
|
||||
|
||||
await simulateEvents(events);
|
||||
|
||||
verifyInOrder([
|
||||
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["2"]),
|
||||
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["5"]),
|
||||
() => mockSyncStreamRepo.deletePartnerV1(any()),
|
||||
() => mockSyncApiRepo.ack(["4"]),
|
||||
() => mockSyncStreamRepo.updatePartnerV1(any()),
|
||||
() => mockSyncApiRepo.ack(["3"]),
|
||||
]);
|
||||
verifyNever(() => mockAbortCallbackWrapper());
|
||||
final future = triggerSyncAndEmit(events);
|
||||
await expectLater(future, completes);
|
||||
// Verify ack includes last ack from each successfully handled type
|
||||
verify(
|
||||
() =>
|
||||
mockSyncApiRepo.ack(any(that: containsAll(["5", "2", "4", "3"]))),
|
||||
).called(1);
|
||||
},
|
||||
);
|
||||
|
||||
test("processes final batch correctly", () async {
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
];
|
||||
|
||||
await simulateEvents(events);
|
||||
|
||||
verifyInOrder([
|
||||
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["2"]),
|
||||
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||
() => mockSyncApiRepo.ack(["1"]),
|
||||
]);
|
||||
verifyNever(() => mockAbortCallbackWrapper());
|
||||
test("completes successfully when stream emits an error", () async {
|
||||
when(() => mockSyncApiRepo.getSyncEvents(any()))
|
||||
.thenAnswer((_) => Stream.error(Exception("Stream Error")));
|
||||
// Should complete gracefully without throwing
|
||||
await expectLater(sut.syncUsers(), throwsException);
|
||||
verifyNever(() => mockSyncApiRepo.ack(any())); // No ack on stream error
|
||||
});
|
||||
|
||||
test("does not process or ack when event list is empty", () async {
|
||||
await simulateEvents([]);
|
||||
test("throws when initial getSyncEvents call fails", () async {
|
||||
final apiException = Exception("API Error");
|
||||
when(() => mockSyncApiRepo.getSyncEvents(any())).thenThrow(apiException);
|
||||
// Should rethrow the exception from the initial call
|
||||
await expectLater(sut.syncUsers(), throwsA(apiException));
|
||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
||||
});
|
||||
|
||||
test(
|
||||
"completes successfully when a repository handler throws an exception",
|
||||
() async {
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
||||
.thenThrow(Exception("Repo Error"));
|
||||
final events = [
|
||||
...SyncStreamStub.userEvents,
|
||||
...SyncStreamStub.partnerEvents,
|
||||
];
|
||||
// Should complete, but ack only for the successful types
|
||||
await triggerSyncAndEmit(events);
|
||||
// Only partner delete was successful by default setup
|
||||
verify(() => mockSyncApiRepo.ack(["2", "4", "3"])).called(1);
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"completes successfully but sends no ack when all handlers fail",
|
||||
() async {
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
||||
.thenAnswer(failureHandler);
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
.thenAnswer(failureHandler);
|
||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
||||
.thenAnswer(failureHandler);
|
||||
when(() => mockSyncStreamRepo.deletePartnerV1(any()))
|
||||
.thenAnswer(failureHandler);
|
||||
|
||||
final events = [
|
||||
...SyncStreamStub.userEvents,
|
||||
...SyncStreamStub.partnerEvents,
|
||||
];
|
||||
await triggerSyncAndEmit(events);
|
||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
||||
},
|
||||
);
|
||||
|
||||
test("sends ack only for types where handler returns true", () async {
|
||||
// Mock specific handlers: user update fails, user delete succeeds
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any()))
|
||||
.thenAnswer(failureHandler);
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
.thenAnswer(successHandler);
|
||||
// partner update fails, partner delete succeeds
|
||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
||||
.thenAnswer(failureHandler);
|
||||
|
||||
final events = [
|
||||
...SyncStreamStub.userEvents,
|
||||
...SyncStreamStub.partnerEvents,
|
||||
];
|
||||
await triggerSyncAndEmit(events);
|
||||
|
||||
// Expect ack only for userDeleteV1 (ack: "2") and partnerDeleteV1 (ack: "4")
|
||||
verify(() => mockSyncApiRepo.ack(any(that: containsAll(["2", "4"]))))
|
||||
.called(1);
|
||||
});
|
||||
|
||||
test("does not process or ack when stream emits an empty list", () async {
|
||||
final future = sut.syncUsers();
|
||||
streamController.add([]); // Emit empty list
|
||||
await streamController.close();
|
||||
await future; // Wait for completion
|
||||
|
||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.deleteUsersV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.updatePartnerV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
||||
verifyNever(() => mockAbortCallbackWrapper());
|
||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
||||
});
|
||||
|
||||
test("aborts and stops processing if cancelled during iteration", () async {
|
||||
final cancellationChecker = _MockCancellationWrapper();
|
||||
when(() => cancellationChecker()).thenReturn(false);
|
||||
test("processes multiple batches sequentially using mutex", () async {
|
||||
final completer1 = Completer<void>();
|
||||
final completer2 = Completer<void>();
|
||||
int callOrder = 0;
|
||||
int handler1StartOrder = -1;
|
||||
int handler2StartOrder = -1;
|
||||
int handler1Calls = 0;
|
||||
int handler2Calls = 0;
|
||||
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
cancelChecker: cancellationChecker.call,
|
||||
);
|
||||
await sut.sync();
|
||||
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
SyncStreamStub.partnerDeleteV1,
|
||||
];
|
||||
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any())).thenAnswer((_) async {
|
||||
when(() => cancellationChecker()).thenReturn(true);
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async {
|
||||
handler1Calls++;
|
||||
handler1StartOrder = ++callOrder;
|
||||
await completer1.future;
|
||||
return true;
|
||||
});
|
||||
when(() => mockSyncStreamRepo.updatePartnerV1(any()))
|
||||
.thenAnswer((_) async {
|
||||
handler2Calls++;
|
||||
handler2StartOrder = ++callOrder;
|
||||
await completer2.future;
|
||||
return true;
|
||||
});
|
||||
|
||||
await handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||
final batch1 = SyncStreamStub.userEvents;
|
||||
final batch2 = SyncStreamStub.partnerEvents;
|
||||
|
||||
verify(() => mockSyncStreamRepo.deleteUsersV1(any())).called(1);
|
||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||
verifyNever(() => mockSyncStreamRepo.deletePartnerV1(any()));
|
||||
final syncFuture = sut.syncUsers();
|
||||
await pumpEventQueue();
|
||||
|
||||
verify(() => mockAbortCallbackWrapper()).called(1);
|
||||
streamController.add(batch1);
|
||||
await pumpEventQueue();
|
||||
// Small delay to ensure the first handler starts
|
||||
await Future.delayed(const Duration(milliseconds: 20));
|
||||
|
||||
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
|
||||
expect(handler1StartOrder, 1, reason: "Handler 1 should start first");
|
||||
expect(handler1Calls, 1);
|
||||
|
||||
streamController.add(batch2);
|
||||
await pumpEventQueue();
|
||||
// Small delay
|
||||
await Future.delayed(const Duration(milliseconds: 20));
|
||||
|
||||
expect(handler2StartOrder, -1, reason: "Handler 2 should wait");
|
||||
expect(handler2Calls, 0);
|
||||
|
||||
completer1.complete();
|
||||
await pumpEventQueue(times: 40);
|
||||
// Small delay to ensure the second handler starts
|
||||
await Future.delayed(const Duration(milliseconds: 20));
|
||||
|
||||
expect(handler2StartOrder, 2, reason: "Handler 2 should start after H1");
|
||||
expect(handler2Calls, 1);
|
||||
|
||||
completer2.complete();
|
||||
await pumpEventQueue(times: 40);
|
||||
// Small delay before closing the stream
|
||||
await Future.delayed(const Duration(milliseconds: 20));
|
||||
|
||||
if (!streamController.isClosed) {
|
||||
await streamController.close();
|
||||
}
|
||||
await pumpEventQueue(times: 40);
|
||||
// Small delay to ensure the sync completes
|
||||
await Future.delayed(const Duration(milliseconds: 20));
|
||||
|
||||
await syncFuture;
|
||||
|
||||
verify(() => mockSyncStreamRepo.updateUsersV1(any())).called(1);
|
||||
verify(() => mockSyncStreamRepo.updatePartnerV1(any())).called(1);
|
||||
verify(() => mockSyncApiRepo.ack(any())).called(2);
|
||||
});
|
||||
|
||||
test(
|
||||
"aborts and stops processing if cancelled before processing batch",
|
||||
"stops processing and ack when cancel checker is completed",
|
||||
() async {
|
||||
final cancellationChecker = _MockCancellationWrapper();
|
||||
when(() => cancellationChecker()).thenReturn(false);
|
||||
|
||||
final processingCompleter = Completer<void>();
|
||||
bool handler1Started = false;
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
.thenAnswer((_) async {
|
||||
handler1Started = true;
|
||||
return processingCompleter.future;
|
||||
});
|
||||
when(() => cancellationChecker.isCancelled()).thenAnswer((_) => false);
|
||||
|
||||
sut = SyncStreamService(
|
||||
syncApiRepository: mockSyncApiRepo,
|
||||
syncStreamRepository: mockSyncStreamRepo,
|
||||
cancelChecker: cancellationChecker.call,
|
||||
cancelChecker: cancellationChecker.isCancelled,
|
||||
);
|
||||
|
||||
await sut.sync();
|
||||
final processingCompleter = Completer<void>();
|
||||
bool handlerStarted = false;
|
||||
|
||||
final events = [
|
||||
SyncStreamStub.userDeleteV1,
|
||||
SyncStreamStub.userV1Admin,
|
||||
SyncStreamStub.partnerDeleteV1,
|
||||
];
|
||||
// Make handler wait so we can cancel it mid-flight
|
||||
when(() => mockSyncStreamRepo.deleteUsersV1(any()))
|
||||
.thenAnswer((_) async {
|
||||
handlerStarted = true;
|
||||
await processingCompleter
|
||||
.future; // Wait indefinitely until test completes it
|
||||
return true;
|
||||
});
|
||||
|
||||
final processingFuture =
|
||||
handleEventsCallback(events, mockAbortCallbackWrapper.call);
|
||||
await pumpEventQueue();
|
||||
final syncFuture = sut.syncUsers();
|
||||
await pumpEventQueue(times: 30);
|
||||
|
||||
expect(handler1Started, isTrue);
|
||||
streamController.add(SyncStreamStub.userEvents);
|
||||
// Ensure processing starts
|
||||
await Future.delayed(const Duration(milliseconds: 10));
|
||||
|
||||
// Signal cancellation while handler 1 is waiting
|
||||
when(() => cancellationChecker()).thenReturn(true);
|
||||
await pumpEventQueue();
|
||||
expect(handlerStarted, isTrue, reason: "Handler should have started");
|
||||
|
||||
when(() => cancellationChecker.isCancelled()).thenAnswer((_) => true);
|
||||
|
||||
// Allow cancellation logic to propagate
|
||||
await Future.delayed(const Duration(milliseconds: 10));
|
||||
|
||||
// Complete the handler's completer after cancellation signal
|
||||
// to ensure the cancellation logic itself isn't blocked by the handler.
|
||||
processingCompleter.complete();
|
||||
await processingFuture;
|
||||
|
||||
verifyNever(() => mockSyncStreamRepo.updateUsersV1(any()));
|
||||
await expectLater(syncFuture, throwsA(isA<CanceledError>()));
|
||||
|
||||
verify(() => mockSyncApiRepo.ack(["2"])).called(1);
|
||||
// Verify that ack was NOT called because processing was cancelled
|
||||
verifyNever(() => mockSyncApiRepo.ack(any()));
|
||||
},
|
||||
);
|
||||
|
||||
test("completes successfully when ack call throws an exception", () async {
|
||||
when(() => mockSyncApiRepo.ack(any())).thenThrow(Exception("Ack Error"));
|
||||
final events = [
|
||||
...SyncStreamStub.userEvents,
|
||||
...SyncStreamStub.partnerEvents,
|
||||
];
|
||||
|
||||
// Should still complete even if ack fails
|
||||
await triggerSyncAndEmit(events);
|
||||
verify(() => mockSyncApiRepo.ack(any()))
|
||||
.called(1); // Verify ack was attempted
|
||||
});
|
||||
|
||||
test("waits for processing to finish if onDone called early", () async {
|
||||
final processingCompleter = Completer<void>();
|
||||
bool handlerFinished = false;
|
||||
|
||||
when(() => mockSyncStreamRepo.updateUsersV1(any())).thenAnswer((_) async {
|
||||
await processingCompleter.future; // Wait inside handler
|
||||
handlerFinished = true;
|
||||
return true;
|
||||
});
|
||||
|
||||
final syncFuture = sut.syncUsers();
|
||||
// Allow listener to attach
|
||||
// This is necessary to ensure the stream is ready to receive events
|
||||
await Future.delayed(Duration.zero);
|
||||
|
||||
streamController.add(SyncStreamStub.userEvents); // Emit batch
|
||||
await Future.delayed(
|
||||
const Duration(milliseconds: 10),
|
||||
); // Ensure processing starts
|
||||
|
||||
await streamController
|
||||
.close(); // Close stream (triggers onDone internally)
|
||||
await Future.delayed(
|
||||
const Duration(milliseconds: 10),
|
||||
); // Give onDone a chance to fire
|
||||
|
||||
// At this point, onDone was called, but processing is blocked
|
||||
expect(handlerFinished, isFalse);
|
||||
|
||||
processingCompleter.complete(); // Allow processing to finish
|
||||
await syncFuture; // Now the main future should complete
|
||||
|
||||
expect(handlerFinished, isTrue);
|
||||
verify(() => mockSyncApiRepo.ack(any())).called(1);
|
||||
});
|
||||
|
||||
test("processes events in the defined _kSyncTypeOrder", () async {
|
||||
final future = sut.syncUsers();
|
||||
await pumpEventQueue();
|
||||
if (!streamController.isClosed) {
|
||||
final events = [
|
||||
SyncEvent(
|
||||
type: SyncEntityType.partnerV1,
|
||||
data: SyncStreamStub.partnerV1,
|
||||
ack: "1",
|
||||
), // Should be processed last
|
||||
SyncEvent(
|
||||
type: SyncEntityType.userV1,
|
||||
data: SyncStreamStub.userV1Admin,
|
||||
ack: "2",
|
||||
), // Should be processed second
|
||||
SyncEvent(
|
||||
type: SyncEntityType.partnerDeleteV1,
|
||||
data: SyncStreamStub.partnerDeleteV1,
|
||||
ack: "3",
|
||||
), // Should be processed third
|
||||
SyncEvent(
|
||||
type: SyncEntityType.userDeleteV1,
|
||||
data: SyncStreamStub.userDeleteV1,
|
||||
ack: "4",
|
||||
), // Should be processed first
|
||||
];
|
||||
|
||||
streamController.add(events);
|
||||
await streamController.close();
|
||||
}
|
||||
await future;
|
||||
|
||||
verifyInOrder([
|
||||
() => mockSyncStreamRepo.deleteUsersV1(any()),
|
||||
() => mockSyncStreamRepo.updateUsersV1(any()),
|
||||
() => mockSyncStreamRepo.deletePartnerV1(any()),
|
||||
() => mockSyncStreamRepo.updatePartnerV1(any()),
|
||||
// Verify ack happens after all processing
|
||||
() => mockSyncApiRepo.ack(any()),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
group("syncUsers", () {
|
||||
test("calls getSyncEvents with correct types", () async {
|
||||
// Need to close the stream for the future to complete
|
||||
final future = sut.syncUsers();
|
||||
await streamController.close();
|
||||
await future;
|
||||
|
||||
verify(
|
||||
() => mockSyncApiRepo.getSyncEvents([
|
||||
SyncRequestType.usersV1,
|
||||
SyncRequestType.partnersV1,
|
||||
]),
|
||||
).called(1);
|
||||
});
|
||||
|
||||
test("calls repository methods with correctly grouped data", () async {
|
||||
final events = [
|
||||
...SyncStreamStub.userEvents,
|
||||
...SyncStreamStub.partnerEvents,
|
||||
];
|
||||
await triggerSyncAndEmit(events);
|
||||
|
||||
// Verify each handler was called with the correct list of data payloads
|
||||
verify(
|
||||
() => mockSyncStreamRepo.updateUsersV1(
|
||||
[SyncStreamStub.userV1Admin, SyncStreamStub.userV1User],
|
||||
),
|
||||
).called(1);
|
||||
verify(
|
||||
() => mockSyncStreamRepo.deleteUsersV1([SyncStreamStub.userDeleteV1]),
|
||||
).called(1);
|
||||
verify(
|
||||
() => mockSyncStreamRepo.updatePartnerV1([SyncStreamStub.partnerV1]),
|
||||
).called(1);
|
||||
verify(
|
||||
() => mockSyncStreamRepo
|
||||
.deletePartnerV1([SyncStreamStub.partnerDeleteV1]),
|
||||
).called(1);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
74
mobile/test/fixtures/sync_stream.stub.dart
vendored
74
mobile/test/fixtures/sync_stream.stub.dart
vendored
@@ -2,44 +2,44 @@ import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
abstract final class SyncStreamStub {
|
||||
static final userV1Admin = SyncEvent(
|
||||
type: SyncEntityType.userV1,
|
||||
data: SyncUserV1(
|
||||
deletedAt: DateTime(2020),
|
||||
email: "admin@admin",
|
||||
id: "1",
|
||||
name: "Admin",
|
||||
static final userV1Admin = SyncUserV1(
|
||||
deletedAt: DateTime(2020),
|
||||
email: "admin@admin",
|
||||
id: "1",
|
||||
name: "Admin",
|
||||
);
|
||||
static final userV1User = SyncUserV1(
|
||||
deletedAt: DateTime(2021),
|
||||
email: "user@user",
|
||||
id: "2",
|
||||
name: "User",
|
||||
);
|
||||
static final userDeleteV1 = SyncUserDeleteV1(userId: "2");
|
||||
static final userEvents = [
|
||||
SyncEvent(type: SyncEntityType.userV1, data: userV1Admin, ack: "1"),
|
||||
SyncEvent(
|
||||
type: SyncEntityType.userDeleteV1,
|
||||
data: userDeleteV1,
|
||||
ack: "2",
|
||||
),
|
||||
ack: "1",
|
||||
);
|
||||
static final userV1User = SyncEvent(
|
||||
type: SyncEntityType.userV1,
|
||||
data: SyncUserV1(
|
||||
deletedAt: DateTime(2021),
|
||||
email: "user@user",
|
||||
id: "5",
|
||||
name: "User",
|
||||
),
|
||||
ack: "5",
|
||||
);
|
||||
static final userDeleteV1 = SyncEvent(
|
||||
type: SyncEntityType.userDeleteV1,
|
||||
data: SyncUserDeleteV1(userId: "2"),
|
||||
ack: "2",
|
||||
);
|
||||
SyncEvent(type: SyncEntityType.userV1, data: userV1User, ack: "5"),
|
||||
];
|
||||
|
||||
static final partnerV1 = SyncEvent(
|
||||
type: SyncEntityType.partnerV1,
|
||||
data: SyncPartnerV1(
|
||||
inTimeline: true,
|
||||
sharedById: "1",
|
||||
sharedWithId: "2",
|
||||
static final partnerV1 = SyncPartnerV1(
|
||||
inTimeline: true,
|
||||
sharedById: "1",
|
||||
sharedWithId: "2",
|
||||
);
|
||||
static final partnerDeleteV1 = SyncPartnerDeleteV1(
|
||||
sharedById: "3",
|
||||
sharedWithId: "4",
|
||||
);
|
||||
static final partnerEvents = [
|
||||
SyncEvent(
|
||||
type: SyncEntityType.partnerDeleteV1,
|
||||
data: partnerDeleteV1,
|
||||
ack: "4",
|
||||
),
|
||||
ack: "3",
|
||||
);
|
||||
static final partnerDeleteV1 = SyncEvent(
|
||||
type: SyncEntityType.partnerDeleteV1,
|
||||
data: SyncPartnerDeleteV1(sharedById: "3", sharedWithId: "4"),
|
||||
ack: "4",
|
||||
);
|
||||
SyncEvent(type: SyncEntityType.partnerV1, data: partnerV1, ack: "3"),
|
||||
];
|
||||
}
|
||||
|
||||
@@ -1,299 +0,0 @@
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:http/http.dart' as http;
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
|
||||
import 'package:mocktail/mocktail.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
import '../../api.mocks.dart';
|
||||
import '../../service.mocks.dart';
|
||||
|
||||
class MockHttpClient extends Mock implements http.Client {}
|
||||
|
||||
class MockApiClient extends Mock implements ApiClient {}
|
||||
|
||||
class MockStreamedResponse extends Mock implements http.StreamedResponse {}
|
||||
|
||||
class FakeBaseRequest extends Fake implements http.BaseRequest {}
|
||||
|
||||
String _createJsonLine(String type, Map<String, dynamic> data, String ack) {
|
||||
return '${jsonEncode({'type': type, 'data': data, 'ack': ack})}\n';
|
||||
}
|
||||
|
||||
void main() {
|
||||
late SyncApiRepository sut;
|
||||
late MockApiService mockApiService;
|
||||
late MockApiClient mockApiClient;
|
||||
late MockSyncApi mockSyncApi;
|
||||
late MockHttpClient mockHttpClient;
|
||||
late MockStreamedResponse mockStreamedResponse;
|
||||
late StreamController<List<int>> responseStreamController;
|
||||
late int testBatchSize = 3;
|
||||
|
||||
setUp(() {
|
||||
mockApiService = MockApiService();
|
||||
mockApiClient = MockApiClient();
|
||||
mockSyncApi = MockSyncApi();
|
||||
mockHttpClient = MockHttpClient();
|
||||
mockStreamedResponse = MockStreamedResponse();
|
||||
responseStreamController =
|
||||
StreamController<List<int>>.broadcast(sync: true);
|
||||
|
||||
registerFallbackValue(FakeBaseRequest());
|
||||
|
||||
when(() => mockApiService.apiClient).thenReturn(mockApiClient);
|
||||
when(() => mockApiService.syncApi).thenReturn(mockSyncApi);
|
||||
when(() => mockApiClient.basePath).thenReturn('http://demo.immich.app/api');
|
||||
when(() => mockApiService.applyToParams(any(), any()))
|
||||
.thenAnswer((_) async => {});
|
||||
|
||||
// Mock HTTP client behavior
|
||||
when(() => mockHttpClient.send(any()))
|
||||
.thenAnswer((_) async => mockStreamedResponse);
|
||||
when(() => mockStreamedResponse.statusCode).thenReturn(200);
|
||||
when(() => mockStreamedResponse.stream)
|
||||
.thenAnswer((_) => http.ByteStream(responseStreamController.stream));
|
||||
when(() => mockHttpClient.close()).thenAnswer((_) => {});
|
||||
|
||||
sut = SyncApiRepository(mockApiService);
|
||||
});
|
||||
|
||||
tearDown(() async {
|
||||
if (!responseStreamController.isClosed) {
|
||||
await responseStreamController.close();
|
||||
}
|
||||
});
|
||||
|
||||
Future<void> streamChanges(
|
||||
Function(List<SyncEvent>, Function() abort) onDataCallback,
|
||||
) {
|
||||
return sut.streamChanges(
|
||||
onDataCallback,
|
||||
batchSize: testBatchSize,
|
||||
httpClient: mockHttpClient,
|
||||
);
|
||||
}
|
||||
|
||||
test('streamChanges stops processing stream when abort is called', () async {
|
||||
int onDataCallCount = 0;
|
||||
bool abortWasCalledInCallback = false;
|
||||
List<SyncEvent> receivedEventsBatch1 = [];
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() abort) {
|
||||
onDataCallCount++;
|
||||
if (onDataCallCount == 1) {
|
||||
receivedEventsBatch1 = events;
|
||||
abort();
|
||||
abortWasCalledInCallback = true;
|
||||
} else {
|
||||
fail("onData called more than once after abort was invoked");
|
||||
}
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
for (int i = 0; i < testBatchSize; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
for (int i = testBatchSize; i < testBatchSize * 2; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
await responseStreamController.close();
|
||||
await expectLater(streamChangesFuture, completes);
|
||||
|
||||
expect(onDataCallCount, 1);
|
||||
expect(abortWasCalledInCallback, isTrue);
|
||||
expect(receivedEventsBatch1.length, testBatchSize);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
});
|
||||
|
||||
test(
|
||||
'streamChanges does not process remaining lines in finally block if aborted',
|
||||
() async {
|
||||
int onDataCallCount = 0;
|
||||
bool abortWasCalledInCallback = false;
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() abort) {
|
||||
onDataCallCount++;
|
||||
if (onDataCallCount == 1) {
|
||||
abort();
|
||||
abortWasCalledInCallback = true;
|
||||
} else {
|
||||
fail("onData called more than once after abort was invoked");
|
||||
}
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
for (int i = 0; i < testBatchSize; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// emit a single event to skip batching and trigger finally
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user100").toJson(),
|
||||
'ack100',
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await responseStreamController.close();
|
||||
await expectLater(streamChangesFuture, completes);
|
||||
|
||||
expect(onDataCallCount, 1);
|
||||
expect(abortWasCalledInCallback, isTrue);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
'streamChanges processes remaining lines in finally block if not aborted',
|
||||
() async {
|
||||
int onDataCallCount = 0;
|
||||
List<SyncEvent> receivedEventsBatch1 = [];
|
||||
List<SyncEvent> receivedEventsBatch2 = [];
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||
onDataCallCount++;
|
||||
if (onDataCallCount == 1) {
|
||||
receivedEventsBatch1 = events;
|
||||
} else if (onDataCallCount == 2) {
|
||||
receivedEventsBatch2 = events;
|
||||
} else {
|
||||
fail("onData called more than expected");
|
||||
}
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
// Batch 1
|
||||
for (int i = 0; i < testBatchSize; i++) {
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user$i").toJson(),
|
||||
'ack$i',
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Partial Batch 2
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user100").toJson(),
|
||||
'ack100',
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await responseStreamController.close();
|
||||
await expectLater(streamChangesFuture, completes);
|
||||
|
||||
expect(onDataCallCount, 2);
|
||||
expect(receivedEventsBatch1.length, testBatchSize);
|
||||
expect(receivedEventsBatch2.length, 1);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
},
|
||||
);
|
||||
|
||||
test('streamChanges handles stream error gracefully', () async {
|
||||
final streamError = Exception("Network Error");
|
||||
int onDataCallCount = 0;
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||
onDataCallCount++;
|
||||
}
|
||||
|
||||
final streamChangesFuture = streamChanges(onDataCallback);
|
||||
|
||||
await pumpEventQueue();
|
||||
|
||||
responseStreamController.add(
|
||||
utf8.encode(
|
||||
_createJsonLine(
|
||||
SyncEntityType.userDeleteV1.toString(),
|
||||
SyncUserDeleteV1(userId: "user1").toJson(),
|
||||
'ack1',
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
responseStreamController.addError(streamError);
|
||||
await expectLater(streamChangesFuture, throwsA(streamError));
|
||||
|
||||
expect(onDataCallCount, 0);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
});
|
||||
|
||||
test('streamChanges throws ApiException on non-200 status code', () async {
|
||||
when(() => mockStreamedResponse.statusCode).thenReturn(401);
|
||||
final errorBodyController = StreamController<List<int>>(sync: true);
|
||||
when(() => mockStreamedResponse.stream)
|
||||
.thenAnswer((_) => http.ByteStream(errorBodyController.stream));
|
||||
|
||||
int onDataCallCount = 0;
|
||||
|
||||
onDataCallback(List<SyncEvent> events, Function() _) {
|
||||
onDataCallCount++;
|
||||
}
|
||||
|
||||
final future = streamChanges(onDataCallback);
|
||||
|
||||
errorBodyController.add(utf8.encode('{"error":"Unauthorized"}'));
|
||||
await errorBodyController.close();
|
||||
|
||||
await expectLater(
|
||||
future,
|
||||
throwsA(
|
||||
isA<ApiException>()
|
||||
.having((e) => e.code, 'code', 401)
|
||||
.having((e) => e.message, 'message', contains('Unauthorized')),
|
||||
),
|
||||
);
|
||||
|
||||
expect(onDataCallCount, 0);
|
||||
verify(() => mockHttpClient.close()).called(1);
|
||||
});
|
||||
}
|
||||
@@ -26,8 +26,9 @@
|
||||
"migrations:generate": "node ./dist/bin/migrations.js generate",
|
||||
"migrations:create": "node ./dist/bin/migrations.js create",
|
||||
"migrations:run": "node ./dist/bin/migrations.js run",
|
||||
"schema:drop": "node ./dist/bin/migrations.js query 'DROP schema public cascade; CREATE schema public;'",
|
||||
"schema:reset": "npm run schema:drop && npm run migrations:run",
|
||||
"typeorm:migrations:revert": "typeorm migration:revert -d ./dist/bin/database.js",
|
||||
"typeorm:schema:drop": "typeorm query -d ./dist/bin/database.js 'DROP schema public cascade; CREATE schema public;'",
|
||||
"typeorm:schema:reset": "npm run typeorm:schema:drop && npm run migrations:run",
|
||||
"kysely:codegen": "npx kysely-codegen --include-pattern=\"(public|vectors).*\" --dialect postgres --url postgres://postgres:postgres@localhost/immich --log-level debug --out-file=./src/db.d.ts",
|
||||
"sync:open-api": "node ./dist/bin/sync-open-api.js",
|
||||
"sync:sql": "node ./dist/bin/sync-sql.js",
|
||||
|
||||
11
server/src/bin/database.ts
Normal file
11
server/src/bin/database.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
|
||||
/**
|
||||
* @deprecated - DO NOT USE THIS
|
||||
*
|
||||
* this export is ONLY to be used for TypeORM commands in package.json#scripts
|
||||
*/
|
||||
export const dataSource = new DataSource({ ...database.config.typeorm, host: 'localhost' });
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env node
|
||||
process.env.DB_URL = process.env.DB_URL || 'postgres://postgres:postgres@localhost:5432/immich';
|
||||
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { mkdirSync, writeFileSync } from 'node:fs';
|
||||
import { Kysely } from 'kysely';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { basename, dirname, extname, join } from 'node:path';
|
||||
import postgres from 'postgres';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
@@ -23,13 +23,8 @@ const main = async () => {
|
||||
}
|
||||
|
||||
case 'run': {
|
||||
await runMigrations();
|
||||
return;
|
||||
}
|
||||
|
||||
case 'query': {
|
||||
const query = process.argv[3];
|
||||
await runQuery(query);
|
||||
const only = process.argv[3] as 'kysely' | 'typeorm' | undefined;
|
||||
await run(only);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -53,25 +48,14 @@ const main = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
const getDatabaseClient = () => {
|
||||
const run = async (only?: 'kysely' | 'typeorm') => {
|
||||
const configRepository = new ConfigRepository();
|
||||
const { database } = configRepository.getEnv();
|
||||
return new Kysely<any>(getKyselyConfig(database.config.kysely));
|
||||
};
|
||||
|
||||
const runQuery = async (query: string) => {
|
||||
const db = getDatabaseClient();
|
||||
await sql.raw(query).execute(db);
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
const runMigrations = async () => {
|
||||
const configRepository = new ConfigRepository();
|
||||
const logger = new LoggingRepository(undefined, configRepository);
|
||||
const db = getDatabaseClient();
|
||||
const db = new Kysely<any>(getKyselyConfig(database.config.kysely));
|
||||
const databaseRepository = new DatabaseRepository(db, logger, configRepository);
|
||||
await databaseRepository.runMigrations();
|
||||
await db.destroy();
|
||||
|
||||
await databaseRepository.runMigrations({ only });
|
||||
};
|
||||
|
||||
const debug = async () => {
|
||||
@@ -97,8 +81,7 @@ const create = (path: string, up: string[], down: string[]) => {
|
||||
const filename = `${timestamp}-${name}.ts`;
|
||||
const folder = dirname(path);
|
||||
const fullPath = join(folder, filename);
|
||||
mkdirSync(folder, { recursive: true });
|
||||
writeFileSync(fullPath, asMigration('kysely', { name, timestamp, up, down }));
|
||||
writeFileSync(fullPath, asMigration('typeorm', { name, timestamp, up, down }));
|
||||
console.log(`Wrote ${fullPath}`);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Selectable } from 'kysely';
|
||||
import { Albums, Exif as DatabaseExif } from 'src/db';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetJobStatus as DatabaseAssetJobStatus, Exif as DatabaseExif } from 'src/db';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import {
|
||||
AlbumUserRole,
|
||||
AssetFileType,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
MemoryType,
|
||||
Permission,
|
||||
SharedLinkType,
|
||||
SourceType,
|
||||
UserStatus,
|
||||
} from 'src/enum';
|
||||
@@ -44,7 +44,7 @@ export type Library = {
|
||||
exclusionPatterns: string[];
|
||||
deletedAt: Date | null;
|
||||
refreshedAt: Date | null;
|
||||
assets?: MapAsset[];
|
||||
assets?: Asset[];
|
||||
};
|
||||
|
||||
export type AuthApiKey = {
|
||||
@@ -96,26 +96,7 @@ export type Memory = {
|
||||
data: OnThisDayData;
|
||||
ownerId: string;
|
||||
isSaved: boolean;
|
||||
assets: MapAsset[];
|
||||
};
|
||||
|
||||
export type Asset = {
|
||||
id: string;
|
||||
checksum: Buffer<ArrayBufferLike>;
|
||||
deviceAssetId: string;
|
||||
deviceId: string;
|
||||
fileCreatedAt: Date;
|
||||
fileModifiedAt: Date;
|
||||
isExternal: boolean;
|
||||
isVisible: boolean;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Date;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
type: AssetType;
|
||||
assets: Asset[];
|
||||
};
|
||||
|
||||
export type User = {
|
||||
@@ -147,6 +128,39 @@ export type StorageAsset = {
|
||||
encodedVideoPath: string | null;
|
||||
};
|
||||
|
||||
export type Asset = {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
deletedAt: Date | null;
|
||||
id: string;
|
||||
updateId: string;
|
||||
status: AssetStatus;
|
||||
checksum: Buffer<ArrayBufferLike>;
|
||||
deviceAssetId: string;
|
||||
deviceId: string;
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
encodedVideoPath: string | null;
|
||||
fileCreatedAt: Date | null;
|
||||
fileModifiedAt: Date | null;
|
||||
isArchived: boolean;
|
||||
isExternal: boolean;
|
||||
isFavorite: boolean;
|
||||
isOffline: boolean;
|
||||
isVisible: boolean;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Date | null;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
thumbhash: Buffer<ArrayBufferLike> | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
export type SidecarWriteAsset = {
|
||||
id: string;
|
||||
sidecarPath: string | null;
|
||||
@@ -159,7 +173,7 @@ export type Stack = {
|
||||
primaryAssetId: string;
|
||||
owner?: User;
|
||||
ownerId: string;
|
||||
assets: MapAsset[];
|
||||
assets: AssetEntity[];
|
||||
assetCount?: number;
|
||||
};
|
||||
|
||||
@@ -173,28 +187,6 @@ export type AuthSharedLink = {
|
||||
password: string | null;
|
||||
};
|
||||
|
||||
export type SharedLink = {
|
||||
id: string;
|
||||
album?: Album | null;
|
||||
albumId: string | null;
|
||||
allowDownload: boolean;
|
||||
allowUpload: boolean;
|
||||
assets: MapAsset[];
|
||||
createdAt: Date;
|
||||
description: string | null;
|
||||
expiresAt: Date | null;
|
||||
key: Buffer;
|
||||
password: string | null;
|
||||
showExif: boolean;
|
||||
type: SharedLinkType;
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type Album = Selectable<Albums> & {
|
||||
owner: User;
|
||||
assets: MapAsset[];
|
||||
};
|
||||
|
||||
export type AuthSession = {
|
||||
id: string;
|
||||
};
|
||||
@@ -264,6 +256,10 @@ export type AssetFace = {
|
||||
person?: Person | null;
|
||||
};
|
||||
|
||||
export type AssetJobStatus = Selectable<DatabaseAssetJobStatus> & {
|
||||
asset: AssetEntity;
|
||||
};
|
||||
|
||||
const userColumns = ['id', 'name', 'email', 'profileImagePath', 'profileChangedAt'] as const;
|
||||
|
||||
export const columns = {
|
||||
|
||||
6
server/src/db.d.ts
vendored
6
server/src/db.d.ts
vendored
@@ -143,8 +143,8 @@ export interface Assets {
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
encodedVideoPath: Generated<string | null>;
|
||||
fileCreatedAt: Timestamp;
|
||||
fileModifiedAt: Timestamp;
|
||||
fileCreatedAt: Timestamp | null;
|
||||
fileModifiedAt: Timestamp | null;
|
||||
id: Generated<string>;
|
||||
isArchived: Generated<boolean>;
|
||||
isExternal: Generated<boolean>;
|
||||
@@ -153,7 +153,7 @@ export interface Assets {
|
||||
isVisible: Generated<boolean>;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Timestamp;
|
||||
localDateTime: Timestamp | null;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
ownerId: string;
|
||||
|
||||
@@ -11,8 +11,7 @@ import { setUnion } from 'src/utils/set';
|
||||
const GeneratedUuidV7Column = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) =>
|
||||
Column({ ...options, type: 'uuid', nullable: false, default: () => `${immich_uuid_v7.name}()` });
|
||||
|
||||
export const UpdateIdColumn = (options: Omit<ColumnOptions, 'type' | 'default' | 'nullable'> = {}) =>
|
||||
GeneratedUuidV7Column(options);
|
||||
export const UpdateIdColumn = () => GeneratedUuidV7Column();
|
||||
|
||||
export const PrimaryGeneratedUuidV7Column = () => GeneratedUuidV7Column({ primary: true });
|
||||
|
||||
|
||||
@@ -2,10 +2,10 @@ import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Type } from 'class-transformer';
|
||||
import { ArrayNotEmpty, IsArray, IsEnum, IsString, ValidateNested } from 'class-validator';
|
||||
import _ from 'lodash';
|
||||
import { AlbumUser, AuthSharedLink, User } from 'src/database';
|
||||
import { AssetResponseDto, MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { AlbumEntity } from 'src/entities/album.entity';
|
||||
import { AlbumUserRole, AssetOrder } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
|
||||
|
||||
@@ -142,23 +142,7 @@ export class AlbumResponseDto {
|
||||
order?: AssetOrder;
|
||||
}
|
||||
|
||||
export type MapAlbumDto = {
|
||||
albumUsers?: AlbumUser[];
|
||||
assets?: MapAsset[];
|
||||
sharedLinks?: AuthSharedLink[];
|
||||
albumName: string;
|
||||
description: string;
|
||||
albumThumbnailAssetId: string | null;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
id: string;
|
||||
ownerId: string;
|
||||
owner: User;
|
||||
isActivityEnabled: boolean;
|
||||
order: AssetOrder;
|
||||
};
|
||||
|
||||
export const mapAlbum = (entity: MapAlbumDto, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
|
||||
export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDto): AlbumResponseDto => {
|
||||
const albumUsers: AlbumUserResponseDto[] = [];
|
||||
|
||||
if (entity.albumUsers) {
|
||||
@@ -175,7 +159,7 @@ export const mapAlbum = (entity: MapAlbumDto, withAssets: boolean, auth?: AuthDt
|
||||
|
||||
const assets = entity.assets || [];
|
||||
|
||||
const hasSharedLink = !!entity.sharedLinks && entity.sharedLinks.length > 0;
|
||||
const hasSharedLink = entity.sharedLinks?.length > 0;
|
||||
const hasSharedUser = albumUsers.length > 0;
|
||||
|
||||
let startDate = assets.at(0)?.localDateTime;
|
||||
@@ -206,5 +190,5 @@ export const mapAlbum = (entity: MapAlbumDto, withAssets: boolean, auth?: AuthDt
|
||||
};
|
||||
};
|
||||
|
||||
export const mapAlbumWithAssets = (entity: MapAlbumDto) => mapAlbum(entity, true);
|
||||
export const mapAlbumWithoutAssets = (entity: MapAlbumDto) => mapAlbum(entity, false);
|
||||
export const mapAlbumWithAssets = (entity: AlbumEntity) => mapAlbum(entity, true);
|
||||
export const mapAlbumWithoutAssets = (entity: AlbumEntity) => mapAlbum(entity, false);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Selectable } from 'kysely';
|
||||
import { AssetFace, AssetFile, Exif, Stack, Tag, User } from 'src/database';
|
||||
import { AssetFace } from 'src/database';
|
||||
import { PropertyLifecycle } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
|
||||
@@ -12,7 +11,8 @@ import {
|
||||
} from 'src/dtos/person.dto';
|
||||
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { AssetStatus, AssetType } from 'src/enum';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { AssetType } from 'src/enum';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
|
||||
export class SanitizedAssetResponseDto {
|
||||
@@ -56,44 +56,6 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
|
||||
resized?: boolean;
|
||||
}
|
||||
|
||||
export type MapAsset = {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
deletedAt: Date | null;
|
||||
id: string;
|
||||
updateId: string;
|
||||
status: AssetStatus;
|
||||
checksum: Buffer<ArrayBufferLike>;
|
||||
deviceAssetId: string;
|
||||
deviceId: string;
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
encodedVideoPath: string | null;
|
||||
exifInfo?: Selectable<Exif> | null;
|
||||
faces?: AssetFace[];
|
||||
fileCreatedAt: Date;
|
||||
fileModifiedAt: Date;
|
||||
files?: AssetFile[];
|
||||
isArchived: boolean;
|
||||
isExternal: boolean;
|
||||
isFavorite: boolean;
|
||||
isOffline: boolean;
|
||||
isVisible: boolean;
|
||||
libraryId: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
localDateTime: Date;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
owner?: User | null;
|
||||
ownerId: string;
|
||||
sidecarPath: string | null;
|
||||
stack?: Stack | null;
|
||||
stackId: string | null;
|
||||
tags?: Tag[];
|
||||
thumbhash: Buffer<ArrayBufferLike> | null;
|
||||
type: AssetType;
|
||||
};
|
||||
|
||||
export class AssetStackResponseDto {
|
||||
id!: string;
|
||||
|
||||
@@ -110,7 +72,7 @@ export type AssetMapOptions = {
|
||||
};
|
||||
|
||||
// TODO: this is inefficient
|
||||
const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
const peopleWithFaces = (faces: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
const result: PersonWithFacesResponseDto[] = [];
|
||||
if (faces) {
|
||||
for (const face of faces) {
|
||||
@@ -128,7 +90,7 @@ const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
return result;
|
||||
};
|
||||
|
||||
const mapStack = (entity: { stack?: Stack | null }) => {
|
||||
const mapStack = (entity: AssetEntity) => {
|
||||
if (!entity.stack) {
|
||||
return null;
|
||||
}
|
||||
@@ -149,7 +111,7 @@ export const hexOrBufferToBase64 = (encoded: string | Buffer) => {
|
||||
return encoded.toString('base64');
|
||||
};
|
||||
|
||||
export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): AssetResponseDto {
|
||||
export function mapAsset(entity: AssetEntity, options: AssetMapOptions = {}): AssetResponseDto {
|
||||
const { stripMetadata = false, withStack = false } = options;
|
||||
|
||||
if (stripMetadata) {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { IsEnum, IsInt, IsObject, IsPositive, ValidateNested } from 'class-valid
|
||||
import { Memory } from 'src/database';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { MemoryType } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
||||
|
||||
@@ -102,6 +103,6 @@ export const mapMemory = (entity: Memory, auth: AuthDto): MemoryResponseDto => {
|
||||
type: entity.type as MemoryType,
|
||||
data: entity.data as unknown as MemoryData,
|
||||
isSaved: entity.isSaved,
|
||||
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset, { auth })),
|
||||
assets: ('assets' in entity ? entity.assets : []).map((asset) => mapAsset(asset as AssetEntity, { auth })),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsEnum, IsString } from 'class-validator';
|
||||
import _ from 'lodash';
|
||||
import { SharedLink } from 'src/database';
|
||||
import { AlbumResponseDto, mapAlbumWithoutAssets } from 'src/dtos/album.dto';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateDate, ValidateUUID } from 'src/validation';
|
||||
|
||||
@@ -102,7 +102,7 @@ export class SharedLinkResponseDto {
|
||||
showMetadata!: boolean;
|
||||
}
|
||||
|
||||
export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
export function mapSharedLink(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
|
||||
const linkAssets = sharedLink.assets || [];
|
||||
|
||||
return {
|
||||
@@ -122,7 +122,7 @@ export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
};
|
||||
}
|
||||
|
||||
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLinkEntity): SharedLinkResponseDto {
|
||||
const linkAssets = sharedLink.assets || [];
|
||||
const albumAssets = (sharedLink?.album?.assets || []).map((asset) => asset);
|
||||
|
||||
@@ -137,7 +137,7 @@ export function mapSharedLinkWithoutMetadata(sharedLink: SharedLink): SharedLink
|
||||
type: sharedLink.type,
|
||||
createdAt: sharedLink.createdAt,
|
||||
expiresAt: sharedLink.expiresAt,
|
||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })),
|
||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })) as AssetResponseDto[],
|
||||
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
|
||||
allowUpload: sharedLink.allowUpload,
|
||||
allowDownload: sharedLink.allowDownload,
|
||||
|
||||
23
server/src/entities/album.entity.ts
Normal file
23
server/src/entities/album.entity.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { AlbumUser, User } from 'src/database';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { AssetOrder } from 'src/enum';
|
||||
|
||||
export class AlbumEntity {
|
||||
id!: string;
|
||||
owner!: User;
|
||||
ownerId!: string;
|
||||
albumName!: string;
|
||||
description!: string;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
updateId?: string;
|
||||
deletedAt!: Date | null;
|
||||
albumThumbnailAsset!: AssetEntity | null;
|
||||
albumThumbnailAssetId!: string | null;
|
||||
albumUsers!: AlbumUser[];
|
||||
assets!: AssetEntity[];
|
||||
sharedLinks!: SharedLinkEntity[];
|
||||
isActivityEnabled!: boolean;
|
||||
order!: AssetOrder;
|
||||
}
|
||||
270
server/src/entities/asset.entity.ts
Normal file
270
server/src/entities/asset.entity.ts
Normal file
@@ -0,0 +1,270 @@
|
||||
import { DeduplicateJoinsPlugin, ExpressionBuilder, Kysely, SelectQueryBuilder, sql } from 'kysely';
|
||||
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
import { AssetFace, AssetFile, AssetJobStatus, columns, Exif, Stack, Tag, User } from 'src/database';
|
||||
import { DB } from 'src/db';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { AssetFileType, AssetStatus, AssetType } from 'src/enum';
|
||||
import { TimeBucketSize } from 'src/repositories/asset.repository';
|
||||
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
|
||||
import { anyUuid, asUuid } from 'src/utils/database';
|
||||
|
||||
export const ASSET_CHECKSUM_CONSTRAINT = 'UQ_assets_owner_checksum';
|
||||
|
||||
export class AssetEntity {
|
||||
id!: string;
|
||||
deviceAssetId!: string;
|
||||
owner!: User;
|
||||
ownerId!: string;
|
||||
libraryId?: string | null;
|
||||
deviceId!: string;
|
||||
type!: AssetType;
|
||||
status!: AssetStatus;
|
||||
originalPath!: string;
|
||||
files!: AssetFile[];
|
||||
thumbhash!: Buffer | null;
|
||||
encodedVideoPath!: string | null;
|
||||
createdAt!: Date;
|
||||
updatedAt!: Date;
|
||||
updateId?: string;
|
||||
deletedAt!: Date | null;
|
||||
fileCreatedAt!: Date;
|
||||
localDateTime!: Date;
|
||||
fileModifiedAt!: Date;
|
||||
isFavorite!: boolean;
|
||||
isArchived!: boolean;
|
||||
isExternal!: boolean;
|
||||
isOffline!: boolean;
|
||||
checksum!: Buffer; // sha1 checksum
|
||||
duration!: string | null;
|
||||
isVisible!: boolean;
|
||||
livePhotoVideo!: AssetEntity | null;
|
||||
livePhotoVideoId!: string | null;
|
||||
originalFileName!: string;
|
||||
sidecarPath!: string | null;
|
||||
exifInfo?: Exif;
|
||||
tags?: Tag[];
|
||||
sharedLinks!: SharedLinkEntity[];
|
||||
faces!: AssetFace[];
|
||||
stackId?: string | null;
|
||||
stack?: Stack | null;
|
||||
jobStatus?: AssetJobStatus;
|
||||
duplicateId!: string | null;
|
||||
}
|
||||
|
||||
export function withExif<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
||||
return qb
|
||||
.leftJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif | null>().as('exifInfo'));
|
||||
}
|
||||
|
||||
export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
||||
return qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.select((eb) => eb.fn.toJson(eb.table('exif')).$castTo<Exif>().as('exifInfo'));
|
||||
}
|
||||
|
||||
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'assets', O>) {
|
||||
return qb
|
||||
.leftJoin('smart_search', 'assets.id', 'smart_search.assetId')
|
||||
.select((eb) => eb.fn.toJson(eb.table('smart_search')).as('smartSearch'));
|
||||
}
|
||||
|
||||
export function withFaces(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
|
||||
return jsonArrayFrom(
|
||||
eb
|
||||
.selectFrom('asset_faces')
|
||||
.selectAll('asset_faces')
|
||||
.whereRef('asset_faces.assetId', '=', 'assets.id')
|
||||
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
|
||||
).as('faces');
|
||||
}
|
||||
|
||||
export function withFiles(eb: ExpressionBuilder<DB, 'assets'>, type?: AssetFileType) {
|
||||
return jsonArrayFrom(
|
||||
eb
|
||||
.selectFrom('asset_files')
|
||||
.select(columns.assetFiles)
|
||||
.whereRef('asset_files.assetId', '=', 'assets.id')
|
||||
.$if(!!type, (qb) => qb.where('asset_files.type', '=', type!)),
|
||||
).as('files');
|
||||
}
|
||||
|
||||
export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'assets'>, withDeletedFace?: boolean) {
|
||||
return jsonArrayFrom(
|
||||
eb
|
||||
.selectFrom('asset_faces')
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb.selectFrom('person').selectAll('person').whereRef('asset_faces.personId', '=', 'person.id').as('person'),
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.selectAll('asset_faces')
|
||||
.select((eb) => eb.table('person').as('person'))
|
||||
.whereRef('asset_faces.assetId', '=', 'assets.id')
|
||||
.$if(!withDeletedFace, (qb) => qb.where('asset_faces.deletedAt', 'is', null)),
|
||||
).as('faces');
|
||||
}
|
||||
|
||||
export function hasPeople<O>(qb: SelectQueryBuilder<DB, 'assets', O>, personIds: string[]) {
|
||||
return qb.innerJoin(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('asset_faces')
|
||||
.select('assetId')
|
||||
.where('personId', '=', anyUuid(personIds!))
|
||||
.where('deletedAt', 'is', null)
|
||||
.groupBy('assetId')
|
||||
.having((eb) => eb.fn.count('personId').distinct(), '=', personIds.length)
|
||||
.as('has_people'),
|
||||
(join) => join.onRef('has_people.assetId', '=', 'assets.id'),
|
||||
);
|
||||
}
|
||||
|
||||
export function hasTags<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagIds: string[]) {
|
||||
return qb.innerJoin(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('tag_asset')
|
||||
.select('assetsId')
|
||||
.innerJoin('tags_closure', 'tag_asset.tagsId', 'tags_closure.id_descendant')
|
||||
.where('tags_closure.id_ancestor', '=', anyUuid(tagIds))
|
||||
.groupBy('assetsId')
|
||||
.having((eb) => eb.fn.count('tags_closure.id_ancestor').distinct(), '>=', tagIds.length)
|
||||
.as('has_tags'),
|
||||
(join) => join.onRef('has_tags.assetsId', '=', 'assets.id'),
|
||||
);
|
||||
}
|
||||
|
||||
export function withOwner(eb: ExpressionBuilder<DB, 'assets'>) {
|
||||
return jsonObjectFrom(eb.selectFrom('users').selectAll().whereRef('users.id', '=', 'assets.ownerId')).as('owner');
|
||||
}
|
||||
|
||||
export function withLibrary(eb: ExpressionBuilder<DB, 'assets'>) {
|
||||
return jsonObjectFrom(eb.selectFrom('libraries').selectAll().whereRef('libraries.id', '=', 'assets.libraryId')).as(
|
||||
'library',
|
||||
);
|
||||
}
|
||||
|
||||
export function withTags(eb: ExpressionBuilder<DB, 'assets'>) {
|
||||
return jsonArrayFrom(
|
||||
eb
|
||||
.selectFrom('tags')
|
||||
.select(columns.tag)
|
||||
.innerJoin('tag_asset', 'tags.id', 'tag_asset.tagsId')
|
||||
.whereRef('assets.id', '=', 'tag_asset.assetsId'),
|
||||
).as('tags');
|
||||
}
|
||||
|
||||
export function truncatedDate<O>(size: TimeBucketSize) {
|
||||
return sql<O>`date_trunc(${size}, "localDateTime" at time zone 'UTC') at time zone 'UTC'`;
|
||||
}
|
||||
|
||||
export function withTagId<O>(qb: SelectQueryBuilder<DB, 'assets', O>, tagId: string) {
|
||||
return qb.where((eb) =>
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('tags_closure')
|
||||
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tags_closure.id_descendant')
|
||||
.whereRef('tag_asset.assetsId', '=', 'assets.id')
|
||||
.where('tags_closure.id_ancestor', '=', tagId),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
|
||||
|
||||
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */
|
||||
export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuilderOptions) {
|
||||
options.isArchived ??= options.withArchived ? undefined : false;
|
||||
options.withDeleted ||= !!(options.trashedAfter || options.trashedBefore || options.isOffline);
|
||||
return kysely
|
||||
.withPlugin(joinDeduplicationPlugin)
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
|
||||
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
|
||||
.$if(!!options.createdBefore, (qb) => qb.where('assets.createdAt', '<=', options.createdBefore!))
|
||||
.$if(!!options.createdAfter, (qb) => qb.where('assets.createdAt', '>=', options.createdAfter!))
|
||||
.$if(!!options.updatedBefore, (qb) => qb.where('assets.updatedAt', '<=', options.updatedBefore!))
|
||||
.$if(!!options.updatedAfter, (qb) => qb.where('assets.updatedAt', '>=', options.updatedAfter!))
|
||||
.$if(!!options.trashedBefore, (qb) => qb.where('assets.deletedAt', '<=', options.trashedBefore!))
|
||||
.$if(!!options.trashedAfter, (qb) => qb.where('assets.deletedAt', '>=', options.trashedAfter!))
|
||||
.$if(!!options.takenBefore, (qb) => qb.where('assets.fileCreatedAt', '<=', options.takenBefore!))
|
||||
.$if(!!options.takenAfter, (qb) => qb.where('assets.fileCreatedAt', '>=', options.takenAfter!))
|
||||
.$if(options.city !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.city', options.city === null ? 'is' : '=', options.city!),
|
||||
)
|
||||
.$if(options.state !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.state', options.state === null ? 'is' : '=', options.state!),
|
||||
)
|
||||
.$if(options.country !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.country', options.country === null ? 'is' : '=', options.country!),
|
||||
)
|
||||
.$if(options.make !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.make', options.make === null ? 'is' : '=', options.make!),
|
||||
)
|
||||
.$if(options.model !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.model', options.model === null ? 'is' : '=', options.model!),
|
||||
)
|
||||
.$if(options.lensModel !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.lensModel', options.lensModel === null ? 'is' : '=', options.lensModel!),
|
||||
)
|
||||
.$if(options.rating !== undefined, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where('exif.rating', options.rating === null ? 'is' : '=', options.rating!),
|
||||
)
|
||||
.$if(!!options.checksum, (qb) => qb.where('assets.checksum', '=', options.checksum!))
|
||||
.$if(!!options.deviceAssetId, (qb) => qb.where('assets.deviceAssetId', '=', options.deviceAssetId!))
|
||||
.$if(!!options.deviceId, (qb) => qb.where('assets.deviceId', '=', options.deviceId!))
|
||||
.$if(!!options.id, (qb) => qb.where('assets.id', '=', asUuid(options.id!)))
|
||||
.$if(!!options.libraryId, (qb) => qb.where('assets.libraryId', '=', asUuid(options.libraryId!)))
|
||||
.$if(!!options.userIds, (qb) => qb.where('assets.ownerId', '=', anyUuid(options.userIds!)))
|
||||
.$if(!!options.encodedVideoPath, (qb) => qb.where('assets.encodedVideoPath', '=', options.encodedVideoPath!))
|
||||
.$if(!!options.originalPath, (qb) =>
|
||||
qb.where(sql`f_unaccent(assets."originalPath")`, 'ilike', sql`'%' || f_unaccent(${options.originalPath}) || '%'`),
|
||||
)
|
||||
.$if(!!options.originalFileName, (qb) =>
|
||||
qb.where(
|
||||
sql`f_unaccent(assets."originalFileName")`,
|
||||
'ilike',
|
||||
sql`'%' || f_unaccent(${options.originalFileName}) || '%'`,
|
||||
),
|
||||
)
|
||||
.$if(!!options.description, (qb) =>
|
||||
qb
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.where(sql`f_unaccent(exif.description)`, 'ilike', sql`'%' || f_unaccent(${options.description}) || '%'`),
|
||||
)
|
||||
.$if(!!options.type, (qb) => qb.where('assets.type', '=', options.type!))
|
||||
.$if(options.isFavorite !== undefined, (qb) => qb.where('assets.isFavorite', '=', options.isFavorite!))
|
||||
.$if(options.isOffline !== undefined, (qb) => qb.where('assets.isOffline', '=', options.isOffline!))
|
||||
.$if(options.isVisible !== undefined, (qb) => qb.where('assets.isVisible', '=', options.isVisible!))
|
||||
.$if(options.isArchived !== undefined, (qb) => qb.where('assets.isArchived', '=', options.isArchived!))
|
||||
.$if(options.isEncoded !== undefined, (qb) =>
|
||||
qb.where('assets.encodedVideoPath', options.isEncoded ? 'is not' : 'is', null),
|
||||
)
|
||||
.$if(options.isMotion !== undefined, (qb) =>
|
||||
qb.where('assets.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
|
||||
)
|
||||
.$if(!!options.isNotInAlbum, (qb) =>
|
||||
qb.where((eb) =>
|
||||
eb.not(eb.exists((eb) => eb.selectFrom('albums_assets_assets').whereRef('assetsId', '=', 'assets.id'))),
|
||||
),
|
||||
)
|
||||
.$if(!!options.withExif, withExifInner)
|
||||
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))
|
||||
.$if(!options.withDeleted, (qb) => qb.where('assets.deletedAt', 'is', null));
|
||||
}
|
||||
20
server/src/entities/shared-link.entity.ts
Normal file
20
server/src/entities/shared-link.entity.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { AlbumEntity } from 'src/entities/album.entity';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
|
||||
export class SharedLinkEntity {
|
||||
id!: string;
|
||||
description!: string | null;
|
||||
password!: string | null;
|
||||
userId!: string;
|
||||
key!: Buffer; // use to access the inidividual asset
|
||||
type!: SharedLinkType;
|
||||
createdAt!: Date;
|
||||
expiresAt!: Date | null;
|
||||
allowUpload!: boolean;
|
||||
allowDownload!: boolean;
|
||||
showExif!: boolean;
|
||||
assets!: AssetEntity[];
|
||||
album?: AlbumEntity;
|
||||
albumId!: string | null;
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddForeignKeyIndexes1744900200559 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c" ON "libraries" ("ownerId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_91704e101438fd0653f582426d" ON "asset_stack" ("primaryAssetId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_c05079e542fd74de3b5ecb5c1c" ON "asset_stack" ("ownerId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_2c5ac0d6fb58b238fd2068de67" ON "assets" ("ownerId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_16294b83fa8c0149719a1f631e" ON "assets" ("livePhotoVideoId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_9977c3c1de01c3d848039a6b90" ON "assets" ("libraryId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20" ON "assets" ("stackId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_b22c53f35ef20c28c21637c85f" ON "albums" ("ownerId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_05895aa505a670300d4816debc" ON "albums" ("albumThumbnailAssetId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_1af8519996fbfb3684b58df280" ON "activity" ("albumId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_3571467bcbe021f66e2bdce96e" ON "activity" ("userId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_8091ea76b12338cb4428d33d78" ON "activity" ("assetId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_6c2e267ae764a9413b863a2934" ON "api_keys" ("userId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_5527cc99f530a547093f9e577b" ON "person" ("ownerId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_2bbabe31656b6778c6b87b6102" ON "person" ("faceAssetId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_575842846f0c28fa5da46c99b1" ON "memories" ("ownerId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_d7e875c6c60e661723dbf372fd" ON "partners" ("sharedWithId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_57de40bc620f456c7311aa3a1e" ON "sessions" ("userId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_66fe3837414c5a9f1c33ca4934" ON "shared_links" ("userId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`);
|
||||
await queryRunner.query(`CREATE INDEX "IDX_9f9590cc11561f1f48ff034ef9" ON "tags" ("parentId")`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX "IDX_66fe3837414c5a9f1c33ca4934";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_91704e101438fd0653f582426d";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_c05079e542fd74de3b5ecb5c1c";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_5527cc99f530a547093f9e577b";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_2bbabe31656b6778c6b87b6102";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_9f9590cc11561f1f48ff034ef9";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_2c5ac0d6fb58b238fd2068de67";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_16294b83fa8c0149719a1f631e";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_9977c3c1de01c3d848039a6b90";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_b22c53f35ef20c28c21637c85f";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_05895aa505a670300d4816debc";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_57de40bc620f456c7311aa3a1e";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_d8ddd9d687816cc490432b3d4b";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_d7e875c6c60e661723dbf372fd";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_575842846f0c28fa5da46c99b1";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_6c2e267ae764a9413b863a2934";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_1af8519996fbfb3684b58df280";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_3571467bcbe021f66e2bdce96e";`);
|
||||
await queryRunner.query(`DROP INDEX "IDX_8091ea76b12338cb4428d33d78";`);
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddMissingIndex1744910873956 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX "IDX_geodata_gist_earthcoord";`);
|
||||
}
|
||||
}
|
||||
@@ -82,7 +82,7 @@ from
|
||||
where
|
||||
"assets"."id" = any ($1::uuid[])
|
||||
|
||||
-- AssetRepository.getByIdsWithAllRelationsButStacks
|
||||
-- AssetRepository.getByIdsWithAllRelations
|
||||
select
|
||||
"assets".*,
|
||||
(
|
||||
@@ -127,13 +127,28 @@ select
|
||||
"assets"."id" = "tag_asset"."assetsId"
|
||||
) as agg
|
||||
) as "tags",
|
||||
to_json("exif") as "exifInfo"
|
||||
to_json("exif") as "exifInfo",
|
||||
to_json("stacked_assets") as "stack"
|
||||
from
|
||||
"assets"
|
||||
left join "exif" on "assets"."id" = "exif"."assetId"
|
||||
left join "asset_stack" on "asset_stack"."id" = "assets"."stackId"
|
||||
left join lateral (
|
||||
select
|
||||
"asset_stack".*,
|
||||
array_agg("stacked") as "assets"
|
||||
from
|
||||
"assets" as "stacked"
|
||||
where
|
||||
"stacked"."stackId" = "asset_stack"."id"
|
||||
and "stacked"."id" != "asset_stack"."primaryAssetId"
|
||||
and "stacked"."deletedAt" is null
|
||||
and "stacked"."isArchived" = $1
|
||||
group by
|
||||
"asset_stack"."id"
|
||||
) as "stacked_assets" on "asset_stack"."id" is not null
|
||||
where
|
||||
"assets"."id" = any ($1::uuid[])
|
||||
"assets"."id" = any ($2::uuid[])
|
||||
|
||||
-- AssetRepository.deleteAll
|
||||
delete from "assets"
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ExpressionBuilder, Insertable, Kysely, NotNull, sql, Updateable } from 'kysely';
|
||||
import { ExpressionBuilder, Insertable, Kysely, sql, Updateable } from 'kysely';
|
||||
import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { columns, Exif } from 'src/database';
|
||||
import { columns } from 'src/database';
|
||||
import { Albums, DB } from 'src/db';
|
||||
import { Chunked, ChunkedArray, ChunkedSet, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AlbumUserCreateDto } from 'src/dtos/album.dto';
|
||||
import { AlbumEntity } from 'src/entities/album.entity';
|
||||
|
||||
export interface AlbumAssetCount {
|
||||
albumId: string;
|
||||
@@ -20,9 +21,9 @@ export interface AlbumInfoOptions {
|
||||
}
|
||||
|
||||
const withOwner = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'albums.ownerId'))
|
||||
.$notNull()
|
||||
.as('owner');
|
||||
return jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'albums.ownerId')).as(
|
||||
'owner',
|
||||
);
|
||||
};
|
||||
|
||||
const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||
@@ -31,14 +32,12 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||
.selectFrom('albums_shared_users_users as album_users')
|
||||
.select('album_users.role')
|
||||
.select((eb) =>
|
||||
jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'album_users.usersId'))
|
||||
.$notNull()
|
||||
.as('user'),
|
||||
jsonObjectFrom(eb.selectFrom('users').select(columns.user).whereRef('users.id', '=', 'album_users.usersId')).as(
|
||||
'user',
|
||||
),
|
||||
)
|
||||
.whereRef('album_users.albumsId', '=', 'albums.id'),
|
||||
)
|
||||
.$notNull()
|
||||
.as('albumUsers');
|
||||
).as('albumUsers');
|
||||
};
|
||||
|
||||
const withSharedLink = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||
@@ -54,7 +53,7 @@ const withAssets = (eb: ExpressionBuilder<DB, 'albums'>) => {
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
.leftJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.select((eb) => eb.table('exif').$castTo<Exif>().as('exifInfo'))
|
||||
.select((eb) => eb.table('exif').as('exifInfo'))
|
||||
.innerJoin('albums_assets_assets', 'albums_assets_assets.assetsId', 'assets.id')
|
||||
.whereRef('albums_assets_assets.albumsId', '=', 'albums.id')
|
||||
.where('assets.deletedAt', 'is', null)
|
||||
@@ -70,7 +69,7 @@ export class AlbumRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, { withAssets: true }] })
|
||||
async getById(id: string, options: AlbumInfoOptions) {
|
||||
async getById(id: string, options: AlbumInfoOptions): Promise<AlbumEntity | undefined> {
|
||||
return this.db
|
||||
.selectFrom('albums')
|
||||
.selectAll('albums')
|
||||
@@ -80,12 +79,11 @@ export class AlbumRepository {
|
||||
.select(withAlbumUsers)
|
||||
.select(withSharedLink)
|
||||
.$if(options.withAssets, (eb) => eb.select(withAssets))
|
||||
.$narrowType<{ assets: NotNull }>()
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as Promise<AlbumEntity | undefined>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
||||
async getByAssetId(ownerId: string, assetId: string) {
|
||||
async getByAssetId(ownerId: string, assetId: string): Promise<AlbumEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('albums')
|
||||
.selectAll('albums')
|
||||
@@ -107,7 +105,7 @@ export class AlbumRepository {
|
||||
.select(withOwner)
|
||||
.select(withAlbumUsers)
|
||||
.orderBy('albums.createdAt', 'desc')
|
||||
.execute();
|
||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
@@ -136,7 +134,7 @@ export class AlbumRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async getOwned(ownerId: string) {
|
||||
async getOwned(ownerId: string): Promise<AlbumEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('albums')
|
||||
.selectAll('albums')
|
||||
@@ -146,14 +144,14 @@ export class AlbumRepository {
|
||||
.where('albums.ownerId', '=', ownerId)
|
||||
.where('albums.deletedAt', 'is', null)
|
||||
.orderBy('albums.createdAt', 'desc')
|
||||
.execute();
|
||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get albums shared with and shared by owner.
|
||||
*/
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async getShared(ownerId: string) {
|
||||
async getShared(ownerId: string): Promise<AlbumEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('albums')
|
||||
.selectAll('albums')
|
||||
@@ -178,14 +176,14 @@ export class AlbumRepository {
|
||||
.select(withOwner)
|
||||
.select(withSharedLink)
|
||||
.orderBy('albums.createdAt', 'desc')
|
||||
.execute();
|
||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get albums of owner that are _not_ shared
|
||||
*/
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async getNotShared(ownerId: string) {
|
||||
async getNotShared(ownerId: string): Promise<AlbumEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('albums')
|
||||
.selectAll('albums')
|
||||
@@ -205,7 +203,7 @@ export class AlbumRepository {
|
||||
)
|
||||
.select(withOwner)
|
||||
.orderBy('albums.createdAt', 'desc')
|
||||
.execute();
|
||||
.execute() as unknown as Promise<AlbumEntity[]>;
|
||||
}
|
||||
|
||||
async restoreAll(userId: string): Promise<void> {
|
||||
@@ -264,7 +262,7 @@ export class AlbumRepository {
|
||||
await this.addAssets(this.db, albumId, assetIds);
|
||||
}
|
||||
|
||||
create(album: Insertable<Albums>, assetIds: string[], albumUsers: AlbumUserCreateDto[]) {
|
||||
create(album: Insertable<Albums>, assetIds: string[], albumUsers: AlbumUserCreateDto[]): Promise<AlbumEntity> {
|
||||
return this.db.transaction().execute(async (tx) => {
|
||||
const newAlbum = await tx.insertInto('albums').values(album).returning('albums.id').executeTakeFirst();
|
||||
|
||||
@@ -292,12 +290,11 @@ export class AlbumRepository {
|
||||
.select(withOwner)
|
||||
.select(withAssets)
|
||||
.select(withAlbumUsers)
|
||||
.$narrowType<{ assets: NotNull }>()
|
||||
.executeTakeFirstOrThrow();
|
||||
.executeTakeFirst() as unknown as Promise<AlbumEntity>;
|
||||
});
|
||||
}
|
||||
|
||||
update(id: string, album: Updateable<Albums>) {
|
||||
update(id: string, album: Updateable<Albums>): Promise<AlbumEntity> {
|
||||
return this.db
|
||||
.updateTable('albums')
|
||||
.set(album)
|
||||
@@ -306,7 +303,7 @@ export class AlbumRepository {
|
||||
.returning(withOwner)
|
||||
.returning(withSharedLink)
|
||||
.returning(withAlbumUsers)
|
||||
.executeTakeFirstOrThrow();
|
||||
.executeTakeFirst() as unknown as Promise<AlbumEntity>;
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
|
||||
@@ -5,9 +5,10 @@ import { InjectKysely } from 'nestjs-kysely';
|
||||
import { columns } from 'src/database';
|
||||
import { DB } from 'src/db';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { withExifInner, withFaces, withFiles } from 'src/entities/asset.entity';
|
||||
import { AssetFileType } from 'src/enum';
|
||||
import { StorageAsset } from 'src/types';
|
||||
import { anyUuid, asUuid, withExifInner, withFaces, withFiles } from 'src/utils/database';
|
||||
import { asUuid } from 'src/utils/database';
|
||||
|
||||
@Injectable()
|
||||
export class AssetJobRepository {
|
||||
@@ -148,21 +149,6 @@ export class AssetJobRepository {
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
getForSyncAssets(ids: string[]) {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.select([
|
||||
'assets.id',
|
||||
'assets.isOffline',
|
||||
'assets.libraryId',
|
||||
'assets.originalPath',
|
||||
'assets.status',
|
||||
'assets.fileModifiedAt',
|
||||
])
|
||||
.where('assets.id', '=', anyUuid(ids))
|
||||
.execute();
|
||||
}
|
||||
|
||||
private storageTemplateAssetQuery() {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, NotNull, Selectable, UpdateResult, Updateable, sql } from 'kysely';
|
||||
import { Insertable, Kysely, Selectable, UpdateResult, Updateable, sql } from 'kysely';
|
||||
import { isEmpty, isUndefined, omitBy } from 'lodash';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { Stack } from 'src/database';
|
||||
import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
|
||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
||||
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
|
||||
import {
|
||||
anyUuid,
|
||||
asUuid,
|
||||
AssetEntity,
|
||||
hasPeople,
|
||||
removeUndefinedKeys,
|
||||
searchAssetBuilder,
|
||||
truncatedDate,
|
||||
unnest,
|
||||
withExif,
|
||||
withFaces,
|
||||
withFacesAndPeople,
|
||||
@@ -25,9 +18,12 @@ import {
|
||||
withSmartSearch,
|
||||
withTagId,
|
||||
withTags,
|
||||
} from 'src/utils/database';
|
||||
} from 'src/entities/asset.entity';
|
||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
||||
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
|
||||
import { anyUuid, asUuid, removeUndefinedKeys, unnest } from 'src/utils/database';
|
||||
import { globToSqlPattern } from 'src/utils/misc';
|
||||
import { PaginationOptions, paginationHelper } from 'src/utils/pagination';
|
||||
import { Paginated, PaginationOptions, paginationHelper } from 'src/utils/pagination';
|
||||
|
||||
export type AssetStats = Record<AssetType, number>;
|
||||
|
||||
@@ -130,6 +126,8 @@ export interface AssetGetByChecksumOptions {
|
||||
libraryId?: string;
|
||||
}
|
||||
|
||||
export type AssetPathEntity = Pick<AssetEntity, 'id' | 'originalPath' | 'isOffline'>;
|
||||
|
||||
export interface GetByIdsRelations {
|
||||
exifInfo?: boolean;
|
||||
faces?: { person?: boolean; withDeleted?: boolean };
|
||||
@@ -143,12 +141,12 @@ export interface GetByIdsRelations {
|
||||
|
||||
export interface DuplicateGroup {
|
||||
duplicateId: string;
|
||||
assets: MapAsset[];
|
||||
assets: AssetEntity[];
|
||||
}
|
||||
|
||||
export interface DayOfYearAssets {
|
||||
yearsAgo: number;
|
||||
assets: MapAsset[];
|
||||
assets: AssetEntity[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@@ -236,12 +234,12 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
create(asset: Insertable<Assets>) {
|
||||
return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirstOrThrow();
|
||||
create(asset: Insertable<Assets>): Promise<AssetEntity> {
|
||||
return this.db.insertInto('assets').values(asset).returningAll().executeTakeFirst() as any as Promise<AssetEntity>;
|
||||
}
|
||||
|
||||
createAll(assets: Insertable<Assets>[]) {
|
||||
return this.db.insertInto('assets').values(assets).returningAll().execute();
|
||||
createAll(assets: Insertable<Assets>[]): Promise<AssetEntity[]> {
|
||||
return this.db.insertInto('assets').values(assets).returningAll().execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
|
||||
@@ -301,13 +299,56 @@ export class AssetRepository {
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
@ChunkedArray()
|
||||
getByIds(ids: string[]) {
|
||||
return this.db.selectFrom('assets').selectAll('assets').where('assets.id', '=', anyUuid(ids)).execute();
|
||||
async getByIds(
|
||||
ids: string[],
|
||||
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {},
|
||||
): Promise<AssetEntity[]> {
|
||||
const res = await this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
.where('assets.id', '=', anyUuid(ids))
|
||||
.$if(!!exifInfo, withExif)
|
||||
.$if(!!faces, (qb) =>
|
||||
qb.select((eb) =>
|
||||
faces?.person ? withFacesAndPeople(eb, faces.withDeleted) : withFaces(eb, faces?.withDeleted),
|
||||
),
|
||||
)
|
||||
.$if(!!files, (qb) => qb.select(withFiles))
|
||||
.$if(!!library, (qb) => qb.select(withLibrary))
|
||||
.$if(!!owner, (qb) => qb.select(withOwner))
|
||||
.$if(!!smartSearch, withSmartSearch)
|
||||
.$if(!!stack, (qb) =>
|
||||
qb
|
||||
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
||||
.$if(!stack!.assets, (qb) => qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).as('stack')))
|
||||
.$if(!!stack!.assets, (qb) =>
|
||||
qb
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('assets as stacked')
|
||||
.selectAll('asset_stack')
|
||||
.select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets'))
|
||||
.whereRef('stacked.stackId', '=', 'asset_stack.id')
|
||||
.whereRef('stacked.id', '!=', 'asset_stack.primaryAssetId')
|
||||
.where('stacked.deletedAt', 'is', null)
|
||||
.where('stacked.isArchived', '=', false)
|
||||
.groupBy('asset_stack.id')
|
||||
.as('stacked_assets'),
|
||||
(join) => join.on('asset_stack.id', 'is not', null),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
|
||||
),
|
||||
)
|
||||
.$if(!!tags, (qb) => qb.select(withTags))
|
||||
.execute();
|
||||
|
||||
return res as any as AssetEntity[];
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
@ChunkedArray()
|
||||
getByIdsWithAllRelationsButStacks(ids: string[]) {
|
||||
getByIdsWithAllRelations(ids: string[]): Promise<AssetEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
@@ -315,8 +356,23 @@ export class AssetRepository {
|
||||
.select(withTags)
|
||||
.$call(withExif)
|
||||
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('assets as stacked')
|
||||
.selectAll('asset_stack')
|
||||
.select((eb) => eb.fn('array_agg', [eb.table('stacked')]).as('assets'))
|
||||
.whereRef('stacked.stackId', '=', 'asset_stack.id')
|
||||
.whereRef('stacked.id', '!=', 'asset_stack.primaryAssetId')
|
||||
.where('stacked.deletedAt', 'is', null)
|
||||
.where('stacked.isArchived', '=', false)
|
||||
.groupBy('asset_stack.id')
|
||||
.as('stacked_assets'),
|
||||
(join) => join.on('asset_stack.id', 'is not', null),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
||||
.where('assets.id', '=', anyUuid(ids))
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
@@ -336,29 +392,36 @@ export class AssetRepository {
|
||||
return assets.map((asset) => asset.deviceAssetId);
|
||||
}
|
||||
|
||||
getByUserId(pagination: PaginationOptions, userId: string, options: Omit<AssetSearchOptions, 'userIds'> = {}) {
|
||||
getByUserId(
|
||||
pagination: PaginationOptions,
|
||||
userId: string,
|
||||
options: Omit<AssetSearchOptions, 'userIds'> = {},
|
||||
): Paginated<AssetEntity> {
|
||||
return this.getAll(pagination, { ...options, userIds: [userId] });
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
||||
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
|
||||
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise<AssetEntity | undefined> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
.where('libraryId', '=', asUuid(libraryId))
|
||||
.where('originalPath', '=', originalPath)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as any as Promise<AssetEntity | undefined>;
|
||||
}
|
||||
|
||||
async getAll(pagination: PaginationOptions, { orderDirection, ...options }: AssetSearchOptions = {}) {
|
||||
async getAll(
|
||||
pagination: PaginationOptions,
|
||||
{ orderDirection, ...options }: AssetSearchOptions = {},
|
||||
): Paginated<AssetEntity> {
|
||||
const builder = searchAssetBuilder(this.db, options)
|
||||
.select(withFiles)
|
||||
.orderBy('assets.createdAt', orderDirection ?? 'asc')
|
||||
.limit(pagination.take + 1)
|
||||
.offset(pagination.skip ?? 0);
|
||||
const items = await builder.execute();
|
||||
return paginationHelper(items, pagination.take);
|
||||
return paginationHelper(items as any as AssetEntity[], pagination.take);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -393,22 +456,23 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||
getById(
|
||||
id: string,
|
||||
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {},
|
||||
): Promise<AssetEntity | undefined> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
.where('assets.id', '=', asUuid(id))
|
||||
.$if(!!exifInfo, withExif)
|
||||
.$if(!!faces, (qb) => qb.select(faces?.person ? withFacesAndPeople : withFaces).$narrowType<{ faces: NotNull }>())
|
||||
.$if(!!faces, (qb) => qb.select(faces?.person ? withFacesAndPeople : withFaces))
|
||||
.$if(!!library, (qb) => qb.select(withLibrary))
|
||||
.$if(!!owner, (qb) => qb.select(withOwner))
|
||||
.$if(!!smartSearch, withSmartSearch)
|
||||
.$if(!!stack, (qb) =>
|
||||
qb
|
||||
.leftJoin('asset_stack', 'asset_stack.id', 'assets.stackId')
|
||||
.$if(!stack!.assets, (qb) =>
|
||||
qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).$castTo<Stack | null>().as('stack')),
|
||||
)
|
||||
.$if(!stack!.assets, (qb) => qb.select((eb) => eb.fn.toJson(eb.table('asset_stack')).as('stack')))
|
||||
.$if(!!stack!.assets, (qb) =>
|
||||
qb
|
||||
.leftJoinLateral(
|
||||
@@ -425,13 +489,13 @@ export class AssetRepository {
|
||||
.as('stacked_assets'),
|
||||
(join) => join.on('asset_stack.id', 'is not', null),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo<Stack | null>().as('stack')),
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
|
||||
),
|
||||
)
|
||||
.$if(!!files, (qb) => qb.select(withFiles))
|
||||
.$if(!!tags, (qb) => qb.select(withTags))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as any as Promise<AssetEntity | undefined>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID], { deviceId: DummyValue.STRING }] })
|
||||
@@ -460,7 +524,7 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async update(asset: Updateable<Assets> & { id: string }) {
|
||||
async update(asset: Updateable<Assets> & { id: string }): Promise<AssetEntity> {
|
||||
const value = omitBy(asset, isUndefined);
|
||||
delete value.id;
|
||||
if (!isEmpty(value)) {
|
||||
@@ -470,10 +534,10 @@ export class AssetRepository {
|
||||
.selectAll('assets')
|
||||
.$call(withExif)
|
||||
.$call((qb) => qb.select(withFacesAndPeople))
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as Promise<AssetEntity>;
|
||||
}
|
||||
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true } });
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true } }) as Promise<AssetEntity>;
|
||||
}
|
||||
|
||||
async remove(asset: { id: string }): Promise<void> {
|
||||
@@ -481,7 +545,7 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ ownerId: DummyValue.UUID, libraryId: DummyValue.UUID, checksum: DummyValue.BUFFER }] })
|
||||
getByChecksum({ ownerId, libraryId, checksum }: AssetGetByChecksumOptions) {
|
||||
getByChecksum({ ownerId, libraryId, checksum }: AssetGetByChecksumOptions): Promise<AssetEntity | undefined> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
@@ -489,17 +553,17 @@ export class AssetRepository {
|
||||
.where('checksum', '=', checksum)
|
||||
.$call((qb) => (libraryId ? qb.where('libraryId', '=', asUuid(libraryId)) : qb.where('libraryId', 'is', null)))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as Promise<AssetEntity | undefined>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.BUFFER]] })
|
||||
getByChecksums(userId: string, checksums: Buffer[]) {
|
||||
getByChecksums(userId: string, checksums: Buffer[]): Promise<AssetEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.select(['id', 'checksum', 'deletedAt'])
|
||||
.where('ownerId', '=', asUuid(userId))
|
||||
.where('checksum', 'in', checksums)
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
|
||||
@@ -516,7 +580,7 @@ export class AssetRepository {
|
||||
return asset?.id;
|
||||
}
|
||||
|
||||
findLivePhotoMatch(options: LivePhotoSearchOptions) {
|
||||
findLivePhotoMatch(options: LivePhotoSearchOptions): Promise<AssetEntity | undefined> {
|
||||
const { ownerId, otherAssetId, livePhotoCID, type } = options;
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
@@ -527,7 +591,7 @@ export class AssetRepository {
|
||||
.where('type', '=', type)
|
||||
.where('exif.livePhotoCID', '=', livePhotoCID)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as Promise<AssetEntity | undefined>;
|
||||
}
|
||||
|
||||
@GenerateSql(
|
||||
@@ -536,7 +600,7 @@ export class AssetRepository {
|
||||
params: [DummyValue.PAGINATION, property],
|
||||
})),
|
||||
)
|
||||
async getWithout(pagination: PaginationOptions, property: WithoutProperty) {
|
||||
async getWithout(pagination: PaginationOptions, property: WithoutProperty): Paginated<AssetEntity> {
|
||||
const items = await this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
@@ -598,7 +662,7 @@ export class AssetRepository {
|
||||
.orderBy('createdAt')
|
||||
.execute();
|
||||
|
||||
return paginationHelper(items, pagination.take);
|
||||
return paginationHelper(items as any as AssetEntity[], pagination.take);
|
||||
}
|
||||
|
||||
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
|
||||
@@ -617,7 +681,7 @@ export class AssetRepository {
|
||||
.executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
getRandom(userIds: string[], take: number) {
|
||||
getRandom(userIds: string[], take: number): Promise<AssetEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
@@ -627,7 +691,7 @@ export class AssetRepository {
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy((eb) => eb.fn('random'))
|
||||
.limit(take)
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ size: TimeBucketSize.MONTH }] })
|
||||
@@ -680,7 +744,7 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.TIME_BUCKET, { size: TimeBucketSize.MONTH, withStacked: true }] })
|
||||
async getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
|
||||
async getTimeBucket(timeBucket: string, options: TimeBucketOptions): Promise<AssetEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
@@ -713,7 +777,7 @@ export class AssetRepository {
|
||||
.as('stacked_assets'),
|
||||
(join) => join.on('asset_stack.id', 'is not', null),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack')),
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack')),
|
||||
)
|
||||
.$if(!!options.assetType, (qb) => qb.where('assets.type', '=', options.assetType!))
|
||||
.$if(options.isDuplicate !== undefined, (qb) =>
|
||||
@@ -725,11 +789,11 @@ export class AssetRepository {
|
||||
.where('assets.isVisible', '=', true)
|
||||
.where(truncatedDate(options.size), '=', timeBucket.replace(/^[+-]/, ''))
|
||||
.orderBy('assets.localDateTime', options.order ?? 'desc')
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getDuplicates(userId: string) {
|
||||
getDuplicates(userId: string): Promise<DuplicateGroup[]> {
|
||||
return (
|
||||
this.db
|
||||
.with('duplicates', (qb) =>
|
||||
@@ -746,15 +810,9 @@ export class AssetRepository {
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.select('assets.duplicateId')
|
||||
.select((eb) =>
|
||||
eb
|
||||
.fn('jsonb_agg', [eb.table('asset')])
|
||||
.$castTo<MapAsset[]>()
|
||||
.as('assets'),
|
||||
)
|
||||
.select((eb) => eb.fn('jsonb_agg', [eb.table('asset')]).as('assets'))
|
||||
.where('assets.ownerId', '=', asUuid(userId))
|
||||
.where('assets.duplicateId', 'is not', null)
|
||||
.$narrowType<{ duplicateId: NotNull }>()
|
||||
.where('assets.deletedAt', 'is', null)
|
||||
.where('assets.isVisible', '=', true)
|
||||
.where('assets.stackId', 'is', null)
|
||||
@@ -779,7 +837,7 @@ export class AssetRepository {
|
||||
.where(({ not, exists }) =>
|
||||
not(exists((eb) => eb.selectFrom('unique').whereRef('unique.duplicateId', '=', 'duplicates.duplicateId'))),
|
||||
)
|
||||
.execute()
|
||||
.execute() as any as Promise<DuplicateGroup[]>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -823,7 +881,7 @@ export class AssetRepository {
|
||||
},
|
||||
],
|
||||
})
|
||||
getAllForUserFullSync(options: AssetFullSyncOptions) {
|
||||
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]> {
|
||||
const { ownerId, lastId, updatedUntil, limit } = options;
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
@@ -841,18 +899,18 @@ export class AssetRepository {
|
||||
.as('stacked_assets'),
|
||||
(join) => join.on('asset_stack.id', 'is not', null),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).$castTo<Stack | null>().as('stack'))
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
||||
.where('assets.ownerId', '=', asUuid(ownerId))
|
||||
.where('assets.isVisible', '=', true)
|
||||
.where('assets.updatedAt', '<=', updatedUntil)
|
||||
.$if(!!lastId, (qb) => qb.where('assets.id', '>', lastId!))
|
||||
.orderBy('assets.id')
|
||||
.limit(limit)
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userIds: [DummyValue.UUID], updatedAfter: DummyValue.DATE, limit: 100 }] })
|
||||
async getChangedDeltaSync(options: AssetDeltaSyncOptions) {
|
||||
async getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('assets')
|
||||
.selectAll('assets')
|
||||
@@ -869,12 +927,12 @@ export class AssetRepository {
|
||||
.as('stacked_assets'),
|
||||
(join) => join.on('asset_stack.id', 'is not', null),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets').$castTo<Stack | null>()).as('stack'))
|
||||
.select((eb) => eb.fn.toJson(eb.table('stacked_assets')).as('stack'))
|
||||
.where('assets.ownerId', '=', anyUuid(options.userIds))
|
||||
.where('assets.isVisible', '=', true)
|
||||
.where('assets.updatedAt', '>', options.updatedAfter)
|
||||
.limit(options.limit)
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
async upsertFile(file: Pick<Insertable<AssetFiles>, 'assetId' | 'path' | 'type'>): Promise<void> {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import AsyncLock from 'async-lock';
|
||||
import { FileMigrationProvider, Kysely, Migrator, sql, Transaction } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { readdir } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import semver from 'semver';
|
||||
@@ -196,54 +197,62 @@ export class DatabaseRepository {
|
||||
return dimSize;
|
||||
}
|
||||
|
||||
async runMigrations(options?: { transaction?: 'all' | 'none' | 'each' }): Promise<void> {
|
||||
async runMigrations(options?: { transaction?: 'all' | 'none' | 'each'; only?: 'kysely' | 'typeorm' }): Promise<void> {
|
||||
const { database } = this.configRepository.getEnv();
|
||||
|
||||
this.logger.log('Running migrations, this may take a while');
|
||||
|
||||
const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`;
|
||||
const { rows } = await tableExists.execute(this.db);
|
||||
const hasTypeOrmMigrations = !!rows[0]?.result;
|
||||
if (hasTypeOrmMigrations) {
|
||||
this.logger.debug('Running typeorm migrations');
|
||||
if (options?.only !== 'kysely') {
|
||||
const dataSource = new DataSource(database.config.typeorm);
|
||||
|
||||
this.logger.log('Running migrations, this may take a while');
|
||||
|
||||
this.logger.debug('Running typeorm migrations');
|
||||
|
||||
await dataSource.initialize();
|
||||
await dataSource.runMigrations(options);
|
||||
await dataSource.destroy();
|
||||
|
||||
this.logger.debug('Finished running typeorm migrations');
|
||||
}
|
||||
|
||||
this.logger.debug('Running kysely migrations');
|
||||
const migrator = new Migrator({
|
||||
db: this.db,
|
||||
migrationLockTableName: 'kysely_migrations_lock',
|
||||
migrationTableName: 'kysely_migrations',
|
||||
provider: new FileMigrationProvider({
|
||||
fs: { readdir },
|
||||
path: { join },
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
migrationFolder: join(__dirname, '..', 'schema/migrations'),
|
||||
}),
|
||||
});
|
||||
if (options?.only !== 'typeorm') {
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
const migrationFolder = join(__dirname, '..', 'schema/migrations');
|
||||
|
||||
const { error, results } = await migrator.migrateToLatest();
|
||||
|
||||
for (const result of results ?? []) {
|
||||
if (result.status === 'Success') {
|
||||
this.logger.log(`Migration "${result.migrationName}" succeeded`);
|
||||
// TODO remove after we have at least one kysely migration
|
||||
if (!existsSync(migrationFolder)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.status === 'Error') {
|
||||
this.logger.warn(`Migration "${result.migrationName}" failed`);
|
||||
this.logger.debug('Running kysely migrations');
|
||||
const migrator = new Migrator({
|
||||
db: this.db,
|
||||
migrationLockTableName: 'kysely_migrations_lock',
|
||||
migrationTableName: 'kysely_migrations',
|
||||
provider: new FileMigrationProvider({
|
||||
fs: { readdir },
|
||||
path: { join },
|
||||
migrationFolder,
|
||||
}),
|
||||
});
|
||||
|
||||
const { error, results } = await migrator.migrateToLatest();
|
||||
|
||||
for (const result of results ?? []) {
|
||||
if (result.status === 'Success') {
|
||||
this.logger.log(`Migration "${result.migrationName}" succeeded`);
|
||||
}
|
||||
|
||||
if (result.status === 'Error') {
|
||||
this.logger.warn(`Migration "${result.migrationName}" failed`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
this.logger.error(`Kysely migrations failed: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
if (error) {
|
||||
this.logger.error(`Kysely migrations failed: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.debug('Finished running kysely migrations');
|
||||
this.logger.debug('Finished running kysely migrations');
|
||||
}
|
||||
}
|
||||
|
||||
async withLock<R>(lock: DatabaseLock, callback: () => Promise<R>): Promise<R> {
|
||||
|
||||
@@ -74,21 +74,11 @@ export class MyConsoleLogger extends ConsoleLogger {
|
||||
export class LoggingRepository {
|
||||
private logger: MyConsoleLogger;
|
||||
|
||||
constructor(
|
||||
@Inject(ClsService) cls: ClsService | undefined,
|
||||
@Inject(ConfigRepository) configRepository: ConfigRepository | undefined,
|
||||
) {
|
||||
let noColor = false;
|
||||
if (configRepository) {
|
||||
noColor = configRepository.getEnv().noColor;
|
||||
}
|
||||
constructor(@Inject(ClsService) cls: ClsService | undefined, configRepository: ConfigRepository) {
|
||||
const { noColor } = configRepository.getEnv();
|
||||
this.logger = new MyConsoleLogger(cls, { context: LoggingRepository.name, color: !noColor });
|
||||
}
|
||||
|
||||
static create() {
|
||||
return new LoggingRepository(undefined, undefined);
|
||||
}
|
||||
|
||||
setAppName(name: string): void {
|
||||
appName = name.charAt(0).toUpperCase() + name.slice(1);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Kysely, OrderByDirection, Selectable, sql } from 'kysely';
|
||||
import { Kysely, OrderByDirection, sql } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { DB, Exif } from 'src/db';
|
||||
import { DB } from 'src/db';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetEntity, searchAssetBuilder } from 'src/entities/asset.entity';
|
||||
import { AssetStatus, AssetType } from 'src/enum';
|
||||
import { anyUuid, asUuid, searchAssetBuilder } from 'src/utils/database';
|
||||
import { anyUuid, asUuid } from 'src/utils/database';
|
||||
import { Paginated } from 'src/utils/pagination';
|
||||
import { isValidInteger } from 'src/validation';
|
||||
|
||||
export interface SearchResult<T> {
|
||||
@@ -215,7 +216,7 @@ export class SearchRepository {
|
||||
},
|
||||
],
|
||||
})
|
||||
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions) {
|
||||
async searchMetadata(pagination: SearchPaginationOptions, options: AssetSearchOptions): Paginated<AssetEntity> {
|
||||
const orderDirection = (options.orderDirection?.toLowerCase() || 'desc') as OrderByDirection;
|
||||
const items = await searchAssetBuilder(this.db, options)
|
||||
.orderBy('assets.fileCreatedAt', orderDirection)
|
||||
@@ -224,7 +225,7 @@ export class SearchRepository {
|
||||
.execute();
|
||||
const hasNextPage = items.length > pagination.size;
|
||||
items.splice(pagination.size);
|
||||
return { items, hasNextPage };
|
||||
return { items: items as any as AssetEntity[], hasNextPage };
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
@@ -239,7 +240,7 @@ export class SearchRepository {
|
||||
},
|
||||
],
|
||||
})
|
||||
async searchRandom(size: number, options: AssetSearchOptions) {
|
||||
async searchRandom(size: number, options: AssetSearchOptions): Promise<AssetEntity[]> {
|
||||
const uuid = randomUUID();
|
||||
const builder = searchAssetBuilder(this.db, options);
|
||||
const lessThan = builder
|
||||
@@ -250,8 +251,8 @@ export class SearchRepository {
|
||||
.where('assets.id', '>', uuid)
|
||||
.orderBy(sql`random()`)
|
||||
.limit(size);
|
||||
const { rows } = await sql<MapAsset>`${lessThan} union all ${greaterThan} limit ${size}`.execute(this.db);
|
||||
return rows;
|
||||
const { rows } = await sql`${lessThan} union all ${greaterThan} limit ${size}`.execute(this.db);
|
||||
return rows as any as AssetEntity[];
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
@@ -267,17 +268,17 @@ export class SearchRepository {
|
||||
},
|
||||
],
|
||||
})
|
||||
async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions) {
|
||||
async searchSmart(pagination: SearchPaginationOptions, options: SmartSearchOptions): Paginated<AssetEntity> {
|
||||
if (!isValidInteger(pagination.size, { min: 1, max: 1000 })) {
|
||||
throw new Error(`Invalid value for 'size': ${pagination.size}`);
|
||||
}
|
||||
|
||||
const items = await searchAssetBuilder(this.db, options)
|
||||
const items = (await searchAssetBuilder(this.db, options)
|
||||
.innerJoin('smart_search', 'assets.id', 'smart_search.assetId')
|
||||
.orderBy(sql`smart_search.embedding <=> ${options.embedding}`)
|
||||
.limit(pagination.size + 1)
|
||||
.offset((pagination.page - 1) * pagination.size)
|
||||
.execute();
|
||||
.execute()) as any as AssetEntity[];
|
||||
|
||||
const hasNextPage = items.length > pagination.size;
|
||||
items.splice(pagination.size);
|
||||
@@ -391,7 +392,7 @@ export class SearchRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
getAssetsByCity(userIds: string[]) {
|
||||
getAssetsByCity(userIds: string[]): Promise<AssetEntity[]> {
|
||||
return this.db
|
||||
.withRecursive('cte', (qb) => {
|
||||
const base = qb
|
||||
@@ -433,14 +434,9 @@ export class SearchRepository {
|
||||
.innerJoin('exif', 'assets.id', 'exif.assetId')
|
||||
.innerJoin('cte', 'assets.id', 'cte.assetId')
|
||||
.selectAll('assets')
|
||||
.select((eb) =>
|
||||
eb
|
||||
.fn('to_jsonb', [eb.table('exif')])
|
||||
.$castTo<Selectable<Exif>>()
|
||||
.as('exifInfo'),
|
||||
)
|
||||
.select((eb) => eb.fn('to_jsonb', [eb.table('exif')]).as('exifInfo'))
|
||||
.orderBy('exif.city')
|
||||
.execute();
|
||||
.execute() as any as Promise<AssetEntity[]>;
|
||||
}
|
||||
|
||||
async upsert(assetId: string, embedding: string): Promise<void> {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, NotNull, sql, Updateable } from 'kysely';
|
||||
import { Insertable, Kysely, sql, Updateable } from 'kysely';
|
||||
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
import _ from 'lodash';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { Album, columns } from 'src/database';
|
||||
import { columns } from 'src/database';
|
||||
import { DB, SharedLinks } from 'src/db';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
|
||||
export type SharedLinkSearchOptions = {
|
||||
@@ -19,7 +19,7 @@ export class SharedLinkRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
||||
get(userId: string, id: string) {
|
||||
get(userId: string, id: string): Promise<SharedLinkEntity | undefined> {
|
||||
return this.db
|
||||
.selectFrom('shared_links')
|
||||
.selectAll('shared_links')
|
||||
@@ -87,23 +87,18 @@ export class SharedLinkRepository {
|
||||
.as('album'),
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.select((eb) =>
|
||||
eb.fn
|
||||
.coalesce(eb.fn.jsonAgg('a').filterWhere('a.id', 'is not', null), sql`'[]'`)
|
||||
.$castTo<MapAsset[]>()
|
||||
.as('assets'),
|
||||
)
|
||||
.select((eb) => eb.fn.coalesce(eb.fn.jsonAgg('a').filterWhere('a.id', 'is not', null), sql`'[]'`).as('assets'))
|
||||
.groupBy(['shared_links.id', sql`"album".*`])
|
||||
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
|
||||
.select((eb) => eb.fn.toJson('album').as('album'))
|
||||
.where('shared_links.id', '=', id)
|
||||
.where('shared_links.userId', '=', userId)
|
||||
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
|
||||
.orderBy('shared_links.createdAt', 'desc')
|
||||
.executeTakeFirst();
|
||||
.executeTakeFirst() as Promise<SharedLinkEntity | undefined>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
|
||||
getAll({ userId, albumId }: SharedLinkSearchOptions) {
|
||||
getAll({ userId, albumId }: SharedLinkSearchOptions): Promise<SharedLinkEntity[]> {
|
||||
return this.db
|
||||
.selectFrom('shared_links')
|
||||
.selectAll('shared_links')
|
||||
@@ -120,7 +115,6 @@ export class SharedLinkRepository {
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.select('assets.assets')
|
||||
.$narrowType<{ assets: NotNull }>()
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb
|
||||
@@ -158,12 +152,12 @@ export class SharedLinkRepository {
|
||||
.as('album'),
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
|
||||
.select((eb) => eb.fn.toJson('album').as('album'))
|
||||
.where((eb) => eb.or([eb('shared_links.type', '=', SharedLinkType.INDIVIDUAL), eb('album.id', 'is not', null)]))
|
||||
.$if(!!albumId, (eb) => eb.where('shared_links.albumId', '=', albumId!))
|
||||
.orderBy('shared_links.createdAt', 'desc')
|
||||
.distinctOn(['shared_links.createdAt'])
|
||||
.execute();
|
||||
.execute() as unknown as Promise<SharedLinkEntity[]>;
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.BUFFER] })
|
||||
@@ -183,7 +177,7 @@ export class SharedLinkRepository {
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
async create(entity: Insertable<SharedLinks> & { assetIds?: string[] }) {
|
||||
async create(entity: Insertable<SharedLinks> & { assetIds?: string[] }): Promise<SharedLinkEntity> {
|
||||
const { id } = await this.db
|
||||
.insertInto('shared_links')
|
||||
.values(_.omit(entity, 'assetIds'))
|
||||
@@ -200,7 +194,7 @@ export class SharedLinkRepository {
|
||||
return this.getSharedLinks(id);
|
||||
}
|
||||
|
||||
async update(entity: Updateable<SharedLinks> & { id: string; assetIds?: string[] }) {
|
||||
async update(entity: Updateable<SharedLinks> & { id: string; assetIds?: string[] }): Promise<SharedLinkEntity> {
|
||||
const { id } = await this.db
|
||||
.updateTable('shared_links')
|
||||
.set(_.omit(entity, 'assets', 'album', 'assetIds'))
|
||||
@@ -218,8 +212,8 @@ export class SharedLinkRepository {
|
||||
return this.getSharedLinks(id);
|
||||
}
|
||||
|
||||
async remove(id: string): Promise<void> {
|
||||
await this.db.deleteFrom('shared_links').where('shared_links.id', '=', id).execute();
|
||||
async remove(entity: SharedLinkEntity): Promise<void> {
|
||||
await this.db.deleteFrom('shared_links').where('shared_links.id', '=', entity.id).execute();
|
||||
}
|
||||
|
||||
private getSharedLinks(id: string) {
|
||||
@@ -242,12 +236,9 @@ export class SharedLinkRepository {
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.select((eb) =>
|
||||
eb.fn
|
||||
.coalesce(eb.fn.jsonAgg('assets').filterWhere('assets.id', 'is not', null), sql`'[]'`)
|
||||
.$castTo<MapAsset[]>()
|
||||
.as('assets'),
|
||||
eb.fn.coalesce(eb.fn.jsonAgg('assets').filterWhere('assets.id', 'is not', null), sql`'[]'`).as('assets'),
|
||||
)
|
||||
.groupBy('shared_links.id')
|
||||
.executeTakeFirstOrThrow();
|
||||
.executeTakeFirstOrThrow() as Promise<SharedLinkEntity>;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { InjectKysely } from 'nestjs-kysely';
|
||||
import { columns } from 'src/database';
|
||||
import { AssetStack, DB } from 'src/db';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetEntity } from 'src/entities/asset.entity';
|
||||
import { asUuid } from 'src/utils/database';
|
||||
|
||||
export interface StackSearch {
|
||||
@@ -35,7 +36,9 @@ const withAssets = (eb: ExpressionBuilder<DB, 'asset_stack'>, withTags = false)
|
||||
.select((eb) => eb.fn.toJson('exifInfo').as('exifInfo'))
|
||||
.where('assets.deletedAt', 'is', null)
|
||||
.whereRef('assets.stackId', '=', 'asset_stack.id'),
|
||||
).as('assets');
|
||||
)
|
||||
.$castTo<AssetEntity[]>()
|
||||
.as('assets');
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
|
||||
@@ -2,7 +2,8 @@ import { Kysely } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DB } from 'src/db';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { asUuid, withExif } from 'src/utils/database';
|
||||
import { withExif } from 'src/entities/asset.entity';
|
||||
import { asUuid } from 'src/utils/database';
|
||||
|
||||
export class ViewRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@@ -1,410 +0,0 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { DatabaseExtension } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
|
||||
const vectorExtension = new ConfigRepository().getEnv().database.vectorExtension;
|
||||
const lastMigrationSql = sql<{ name: string }>`SELECT "name" FROM "migrations" ORDER BY "timestamp" DESC LIMIT 1;`;
|
||||
const tableExists = sql<{ result: string | null }>`select to_regclass('migrations') as "result"`;
|
||||
const logger = LoggingRepository.create();
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
const { rows } = await tableExists.execute(db);
|
||||
const hasTypeOrmMigrations = !!rows[0]?.result;
|
||||
if (hasTypeOrmMigrations) {
|
||||
const {
|
||||
rows: [lastMigration],
|
||||
} = await lastMigrationSql.execute(db);
|
||||
if (lastMigration?.name !== 'AddMissingIndex1744910873956') {
|
||||
throw new Error(
|
||||
'Invalid upgrade path. For more information, see https://immich.app/errors#typeorm-upgrade',
|
||||
);
|
||||
}
|
||||
logger.log('Database has up to date TypeORM migrations, skipping initial Kysely migration');
|
||||
return;
|
||||
}
|
||||
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`.execute(db);
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS "unaccent";`.execute(db);
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS "cube";`.execute(db);
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS "earthdistance";`.execute(db);
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS "pg_trgm";`.execute(db);
|
||||
await sql`CREATE EXTENSION IF NOT EXISTS "vectors";`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())
|
||||
RETURNS uuid
|
||||
VOLATILE LANGUAGE SQL
|
||||
AS $$
|
||||
select encode(
|
||||
set_bit(
|
||||
set_bit(
|
||||
overlay(uuid_send(gen_random_uuid())
|
||||
placing substring(int8send(floor(extract(epoch from p_timestamp) * 1000)::bigint) from 3)
|
||||
from 1 for 6
|
||||
),
|
||||
52, 1
|
||||
),
|
||||
53, 1
|
||||
),
|
||||
'hex')::uuid;
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION updated_at()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
DECLARE
|
||||
clock_timestamp TIMESTAMP := clock_timestamp();
|
||||
BEGIN
|
||||
new."updatedAt" = clock_timestamp;
|
||||
new."updateId" = immich_uuid_v7(clock_timestamp);
|
||||
return new;
|
||||
END;
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION f_concat_ws(text, text[])
|
||||
RETURNS text
|
||||
PARALLEL SAFE IMMUTABLE LANGUAGE SQL
|
||||
AS $$SELECT array_to_string($2, $1)$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION f_unaccent(text)
|
||||
RETURNS text
|
||||
PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL
|
||||
RETURN unaccent('unaccent', $1)`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION ll_to_earth_public(latitude double precision, longitude double precision)
|
||||
RETURNS public.earth
|
||||
PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL
|
||||
AS $$
|
||||
SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION users_delete_audit()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
INSERT INTO users_audit ("userId")
|
||||
SELECT "id"
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION partners_delete_audit()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
INSERT INTO partners_audit ("sharedById", "sharedWithId")
|
||||
SELECT "sharedById", "sharedWithId"
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION assets_delete_audit()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
INSERT INTO assets_audit ("assetId", "ownerId")
|
||||
SELECT "id", "ownerId"
|
||||
FROM OLD;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$;`.execute(db);
|
||||
if (vectorExtension === DatabaseExtension.VECTORS) {
|
||||
await sql`SET search_path TO "$user", public, vectors`.execute(db);
|
||||
await sql`SET vectors.pgvector_compatibility=on`.execute(db);
|
||||
}
|
||||
await sql`CREATE TYPE "assets_status_enum" AS ENUM ('active','trashed','deleted');`.execute(db);
|
||||
await sql`CREATE TYPE "sourcetype" AS ENUM ('machine-learning','exif','manual');`.execute(db);
|
||||
await sql`CREATE TABLE "users" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying NOT NULL, "password" character varying NOT NULL DEFAULT '', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "profileImagePath" character varying NOT NULL DEFAULT '', "isAdmin" boolean NOT NULL DEFAULT false, "shouldChangePassword" boolean NOT NULL DEFAULT true, "deletedAt" timestamp with time zone, "oauthId" character varying NOT NULL DEFAULT '', "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "storageLabel" character varying, "name" character varying NOT NULL DEFAULT '', "quotaSizeInBytes" bigint, "quotaUsageInBytes" bigint NOT NULL DEFAULT 0, "status" character varying NOT NULL DEFAULT 'active', "profileChangedAt" timestamp with time zone NOT NULL DEFAULT now(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "libraries" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "name" character varying NOT NULL, "ownerId" uuid NOT NULL, "importPaths" text[] NOT NULL, "exclusionPatterns" text[] NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "refreshedAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "asset_stack" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "primaryAssetId" uuid NOT NULL, "ownerId" uuid NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "assets" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "deviceAssetId" character varying NOT NULL, "ownerId" uuid NOT NULL, "deviceId" character varying NOT NULL, "type" character varying NOT NULL, "originalPath" character varying NOT NULL, "fileCreatedAt" timestamp with time zone NOT NULL, "fileModifiedAt" timestamp with time zone NOT NULL, "isFavorite" boolean NOT NULL DEFAULT false, "duration" character varying, "encodedVideoPath" character varying DEFAULT '', "checksum" bytea NOT NULL, "isVisible" boolean NOT NULL DEFAULT true, "livePhotoVideoId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "isArchived" boolean NOT NULL DEFAULT false, "originalFileName" character varying NOT NULL, "sidecarPath" character varying, "thumbhash" bytea, "isOffline" boolean NOT NULL DEFAULT false, "libraryId" uuid, "isExternal" boolean NOT NULL DEFAULT false, "deletedAt" timestamp with time zone, "localDateTime" timestamp with time zone NOT NULL, "stackId" uuid, "duplicateId" uuid, "status" assets_status_enum NOT NULL DEFAULT 'active', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "albums" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "ownerId" uuid NOT NULL, "albumName" character varying NOT NULL DEFAULT 'Untitled Album', "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "albumThumbnailAssetId" uuid, "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "description" text NOT NULL DEFAULT '', "deletedAt" timestamp with time zone, "isActivityEnabled" boolean NOT NULL DEFAULT true, "order" character varying NOT NULL DEFAULT 'desc', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`COMMENT ON COLUMN "albums"."albumThumbnailAssetId" IS 'Asset ID to be used as thumbnail';`.execute(db);
|
||||
await sql`CREATE TABLE "activity" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "albumId" uuid NOT NULL, "userId" uuid NOT NULL, "assetId" uuid, "comment" text, "isLiked" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "albums_assets_assets" ("albumsId" uuid NOT NULL, "assetsId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db);
|
||||
await sql`CREATE TABLE "albums_shared_users_users" ("albumsId" uuid NOT NULL, "usersId" uuid NOT NULL, "role" character varying NOT NULL DEFAULT 'editor');`.execute(db);
|
||||
await sql`CREATE TABLE "api_keys" ("name" character varying NOT NULL, "key" character varying NOT NULL, "userId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "permissions" character varying[] NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "assets_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "assetId" uuid NOT NULL, "ownerId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db);
|
||||
await sql`CREATE TABLE "person" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ownerId" uuid NOT NULL, "name" character varying NOT NULL DEFAULT '', "thumbnailPath" character varying NOT NULL DEFAULT '', "isHidden" boolean NOT NULL DEFAULT false, "birthDate" date, "faceAssetId" uuid, "isFavorite" boolean NOT NULL DEFAULT false, "color" character varying, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "asset_faces" ("assetId" uuid NOT NULL, "personId" uuid, "imageWidth" integer NOT NULL DEFAULT 0, "imageHeight" integer NOT NULL DEFAULT 0, "boundingBoxX1" integer NOT NULL DEFAULT 0, "boundingBoxY1" integer NOT NULL DEFAULT 0, "boundingBoxX2" integer NOT NULL DEFAULT 0, "boundingBoxY2" integer NOT NULL DEFAULT 0, "id" uuid NOT NULL DEFAULT uuid_generate_v4(), "sourceType" sourcetype NOT NULL DEFAULT 'machine-learning', "deletedAt" timestamp with time zone);`.execute(db);
|
||||
await sql`CREATE TABLE "asset_files" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "assetId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "type" character varying NOT NULL, "path" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "asset_job_status" ("assetId" uuid NOT NULL, "facesRecognizedAt" timestamp with time zone, "metadataExtractedAt" timestamp with time zone, "duplicatesDetectedAt" timestamp with time zone, "previewAt" timestamp with time zone, "thumbnailAt" timestamp with time zone);`.execute(db);
|
||||
await sql`CREATE TABLE "audit" ("id" serial NOT NULL, "entityType" character varying NOT NULL, "entityId" uuid NOT NULL, "action" character varying NOT NULL, "ownerId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now());`.execute(db);
|
||||
await sql`CREATE TABLE "exif" ("assetId" uuid NOT NULL, "make" character varying, "model" character varying, "exifImageWidth" integer, "exifImageHeight" integer, "fileSizeInByte" bigint, "orientation" character varying, "dateTimeOriginal" timestamp with time zone, "modifyDate" timestamp with time zone, "lensModel" character varying, "fNumber" double precision, "focalLength" double precision, "iso" integer, "latitude" double precision, "longitude" double precision, "city" character varying, "state" character varying, "country" character varying, "description" text NOT NULL DEFAULT '', "fps" double precision, "exposureTime" character varying, "livePhotoCID" character varying, "timeZone" character varying, "projectionType" character varying, "profileDescription" character varying, "colorspace" character varying, "bitsPerSample" integer, "autoStackId" character varying, "rating" integer, "updatedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "face_search" ("faceId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "geodata_places" ("id" integer NOT NULL, "name" character varying(200) NOT NULL, "longitude" double precision NOT NULL, "latitude" double precision NOT NULL, "countryCode" character(2) NOT NULL, "admin1Code" character varying(20), "admin2Code" character varying(80), "modificationDate" date NOT NULL, "admin1Name" character varying, "admin2Name" character varying, "alternateNames" character varying);`.execute(db);
|
||||
await sql`CREATE TABLE "memories" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "deletedAt" timestamp with time zone, "ownerId" uuid NOT NULL, "type" character varying NOT NULL, "data" jsonb NOT NULL, "isSaved" boolean NOT NULL DEFAULT false, "memoryAt" timestamp with time zone NOT NULL, "seenAt" timestamp with time zone, "showAt" timestamp with time zone, "hideAt" timestamp with time zone, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "memories_assets_assets" ("memoriesId" uuid NOT NULL, "assetsId" uuid NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "move_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "entityId" uuid NOT NULL, "pathType" character varying NOT NULL, "oldPath" character varying NOT NULL, "newPath" character varying NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "naturalearth_countries" ("id" integer NOT NULL GENERATED ALWAYS AS IDENTITY, "admin" character varying(50) NOT NULL, "admin_a3" character varying(3) NOT NULL, "type" character varying(50) NOT NULL, "coordinates" polygon NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "partners_audit" ("id" uuid NOT NULL DEFAULT immich_uuid_v7(), "sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp());`.execute(db);
|
||||
await sql`CREATE TABLE "partners" ("sharedById" uuid NOT NULL, "sharedWithId" uuid NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "inTimeline" boolean NOT NULL DEFAULT false, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "sessions" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "token" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "userId" uuid NOT NULL, "deviceType" character varying NOT NULL DEFAULT '', "deviceOS" character varying NOT NULL DEFAULT '', "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "shared_links" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "description" character varying, "userId" uuid NOT NULL, "key" bytea NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "expiresAt" timestamp with time zone, "allowUpload" boolean NOT NULL DEFAULT false, "albumId" uuid, "allowDownload" boolean NOT NULL DEFAULT true, "showExif" boolean NOT NULL DEFAULT true, "password" character varying);`.execute(db);
|
||||
await sql`CREATE TABLE "shared_link__asset" ("assetsId" uuid NOT NULL, "sharedLinksId" uuid NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "smart_search" ("assetId" uuid NOT NULL, "embedding" vector(512) NOT NULL);`.execute(db);
|
||||
await sql`ALTER TABLE "smart_search" ALTER COLUMN "embedding" SET STORAGE EXTERNAL;`.execute(db);
|
||||
await sql`CREATE TABLE "session_sync_checkpoints" ("sessionId" uuid NOT NULL, "type" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "ack" character varying NOT NULL, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "system_metadata" ("key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "tags" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "userId" uuid NOT NULL, "value" character varying NOT NULL, "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "updatedAt" timestamp with time zone NOT NULL DEFAULT now(), "color" character varying, "parentId" uuid, "updateId" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "tag_asset" ("assetsId" uuid NOT NULL, "tagsId" uuid NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "tags_closure" ("id_ancestor" uuid NOT NULL, "id_descendant" uuid NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "users_audit" ("userId" uuid NOT NULL, "deletedAt" timestamp with time zone NOT NULL DEFAULT clock_timestamp(), "id" uuid NOT NULL DEFAULT immich_uuid_v7());`.execute(db);
|
||||
await sql`CREATE TABLE "user_metadata" ("userId" uuid NOT NULL, "key" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
|
||||
await sql`CREATE TABLE "version_history" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" timestamp with time zone NOT NULL DEFAULT now(), "version" character varying NOT NULL);`.execute(db);
|
||||
await sql`ALTER TABLE "users" ADD CONSTRAINT "PK_a3ffb1c0c8416b9fc6f907b7433" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "libraries" ADD CONSTRAINT "PK_505fedfcad00a09b3734b4223de" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "PK_74a27e7fcbd5852463d0af3034b" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "assets" ADD CONSTRAINT "PK_da96729a8b113377cfb6a62439c" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "albums" ADD CONSTRAINT "PK_7f71c7b5bc7c87b8f94c9a93a00" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "activity" ADD CONSTRAINT "PK_24625a1d6b1b089c8ae206fe467" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "PK_c67bc36fa845fb7b18e0e398180" PRIMARY KEY ("albumsId", "assetsId");`.execute(db);
|
||||
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "PK_7df55657e0b2e8b626330a0ebc8" PRIMARY KEY ("albumsId", "usersId");`.execute(db);
|
||||
await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "PK_5c8a79801b44bd27b79228e1dad" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "assets_audit" ADD CONSTRAINT "PK_99bd5c015f81a641927a32b4212" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "person" ADD CONSTRAINT "PK_5fdaf670315c4b7e70cce85daa3" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "PK_6df76ab2eb6f5b57b7c2f1fc684" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "PK_c41dc3e9ef5e1c57ca5a08a0004" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "PK_420bec36fc02813bddf5c8b73d4" PRIMARY KEY ("assetId");`.execute(db);
|
||||
await sql`ALTER TABLE "audit" ADD CONSTRAINT "PK_1d3d120ddaf7bc9b1ed68ed463a" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "exif" ADD CONSTRAINT "PK_c0117fdbc50b917ef9067740c44" PRIMARY KEY ("assetId");`.execute(db);
|
||||
await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_pkey" PRIMARY KEY ("faceId");`.execute(db);
|
||||
await sql`ALTER TABLE "geodata_places" ADD CONSTRAINT "PK_c29918988912ef4036f3d7fbff4" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "memories" ADD CONSTRAINT "PK_aaa0692d9496fe827b0568612f8" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "PK_fcaf7112a013d1703c011c6793d" PRIMARY KEY ("memoriesId", "assetsId");`.execute(db);
|
||||
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "PK_af608f132233acf123f2949678d" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "naturalearth_countries" ADD CONSTRAINT "PK_21a6d86d1ab5d841648212e5353" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "partners_audit" ADD CONSTRAINT "PK_952b50217ff78198a7e380f0359" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "partners" ADD CONSTRAINT "PK_f1cc8f73d16b367f426261a8736" PRIMARY KEY ("sharedById", "sharedWithId");`.execute(db);
|
||||
await sql`ALTER TABLE "sessions" ADD CONSTRAINT "PK_48cb6b5c20faa63157b3c1baf7f" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "PK_642e2b0f619e4876e5f90a43465" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "PK_9b4f3687f9b31d1e311336b05e3" PRIMARY KEY ("assetsId", "sharedLinksId");`.execute(db);
|
||||
await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_pkey" PRIMARY KEY ("assetId");`.execute(db);
|
||||
await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "PK_b846ab547a702863ef7cd9412fb" PRIMARY KEY ("sessionId", "type");`.execute(db);
|
||||
await sql`ALTER TABLE "system_metadata" ADD CONSTRAINT "PK_fa94f6857470fb5b81ec6084465" PRIMARY KEY ("key");`.execute(db);
|
||||
await sql`ALTER TABLE "tags" ADD CONSTRAINT "PK_e7dc17249a1148a1970748eda99" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "PK_ef5346fe522b5fb3bc96454747e" PRIMARY KEY ("assetsId", "tagsId");`.execute(db);
|
||||
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "PK_eab38eb12a3ec6df8376c95477c" PRIMARY KEY ("id_ancestor", "id_descendant");`.execute(db);
|
||||
await sql`ALTER TABLE "users_audit" ADD CONSTRAINT "PK_e9b2bdfd90e7eb5961091175180" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "PK_5931462150b3438cbc83277fe5a" PRIMARY KEY ("userId", "key");`.execute(db);
|
||||
await sql`ALTER TABLE "version_history" ADD CONSTRAINT "PK_5db259cbb09ce82c0d13cfd1b23" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "libraries" ADD CONSTRAINT "FK_0f6fc2fb195f24d19b0fb0d57c1" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_91704e101438fd0653f582426dc" FOREIGN KEY ("primaryAssetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "FK_c05079e542fd74de3b5ecb5c1c8" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_2c5ac0d6fb58b238fd2068de67d" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_16294b83fa8c0149719a1f631ef" FOREIGN KEY ("livePhotoVideoId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_9977c3c1de01c3d848039a6b90c" FOREIGN KEY ("libraryId") REFERENCES "libraries" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "assets" ADD CONSTRAINT "FK_f15d48fa3ea5e4bda05ca8ab207" FOREIGN KEY ("stackId") REFERENCES "asset_stack" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||
await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_b22c53f35ef20c28c21637c85f4" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "albums" ADD CONSTRAINT "FK_05895aa505a670300d4816debce" FOREIGN KEY ("albumThumbnailAssetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_1af8519996fbfb3684b58df280b" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_3571467bcbe021f66e2bdce96ea" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "activity" ADD CONSTRAINT "FK_8091ea76b12338cb4428d33d782" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_e590fa396c6898fcd4a50e40927" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "albums_assets_assets" ADD CONSTRAINT "FK_4bd1303d199f4e72ccdf998c621" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_427c350ad49bd3935a50baab737" FOREIGN KEY ("albumsId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "albums_shared_users_users" ADD CONSTRAINT "FK_f48513bf9bccefd6ff3ad30bd06" FOREIGN KEY ("usersId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "api_keys" ADD CONSTRAINT "FK_6c2e267ae764a9413b863a29342" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_5527cc99f530a547093f9e577b6" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "person" ADD CONSTRAINT "FK_2bbabe31656b6778c6b87b61023" FOREIGN KEY ("faceAssetId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE SET NULL;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_02a43fd0b3c50fb6d7f0cb7282c" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_faces" ADD CONSTRAINT "FK_95ad7106dd7b484275443f580f9" FOREIGN KEY ("personId") REFERENCES "person" ("id") ON UPDATE CASCADE ON DELETE SET NULL;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "FK_e3e103a5f1d8bc8402999286040" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_job_status" ADD CONSTRAINT "FK_420bec36fc02813bddf5c8b73d4" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "exif" ADD CONSTRAINT "FK_c0117fdbc50b917ef9067740c44" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "face_search" ADD CONSTRAINT "face_search_faceId_fkey" FOREIGN KEY ("faceId") REFERENCES "asset_faces" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "memories" ADD CONSTRAINT "FK_575842846f0c28fa5da46c99b19" FOREIGN KEY ("ownerId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_984e5c9ab1f04d34538cd32334e" FOREIGN KEY ("memoriesId") REFERENCES "memories" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "memories_assets_assets" ADD CONSTRAINT "FK_6942ecf52d75d4273de19d2c16f" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_7e077a8b70b3530138610ff5e04" FOREIGN KEY ("sharedById") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "partners" ADD CONSTRAINT "FK_d7e875c6c60e661723dbf372fd3" FOREIGN KEY ("sharedWithId") REFERENCES "users" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "sessions" ADD CONSTRAINT "FK_57de40bc620f456c7311aa3a1e6" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_66fe3837414c5a9f1c33ca49340" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "FK_0c6ce9058c29f07cdf7014eac66" FOREIGN KEY ("albumId") REFERENCES "albums" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_5b7decce6c8d3db9593d6111a66" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link__asset" ADD CONSTRAINT "FK_c9fab4aa97ffd1b034f3d6581ab" FOREIGN KEY ("sharedLinksId") REFERENCES "shared_links" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "smart_search" ADD CONSTRAINT "smart_search_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "assets" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "session_sync_checkpoints" ADD CONSTRAINT "FK_d8ddd9d687816cc490432b3d4bc" FOREIGN KEY ("sessionId") REFERENCES "sessions" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_92e67dc508c705dd66c94615576" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "tags" ADD CONSTRAINT "FK_9f9590cc11561f1f48ff034ef99" FOREIGN KEY ("parentId") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_f8e8a9e893cb5c54907f1b798e9" FOREIGN KEY ("assetsId") REFERENCES "assets" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" ADD CONSTRAINT "FK_e99f31ea4cdf3a2c35c7287eb42" FOREIGN KEY ("tagsId") REFERENCES "tags" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_15fbcbc67663c6bfc07b354c22c" FOREIGN KEY ("id_ancestor") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "tags_closure" ADD CONSTRAINT "FK_b1a2a7ed45c29179b5ad51548a1" FOREIGN KEY ("id_descendant") REFERENCES "tags" ("id") ON UPDATE NO ACTION ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "user_metadata" ADD CONSTRAINT "FK_6afb43681a21cf7815932bc38ac" FOREIGN KEY ("userId") REFERENCES "users" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(db);
|
||||
await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_97672ac88f789774dd47f7c8be3" UNIQUE ("email");`.execute(db);
|
||||
await sql`ALTER TABLE "users" ADD CONSTRAINT "UQ_b309cf34fa58137c416b32cea3a" UNIQUE ("storageLabel");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_stack" ADD CONSTRAINT "REL_91704e101438fd0653f582426d" UNIQUE ("primaryAssetId");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_files" ADD CONSTRAINT "UQ_assetId_type" UNIQUE ("assetId", "type");`.execute(db);
|
||||
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_newPath" UNIQUE ("newPath");`.execute(db);
|
||||
await sql`ALTER TABLE "move_history" ADD CONSTRAINT "UQ_entityId_pathType" UNIQUE ("entityId", "pathType");`.execute(db);
|
||||
await sql`ALTER TABLE "shared_links" ADD CONSTRAINT "UQ_sharedlink_key" UNIQUE ("key");`.execute(db);
|
||||
await sql`ALTER TABLE "tags" ADD CONSTRAINT "UQ_79d6f16e52bb2c7130375246793" UNIQUE ("userId", "value");`.execute(db);
|
||||
await sql`ALTER TABLE "activity" ADD CONSTRAINT "CHK_2ab1e70f113f450eb40c1e3ec8" CHECK (("comment" IS NULL AND "isLiked" = true) OR ("comment" IS NOT NULL AND "isLiked" = false));`.execute(db);
|
||||
await sql`ALTER TABLE "person" ADD CONSTRAINT "CHK_b0f82b0ed662bfc24fbb58bb45" CHECK ("birthDate" <= CURRENT_DATE);`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_users_updated_at_asc_id_asc" ON "users" ("updatedAt", "id")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_users_update_id" ON "users" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_0f6fc2fb195f24d19b0fb0d57c" ON "libraries" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_libraries_update_id" ON "libraries" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_91704e101438fd0653f582426d" ON "asset_stack" ("primaryAssetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_c05079e542fd74de3b5ecb5c1c" ON "asset_stack" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "idx_originalfilename_trigram" ON "assets" USING gin (f_unaccent("originalFileName") gin_trgm_ops)`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_asset_id_stackId" ON "assets" ("id", "stackId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_originalPath_libraryId" ON "assets" ("originalPath", "libraryId")`.execute(db);
|
||||
await sql`CREATE INDEX "idx_local_date_time_month" ON "assets" ((date_trunc('MONTH'::text, ("localDateTime" AT TIME ZONE 'UTC'::text)) AT TIME ZONE 'UTC'::text))`.execute(db);
|
||||
await sql`CREATE INDEX "idx_local_date_time" ON "assets" ((("localDateTime" at time zone 'UTC')::date))`.execute(db);
|
||||
await sql`CREATE UNIQUE INDEX "UQ_assets_owner_library_checksum" ON "assets" ("ownerId", "libraryId", "checksum") WHERE ("libraryId" IS NOT NULL)`.execute(db);
|
||||
await sql`CREATE UNIQUE INDEX "UQ_assets_owner_checksum" ON "assets" ("ownerId", "checksum") WHERE ("libraryId" IS NULL)`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_2c5ac0d6fb58b238fd2068de67" ON "assets" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "idx_asset_file_created_at" ON "assets" ("fileCreatedAt")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_8d3efe36c0755849395e6ea866" ON "assets" ("checksum")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_16294b83fa8c0149719a1f631e" ON "assets" ("livePhotoVideoId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_4d66e76dada1ca180f67a205dc" ON "assets" ("originalFileName")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_9977c3c1de01c3d848039a6b90" ON "assets" ("libraryId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_f15d48fa3ea5e4bda05ca8ab20" ON "assets" ("stackId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_assets_duplicateId" ON "assets" ("duplicateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_assets_update_id" ON "assets" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_b22c53f35ef20c28c21637c85f" ON "albums" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_05895aa505a670300d4816debc" ON "albums" ("albumThumbnailAssetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_albums_update_id" ON "albums" ("updateId")`.execute(db);
|
||||
await sql`CREATE UNIQUE INDEX "IDX_activity_like" ON "activity" ("assetId", "userId", "albumId") WHERE ("isLiked" = true)`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_1af8519996fbfb3684b58df280" ON "activity" ("albumId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_3571467bcbe021f66e2bdce96e" ON "activity" ("userId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_8091ea76b12338cb4428d33d78" ON "activity" ("assetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_activity_update_id" ON "activity" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_e590fa396c6898fcd4a50e4092" ON "albums_assets_assets" ("albumsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_4bd1303d199f4e72ccdf998c62" ON "albums_assets_assets" ("assetsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_f48513bf9bccefd6ff3ad30bd0" ON "albums_shared_users_users" ("usersId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_427c350ad49bd3935a50baab73" ON "albums_shared_users_users" ("albumsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_6c2e267ae764a9413b863a2934" ON "api_keys" ("userId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_api_keys_update_id" ON "api_keys" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_assets_audit_asset_id" ON "assets_audit" ("assetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_assets_audit_owner_id" ON "assets_audit" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_assets_audit_deleted_at" ON "assets_audit" ("deletedAt")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_5527cc99f530a547093f9e577b" ON "person" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_2bbabe31656b6778c6b87b6102" ON "person" ("faceAssetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_person_update_id" ON "person" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_bf339a24070dac7e71304ec530" ON "asset_faces" ("personId", "assetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_asset_faces_assetId_personId" ON "asset_faces" ("assetId", "personId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_02a43fd0b3c50fb6d7f0cb7282" ON "asset_faces" ("assetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_95ad7106dd7b484275443f580f" ON "asset_faces" ("personId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_asset_files_assetId" ON "asset_files" ("assetId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_asset_files_update_id" ON "asset_files" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_ownerId_createdAt" ON "audit" ("ownerId", "createdAt")`.execute(db);
|
||||
await sql`CREATE INDEX "exif_city" ON "exif" ("city")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_live_photo_cid" ON "exif" ("livePhotoCID")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_auto_stack_id" ON "exif" ("autoStackId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_asset_exif_update_id" ON "exif" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "face_index" ON "face_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_geodata_gist_earthcoord" ON "geodata_places" (ll_to_earth_public(latitude, longitude))`.execute(db);
|
||||
await sql`CREATE INDEX "idx_geodata_places_name" ON "geodata_places" USING gin (f_unaccent("name") gin_trgm_ops)`.execute(db);
|
||||
await sql`CREATE INDEX "idx_geodata_places_admin2_name" ON "geodata_places" USING gin (f_unaccent("admin2Name") gin_trgm_ops)`.execute(db);
|
||||
await sql`CREATE INDEX "idx_geodata_places_admin1_name" ON "geodata_places" USING gin (f_unaccent("admin1Name") gin_trgm_ops)`.execute(db);
|
||||
await sql`CREATE INDEX "idx_geodata_places_alternate_names" ON "geodata_places" USING gin (f_unaccent("alternateNames") gin_trgm_ops)`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_575842846f0c28fa5da46c99b1" ON "memories" ("ownerId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_memories_update_id" ON "memories" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_984e5c9ab1f04d34538cd32334" ON "memories_assets_assets" ("memoriesId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_6942ecf52d75d4273de19d2c16" ON "memories_assets_assets" ("assetsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_partners_audit_shared_by_id" ON "partners_audit" ("sharedById")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_partners_audit_shared_with_id" ON "partners_audit" ("sharedWithId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_partners_audit_deleted_at" ON "partners_audit" ("deletedAt")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_7e077a8b70b3530138610ff5e0" ON "partners" ("sharedById")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_d7e875c6c60e661723dbf372fd" ON "partners" ("sharedWithId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_partners_update_id" ON "partners" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_57de40bc620f456c7311aa3a1e" ON "sessions" ("userId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_sessions_update_id" ON "sessions" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_66fe3837414c5a9f1c33ca4934" ON "shared_links" ("userId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_sharedlink_key" ON "shared_links" ("key")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_sharedlink_albumId" ON "shared_links" ("albumId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_5b7decce6c8d3db9593d6111a6" ON "shared_link__asset" ("assetsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_c9fab4aa97ffd1b034f3d6581a" ON "shared_link__asset" ("sharedLinksId")`.execute(db);
|
||||
await sql`CREATE INDEX "clip_index" ON "smart_search" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_d8ddd9d687816cc490432b3d4b" ON "session_sync_checkpoints" ("sessionId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_session_sync_checkpoints_update_id" ON "session_sync_checkpoints" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_92e67dc508c705dd66c9461557" ON "tags" ("userId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_9f9590cc11561f1f48ff034ef9" ON "tags" ("parentId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_tags_update_id" ON "tags" ("updateId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_tag_asset_assetsId_tagsId" ON "tag_asset" ("assetsId", "tagsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_f8e8a9e893cb5c54907f1b798e" ON "tag_asset" ("assetsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_e99f31ea4cdf3a2c35c7287eb4" ON "tag_asset" ("tagsId")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_15fbcbc67663c6bfc07b354c22" ON "tags_closure" ("id_ancestor")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_b1a2a7ed45c29179b5ad51548a" ON "tags_closure" ("id_descendant")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_users_audit_deleted_at" ON "users_audit" ("deletedAt")`.execute(db);
|
||||
await sql`CREATE INDEX "IDX_6afb43681a21cf7815932bc38a" ON "user_metadata" ("userId")`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "users_delete_audit"
|
||||
AFTER DELETE ON "users"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION users_delete_audit();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "users_updated_at"
|
||||
BEFORE UPDATE ON "users"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "libraries_updated_at"
|
||||
BEFORE UPDATE ON "libraries"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "assets_delete_audit"
|
||||
AFTER DELETE ON "assets"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION assets_delete_audit();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "assets_updated_at"
|
||||
BEFORE UPDATE ON "assets"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "albums_updated_at"
|
||||
BEFORE UPDATE ON "albums"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "activity_updated_at"
|
||||
BEFORE UPDATE ON "activity"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "api_keys_updated_at"
|
||||
BEFORE UPDATE ON "api_keys"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "person_updated_at"
|
||||
BEFORE UPDATE ON "person"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_files_updated_at"
|
||||
BEFORE UPDATE ON "asset_files"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_exif_updated_at"
|
||||
BEFORE UPDATE ON "exif"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "memories_updated_at"
|
||||
BEFORE UPDATE ON "memories"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "partners_delete_audit"
|
||||
AFTER DELETE ON "partners"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION partners_delete_audit();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "partners_updated_at"
|
||||
BEFORE UPDATE ON "partners"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "sessions_updated_at"
|
||||
BEFORE UPDATE ON "sessions"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "session_sync_checkpoints_updated_at"
|
||||
BEFORE UPDATE ON "session_sync_checkpoints"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "tags_updated_at"
|
||||
BEFORE UPDATE ON "tags"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION updated_at();`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// not implemented
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Check,
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
Index,
|
||||
@@ -50,6 +51,7 @@ export class ActivityTable {
|
||||
@Column({ type: 'boolean', default: false })
|
||||
isLiked!: boolean;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_activity_update_id' })
|
||||
@ColumnIndex('IDX_activity_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId!: string;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,25 @@
|
||||
import { AlbumTable } from 'src/schema/tables/album.table';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { CreateDateColumn, ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
import { ColumnIndex, CreateDateColumn, ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table({ name: 'albums_assets_assets', primaryConstraintName: 'PK_c67bc36fa845fb7b18e0e398180' })
|
||||
export class AlbumAssetTable {
|
||||
@ForeignKeyColumn(() => AlbumTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
|
||||
@ForeignKeyColumn(() => AlbumTable, {
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
nullable: false,
|
||||
primary: true,
|
||||
})
|
||||
@ColumnIndex()
|
||||
albumsId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
|
||||
@ForeignKeyColumn(() => AssetTable, {
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
nullable: false,
|
||||
primary: true,
|
||||
})
|
||||
@ColumnIndex()
|
||||
assetsId!: string;
|
||||
|
||||
@CreateDateColumn()
|
||||
|
||||
@@ -4,6 +4,7 @@ import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
DeleteDateColumn,
|
||||
ForeignKeyColumn,
|
||||
@@ -50,6 +51,7 @@ export class AlbumTable {
|
||||
@Column({ default: AssetOrder.DESC })
|
||||
order!: AssetOrder;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_albums_update_id' })
|
||||
@ColumnIndex('IDX_albums_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Permission } from 'src/enum';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryGeneratedColumn,
|
||||
@@ -34,6 +35,7 @@ export class APIKeyTable {
|
||||
@Column({ array: true, type: 'character varying' })
|
||||
permissions!: Permission[];
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_api_keys_update_id' })
|
||||
@ColumnIndex({ name: 'IDX_api_keys_update_id' })
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
|
||||
import { Column, CreateDateColumn, Table } from 'src/sql-tools';
|
||||
import { Column, ColumnIndex, CreateDateColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('assets_audit')
|
||||
export class AssetAuditTable {
|
||||
@PrimaryGeneratedUuidV7Column()
|
||||
id!: string;
|
||||
|
||||
@Column({ type: 'uuid', indexName: 'IDX_assets_audit_asset_id' })
|
||||
@ColumnIndex('IDX_assets_audit_asset_id')
|
||||
@Column({ type: 'uuid' })
|
||||
assetId!: string;
|
||||
|
||||
@Column({ type: 'uuid', indexName: 'IDX_assets_audit_owner_id' })
|
||||
@ColumnIndex('IDX_assets_audit_owner_id')
|
||||
@Column({ type: 'uuid' })
|
||||
ownerId!: string;
|
||||
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_assets_audit_deleted_at' })
|
||||
@ColumnIndex('IDX_assets_audit_deleted_at')
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()' })
|
||||
deletedAt!: Date;
|
||||
}
|
||||
|
||||
@@ -8,21 +8,10 @@ import { Column, DeleteDateColumn, ForeignKeyColumn, Index, PrimaryGeneratedColu
|
||||
@Index({ name: 'IDX_asset_faces_assetId_personId', columns: ['assetId', 'personId'] })
|
||||
@Index({ columns: ['personId', 'assetId'] })
|
||||
export class AssetFaceTable {
|
||||
@ForeignKeyColumn(() => AssetTable, {
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
// [assetId, personId] is the PK constraint
|
||||
index: false,
|
||||
})
|
||||
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
assetId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => PersonTable, {
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
nullable: true,
|
||||
// [personId, assetId] makes this redundant
|
||||
index: false,
|
||||
})
|
||||
@ForeignKeyColumn(() => PersonTable, { onDelete: 'SET NULL', onUpdate: 'CASCADE', nullable: true })
|
||||
personId!: string | null;
|
||||
|
||||
@Column({ default: 0, type: 'integer' })
|
||||
|
||||
@@ -3,6 +3,7 @@ import { AssetFileType } from 'src/enum';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryGeneratedColumn,
|
||||
@@ -18,11 +19,8 @@ export class AssetFileTable {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: string;
|
||||
|
||||
@ForeignKeyColumn(() => AssetTable, {
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
indexName: 'IDX_asset_files_assetId',
|
||||
})
|
||||
@ColumnIndex('IDX_asset_files_assetId')
|
||||
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
assetId?: string;
|
||||
|
||||
@CreateDateColumn()
|
||||
@@ -37,6 +35,7 @@ export class AssetFileTable {
|
||||
@Column()
|
||||
path!: string;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_asset_files_update_id' })
|
||||
@ColumnIndex('IDX_asset_files_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/entities/asset.entity';
|
||||
import { AssetStatus, AssetType } from 'src/enum';
|
||||
import { assets_status_enum } from 'src/schema/enums';
|
||||
import { assets_delete_audit } from 'src/schema/functions';
|
||||
@@ -8,6 +9,7 @@ import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
AfterDeleteTrigger,
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
DeleteDateColumn,
|
||||
ForeignKeyColumn,
|
||||
@@ -16,7 +18,6 @@ import {
|
||||
Table,
|
||||
UpdateDateColumn,
|
||||
} from 'src/sql-tools';
|
||||
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||
|
||||
@Table('assets')
|
||||
@UpdatedAtTrigger('assets_updated_at')
|
||||
@@ -77,7 +78,8 @@ export class AssetTable {
|
||||
@Column()
|
||||
originalPath!: string;
|
||||
|
||||
@Column({ type: 'timestamp with time zone', indexName: 'idx_asset_file_created_at' })
|
||||
@ColumnIndex('idx_asset_file_created_at')
|
||||
@Column({ type: 'timestamp with time zone' })
|
||||
fileCreatedAt!: Date;
|
||||
|
||||
@Column({ type: 'timestamp with time zone' })
|
||||
@@ -92,7 +94,8 @@ export class AssetTable {
|
||||
@Column({ type: 'character varying', nullable: true, default: '' })
|
||||
encodedVideoPath!: string | null;
|
||||
|
||||
@Column({ type: 'bytea', index: true })
|
||||
@Column({ type: 'bytea' })
|
||||
@ColumnIndex()
|
||||
checksum!: Buffer; // sha1 checksum
|
||||
|
||||
@Column({ type: 'boolean', default: true })
|
||||
@@ -110,7 +113,8 @@ export class AssetTable {
|
||||
@Column({ type: 'boolean', default: false })
|
||||
isArchived!: boolean;
|
||||
|
||||
@Column({ index: true })
|
||||
@Column()
|
||||
@ColumnIndex()
|
||||
originalFileName!: string;
|
||||
|
||||
@Column({ nullable: true })
|
||||
@@ -137,12 +141,14 @@ export class AssetTable {
|
||||
@ForeignKeyColumn(() => StackTable, { nullable: true, onDelete: 'SET NULL', onUpdate: 'CASCADE' })
|
||||
stackId?: string | null;
|
||||
|
||||
@Column({ type: 'uuid', nullable: true, indexName: 'IDX_assets_duplicateId' })
|
||||
@ColumnIndex('IDX_assets_duplicateId')
|
||||
@Column({ type: 'uuid', nullable: true })
|
||||
duplicateId!: string | null;
|
||||
|
||||
@Column({ enum: assets_status_enum, default: AssetStatus.ACTIVE })
|
||||
status!: AssetStatus;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_assets_update_id' })
|
||||
@ColumnIndex('IDX_assets_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { Column, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools';
|
||||
import { Column, ColumnIndex, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools';
|
||||
|
||||
@Table('exif')
|
||||
@UpdatedAtTrigger('asset_exif_updated_at')
|
||||
@@ -50,7 +50,8 @@ export class ExifTable {
|
||||
@Column({ type: 'double precision', nullable: true })
|
||||
longitude!: number | null;
|
||||
|
||||
@Column({ type: 'character varying', nullable: true, indexName: 'exif_city' })
|
||||
@ColumnIndex('exif_city')
|
||||
@Column({ type: 'character varying', nullable: true })
|
||||
city!: string | null;
|
||||
|
||||
@Column({ type: 'character varying', nullable: true })
|
||||
@@ -68,7 +69,8 @@ export class ExifTable {
|
||||
@Column({ type: 'character varying', nullable: true })
|
||||
exposureTime!: string | null;
|
||||
|
||||
@Column({ type: 'character varying', nullable: true, indexName: 'IDX_live_photo_cid' })
|
||||
@ColumnIndex('IDX_live_photo_cid')
|
||||
@Column({ type: 'character varying', nullable: true })
|
||||
livePhotoCID!: string | null;
|
||||
|
||||
@Column({ type: 'character varying', nullable: true })
|
||||
@@ -86,7 +88,8 @@ export class ExifTable {
|
||||
@Column({ type: 'integer', nullable: true })
|
||||
bitsPerSample!: number | null;
|
||||
|
||||
@Column({ type: 'character varying', nullable: true, indexName: 'IDX_auto_stack_id' })
|
||||
@ColumnIndex('IDX_auto_stack_id')
|
||||
@Column({ type: 'character varying', nullable: true })
|
||||
autoStackId!: string | null;
|
||||
|
||||
@Column({ type: 'integer', nullable: true })
|
||||
@@ -95,6 +98,7 @@ export class ExifTable {
|
||||
@UpdateDateColumn({ default: () => 'clock_timestamp()' })
|
||||
updatedAt?: Date;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_asset_exif_update_id' })
|
||||
@ColumnIndex('IDX_asset_exif_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table({ name: 'geodata_places', synchronize: false })
|
||||
@Table({ name: 'geodata_places' })
|
||||
@Index({
|
||||
name: 'idx_geodata_places_alternate_names',
|
||||
using: 'gin',
|
||||
@@ -26,10 +26,11 @@ import { Column, Index, PrimaryColumn, Table } from 'src/sql-tools';
|
||||
synchronize: false,
|
||||
})
|
||||
@Index({
|
||||
name: 'IDX_geodata_gist_earthcoord',
|
||||
name: 'idx_geodata_places_gist_earthcoord',
|
||||
expression: 'll_to_earth_public(latitude, longitude)',
|
||||
synchronize: false,
|
||||
})
|
||||
@Table({ name: 'idx_geodata_places', synchronize: false })
|
||||
export class GeodataPlacesTable {
|
||||
@PrimaryColumn({ type: 'integer' })
|
||||
id!: number;
|
||||
|
||||
@@ -2,6 +2,7 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
DeleteDateColumn,
|
||||
ForeignKeyColumn,
|
||||
@@ -40,6 +41,7 @@ export class LibraryTable {
|
||||
@Column({ type: 'timestamp with time zone', nullable: true })
|
||||
refreshedAt!: Date | null;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_libraries_update_id' })
|
||||
@ColumnIndex('IDX_libraries_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { MemoryType } from 'src/enum';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
DeleteDateColumn,
|
||||
ForeignKeyColumn,
|
||||
@@ -54,6 +55,7 @@ export class MemoryTable<T extends MemoryType = MemoryType> {
|
||||
@Column({ type: 'timestamp with time zone', nullable: true })
|
||||
hideAt?: Date;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_memories_update_id' })
|
||||
@ColumnIndex('IDX_memories_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId?: string;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { MemoryTable } from 'src/schema/tables/memory.table';
|
||||
import { ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
import { ColumnIndex, ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('memories_assets_assets')
|
||||
export class MemoryAssetTable {
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => MemoryTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
memoriesId!: string;
|
||||
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
assetsId!: string;
|
||||
}
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
|
||||
import { Column, CreateDateColumn, Table } from 'src/sql-tools';
|
||||
import { Column, ColumnIndex, CreateDateColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('partners_audit')
|
||||
export class PartnerAuditTable {
|
||||
@PrimaryGeneratedUuidV7Column()
|
||||
id!: string;
|
||||
|
||||
@Column({ type: 'uuid', indexName: 'IDX_partners_audit_shared_by_id' })
|
||||
@ColumnIndex('IDX_partners_audit_shared_by_id')
|
||||
@Column({ type: 'uuid' })
|
||||
sharedById!: string;
|
||||
|
||||
@Column({ type: 'uuid', indexName: 'IDX_partners_audit_shared_with_id' })
|
||||
@ColumnIndex('IDX_partners_audit_shared_with_id')
|
||||
@Column({ type: 'uuid' })
|
||||
sharedWithId!: string;
|
||||
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_partners_audit_deleted_at' })
|
||||
@ColumnIndex('IDX_partners_audit_deleted_at')
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()' })
|
||||
deletedAt!: Date;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { partners_delete_audit } from 'src/schema/functions';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import { AfterDeleteTrigger, Column, CreateDateColumn, ForeignKeyColumn, Table, UpdateDateColumn } from 'src/sql-tools';
|
||||
import {
|
||||
AfterDeleteTrigger,
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
Table,
|
||||
UpdateDateColumn,
|
||||
} from 'src/sql-tools';
|
||||
|
||||
@Table('partners')
|
||||
@UpdatedAtTrigger('partners_updated_at')
|
||||
@@ -13,12 +21,7 @@ import { AfterDeleteTrigger, Column, CreateDateColumn, ForeignKeyColumn, Table,
|
||||
when: 'pg_trigger_depth() = 0',
|
||||
})
|
||||
export class PartnerTable {
|
||||
@ForeignKeyColumn(() => UserTable, {
|
||||
onDelete: 'CASCADE',
|
||||
primary: true,
|
||||
// [sharedById, sharedWithId] is the PK constraint
|
||||
index: false,
|
||||
})
|
||||
@ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', primary: true })
|
||||
sharedById!: string;
|
||||
|
||||
@ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', primary: true })
|
||||
@@ -33,6 +36,7 @@ export class PartnerTable {
|
||||
@Column({ type: 'boolean', default: false })
|
||||
inTimeline!: boolean;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_partners_update_id' })
|
||||
@ColumnIndex('IDX_partners_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId!: string;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Check,
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryGeneratedColumn,
|
||||
@@ -48,6 +49,7 @@ export class PersonTable {
|
||||
@Column({ type: 'character varying', nullable: true, default: null })
|
||||
color?: string | null;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_person_update_id' })
|
||||
@ColumnIndex('IDX_person_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId!: string;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryGeneratedColumn,
|
||||
@@ -34,6 +35,7 @@ export class SessionTable {
|
||||
@Column({ default: '' })
|
||||
deviceOS!: string;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_sessions_update_id' })
|
||||
@ColumnIndex('IDX_sessions_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId!: string;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { SharedLinkTable } from 'src/schema/tables/shared-link.table';
|
||||
import { ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
import { ColumnIndex, ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('shared_link__asset')
|
||||
export class SharedLinkAssetTable {
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
assetsId!: string;
|
||||
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => SharedLinkTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
sharedLinksId!: string;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
import { AlbumTable } from 'src/schema/tables/album.table';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import { Column, CreateDateColumn, ForeignKeyColumn, PrimaryGeneratedColumn, Table, Unique } from 'src/sql-tools';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryGeneratedColumn,
|
||||
Table,
|
||||
Unique,
|
||||
} from 'src/sql-tools';
|
||||
|
||||
@Table('shared_links')
|
||||
@Unique({ name: 'UQ_sharedlink_key', columns: ['key'] })
|
||||
@@ -15,7 +23,8 @@ export class SharedLinkTable {
|
||||
@ForeignKeyColumn(() => UserTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
userId!: string;
|
||||
|
||||
@Column({ type: 'bytea', indexName: 'IDX_sharedlink_key' })
|
||||
@ColumnIndex('IDX_sharedlink_key')
|
||||
@Column({ type: 'bytea' })
|
||||
key!: Buffer; // use to access the inidividual asset
|
||||
|
||||
@Column()
|
||||
@@ -30,12 +39,8 @@ export class SharedLinkTable {
|
||||
@Column({ type: 'boolean', default: false })
|
||||
allowUpload!: boolean;
|
||||
|
||||
@ForeignKeyColumn(() => AlbumTable, {
|
||||
nullable: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
indexName: 'IDX_sharedlink_albumId',
|
||||
})
|
||||
@ColumnIndex('IDX_sharedlink_albumId')
|
||||
@ForeignKeyColumn(() => AlbumTable, { nullable: true, onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
albumId!: string;
|
||||
|
||||
@Column({ type: 'boolean', default: true })
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { SyncEntityType } from 'src/enum';
|
||||
import { SessionTable } from 'src/schema/tables/session.table';
|
||||
import { Column, CreateDateColumn, ForeignKeyColumn, PrimaryColumn, Table, UpdateDateColumn } from 'src/sql-tools';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryColumn,
|
||||
Table,
|
||||
UpdateDateColumn,
|
||||
} from 'src/sql-tools';
|
||||
|
||||
@Table('session_sync_checkpoints')
|
||||
@UpdatedAtTrigger('session_sync_checkpoints_updated_at')
|
||||
@@ -21,6 +29,7 @@ export class SessionSyncCheckpointTable {
|
||||
@Column()
|
||||
ack!: string;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_session_sync_checkpoints_update_id' })
|
||||
@ColumnIndex('IDX_session_sync_checkpoints_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId!: string;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { TagTable } from 'src/schema/tables/tag.table';
|
||||
import { ForeignKeyColumn, Index, Table } from 'src/sql-tools';
|
||||
import { ColumnIndex, ForeignKeyColumn, Index, Table } from 'src/sql-tools';
|
||||
|
||||
@Index({ name: 'IDX_tag_asset_assetsId_tagsId', columns: ['assetsId', 'tagsId'] })
|
||||
@Table('tag_asset')
|
||||
export class TagAssetTable {
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
assetsId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
tagsId!: string;
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { TagTable } from 'src/schema/tables/tag.table';
|
||||
import { ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
import { ColumnIndex, ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('tags_closure')
|
||||
export class TagClosureTable {
|
||||
@ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION', index: true })
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION' })
|
||||
id_ancestor!: string;
|
||||
|
||||
@ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION', index: true })
|
||||
@ColumnIndex()
|
||||
@ForeignKeyColumn(() => TagTable, { primary: true, onDelete: 'CASCADE', onUpdate: 'NO ACTION' })
|
||||
id_descendant!: string;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { UpdatedAtTrigger, UpdateIdColumn } from 'src/decorators';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import {
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
ForeignKeyColumn,
|
||||
PrimaryGeneratedColumn,
|
||||
@@ -17,12 +18,7 @@ export class TagTable {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: string;
|
||||
|
||||
@ForeignKeyColumn(() => UserTable, {
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'CASCADE',
|
||||
// [userId, value] makes this redundant
|
||||
index: false,
|
||||
})
|
||||
@ForeignKeyColumn(() => UserTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE' })
|
||||
userId!: string;
|
||||
|
||||
@Column()
|
||||
@@ -40,6 +36,7 @@ export class TagTable {
|
||||
@ForeignKeyColumn(() => TagTable, { nullable: true, onDelete: 'CASCADE' })
|
||||
parentId?: string;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_tags_update_id' })
|
||||
@ColumnIndex('IDX_tags_update_id')
|
||||
@UpdateIdColumn()
|
||||
updateId!: string;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
|
||||
import { Column, CreateDateColumn, Table } from 'src/sql-tools';
|
||||
import { Column, ColumnIndex, CreateDateColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('users_audit')
|
||||
export class UserAuditTable {
|
||||
@Column({ type: 'uuid' })
|
||||
userId!: string;
|
||||
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()', indexName: 'IDX_users_audit_deleted_at' })
|
||||
@ColumnIndex('IDX_users_audit_deleted_at')
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()' })
|
||||
deletedAt!: Date;
|
||||
|
||||
@PrimaryGeneratedUuidV7Column()
|
||||
|
||||
@@ -5,13 +5,7 @@ import { UserMetadata, UserMetadataItem } from 'src/types';
|
||||
|
||||
@Table('user_metadata')
|
||||
export class UserMetadataTable<T extends keyof UserMetadata = UserMetadataKey> implements UserMetadataItem<T> {
|
||||
@ForeignKeyColumn(() => UserTable, {
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'CASCADE',
|
||||
primary: true,
|
||||
// [userId, key] is the PK constraint
|
||||
index: false,
|
||||
})
|
||||
@ForeignKeyColumn(() => UserTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
userId!: string;
|
||||
|
||||
@PrimaryColumn({ type: 'character varying' })
|
||||
|
||||
@@ -5,6 +5,7 @@ import { users_delete_audit } from 'src/schema/functions';
|
||||
import {
|
||||
AfterDeleteTrigger,
|
||||
Column,
|
||||
ColumnIndex,
|
||||
CreateDateColumn,
|
||||
DeleteDateColumn,
|
||||
Index,
|
||||
@@ -76,6 +77,7 @@ export class UserTable {
|
||||
@Column({ type: 'timestamp with time zone', default: () => 'now()' })
|
||||
profileChangedAt!: Generated<Timestamp>;
|
||||
|
||||
@UpdateIdColumn({ indexName: 'IDX_users_update_id' })
|
||||
@ColumnIndex({ name: 'IDX_users_update_id' })
|
||||
@UpdateIdColumn()
|
||||
updateId!: Generated<string>;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user