Compare commits
13 Commits
v1.107.2
...
feat/serve
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea33cd9515 | ||
|
|
e9ceb8b017 | ||
|
|
94030d809f | ||
|
|
4db0717a08 | ||
|
|
b6808c1675 | ||
|
|
a2b7403978 | ||
|
|
1556d978ed | ||
|
|
48a71ac4d9 | ||
|
|
adf620331c | ||
|
|
c92637df80 | ||
|
|
3356b023c5 | ||
|
|
a2e8b657e6 | ||
|
|
f420befc15 |
@@ -4,7 +4,6 @@
|
||||
|
||||
design/
|
||||
docker/
|
||||
!docker/scripts
|
||||
docs/
|
||||
e2e/
|
||||
fastlane/
|
||||
|
||||
2
.gitattributes
vendored
@@ -2,6 +2,8 @@ mobile/openapi/**/*.md -diff -merge
|
||||
mobile/openapi/**/*.md linguist-generated=true
|
||||
mobile/openapi/**/*.dart -diff -merge
|
||||
mobile/openapi/**/*.dart linguist-generated=true
|
||||
mobile/openapi/.openapi-generator/FILES -diff -merge
|
||||
mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
||||
|
||||
mobile/lib/**/*.g.dart -diff -merge
|
||||
mobile/lib/**/*.g.dart linguist-generated=true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: I have a question or need support
|
||||
url: https://discord.immich.app
|
||||
url: https://discord.gg/D8JsnBEuKb
|
||||
about: We use GitHub for tracking bugs, please check out our Discord channel for freaky fast support.
|
||||
- name: Feature Request
|
||||
url: https://github.com/immich-app/immich/discussions/new?category=feature-request
|
||||
about: Please use our GitHub Discussion for making feature requests.
|
||||
- name: I'm unsure where to go
|
||||
url: https://discord.immich.app
|
||||
url: https://discord.gg/D8JsnBEuKb
|
||||
about: If you are unsure where to go, then joining our Discord is recommended; Just ask!
|
||||
|
||||
35
.github/labeler.yml
vendored
@@ -1,35 +0,0 @@
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- cli/src/**
|
||||
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- docs/blob/**
|
||||
- docs/docs/**
|
||||
- docs/src/**
|
||||
- docs/static/**
|
||||
|
||||
🖥️web:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- web/src/**
|
||||
- web/static/**
|
||||
|
||||
📱mobile:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- mobile/lib/**
|
||||
- mobile/test/**
|
||||
|
||||
🗄️server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- server/src/**
|
||||
- server/test/**
|
||||
|
||||
🧠machine-learning:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- machine-learning/app/**
|
||||
2
.github/workflows/build-mobile.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
flutter-version: '3.19.3'
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
|
||||
25
.github/workflows/cli.yml
vendored
@@ -1,17 +1,16 @@
|
||||
name: CLI Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
- "cli/**"
|
||||
- ".github/workflows/cli.yml"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
release:
|
||||
types: [published]
|
||||
- "cli/**"
|
||||
- ".github/workflows/cli.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -33,8 +32,8 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version: "20.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
@@ -42,7 +41,7 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
@@ -84,15 +83,15 @@ jobs:
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/immich-cli
|
||||
tags: |
|
||||
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'release' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'workflow_dispatch' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.2.0
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
push: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
|
||||
4
.github/workflows/docker-cleanup.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.7.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.6.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.7.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.6.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
|
||||
10
.github/workflows/docker.yml
vendored
@@ -115,7 +115,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.2.0
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
@@ -124,11 +124,7 @@ jobs:
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
|
||||
48
.github/workflows/docs-build.yml
vendored
@@ -1,48 +0,0 @@
|
||||
name: Docs build
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
retention-days: 1
|
||||
189
.github/workflows/docs-deploy.yml
vendored
@@ -1,189 +0,0 @@
|
||||
name: Docs deploy
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Docs build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
checks:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
steps:
|
||||
- if: ${{ github.event.workflow_run.conclusion == 'failure' }}
|
||||
run: echo 'The triggering workflow failed' && exit 1
|
||||
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
const isFork = context.payload.workflow_run.repository.fork;
|
||||
|
||||
let parameters;
|
||||
|
||||
console.log({eventType, isFork});
|
||||
|
||||
if (eventType == "push") {
|
||||
const branch = context.payload.workflow_run.head_branch;
|
||||
console.log({branch});
|
||||
const shouldDeploy = !isFork && branch == "main";
|
||||
parameters = {
|
||||
event: "branch",
|
||||
name: "main",
|
||||
shouldDeploy
|
||||
};
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
}
|
||||
const pullRequestNumber = items[0].number
|
||||
console.info("Pull request number is", pullRequestNumber)
|
||||
pull_number = pullRequestNumber
|
||||
}
|
||||
const {data: pr} = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number
|
||||
});
|
||||
|
||||
console.log({pull_number});
|
||||
|
||||
parameters = {
|
||||
event: "pr",
|
||||
name: `pr-${pull_number}`,
|
||||
pr_number: pull_number,
|
||||
shouldDeploy: true
|
||||
};
|
||||
} else if (eventType == "release") {
|
||||
parameters = {
|
||||
event: "release",
|
||||
name: context.payload.workflow_run.head_branch,
|
||||
shouldDeploy: !isFork
|
||||
};
|
||||
}
|
||||
|
||||
console.log(parameters);
|
||||
return parameters;
|
||||
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
if: ${{ fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const json = `${{ needs.checks.outputs.parameters }}`;
|
||||
const parameters = JSON.parse(json);
|
||||
core.setOutput("event", parameters.event);
|
||||
core.setOutput("name", parameters.name);
|
||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||
|
||||
- run: |
|
||||
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "docs-build-output"
|
||||
})[0];
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
let fs = require('fs');
|
||||
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/docs-build-output.zip`, Buffer.from(download.data));
|
||||
|
||||
- name: Unzip artifact
|
||||
run: unzip "${{ github.workspace }}/docs-build-output.zip" -d "${{ github.workspace }}/docs/build"
|
||||
|
||||
- name: Deploy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "apply"
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "output -json"
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
run: |
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||
workingDirectory: "docs"
|
||||
directory: "build"
|
||||
branch: ${{ steps.parameters.outputs.name }}
|
||||
wranglerVersion: '3'
|
||||
|
||||
- name: Deploy Docs Release Domain
|
||||
if: ${{ steps.parameters.outputs.event == 'release' }}
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs-release'
|
||||
tg_command: 'apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
body: |
|
||||
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
|
||||
emojis: 'rocket'
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
32
.github/workflows/docs-destroy.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: Docs destroy
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
|
||||
TF_VAR_prefix_event_type: "pr"
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "destroy"
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
with:
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
12
.github/workflows/pr-labeler.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
@@ -1,15 +0,0 @@
|
||||
name: PR Conventional Commit Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@1.2.0
|
||||
with:
|
||||
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
||||
add_label: 'false'
|
||||
13
.github/workflows/pr-require-label.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Enforce PR labels
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled, opened, edited, synchronize]
|
||||
jobs:
|
||||
enforce-label:
|
||||
name: Enforce label
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- if: toJson(github.event.pull_request.labels) == '[]'
|
||||
run: exit 1
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
node-version: '20.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
4
.github/workflows/static_analysis.yml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
channel: "stable"
|
||||
flutter-version: "3.19.3"
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
|
||||
95
.github/workflows/test.yml
vendored
@@ -10,6 +10,28 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
doc-tests:
|
||||
name: Docs
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-unit-tests:
|
||||
name: Server
|
||||
runs-on: ubuntu-latest
|
||||
@@ -21,11 +43,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
@@ -59,7 +76,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
node-version: 20
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -84,38 +101,6 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
name: CLI (Windows)
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
# Skip linter & formatter in Windows test.
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Web
|
||||
runs-on: ubuntu-latest
|
||||
@@ -127,11 +112,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
@@ -175,7 +155,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
node-version: 20
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -228,7 +208,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
flutter-version: '3.19.3'
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
@@ -280,23 +260,9 @@ jobs:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
|
||||
- name: Build the app
|
||||
run: npm --prefix=server run build
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run API generation
|
||||
run: make open-api
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
id: verify-changed-files
|
||||
@@ -304,8 +270,6 @@ jobs:
|
||||
files: |
|
||||
mobile/openapi
|
||||
open-api/typescript-sdk
|
||||
open-api/immich-openapi-specs.json
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
run: |
|
||||
@@ -338,11 +302,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
|
||||
@@ -373,7 +332,7 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Run SQL generation
|
||||
run: npm run sync:sql
|
||||
run: npm run sql:generate
|
||||
env:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
|
||||
5
.gitignore
vendored
@@ -14,10 +14,7 @@ mobile/gradle.properties
|
||||
mobile/openapi/pubspec.lock
|
||||
mobile/*.jks
|
||||
mobile/libisar.dylib
|
||||
mobile/openapi/test
|
||||
mobile/openapi/doc
|
||||
mobile/openapi/.openapi-generator/FILES
|
||||
|
||||
open-api/typescript-sdk/build
|
||||
mobile/android/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
14
.vscode/settings.json
vendored
@@ -1,16 +1,6 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[css]": {
|
||||
"[javascript][typescript][css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
@@ -41,4 +31,4 @@
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
}
|
||||
}
|
||||
}
|
||||
56
Makefile
@@ -10,6 +10,12 @@ dev-update:
|
||||
dev-scale:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml up --build -V --remove-orphans
|
||||
|
||||
pull-stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml pull
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
@@ -31,55 +37,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
npm --prefix server run sync:sql
|
||||
npm --prefix server run sql:generate
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
|
||||
renovate:
|
||||
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
|
||||
|
||||
MODULES = e2e server web cli sdk
|
||||
|
||||
audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'build' >/dev/null \
|
||||
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build || true
|
||||
format-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'format:fix' >/dev/null \
|
||||
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run format:fix || true
|
||||
lint-%:
|
||||
npm --prefix $* run lint:fix
|
||||
check-%:
|
||||
npm --prefix $* run check
|
||||
check-web:
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
test-%:
|
||||
npm --prefix $* run test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
|
||||
build-all: $(foreach M,$(MODULES),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
check-all: $(foreach M,$(MODULES),check-$M) ;
|
||||
lint-all: $(foreach M,$(MODULES),lint-$M) ;
|
||||
format-all: $(foreach M,$(MODULES),format-$M) ;
|
||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(MODULES),test-$M) ;
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
91
README.md
@@ -1,7 +1,7 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
||||
<a href="https://discord.immich.app">
|
||||
<a href="https://discord.gg/D8JsnBEuKb">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
@@ -19,21 +19,19 @@
|
||||
<br/>
|
||||
<p align="center">
|
||||
|
||||
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||
<a href="readme_i18n/README_fr_FR.md">Français</a>
|
||||
<a href="readme_i18n/README_it_IT.md">Italiano</a>
|
||||
<a href="readme_i18n/README_ja_JP.md">日本語</a>
|
||||
<a href="readme_i18n/README_ko_KR.md">한국어</a>
|
||||
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
|
||||
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||
<a href="readme_i18n/README_fr_FR.md">Français</a>
|
||||
<a href="readme_i18n/README_it_IT.md">Italiano</a>
|
||||
<a href="readme_i18n/README_ja_JP.md">日本語</a>
|
||||
<a href="readme_i18n/README_ko_KR.md">한국어</a>
|
||||
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
</p>
|
||||
|
||||
## Disclaimer
|
||||
@@ -43,36 +41,45 @@
|
||||
- ⚠️ **Do not use the app as the only way to store your photos and videos.**
|
||||
- ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
|
||||
|
||||
> [!NOTE]
|
||||
> You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
## Content
|
||||
|
||||
## Links
|
||||
|
||||
- [Documentation](https://immich.app/docs)
|
||||
- [About](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Roadmap](https://immich.app/roadmap)
|
||||
- [Official Documentation](https://immich.app/docs)
|
||||
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
|
||||
- [Demo](#demo)
|
||||
- [Features](#features)
|
||||
- [Translations](https://immich.app/docs/developer/translations)
|
||||
- [Contributing](https://immich.app/docs/overview/support-the-project)
|
||||
- [Introduction](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
||||
|
||||
## Documentation
|
||||
|
||||
You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
|
||||
## Demo
|
||||
|
||||
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
|
||||
You can access the web demo at https://demo.immich.app
|
||||
|
||||
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
|
||||
|
||||
### Login credentials
|
||||
```bash title="Demo Credential"
|
||||
The credential
|
||||
email: demo@immich.app
|
||||
password: demo
|
||||
```
|
||||
|
||||
| Email | Password |
|
||||
| --------------- | -------- |
|
||||
| demo@immich.app | demo |
|
||||
```
|
||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
## Activities
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
|
||||
| Features | Mobile | Web |
|
||||
| :------------------------------------------- | ------ | --- |
|
||||
| :--------------------------------------------- | -------- | ----- |
|
||||
| Upload and view videos and photos | Yes | Yes |
|
||||
| Auto backup when the app is opened | Yes | N/A |
|
||||
| Prevent duplication of assets | Yes | Yes |
|
||||
@@ -102,19 +109,13 @@ For the mobile app, you can use `https://demo.immich.app/api` for the `Server En
|
||||
| Read-only gallery | Yes | Yes |
|
||||
| Stacked Photos | Yes | Yes |
|
||||
|
||||
## Translations
|
||||
## Contributors
|
||||
|
||||
Read more about translations [here](https://immich.app/docs/developer/translations).
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
## Repository activity
|
||||
|
||||

|
||||
|
||||
## Star history
|
||||
## Star History
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<picture>
|
||||
@@ -123,9 +124,3 @@ Read more about translations [here](https://immich.app/docs/developer/translatio
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please report security issues to `security@immich.app`
|
||||
Please report security issues to `alex.tran1502@gmail.com`
|
||||
1
base-images
Submodule
@@ -1 +1 @@
|
||||
20.15
|
||||
20.13
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as core
|
||||
FROM node:20-alpine3.19@sha256:291e84d956f1aff38454bbd3da38941461ad569a185c20aa289f71f37ea08e23 as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
@@ -16,4 +16,4 @@ RUN npm run build
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
549
cli/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.7",
|
||||
"version": "2.2.0",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -18,7 +18,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/node": "^20.3.1",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
@@ -28,7 +28,7 @@
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
@@ -62,6 +62,6 @@
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.0"
|
||||
"node": "20.13.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { platform } from 'node:os';
|
||||
import { UploadOptionsDto, getAlbumName } from 'src/commands/asset';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
describe('Unit function tests', () => {
|
||||
it('should return a non-undefined value', () => {
|
||||
if (platform() === 'win32') {
|
||||
// This is meaningless for Unix systems.
|
||||
expect(getAlbumName(String.raw`D:\test\Filename.txt`, {} as UploadOptionsDto)).toBe('test');
|
||||
}
|
||||
expect(getAlbumName('D:/parentfolder/test/Filename.txt', {} as UploadOptionsDto)).toBe('test');
|
||||
});
|
||||
|
||||
it('has higher priority to return `albumName` in `options`', () => {
|
||||
expect(getAlbumName('/parentfolder/test/Filename.txt', { albumName: 'example' } as UploadOptionsDto)).toBe(
|
||||
'example',
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,7 @@
|
||||
import {
|
||||
Action,
|
||||
AssetBulkUploadCheckResult,
|
||||
AssetMediaResponseDto,
|
||||
AssetMediaStatus,
|
||||
AssetFileUploadResponseDto,
|
||||
addAssetsToAlbum,
|
||||
checkBulkUpload,
|
||||
createAlbum,
|
||||
@@ -15,6 +14,7 @@ import { Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path, { basename } from 'node:path';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
@@ -24,7 +24,7 @@ const s = (count: number) => (count === 1 ? '' : 's');
|
||||
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||
type Asset = { id: string; filepath: string };
|
||||
|
||||
export interface UploadOptionsDto {
|
||||
interface UploadOptionsDto {
|
||||
recursive?: boolean;
|
||||
ignore?: string;
|
||||
dryRun?: boolean;
|
||||
@@ -167,7 +167,7 @@ const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptio
|
||||
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
if (response.duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
@@ -192,7 +192,7 @@ const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptio
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaResponseDto> => {
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const assetPath = path.parse(input);
|
||||
@@ -225,7 +225,7 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaRespon
|
||||
formData.append('sidecarData', sidecarData);
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}/assets`, {
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
@@ -345,9 +345,7 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
}
|
||||
};
|
||||
|
||||
// `filepath` valid format:
|
||||
// - Windows: `D:\\test\\Filename.txt` or `D:/test/Filename.txt`
|
||||
// - Unix: `/test/Filename.txt`
|
||||
export const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
return options.albumName ?? path.basename(path.dirname(filepath));
|
||||
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
|
||||
return options.albumName ?? folderName;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getMyUser } from '@immich/sdk';
|
||||
import { getMyUserInfo } from '@immich/sdk';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { mkdir, unlink } from 'node:fs/promises';
|
||||
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
||||
@@ -10,13 +10,13 @@ export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
|
||||
await connect(url, key);
|
||||
|
||||
const [error, user] = await withError(getMyUser());
|
||||
const [error, userInfo] = await withError(getMyUserInfo());
|
||||
if (error) {
|
||||
logError(error, 'Failed to load user info');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`Logged in as ${user.email}`);
|
||||
console.log(`Logged in as ${userInfo.email}`);
|
||||
|
||||
if (!existsSync(configDir)) {
|
||||
// Create config folder if it doesn't exist
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { getAssetStatistics, getMyUserInfo, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
|
||||
export const serverInfo = async (options: BaseOptions) => {
|
||||
@@ -8,7 +8,7 @@ export const serverInfo = async (options: BaseOptions) => {
|
||||
getServerVersion(),
|
||||
getSupportedMediaTypes(),
|
||||
getAssetStatistics({}),
|
||||
getMyUser(),
|
||||
getMyUserInfo(),
|
||||
]);
|
||||
|
||||
console.log(`Server Info (via ${userInfo.email})`);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
|
||||
interface Test {
|
||||
@@ -10,10 +9,6 @@ interface Test {
|
||||
|
||||
const cwd = process.cwd();
|
||||
|
||||
const readContent = (path: string) => {
|
||||
return readFileSync(path).toString();
|
||||
};
|
||||
|
||||
const extensions = [
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
@@ -261,8 +256,7 @@ const tests: Test[] = [
|
||||
{
|
||||
test: 'should support ignoring absolute paths',
|
||||
options: {
|
||||
// Currently, fast-glob has some caveat when dealing with `/`.
|
||||
pathsToCrawl: ['/*s'],
|
||||
pathsToCrawl: ['/'],
|
||||
recursive: true,
|
||||
exclusionPattern: '/images/**',
|
||||
},
|
||||
@@ -282,16 +276,14 @@ describe('crawl', () => {
|
||||
describe('crawl', () => {
|
||||
for (const { test, options, files } of tests) {
|
||||
it(test, async () => {
|
||||
// The file contents is the same as the path.
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
||||
|
||||
const actual = await crawl({ ...options, extensions });
|
||||
const expected = Object.entries(files)
|
||||
.filter((entry) => entry[1])
|
||||
.map(([file]) => file);
|
||||
|
||||
// Compare file's content instead of path since a file can be represent in multiple ways.
|
||||
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
|
||||
expect(actual.sort()).toEqual(expected.sort());
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
||||
import { convertPathToPattern, glob } from 'fast-glob';
|
||||
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
|
||||
import { glob } from 'fast-glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readFile, stat, writeFile } from 'node:fs/promises';
|
||||
import { platform } from 'node:os';
|
||||
import { join, resolve } from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
@@ -47,9 +46,10 @@ export const connect = async (url: string, key: string) => {
|
||||
// noop
|
||||
}
|
||||
|
||||
init({ baseUrl: url, apiKey: key });
|
||||
defaults.baseUrl = url;
|
||||
defaults.headers = { 'x-api-key': key };
|
||||
|
||||
const [error] = await withError(getMyUser());
|
||||
const [error] = await withError(getMyUserInfo());
|
||||
if (isHttpError(error)) {
|
||||
logError(error, 'Failed to connect to server');
|
||||
process.exit(1);
|
||||
@@ -107,11 +107,6 @@ export interface CrawlOptions {
|
||||
exclusionPattern?: string;
|
||||
extensions: string[];
|
||||
}
|
||||
|
||||
const convertPathToPatternOnWin = (path: string) => {
|
||||
return platform() === 'win32' ? convertPathToPattern(path) : path;
|
||||
};
|
||||
|
||||
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
|
||||
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
||||
@@ -130,11 +125,11 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(absolutePath);
|
||||
} else {
|
||||
patterns.push(convertPathToPatternOnWin(absolutePath));
|
||||
patterns.push(absolutePath);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(convertPathToPatternOnWin(currentPath));
|
||||
patterns.push(currentPath);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
|
||||
37
cli/src/version.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { version } from '../package.json';
|
||||
|
||||
export interface ICliVersion {
|
||||
major: number;
|
||||
minor: number;
|
||||
patch: number;
|
||||
}
|
||||
|
||||
export class CliVersion implements ICliVersion {
|
||||
constructor(
|
||||
public readonly major: number,
|
||||
public readonly minor: number,
|
||||
public readonly patch: number,
|
||||
) {}
|
||||
|
||||
toString() {
|
||||
return `${this.major}.${this.minor}.${this.patch}`;
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
const { major, minor, patch } = this;
|
||||
return { major, minor, patch };
|
||||
}
|
||||
|
||||
static fromString(version: string): CliVersion {
|
||||
const regex = /v?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)/i;
|
||||
const matchResult = version.match(regex);
|
||||
if (matchResult) {
|
||||
const [, major, minor, patch] = matchResult.map(Number);
|
||||
return new CliVersion(major, minor, patch);
|
||||
} else {
|
||||
throw new Error(`Invalid version format: ${version}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const cliVersion = CliVersion.fromString(version);
|
||||
@@ -2,7 +2,6 @@ import { defineConfig } from 'vite';
|
||||
import tsconfigPaths from 'vite-tsconfig-paths';
|
||||
|
||||
export default defineConfig({
|
||||
resolve: { alias: { src: '/src' } },
|
||||
build: {
|
||||
rollupOptions: {
|
||||
input: 'src/index.ts',
|
||||
|
||||
38
deployment/.gitignore
vendored
@@ -1,38 +0,0 @@
|
||||
# OpenTofu
|
||||
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
# Terragrunt
|
||||
|
||||
# terragrunt cache directories
|
||||
**/.terragrunt-cache/*
|
||||
|
||||
# Terragrunt debug output file (when using `--terragrunt-debug` option)
|
||||
# See: https://terragrunt.gruntwork.io/docs/reference/cli-options/#terragrunt-debug
|
||||
terragrunt-debug.tfvars.json
|
||||
@@ -1,38 +0,0 @@
|
||||
# This file is maintained automatically by "tofu init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.36.0"
|
||||
constraints = "4.36.0"
|
||||
hashes = [
|
||||
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
|
||||
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
|
||||
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
|
||||
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
|
||||
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
|
||||
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
|
||||
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
|
||||
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
|
||||
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
|
||||
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
|
||||
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
|
||||
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
|
||||
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
|
||||
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
|
||||
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
|
||||
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
|
||||
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
|
||||
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
|
||||
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
|
||||
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
|
||||
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
|
||||
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
|
||||
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
|
||||
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
|
||||
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
|
||||
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
|
||||
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
terraform {
|
||||
backend "pg" {}
|
||||
required_version = "~> 1.7"
|
||||
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.36.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_release_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name
|
||||
domain = "immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_release_domain" {
|
||||
name = "immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
value = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
provider "cloudflare" {
|
||||
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
data "terraform_remote_state" "api_keys_state" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_api_keys"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_account" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_account"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_immich_app_docs" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_immich_app_docs_${var.prefix_name}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
terraform {
|
||||
source = "."
|
||||
|
||||
extra_arguments custom_vars {
|
||||
commands = get_terraform_commands_that_need_vars()
|
||||
}
|
||||
}
|
||||
|
||||
include {
|
||||
path = find_in_parent_folders("state.hcl")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
schema_name = "prod_cloudflare_immich_app_docs_release"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
variable "cloudflare_account_id" {}
|
||||
variable "tf_state_postgres_conn_str" {}
|
||||
|
||||
variable "prefix_name" {}
|
||||
@@ -1,38 +0,0 @@
|
||||
# This file is maintained automatically by "tofu init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.36.0"
|
||||
constraints = "4.36.0"
|
||||
hashes = [
|
||||
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
|
||||
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
|
||||
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
|
||||
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
|
||||
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
|
||||
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
|
||||
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
|
||||
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
|
||||
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
|
||||
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
|
||||
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
|
||||
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
|
||||
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
|
||||
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
|
||||
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
|
||||
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
|
||||
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
|
||||
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
|
||||
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
|
||||
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
|
||||
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
|
||||
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
|
||||
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
|
||||
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
|
||||
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
|
||||
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
|
||||
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
terraform {
|
||||
backend "pg" {}
|
||||
required_version = "~> 1.7"
|
||||
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.36.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_branch_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_name
|
||||
domain = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_branch_subdomain" {
|
||||
name = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
value = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
|
||||
output "immich_app_branch_subdomain" {
|
||||
value = cloudflare_record.immich_app_branch_subdomain.hostname
|
||||
}
|
||||
|
||||
output "immich_app_branch_pages_hostname" {
|
||||
value = cloudflare_record.immich_app_branch_subdomain.value
|
||||
}
|
||||
|
||||
output "pages_project_name" {
|
||||
value = cloudflare_pages_domain.immich_app_branch_domain.project_name
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
locals {
|
||||
domain_name = "immich.app"
|
||||
preview_prefix = contains(["branch", "pr"], var.prefix_event_type) ? "preview" : ""
|
||||
archive_prefix = contains(["release"], var.prefix_event_type) ? "archive" : ""
|
||||
deploy_domain_prefix = coalesce(local.preview_prefix, local.archive_prefix)
|
||||
is_release = contains(["release"], var.prefix_event_type)
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
provider "cloudflare" {
|
||||
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
data "terraform_remote_state" "api_keys_state" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_api_keys"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_account" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_account"
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
terraform {
|
||||
source = "."
|
||||
|
||||
extra_arguments custom_vars {
|
||||
commands = get_terraform_commands_that_need_vars()
|
||||
}
|
||||
}
|
||||
|
||||
include {
|
||||
path = find_in_parent_folders("state.hcl")
|
||||
}
|
||||
|
||||
locals {
|
||||
prefix_name = get_env("TF_VAR_prefix_name")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
schema_name = "prod_cloudflare_immich_app_docs_${local.prefix_name}"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
variable "cloudflare_account_id" {}
|
||||
variable "tf_state_postgres_conn_str" {}
|
||||
|
||||
variable "prefix_name" {}
|
||||
variable "prefix_event_type" {}
|
||||
@@ -1,20 +0,0 @@
|
||||
locals {
|
||||
cloudflare_account_id = get_env("CLOUDFLARE_ACCOUNT_ID")
|
||||
cloudflare_api_token = get_env("CLOUDFLARE_API_TOKEN")
|
||||
|
||||
tf_state_postgres_conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = local.tf_state_postgres_conn_str
|
||||
}
|
||||
}
|
||||
|
||||
inputs = {
|
||||
cloudflare_account_id = local.cloudflare_account_id
|
||||
cloudflare_api_token = local.cloudflare_api_token
|
||||
tf_state_postgres_conn_str = local.tf_state_postgres_conn_str
|
||||
}
|
||||
@@ -4,42 +4,32 @@
|
||||
|
||||
name: immich-dev
|
||||
|
||||
x-server-build: &server-common
|
||||
image: immich-server-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: always
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: always
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
IMMICH_REPOSITORY: immich-app/immich
|
||||
IMMICH_REPOSITORY_URL: https://github.com/immich-app/immich
|
||||
IMMICH_SOURCE_REF: local
|
||||
IMMICH_SOURCE_COMMIT: af2efbdbbddc27cd06142f22253ccbbbbeec1f55
|
||||
IMMICH_SOURCE_URL: https://github.com/immich-app/immich/commit/af2efbdbbddc27cd06142f22253ccbbbbeec1f55
|
||||
IMMICH_BUILD: '9654404849'
|
||||
IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849
|
||||
IMMICH_BUILD_IMAGE: development
|
||||
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
command: ['/usr/src/app/bin/immich-dev', 'immich']
|
||||
<<: *server-common
|
||||
ports:
|
||||
- 3001:3001
|
||||
- 9230:9230
|
||||
@@ -47,6 +37,19 @@ services:
|
||||
- redis
|
||||
- database
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
command: ['/usr/src/app/bin/immich-dev', 'microservices']
|
||||
<<: *server-common
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
ports:
|
||||
- 9231:9230
|
||||
depends_on:
|
||||
- database
|
||||
- immich-server
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
image: immich-web-dev:latest
|
||||
@@ -94,9 +97,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
image: redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
@@ -112,27 +113,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command:
|
||||
[
|
||||
'postgres',
|
||||
'-c',
|
||||
'shared_preload_libraries=vectors.so',
|
||||
'-c',
|
||||
'search_path="$$user", public, vectors',
|
||||
'-c',
|
||||
'logging_collector=on',
|
||||
'-c',
|
||||
'max_wal_size=2GB',
|
||||
'-c',
|
||||
'shared_buffers=512MB',
|
||||
'-c',
|
||||
'wal_compression=on',
|
||||
]
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
|
||||
@@ -1,26 +1,39 @@
|
||||
name: immich-prod
|
||||
|
||||
x-server-build: &server-common
|
||||
image: immich-server:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: immich-server:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
command: ['start.sh', 'immich']
|
||||
<<: *server-common
|
||||
ports:
|
||||
- 2283:3001
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
command: ['start.sh', 'microservices']
|
||||
<<: *server-common
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- immich-server
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -41,9 +54,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
image: redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
|
||||
restart: always
|
||||
|
||||
database:
|
||||
@@ -60,20 +71,14 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
restart: always
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
immich-prometheus:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:075b1ba2c4ebb04bc3a6ab86c06ec8d8099f8fda1c96ef6d104d9bb1def1d8bc
|
||||
image: prom/prometheus@sha256:5c435642ca4d8427ca26f4901c11114023004709037880cd7860d5b7176aa731
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -85,7 +90,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.1.0-ubuntu@sha256:c7fc29ec783d5e7fc1bdfaad6f92345a345cffbc5d21c388ca228175006fc107
|
||||
image: grafana/grafana:10.4.2-ubuntu@sha256:4f55071b556fb03f12b41423c98a185ed6695ed9ff2558e35805f0dd765fd958
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -12,9 +12,7 @@ services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
command: ['start.sh', 'immich']
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
@@ -27,6 +25,23 @@ services:
|
||||
- database
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/hardware-transcoding
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
command: ['start.sh', 'microservices']
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||
@@ -43,14 +58,12 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
image: registry.hub.docker.com/library/redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: registry.hub.docker.com/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
@@ -58,13 +71,8 @@ services:
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
restart: always
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
|
||||
volumes:
|
||||
model-cache:
|
||||
|
||||
@@ -5,9 +5,6 @@ UPLOAD_LOCATION=./library
|
||||
# The location where your database files are stored
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
# TZ=Etc/UTC
|
||||
|
||||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||
IMMICH_VERSION=release
|
||||
|
||||
|
||||
@@ -10,6 +10,9 @@ services:
|
||||
cpu: {}
|
||||
|
||||
nvenc:
|
||||
runtime: nvidia
|
||||
environment:
|
||||
- DISPLAY:$DISPLAY
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
@@ -20,6 +23,8 @@ services:
|
||||
- gpu
|
||||
- compute
|
||||
- video
|
||||
- display
|
||||
- graphics
|
||||
|
||||
quicksync:
|
||||
devices:
|
||||
|
||||
@@ -3,10 +3,10 @@ global:
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: immich_api
|
||||
- job_name: immich_server
|
||||
static_configs:
|
||||
- targets: ['immich-server:8081']
|
||||
|
||||
|
||||
- job_name: immich_microservices
|
||||
static_configs:
|
||||
- targets: ['immich-server:8082']
|
||||
- targets: ['immich-microservices:8081']
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
LOG_LEVEL="${IMMICH_LOG_LEVEL:='info'}"
|
||||
|
||||
logDebug() {
|
||||
if [ "$LOG_LEVEL" = "debug" ] || [ "$LOG_LEVEL" = "verbose" ]; then
|
||||
echo "DEBUG: $1" >&2
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
|
||||
logDebug "cgroup v2 detected."
|
||||
if [ -f /sys/fs/cgroup/cpu.max ]; then
|
||||
read -r quota period </sys/fs/cgroup/cpu.max
|
||||
if [ "$quota" = "max" ]; then
|
||||
logDebug "No CPU limits set."
|
||||
unset quota period
|
||||
fi
|
||||
else
|
||||
logDebug "/sys/fs/cgroup/cpu.max not found."
|
||||
fi
|
||||
else
|
||||
logDebug "cgroup v1 detected."
|
||||
|
||||
if [ -f /sys/fs/cgroup/cpu/cpu.cfs_quota_us ] && [ -f /sys/fs/cgroup/cpu/cpu.cfs_period_us ]; then
|
||||
quota=$(cat /sys/fs/cgroup/cpu/cpu.cfs_quota_us)
|
||||
period=$(cat /sys/fs/cgroup/cpu/cpu.cfs_period_us)
|
||||
|
||||
if [ "$quota" = "-1" ]; then
|
||||
logDebug "No CPU limits set."
|
||||
unset quota period
|
||||
fi
|
||||
else
|
||||
logDebug "/sys/fs/cgroup/cpu/cpu.cfs_quota_us or /sys/fs/cgroup/cpu/cpu.cfs_period_us not found."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "${quota:-}" ] && [ -n "${period:-}" ]; then
|
||||
cpus=$((quota / period))
|
||||
if [ "$cpus" -eq 0 ]; then
|
||||
cpus=1
|
||||
fi
|
||||
else
|
||||
cpus=$(grep -c ^processor /proc/cpuinfo)
|
||||
fi
|
||||
|
||||
echo "$cpus"
|
||||
@@ -1 +1 @@
|
||||
20.15
|
||||
20.13
|
||||
|
||||
@@ -5,13 +5,13 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ npm install
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
```
|
||||
$ npm run start
|
||||
$ yarn start
|
||||
```
|
||||
|
||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||
@@ -19,7 +19,7 @@ This command starts a local development server and opens up a browser window. Mo
|
||||
### Build
|
||||
|
||||
```
|
||||
$ npm run build
|
||||
$ yarn build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
@@ -29,13 +29,13 @@ This command generates static content into the `build` directory and can be serv
|
||||
Using SSH:
|
||||
|
||||
```
|
||||
$ USE_SSH=true npm run deploy
|
||||
$ USE_SSH=true yarn deploy
|
||||
```
|
||||
|
||||
Not using SSH:
|
||||
|
||||
```
|
||||
$ GIT_USER=<Your GitHub username> npm run deploy
|
||||
$ GIT_USER=<Your GitHub username> yarn deploy
|
||||
```
|
||||
|
||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||
|
||||
@@ -94,7 +94,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
---
|
||||
title: Immich Update - July 2024
|
||||
authors: [alextran]
|
||||
tags: [update, v1.106.0]
|
||||
---
|
||||
|
||||
Hello everybody! Alex from Immich here and I am back with another development progress update for the project.
|
||||
|
||||
Summer has returned once again, and the night sky is filled with stars, thank you for **38_000 shining stars** you have sent to our [GitHub repo](https://github.com/immich-app/immich)! Since the last announcement several core contributors have started full time. Everything is going great with development, PRs get merged with _brrrrrrr_ rate, conversation exchange between team members is on a new high, we met and are working with the great engineers at FUTO. The spirit is high and we have a lot of things brewing that we think you will like.
|
||||
|
||||
Let's go over some of the updates we had since the last post.
|
||||
|
||||
### Container consolidation
|
||||
|
||||
Reduced the number of total containers from 5 to 4 by making the microservices thread get spawned directly in the server container. Woohoo, remember when Immich had 7 containers?
|
||||
|
||||
### Email notifications
|
||||
|
||||

|
||||
|
||||
We added email notifications to the app with SMTP settings that you can configure for the following events
|
||||
|
||||
- A new account is created for you.
|
||||
- You are added to a shared album.
|
||||
- New media is added to an album.
|
||||
|
||||
### Versioned docs
|
||||
|
||||
You can now jump back into the past or take a peek at the unreleased version of the documentation by selecting the version on the website.
|
||||
|
||||

|
||||
|
||||
### Similarity deduplication
|
||||
|
||||
With more machine learning and CLIP magic, we now have similarity deduplication built into the application where it will search for closely similar images and let you decide what to do with them; i.e keep or trash.
|
||||
|
||||

|
||||
|
||||
### Permanent URL for asset on the web
|
||||
|
||||
The detail view for an asset now has a permanent URL so you can easily share them with your loved ones.
|
||||
|
||||
### Web app translations
|
||||
|
||||
We now have a public Weblate project which the community can use to translate the webapp to their native languages. We are planning to port the mobile app translation to this platform as well. If you would like to contribute, you can take a look [here](https://hosted.weblate.org/projects/immich/immich/). We're already close to 50% translations -- we really appreciate everyone contributing to that!
|
||||
|
||||

|
||||
|
||||
### Read-only/Editor mode on shared album
|
||||
|
||||
As the owner of the album, you can choose if the shared user can edit the album or to only view the content of the album without any modification.
|
||||
|
||||

|
||||
|
||||
### Better video thumbnails
|
||||
|
||||
Immich now tries to find a descriptive video thumbnail instead of simply using the first frame. No more black images for thumbnails!
|
||||
|
||||
### Public Roadmap
|
||||
|
||||
We now have a [public roadmap](https://immich.app/roadmap), giving you a high-level overview of things the team is working on. The first goal of this roadmap is to bring Immich to a stable release, which is expected sometime later this year. Some of the highlights include
|
||||
|
||||
- Auto stacking - Auto stacking of burst photos
|
||||
- Basic editor - Basic photo editing capabilities
|
||||
- Workflows - Automate tasks with workflows
|
||||
- Fine grained access controls - Granular access controls for users and api keys
|
||||
- Better background backups - Rework background backups to be more reliable
|
||||
- Private/locked photos - Private assets with extra protections
|
||||
|
||||
Beyond the items in the roadmap, we have _many many_ more ideas for Immich. The team and I hope that you are enjoying the application, find it helpful in your life and we have nothing but the intention of building out great software for you all!
|
||||
|
||||
Have an amazing Summer or Winter for those in the southern hemisphere! :D
|
||||
|
||||
Until next time,
|
||||
|
||||
Cheers!
|
||||
Alex
|
||||
@@ -67,7 +67,7 @@ Yes, with an [External Library](/docs/features/libraries.md).
|
||||
|
||||
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
||||
|
||||
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs-workers/#jobs) page.
|
||||
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs.md) page.
|
||||
|
||||
### Why are only photos and not videos being uploaded to Immich?
|
||||
|
||||
@@ -133,6 +133,40 @@ For example, say you have existing transcodes with the policy "Videos higher tha
|
||||
|
||||
No. Our design principle is that the original assets should always be untouched.
|
||||
|
||||
### How can I move all data (photos, persons, albums, libraries) from one user to another?
|
||||
|
||||
This is not officially supported but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the `psql` command), or you can add, for example, an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file so that you can use a web interface.
|
||||
|
||||
<details>
|
||||
<summary>Steps</summary>
|
||||
|
||||
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
||||
|
||||
2. Find the ID of both the 'source' and the 'destination' user (it's the id column in the `users` table)
|
||||
|
||||
3. Four tables need to be updated:
|
||||
|
||||
```sql
|
||||
BEGIN;
|
||||
-- reassign albums
|
||||
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
|
||||
-- reassign people
|
||||
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
|
||||
-- reassign assets
|
||||
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
|
||||
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
|
||||
|
||||
-- reassign external libraries
|
||||
UPDATE libraries SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Albums
|
||||
@@ -408,11 +442,4 @@ docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --
|
||||
|
||||
</details>
|
||||
|
||||
If corruption is detected, you should immediately make a backup before performing any other work in the database.
|
||||
To do so, you may need to set the `zero_damaged_pages=on` flag for the database server to allow `pg_dumpall` to succeed.
|
||||
After taking a backup, the recommended next step is to restore the database from a healthy backup before corruption was detected.
|
||||
The damaged database dump can be used to manually recover any changes made since the last backup, if needed.
|
||||
|
||||
The causes of possible corruption are many, but can include unexpected poweroffs or unmounts, use of a network share for Postgres data, or a poor storage medium such an SD card or failing HDD/SSD.
|
||||
|
||||
[huggingface]: https://huggingface.co/immich-app
|
||||
|
||||
@@ -76,7 +76,6 @@ services:
|
||||
backup:
|
||||
container_name: immich_db_dumper
|
||||
image: prodrigestivill/postgres-backup-local:14
|
||||
restart: always
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -192,6 +191,6 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
</Tabs>
|
||||
|
||||
:::danger
|
||||
Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files.
|
||||
Do not touch the files inside these folders under any circumstances except taking a backup, changing or removing an asset can cause untracked and missing files.
|
||||
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
|
||||
:::
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
# Email Notifications
|
||||
|
||||
Immich supports the option to send notifications via Email for the following events:
|
||||
|
||||
- Creating a new user
|
||||
- Notifying a user when they get added to a shared album
|
||||
- Informing other users about the addition of new assets to a shared album
|
||||
|
||||
## SMTP settings
|
||||
|
||||
You can access the settings panel from the web at `Administration -> Settings -> Notification settings`
|
||||
|
||||
Under Email, enter the following details to connect with SMTP servers.
|
||||
|
||||
You can use the following [guide](/docs/guides/smtp-gmail) to use Gmail's SMTP server.
|
||||
|
||||
<img src={require('./img/email-settings.png').default} width="80%" title="SMTP settings" />
|
||||
|
||||
## User's notifications settings
|
||||
|
||||
Users can manage their email notification settings from their account settings page on the web. They can choose to turn email notifications on or off for the following events:
|
||||
|
||||
<img src={require('./img/user-notifications-settings.png').default} width="80%" title="User notification settings" />
|
||||
|
Before Width: | Height: | Size: 55 KiB |
|
Before Width: | Height: | Size: 218 KiB |
|
Before Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 18 KiB |
BIN
docs/docs/administration/img/oauth-settings.png
Normal file
|
After Width: | Height: | Size: 57 KiB |
|
Before Width: | Height: | Size: 2.7 KiB |
|
Before Width: | Height: | Size: 109 KiB |
@@ -1,55 +0,0 @@
|
||||
# Jobs and Workers
|
||||
|
||||
## Workers
|
||||
|
||||
### Architecture
|
||||
|
||||
The `immich-server` container contains multiple workers:
|
||||
|
||||
- `api`: responds to API requests for data and files for the web and mobile app.
|
||||
- `microservices`: handles most other work, such as thumbnail generation and video encoding, in the form of _jobs_. Simply put, a job is a request to process data in the background.
|
||||
|
||||
## Split workers
|
||||
|
||||
If you prefer to throttle or distribute the workers, you can do this using the [environment variables](/docs/install/environment-variables) to specify which container should pick up which tasks.
|
||||
|
||||
For example, for a simple setup with one container for the Web/API and one for all other microservices, you can do the following:
|
||||
|
||||
Copy the entire `immich-server` block as a new service and make the following changes to the **copy**:
|
||||
|
||||
```diff
|
||||
- immich-server:
|
||||
- container_name: immich_server
|
||||
...
|
||||
- ports:
|
||||
- - 2283:3001
|
||||
+ immich-microservices:
|
||||
+ container_name: immich_microservices
|
||||
```
|
||||
|
||||
Once you have two copies of the immich-server service, make the following changes to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
|
||||
|
||||
```diff
|
||||
services:
|
||||
immich-server:
|
||||
...
|
||||
+ environment:
|
||||
+ IMMICH_WORKERS_INCLUDE: 'api'
|
||||
|
||||
immich-microservices:
|
||||
...
|
||||
+ environment:
|
||||
+ IMMICH_WORKERS_EXCLUDE: 'api'
|
||||
```
|
||||
|
||||
## Jobs
|
||||
|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||
|
||||
:::info
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
13
docs/docs/administration/jobs.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Jobs
|
||||
|
||||
The `immich-server` responds to API requests for data and files for the web and mobile app. To do this quickly and reliably, it offloads most other work to `immich-microservices` in the form of _jobs_. Simply put, a job is a request to process data in the background. Jobs are picked up automatically by microservices containers.
|
||||
|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||
|
||||
:::info
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
@@ -110,66 +110,8 @@ Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to
|
||||
|
||||
## Example Configuration
|
||||
|
||||
<details>
|
||||
<summary>Authentik Example</summary>
|
||||
|
||||
### Authentik Example
|
||||
|
||||
Here's an example of OAuth configured for Authentik:
|
||||
|
||||
Configuration of Authorised redirect URIs (Authentik OAuth2/OpenID Provider)
|
||||
|
||||
<img src={require('./img/authentik-redirect-uris-example.webp').default} width='70%' title="Authentik authorised redirect URIs" />
|
||||
|
||||
Configuration of OAuth in Immich System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ---------------------------------------------------------------------------------- |
|
||||
| Issuer URL | `https://example.immich.app/application/o/immich/.well-known/openid-configuration` |
|
||||
| Client ID | AFCj2rM1f4rps**\*\*\*\***\***\*\*\*\***lCLEum6hH9... |
|
||||
| Client Secret | 0v89FXkQOWO\***\*\*\*\*\***\*\*\***\*\*\*\*\***mprbvXD549HH6s1iw... |
|
||||
| Scope | openid email profile |
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Button Text | Sign in with Authentik (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled (optional) |
|
||||
| Mobile Redirect URI Override | Disable |
|
||||
| Mobile Redirect URI | |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Google Example</summary>
|
||||
|
||||
### Google Example
|
||||
|
||||
Here's an example of OAuth configured for Google:
|
||||
|
||||
Configuration of Authorised redirect URIs (Google Console)
|
||||
|
||||
<img src={require('./img/google-redirect-uris-example.webp').default} width='50%' title="Google authorised redirect URIs" />
|
||||
|
||||
Configuration of OAuth in Immich System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||
| Issuer URL | [https://accounts.google.com](https://accounts.google.com) |
|
||||
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
|
||||
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
|
||||
| Scope | openid email profile |
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Button Text | Sign in with Google (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled |
|
||||
| Mobile Redirect URI Override | Enabled (required) |
|
||||
| Mobile Redirect URI | [https://demo.immich.app/api/oauth/mobile-redirect](https://demo.immich.app/api/oauth/mobile-redirect) |
|
||||
|
||||
</details>
|
||||

|
||||
|
||||
[oidc]: https://openid.net/connect/
|
||||
|
||||
@@ -18,7 +18,7 @@ In any other situation, there are 3 different options that can appear:
|
||||
|
||||
- MATCHES - These files are matched by their checksums.
|
||||
|
||||
- OFFLINE PATHS - These files are the result of manually deleting files from immich or a failed file move in the past (losing track of a file).
|
||||
- OFFLINE PATHS - These files are the result of manually deleting files in the upload library or a failed file move in the past (losing track of a file).
|
||||
|
||||
- UNTRACKED FILES - These files are not tracked by the application. They can be the result of failed moves, interrupted uploads, or left behind due to a bug.
|
||||
|
||||
|
||||
@@ -77,6 +77,7 @@ immich-admin list-users
|
||||
deletedAt: null,
|
||||
updatedAt: 2023-09-21T15:42:28.129Z,
|
||||
oauthId: '',
|
||||
memoriesEnabled: true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
@@ -10,59 +10,6 @@ Viewing and modifying the system settings is restricted to the Administrator.
|
||||
You can always return to the default settings by clicking the `Reset to default` button.
|
||||
:::
|
||||
|
||||
## Authentication Settings
|
||||
|
||||
Manage password, OAuth, and other authentication settings
|
||||
|
||||
### OAuth Authentication
|
||||
|
||||
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||
|
||||
### Password Authentication
|
||||
|
||||
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||
|
||||
:::tip
|
||||
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||
:::
|
||||
|
||||
## Image Settings (thumbnails and previews)
|
||||
|
||||
- Thumbnails - Used in the main timeline.
|
||||
- Previews - Used in the asset viewer.
|
||||
|
||||
By default Immich creates 3 thumbnails for each asset,
|
||||
Blurred (thumbhash) , Small - thumbnails (webp) , and Large - previews (jpeg/webp), using these settings you can change the quality for the thumbnails and previews files that are created.
|
||||
|
||||
**Thumbnail format**
|
||||
Allows you to choose the type of format you want for the Thumbnail images, Webp produces smaller files than jpeg, but is slower to encode.
|
||||
|
||||
:::tip
|
||||
You can read in detail about the advantages and disadvantages of using webp over jpeg on [Adobe's website](https://www.adobe.com/creativecloud/file-types/image/raster/webp-file.html)
|
||||
:::
|
||||
|
||||
**Thumbnail resolution**
|
||||
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Preview format**
|
||||
Allows you to choose the type of format you want for the Preview images, Webp produces smaller files than jpeg, but is slower to encode.
|
||||
|
||||
**Preview resolution**
|
||||
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Quality**
|
||||
Image quality from 1-100. Higher is better for quality but produces larger files, this option affects the Preview and Thumbnail images.
|
||||
|
||||
**Prefer wide gamut**
|
||||
Use Display P3 for thumbnails. This better preserves the vibrance of images with wide colorspaces, but images may appear differently on old devices with an old browser version. sRGB images are kept as sRGB to avoid color shifts.
|
||||
|
||||
**Prefer embedded preview**
|
||||
Use embedded previews in RAW photos as the input to image processing when available. This can produce more accurate colors for some images, but the quality of the preview is camera-dependent and the image may have more compression artifacts.
|
||||
|
||||
:::tip
|
||||
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||
:::
|
||||
|
||||
## Job Settings
|
||||
|
||||
Using these settings, you can determine the amount of work that will run concurrently for each task in microservices. Some tasks can be set to higher values on computers with powerful hardware and storage with good I/O capabilities.
|
||||
@@ -72,11 +19,6 @@ this advice improves throughput, not latency, for example, it will make Smart Se
|
||||
|
||||
It is important to remember that jobs like Smart Search, Face Detection, Facial Recognition, and Transcode Videos require a **lot** of processing power and therefore do not exaggerate the amount of jobs because you're probably thoroughly overloading the server.
|
||||
|
||||
:::danger IMPORTANT
|
||||
If you increase the concurrency from the defaults we set, especially for thumbnail generation, make sure you do not increase them past the amount of CPU cores you have available.
|
||||
Doing so can impact API responsiveness with no gain in thumbnail generation speed.
|
||||
:::
|
||||
|
||||
:::info Facial Recognition Concurrency
|
||||
The Facial Recognition Concurrency value cannot be changed because
|
||||
[DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok) is traditionally sequential, but there are parallel implementations of it out there. Our implementation isn't parallel.
|
||||
@@ -145,9 +87,17 @@ The map can be adjusted via [OpenMapTiles](https://openmaptiles.org/styles/) for
|
||||
|
||||
Immich supports [Reverse Geocoding](/docs/features/reverse-geocoding) using data from the [GeoNames](https://www.geonames.org/) geographical database.
|
||||
|
||||
## Notification Settings
|
||||
## OAuth Authentication
|
||||
|
||||
SMTP server setup, for user creation notifications, new albums, etc. More information can be found [here](/docs/administration/email-notification)
|
||||
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||
|
||||
## Password Authentication
|
||||
|
||||
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||
|
||||
:::tip
|
||||
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||
:::
|
||||
|
||||
## Server Settings
|
||||
|
||||
@@ -175,6 +125,27 @@ p {
|
||||
}
|
||||
```
|
||||
|
||||
## Thumbnail Settings
|
||||
|
||||
By default Immich creates 3 thumbnails for each asset,
|
||||
Blurred (thumbhash) , Small (webp) , and Large (jpeg), using these settings you can change the quality for the thumbnail files that are created.
|
||||
|
||||
**Small thumbnail resolution**
|
||||
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Large thumbnail resolution**
|
||||
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Quality**
|
||||
Thumbnail quality from 1-100. Higher is better for quality but produces larger files.
|
||||
|
||||
**Prefer wide gamut**
|
||||
Use display p3 for thumbnails. This better preserves the vibrance of images with wide color spaces, but images may appear differently on old devices with an old browser version. Srgb images are kept as srgb to avoid color shifts.
|
||||
|
||||
:::tip
|
||||
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||
:::
|
||||
|
||||
## Trash Settings
|
||||
|
||||
In the system administrator's option to set a trash for deleted files, these files will remain in the trash until the deletion date 30 days (default) or as defined by the system administrator.
|
||||
|
||||
@@ -13,20 +13,6 @@ Immich supports multiple users, each with their own library.
|
||||
|
||||
<UserCreate />
|
||||
|
||||
## Send new user email notification
|
||||
|
||||
:::note
|
||||
This option is only available if an SMTP server has been configured in the administrator settings.
|
||||
:::
|
||||
|
||||
Admin can send a welcome email if the Email option is set, you can learn here how to set up the SMTP server in Immich.
|
||||
|
||||
<img
|
||||
src={require('./img/send-user-email-notification.webp').default}
|
||||
width="40%"
|
||||
title="Send user email notification"
|
||||
/>
|
||||
|
||||
## Set Storage Quota For User
|
||||
|
||||
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
|
||||
|
||||
@@ -80,7 +80,7 @@ The Immich Microservices image uses the same `Dockerfile` as the Immich Server,
|
||||
- Background jobs (file deletion, user deletion)
|
||||
|
||||
:::info
|
||||
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs-workers/#jobs) page, which provides some remote queue management capabilities.
|
||||
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs.md) page, which provides some remote queue management capabilities.
|
||||
:::
|
||||
|
||||
### Machine Learning
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# Translations
|
||||
|
||||
:::tip
|
||||
You can request a new language [here](https://hosted.weblate.org/new-lang/immich/immich/).
|
||||
:::
|
||||
|
||||
## Weblate
|
||||
|
||||
[Weblate](https://weblate.org/) is a "libre software web-based continuous localization system". Immich localization efforts are managed on their [hosted platform](https://hosted.weblate.org/projects/immich/immich/).
|
||||
|
||||
## International message format
|
||||
|
||||
Plurals, numbers, dates and other locale specific message formats can be handled by using the [ICU message format](https://unicode-org.github.io/icu/userguide/format_parse/messages/). Internally, this is handled by the [intl-messageformat](https://www.npmjs.com/package/intl-messageformat) library. Their [documentation](https://formatjs.io/docs/intl-messageformat/) includes common, editable examples via a "live editor" feature, which can be useful to test and debug message formats.
|
||||
|
||||
## Progress
|
||||
|
||||
Immich currently supports the following languages:
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
</a>
|
||||
@@ -1,7 +1,7 @@
|
||||
# Troubleshooting
|
||||
|
||||
:::tip
|
||||
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.immich.app) server, where we have a dedicated channel for `#contributing`.
|
||||
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.gg/D8JsnBEuKb) server, where we have a dedicated channel for `#contributing`.
|
||||
:::
|
||||
|
||||
## Known Issues
|
||||
|
||||
@@ -22,8 +22,7 @@ You do not need to redo any transcoding jobs after enabling hardware acceleratio
|
||||
- WSL2 does not support Quick Sync.
|
||||
- Raspberry Pi is currently not supported.
|
||||
- Two-pass mode is only supported for NVENC. Other APIs will ignore this setting.
|
||||
- By default, only encoding is currently hardware accelerated. This means the CPU is still used for software decoding and tone-mapping.
|
||||
- NVENC and RKMPP can be fully accelerated by enabling hardware decoding in the video transcoding settings.
|
||||
- Only encoding is currently hardware accelerated, so the CPU is still used for software decoding and tone-mapping.
|
||||
- Hardware dependent
|
||||
- Codec support varies, but H.264 and HEVC are usually supported.
|
||||
- Notably, NVIDIA and AMD GPUs do not support VP9 encoding.
|
||||
@@ -60,17 +59,16 @@ For RKMPP to work:
|
||||
#### Basic Setup
|
||||
|
||||
1. If you do not already have it, download the latest [`hwaccel.transcoding.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||
2. In the `docker-compose.yml` under `immich-server`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
2. In the `docker-compose.yml` under `immich-microservices`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
|
||||
- For VAAPI on WSL2, be sure to use `vaapi-wsl` rather than `vaapi`
|
||||
|
||||
3. Redeploy the `immich-server` container with these updated settings.
|
||||
3. Redeploy the `immich-microservices` container with these updated settings.
|
||||
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
|
||||
5. (Optional) If using a compatible backend, you may enable hardware decoding for optimal performance.
|
||||
|
||||
#### Single Compose File
|
||||
|
||||
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-server` service directly.
|
||||
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-microservices` service directly.
|
||||
|
||||
For example, the `qsv` section in this file is:
|
||||
|
||||
@@ -79,22 +77,21 @@ devices:
|
||||
- /dev/dri:/dev/dri
|
||||
```
|
||||
|
||||
You can add this to the `immich-server` service instead of extending from `hwaccel.transcoding.yml`:
|
||||
You can add this to the `immich-microservices` service instead of extending from `hwaccel.transcoding.yml`:
|
||||
|
||||
```yaml
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# Note the lack of an `extends` section
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
command: ['start.sh', 'microservices']
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 2283:3001
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
|
||||
@@ -4,6 +4,10 @@
|
||||
|
||||
Immich supports the creation of libraries which is a top-level asset container. Currently, there are two types of libraries: traditional upload libraries that can sync with a mobile device, and external libraries, that keeps up to date with files on disk. Libraries are different from albums in that an asset can belong to multiple albums but only one library, and deleting a library deletes all assets contained within. As of August 2023, this is a new feature and libraries have a lot of potential for future development beyond what is documented here. This document attempts to describe the current state of libraries.
|
||||
|
||||
## The Upload Library
|
||||
|
||||
Immich comes preconfigured with an upload library for each user. All assets uploaded to Immich are added to this library. This library can be renamed, but not deleted. The upload library is the only library that can be synced with a mobile device. No items in an upload library is allowed to have the same sha1 hash as another item in the same library in order to prevent duplicates.
|
||||
|
||||
## External Libraries
|
||||
|
||||
External libraries tracks assets stored outside of Immich, i.e. in the file system. When the external library is scanned, Immich will read the metadata from the file and create an asset in the library for each image or video file. These items will then be shown in the main timeline, and they will look and behave like any other asset, including viewing on the map, adding to albums, etc.
|
||||
@@ -44,16 +48,16 @@ If the import paths are edited in a way that an external file is no longer in an
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
Sometimes, an external library will not scan correctly. This can happen if Immich can't access the files. Here are some things to check:
|
||||
Sometimes, an external library will not scan correctly. This can happen if immich_server or immich_microservices can't access the files. Here are some things to check:
|
||||
|
||||
- In the docker-compose file, are the volumes mounted correctly?
|
||||
- Are the volumes also mounted to any worker containers?
|
||||
- Are the volumes identical between the `server` and `microservices` container?
|
||||
- Are the import paths set correctly, and do they match the path set in docker-compose file?
|
||||
- Make sure you don't use symlinks in your import libraries, and that you aren't linking across docker mounts.
|
||||
- Are the permissions set correctly?
|
||||
- Make sure you are using forward slashes (`/`) and not backward slashes.
|
||||
|
||||
To validate that Immich can reach your external library, start a shell inside the container. Run `docker exec -it immich_server bash` to a bash shell. If your import path is `/data/import/photos`, check it with `ls /data/import/photos`. Do the same check for the same in any microservices containers.
|
||||
To validate that Immich can reach your external library, start a shell inside the container. Run `docker exec -it immich_microservices /bin/bash` to a bash shell. If your import path is `/data/import/photos`, check it with `ls /data/import/photos`. Do the same check for the `immich_server` container. If you cannot access this directory in both the `microservices` and `server` containers, Immich won't be able to import files.
|
||||
|
||||
### Exclusion Patterns
|
||||
|
||||
@@ -98,7 +102,7 @@ First, we need to plan how we want to organize the libraries. The christmas trip
|
||||
|
||||
### Mount Docker Volumes
|
||||
|
||||
The `immich-server` container will need access to the gallery. Modify your docker compose file as follows
|
||||
`immich-server` and `immich-microservices` containers will need access to the gallery. Modify your docker compose file as follows
|
||||
|
||||
```diff title="docker-compose.yml"
|
||||
immich-server:
|
||||
@@ -107,6 +111,15 @@ The `immich-server` container will need access to the gallery. Modify your docke
|
||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||
|
||||
|
||||
immich-microservices:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||
```
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ immich-machine-learning:
|
||||
Once this is done, you can redeploy the `immich-machine-learning` container.
|
||||
|
||||
:::info
|
||||
You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for CUDA, `intel_gpu_top` for OpenVINO, etc.). You can also enable debug logging by setting `IMMICH_LOG_LEVEL=debug` in the `.env` file and restarting the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, you should see a log for `Available ORT providers` containing the relevant provider. In the case of ARM NN, the absence of a `Could not load ANN shared libraries` log entry means it loaded successfully.
|
||||
You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for CUDA, `intel_gpu_top` for OpenVINO, etc.). You can also enable debug logging by setting `LOG_LEVEL=debug` in the `.env` file and restarting the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, you should see a log for `Available ORT providers` containing the relevant provider. In the case of ARM NN, the absence of a `Could not load ANN shared libraries` log entry means it loaded successfully.
|
||||
:::
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml
|
||||
|
||||
@@ -9,7 +9,7 @@ It is important to remember to update the backup settings after following the gu
|
||||
In our `.env` file, we will define variables that will help us in the future when we want to move to a more advanced server in the future
|
||||
|
||||
```diff title=".env"
|
||||
# You can find documentation for all the supported env variables [here](/docs/install/environment-variables)
|
||||
# You can find documentation for all the supported env variables at https://immich.app/docs/install/environment-variables
|
||||
|
||||
# Custom location where your uploaded, thumbnails, and transcoded video files are stored
|
||||
- UPLOAD_LOCATION=./library
|
||||
|
||||
@@ -2,52 +2,48 @@
|
||||
|
||||
A short guide on connecting [pgAdmin](https://www.pgadmin.org/) to Immich.
|
||||
|
||||
:::note
|
||||
|
||||
In order to connect to the database the immich_postgres container **must be running**.
|
||||
|
||||
The passwords and usernames used below match the ones specified in the example `.env` file. If changed, please use actual values instead.
|
||||
|
||||
**Optional:** To connect to the database **outside** of your Docker's network:
|
||||
|
||||
- Expose port 5432 in your `docker-compose.yml` file.
|
||||
- Edit the PostgreSQL [`pg_hba.conf`](https://www.postgresql.org/docs/current/auth-pg-hba-conf.html) file.
|
||||
- Make sure your firewall does not block access to port 5432.
|
||||
Note that exposing the database port increases the risk of getting attacked by hackers.
|
||||
Make sure to remove the binding port after finishing the database's tasks.
|
||||
|
||||
:::
|
||||
|
||||
## 1. Install pgAdmin
|
||||
|
||||
Add a file `docker-compose-pgadmin.yml` next to your `docker-compose.yml` with the following content:
|
||||
|
||||
```
|
||||
name: immich
|
||||
|
||||
services:
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4
|
||||
container_name: pgadmin4_container
|
||||
restart: always
|
||||
ports:
|
||||
- "8888:80"
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: user-name@domain-name.com
|
||||
PGADMIN_DEFAULT_PASSWORD: strong-password
|
||||
volumes:
|
||||
- pgadmin-data:/var/lib/pgadmin
|
||||
|
||||
volumes:
|
||||
pgadmin-data:
|
||||
```
|
||||
|
||||
Change the values of `PGADMIN_DEFAULT_EMAIL` and `PGADMIN_DEFAULT_PASSWORD` in this file.
|
||||
|
||||
Run `docker compose -f docker-compose.yml -f docker-compose-pgadmin.yml up` to start immich along with `pgAdmin`.
|
||||
Download and install [pgAdmin](https://www.pgadmin.org/download/) following the official documentation.
|
||||
|
||||
## 2. Add a Server
|
||||
|
||||
Open [localhost:8888](http://localhost:8888) and login with the default credentials from above.
|
||||
Open pgAdmin and click "Add New Server".
|
||||
|
||||
Right click on `Servers` and click on `Register >> Server..` then enter the values below in the `Connection` tab.
|
||||
<img src={require('./img/add-new-server-option.png').default} width="50%" title="new server option" />
|
||||
|
||||
<img src={require('./img/pgadmin-add-new-server.png').default} width="50%" title="new server option" />
|
||||
## 3. Enter Connection Details
|
||||
|
||||
:::note
|
||||
The parameters used here match those specified in the example `.env` file. If you have changed your `.env` file, you'll need to adjust accordingly.
|
||||
:::
|
||||
| Name | Value |
|
||||
| -------------------- | ----------- |
|
||||
| Host name/address | `localhost` |
|
||||
| Port | `5432` |
|
||||
| Maintenance database | `immich` |
|
||||
| Username | `postgres` |
|
||||
| Password | `postgres` |
|
||||
|
||||
| Name | Value |
|
||||
| -------------------- | ----------------- |
|
||||
| Host name/address | `immich_postgres` |
|
||||
| Port | `5432` |
|
||||
| Maintenance database | `immich` |
|
||||
| Username | `postgres` |
|
||||
| Password | `postgres` |
|
||||
<img src={require('./img/Connection-Pgadmin.png').default} width="75%" title="Connection" />
|
||||
|
||||
## 4. Save Connection
|
||||
|
||||
Click on "Save" to connect to the Immich database.
|
||||
|
||||
:::tip
|
||||
View [Database Queries](/docs/guides/database-queries/) for common database queries.
|
||||
:::
|
||||
|
||||
@@ -96,7 +96,7 @@ SELECT * FROM "users";
|
||||
## System Config
|
||||
|
||||
```sql title="Custom settings"
|
||||
SELECT "key", "value" FROM "system_metadata" WHERE "key" = 'system-config';
|
||||
SELECT "key", "value" FROM "system_config";
|
||||
```
|
||||
|
||||
(Only used when not using the [config file](/docs/install/config-file))
|
||||
|
||||
@@ -11,8 +11,9 @@ docker ps -a # see a list of running and stopped containers
|
||||
|
||||
```bash
|
||||
docker exec -it <id or name> <command> # attach to a container with a command
|
||||
docker exec -it immich_server bash
|
||||
docker exec -it immich_machine_learning bash
|
||||
docker exec -it immich_server sh
|
||||
docker exec -it immich_microservices sh
|
||||
docker exec -it immich_machine_learning sh
|
||||
```
|
||||
|
||||
## Logs
|
||||
@@ -21,6 +22,7 @@ docker exec -it immich_machine_learning bash
|
||||
docker logs <id or name> # see the logs for a specific container (by id or name)
|
||||
|
||||
docker logs immich_server
|
||||
docker logs immich_microservices
|
||||
docker logs immich_machine_learning
|
||||
```
|
||||
|
||||
|
||||
@@ -6,14 +6,20 @@ in a directory on the same machine.
|
||||
|
||||
# Mount the directory into the containers.
|
||||
|
||||
Edit `docker-compose.yml` to add two new mount points in the section `immich-server:` under `volumes:`
|
||||
Edit `docker-compose.yml` to add two new mount points in the sections `immich-server:` and `immich-microservices:` under `volumes:`
|
||||
|
||||
```diff
|
||||
immich-server:
|
||||
volumes:
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
|
||||
immich-microservices:
|
||||
volumes:
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
```
|
||||
|
||||
Be sure to add exactly the same path to both services.
|
||||
|
||||
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
|
||||
|
||||
```
|
||||
|
||||
BIN
docs/docs/guides/img/Connection-Pgadmin.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
docs/docs/guides/img/add-new-server-option.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 218 KiB |
|
Before Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 68 KiB |
@@ -32,7 +32,7 @@ def upload(file):
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f'{BASE_URL}/assets', headers=headers, data=data, files=files)
|
||||
f'{BASE_URL}/asset/upload', headers=headers, data=data, files=files)
|
||||
|
||||
print(response.json())
|
||||
# {'id': 'ef96f635-61c7-4639-9e60-61a11c4bbfba', 'duplicate': False}
|
||||
|
||||
@@ -7,7 +7,7 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
|
||||
- Start the container by running `docker compose up -d`.
|
||||
|
||||
:::info
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning container, but facial recognition is done in the `microservices` worker.
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning service, but facial recognition is done in the immich_microservices service.
|
||||
:::
|
||||
|
||||
:::note
|
||||
@@ -15,7 +15,7 @@ The [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/downlo
|
||||
:::
|
||||
|
||||
```yaml
|
||||
name: immich_remote_ml
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
immich-machine-learning:
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# SMTP settings using Gmail
|
||||
|
||||
This guide walks you through how to get the information you need to set up your Immich instance to send emails using Gmail's SMTP server.
|
||||
|
||||
## Create an app password
|
||||
|
||||
From your Google account settings
|
||||
|
||||
- Add [2-Step Verification](https://support.google.com/accounts/answer/185839) to your Google account (Required)
|
||||
- [Create an app password](https://myaccount.google.com/apppasswords).
|
||||
|
||||
At the end of creating your app passwords, a password will be displayed; save it, it will be used for the password field when setting up the SMTP server in Immich.
|
||||
|
||||
<img src={require('./img/google-app-password.webp').default} title="Authorised redirect URIs" />
|
||||
|
||||
## Entering the SMTP credential in Immich
|
||||
|
||||
Entering your credential in Immich's email notification settings at `Administration -> Settings -> Notification Settings`
|
||||
|
||||
<img src={require('./img/email-settings.png').default} width="80%" title="SMTP settings" />
|
||||