Compare commits
6 Commits
feat/ml-ar
...
apeman76/m
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27fa817ea6 | ||
|
|
4bf2ded729 | ||
|
|
fed7d0464a | ||
|
|
3ab67886b0 | ||
|
|
2b06d4b284 | ||
|
|
34bea0190e |
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.15
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as core
|
||||
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
16
cli/package-lock.json
generated
16
cli/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.8",
|
||||
"version": "2.2.7",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.8",
|
||||
"version": "2.2.7",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
@@ -22,7 +22,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
@@ -49,14 +49,14 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -1163,9 +1163,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.14.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz",
|
||||
"integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==",
|
||||
"version": "20.14.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
|
||||
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.8",
|
||||
"version": "2.2.7",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -18,7 +18,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
@@ -64,6 +64,6 @@
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:f20d3127bf2876f4a1df76246fca576b41ddf1125ed1c546fbd8b16ea55117e6
|
||||
image: prom/prometheus@sha256:075b1ba2c4ebb04bc3a6ab86c06ec8d8099f8fda1c96ef6d104d9bb1def1d8bc
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.15
|
||||
|
||||
@@ -4,7 +4,7 @@ This page gives a few pointers on how to access your Immich instance from outsid
|
||||
You can read the [full discussion in Discord](https://discord.com/channels/979116623879368755/1122615710846308484)
|
||||
|
||||
:::danger
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you susceptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
:::
|
||||
|
||||
## Option 1: VPN to home network
|
||||
|
||||
@@ -92,7 +92,6 @@ const config = {
|
||||
alt: 'Immich Logo',
|
||||
src: 'img/immich-logo-inline-light.png',
|
||||
srcDark: 'img/immich-logo-inline-dark.png',
|
||||
className: 'rounded-none',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
|
||||
@@ -56,6 +56,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ export function Timeline({ items }: Props): JSX.Element {
|
||||
<div className="flex flex-col flex-grow justify-between gap-2">
|
||||
<div className="flex gap-2 items-center">
|
||||
{cardIcon === 'immich' ? (
|
||||
<img src="img/immich-logo.svg" height="30" className="rounded-none" />
|
||||
<img src="img/immich-logo.svg" height="30" />
|
||||
) : (
|
||||
<Icon path={cardIcon} size={1} color={item.iconColor} />
|
||||
)}
|
||||
|
||||
@@ -49,7 +49,7 @@ const items: Item[] = [
|
||||
iconColor: 'greenyellow',
|
||||
title: 'JavaScript Date objects are cursed',
|
||||
description: 'JavaScript date objects are 1 indexed for years and days, but 0 indexed for months.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/6787', text: '#6787' },
|
||||
link: { url: 'https://github.com/immich-app/immich/pulls/6787', text: '#6787' },
|
||||
date: new Date(2024, 0, 31),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -10,7 +10,7 @@ function HomepageHeader() {
|
||||
<section className="text-center m-6 p-12 border border-red-400 rounded-[50px] bg-slate-200 dark:bg-immich-dark-gray">
|
||||
<img
|
||||
src={isDarkTheme ? 'img/immich-logo-stacked-dark.svg' : 'img/immich-logo-stacked-light.svg'}
|
||||
className="md:h-60 h-44 mb-2 antialiased rounded-none"
|
||||
className="md:h-60 h-44 mb-2 antialiased"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<div className="sm:text-2xl text-lg md:text-4xl mb-12 sm:leading-tight">
|
||||
|
||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,8 +1,4 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.108.0",
|
||||
"url": "https://v1.108.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.2",
|
||||
"url": "https://v1.107.2.archive.immich.app"
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.15
|
||||
|
||||
@@ -26,8 +26,6 @@ services:
|
||||
volumes:
|
||||
- upload:/usr/src/app/upload
|
||||
- ./test-assets:/test-assets
|
||||
extra_hosts:
|
||||
- 'auth-server:host-gateway'
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
|
||||
1045
e2e/package-lock.json
generated
1045
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -23,8 +23,7 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
@@ -36,9 +35,7 @@
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"exiftool-vendored": "^27.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
"pg": "^8.11.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
@@ -50,6 +47,6 @@
|
||||
"vitest": "^1.3.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,12 +100,6 @@ describe('/auth/*', () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should reject an invalid email', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email: [], password });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.invalidEmail);
|
||||
});
|
||||
}
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
|
||||
@@ -1,85 +1,12 @@
|
||||
import {
|
||||
LoginResponseDto,
|
||||
SystemConfigOAuthDto,
|
||||
getConfigDefaults,
|
||||
getMyUser,
|
||||
startOAuth,
|
||||
updateConfig,
|
||||
} from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
|
||||
import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const authServer = {
|
||||
internal: 'http://auth-server:3000',
|
||||
external: 'http://127.0.0.1:3000',
|
||||
};
|
||||
|
||||
const redirect = async (url: string, cookies?: string[]) => {
|
||||
const { headers } = await request(url)
|
||||
.get('/')
|
||||
.set('Cookie', cookies || []);
|
||||
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
|
||||
};
|
||||
|
||||
const loginWithOAuth = async (sub: OAuthUser | string) => {
|
||||
const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: `${baseUrl}/auth/login` } });
|
||||
|
||||
// login
|
||||
const response1 = await redirect(url.replace(authServer.internal, authServer.external));
|
||||
const response2 = await request(authServer.external + response1.location)
|
||||
.post('/')
|
||||
.set('Cookie', response1.cookies)
|
||||
.type('form')
|
||||
.send({ prompt: 'login', login: sub, password: 'password' });
|
||||
|
||||
// approve
|
||||
const response3 = await redirect(response2.header.location, response1.cookies);
|
||||
const response4 = await request(authServer.external + response3.location)
|
||||
.post('/')
|
||||
.type('form')
|
||||
.set('Cookie', response3.cookies)
|
||||
.send({ prompt: 'consent' });
|
||||
|
||||
const response5 = await redirect(response4.header.location, response3.cookies.slice(1));
|
||||
const redirectUrl = response5.location;
|
||||
|
||||
expect(redirectUrl).toBeDefined();
|
||||
const params = new URL(redirectUrl).searchParams;
|
||||
expect(params.get('code')).toBeDefined();
|
||||
expect(params.get('state')).toBeDefined();
|
||||
|
||||
return redirectUrl;
|
||||
};
|
||||
|
||||
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
|
||||
const options = { headers: asBearerAuth(token) };
|
||||
const defaults = await getConfigDefaults(options);
|
||||
const merged = {
|
||||
...defaults.oauth,
|
||||
buttonText: 'Login with Immich',
|
||||
issuerUrl: `${authServer.internal}/.well-known/openid-configuration`,
|
||||
...dto,
|
||||
};
|
||||
await updateConfig({ systemConfigDto: { ...defaults, oauth: merged } }, options);
|
||||
};
|
||||
|
||||
describe(`/oauth`, () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
storageLabelClaim: 'immich_username',
|
||||
});
|
||||
await utils.adminSetup();
|
||||
});
|
||||
|
||||
describe('POST /oauth/authorize', () => {
|
||||
@@ -88,171 +15,5 @@ describe(`/oauth`, () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['redirectUri must be a string', 'redirectUri should not be empty']));
|
||||
});
|
||||
|
||||
it('should return a redirect uri', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/authorize')
|
||||
.send({ redirectUri: 'http://127.0.0.1:2283/auth/login' });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({ url: expect.stringContaining(`${authServer.internal}/auth?`) });
|
||||
|
||||
const params = new URL(body.url).searchParams;
|
||||
expect(params.get('client_id')).toBe('client-default');
|
||||
expect(params.get('response_type')).toBe('code');
|
||||
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2283/auth/login');
|
||||
expect(params.get('state')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/callback', () => {
|
||||
it(`should throw an error if a url is not provided`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({});
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url must be a string', 'url should not be empty']));
|
||||
});
|
||||
|
||||
it(`should throw an error if the url is empty`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url: '' });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const url = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-auto-register@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a user without an email', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
|
||||
});
|
||||
|
||||
it('should set the quota from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-quota@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.quotaSizeInBytes).toBe(25 * 2 ** 30); // 25 GiB;
|
||||
});
|
||||
|
||||
it('should set the storage label from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-username@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.storageLabel).toBe('user-username');
|
||||
});
|
||||
|
||||
it('should work with RS256 signed tokens', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_TOKENS,
|
||||
clientSecret: OAuthClient.RS256_TOKENS,
|
||||
autoRegister: true,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-RS256-token');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-RS256-token@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with RS256 signed user profiles', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_PROFILE,
|
||||
clientSecret: OAuthClient.RS256_PROFILE,
|
||||
buttonText: 'Login with Immich',
|
||||
profileSigningAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-profile');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-signed-profile@immich.app',
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid token algorithm', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'something-that-does-not-work',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-bad');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(500);
|
||||
expect(body).toMatchObject({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to finish oauth',
|
||||
statusCode: 500,
|
||||
});
|
||||
});
|
||||
|
||||
describe('autoRegister: false', () => {
|
||||
beforeAll(async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
autoRegister: false,
|
||||
buttonText: 'Login with Immich',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not auto register the user', async () => {
|
||||
const url = await loginWithOAuth('oauth-no-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
|
||||
});
|
||||
|
||||
it('should link to an existing user by email', async () => {
|
||||
const { userId } = await utils.userSetup(admin.accessToken, {
|
||||
name: 'OAuth User 3',
|
||||
email: 'oauth-user3@immich.app',
|
||||
password: 'password',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-user3');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId,
|
||||
userEmail: 'oauth-user3@immich.app',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -61,12 +61,6 @@ export const errorDto = {
|
||||
message: 'The server already has an admin',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidEmail: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: ['email must be an email'],
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
};
|
||||
|
||||
export const signupResponseDto = {
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
import { exportJWK, generateKeyPair } from 'jose';
|
||||
import Provider from 'oidc-provider';
|
||||
|
||||
export enum OAuthClient {
|
||||
DEFAULT = 'client-default',
|
||||
RS256_TOKENS = 'client-RS256-tokens',
|
||||
RS256_PROFILE = 'client-RS256-profile',
|
||||
}
|
||||
|
||||
export enum OAuthUser {
|
||||
NO_EMAIL = 'no-email',
|
||||
NO_NAME = 'no-name',
|
||||
WITH_QUOTA = 'with-quota',
|
||||
WITH_USERNAME = 'with-username',
|
||||
}
|
||||
|
||||
const claims = [
|
||||
{ sub: OAuthUser.NO_EMAIL },
|
||||
{
|
||||
sub: OAuthUser.NO_NAME,
|
||||
email: 'oauth-no-name@immich.app',
|
||||
email_verified: true,
|
||||
},
|
||||
{
|
||||
sub: OAuthUser.WITH_USERNAME,
|
||||
email: 'oauth-with-username@immich.app',
|
||||
email_verified: true,
|
||||
immich_username: 'user-username',
|
||||
},
|
||||
{
|
||||
sub: OAuthUser.WITH_QUOTA,
|
||||
email: 'oauth-with-quota@immich.app',
|
||||
email_verified: true,
|
||||
preferred_username: 'user-quota',
|
||||
immich_quota: 25,
|
||||
},
|
||||
];
|
||||
|
||||
const withDefaultClaims = (sub: string) => ({
|
||||
sub,
|
||||
email: `${sub}@immich.app`,
|
||||
name: 'OAuth User',
|
||||
given_name: `OAuth`,
|
||||
family_name: 'User',
|
||||
email_verified: true,
|
||||
});
|
||||
|
||||
const getClaims = (sub: string) => claims.find((user) => user.sub === sub) || withDefaultClaims(sub);
|
||||
|
||||
const setup = async () => {
|
||||
const { privateKey, publicKey } = await generateKeyPair('RS256');
|
||||
|
||||
const port = 3000;
|
||||
const host = '0.0.0.0';
|
||||
const oidc = new Provider(`http://${host}:${port}`, {
|
||||
renderError: async (ctx, out, error) => {
|
||||
console.error(out);
|
||||
console.error(error);
|
||||
ctx.body = 'Internal Server Error';
|
||||
},
|
||||
findAccount: (ctx, sub) => ({ accountId: sub, claims: () => getClaims(sub) }),
|
||||
scopes: ['openid', 'email', 'profile'],
|
||||
claims: {
|
||||
openid: ['sub'],
|
||||
email: ['email', 'email_verified'],
|
||||
profile: ['name', 'given_name', 'family_name', 'preferred_username', 'immich_quota', 'immich_username'],
|
||||
},
|
||||
features: {
|
||||
jwtUserinfo: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
cookies: {
|
||||
names: {
|
||||
session: 'oidc.session',
|
||||
interaction: 'oidc.interaction',
|
||||
resume: 'oidc.resume',
|
||||
state: 'oidc.state',
|
||||
},
|
||||
},
|
||||
pkce: {
|
||||
required: () => false,
|
||||
},
|
||||
jwks: { keys: [await exportJWK(privateKey)] },
|
||||
clients: [
|
||||
{
|
||||
client_id: OAuthClient.DEFAULT,
|
||||
client_secret: OAuthClient.DEFAULT,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
response_types: ['code'],
|
||||
},
|
||||
{
|
||||
client_id: OAuthClient.RS256_TOKENS,
|
||||
client_secret: OAuthClient.RS256_TOKENS,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
id_token_signed_response_alg: 'RS256',
|
||||
jwks: { keys: [await exportJWK(publicKey)] },
|
||||
},
|
||||
{
|
||||
client_id: OAuthClient.RS256_PROFILE,
|
||||
client_secret: OAuthClient.RS256_PROFILE,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
userinfo_signed_response_alg: 'RS256',
|
||||
jwks: { keys: [await exportJWK(publicKey)] },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const onStart = () => console.log(`[auth-server] http://${host}:${port}/.well-known/openid-configuration`);
|
||||
const app = oidc.listen(port, host, onStart);
|
||||
return () => app.close();
|
||||
};
|
||||
|
||||
export default setup;
|
||||
@@ -53,7 +53,8 @@ type AdminSetupOptions = { onboarding?: boolean };
|
||||
type AssetData = { bytes?: Buffer; filename: string };
|
||||
|
||||
const dbUrl = 'postgres://postgres:postgres@127.0.0.1:5433/immich';
|
||||
export const baseUrl = 'http://127.0.0.1:2283';
|
||||
const baseUrl = 'http://127.0.0.1:2283';
|
||||
|
||||
export const shareUrl = `${baseUrl}/share`;
|
||||
export const app = `${baseUrl}/api`;
|
||||
// TODO move test assets into e2e/assets
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
// skip `docker compose up` if `make e2e` was already run
|
||||
const globalSetup: string[] = ['src/setup/auth-server.ts'];
|
||||
const globalSetup: string[] = [];
|
||||
try {
|
||||
await fetch('http://127.0.0.1:2283/api/server-info/ping');
|
||||
} catch {
|
||||
globalSetup.push('src/setup/docker-compose.ts');
|
||||
globalSetup.push('src/setup.ts');
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
|
||||
3
machine-learning/ann/build.sh
Normal file
3
machine-learning/ann/build.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
g++ -shared -O3 -o libann.so -fuse-ld=gold -std=c++17 -I"$ARMNN_PATH"/include -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -L"$ARMNN_PATH" ann.cpp
|
||||
4
machine-learning/ann/export/build-converter.sh
Executable file
4
machine-learning/ann/export/build-converter.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
cd armnn-23.11/ || exit
|
||||
g++ -o ../armnnconverter -O1 -DARMNN_ONNX_PARSER -DARMNN_SERIALIZER -DARMNN_TF_LITE_PARSER -fuse-ld=gold -std=c++17 -Iinclude -Isrc/armnnUtils -Ithird-party -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -larmnnSerializer -L../armnn src/armnnConverter/ArmnnConverter.cpp
|
||||
201
machine-learning/ann/export/env.yaml
Normal file
201
machine-learning/ann/export/env.yaml
Normal file
@@ -0,0 +1,201 @@
|
||||
name: annexport
|
||||
channels:
|
||||
- pytorch
|
||||
- nvidia
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- _libgcc_mutex=0.1=conda_forge
|
||||
- _openmp_mutex=4.5=2_kmp_llvm
|
||||
- aiohttp=3.9.1=py310h2372a71_0
|
||||
- aiosignal=1.3.1=pyhd8ed1ab_0
|
||||
- arpack=3.8.0=nompi_h0baa96a_101
|
||||
- async-timeout=4.0.3=pyhd8ed1ab_0
|
||||
- attrs=23.1.0=pyh71513ae_1
|
||||
- aws-c-auth=0.7.3=h28f7589_1
|
||||
- aws-c-cal=0.6.1=hc309b26_1
|
||||
- aws-c-common=0.9.0=hd590300_0
|
||||
- aws-c-compression=0.2.17=h4d4d85c_2
|
||||
- aws-c-event-stream=0.3.1=h2e3709c_4
|
||||
- aws-c-http=0.7.11=h00aa349_4
|
||||
- aws-c-io=0.13.32=he9a53bd_1
|
||||
- aws-c-mqtt=0.9.3=hb447be9_1
|
||||
- aws-c-s3=0.3.14=hf3aad02_1
|
||||
- aws-c-sdkutils=0.1.12=h4d4d85c_1
|
||||
- aws-checksums=0.1.17=h4d4d85c_1
|
||||
- aws-crt-cpp=0.21.0=hb942446_5
|
||||
- aws-sdk-cpp=1.10.57=h85b1a90_19
|
||||
- blas=2.120=openblas
|
||||
- blas-devel=3.9.0=20_linux64_openblas
|
||||
- brotli-python=1.0.9=py310hd8f1fbe_9
|
||||
- bzip2=1.0.8=hd590300_5
|
||||
- c-ares=1.23.0=hd590300_0
|
||||
- ca-certificates=2023.11.17=hbcca054_0
|
||||
- certifi=2023.11.17=pyhd8ed1ab_0
|
||||
- charset-normalizer=3.3.2=pyhd8ed1ab_0
|
||||
- click=8.1.7=unix_pyh707e725_0
|
||||
- colorama=0.4.6=pyhd8ed1ab_0
|
||||
- coloredlogs=15.0.1=pyhd8ed1ab_3
|
||||
- cuda-cudart=11.7.99=0
|
||||
- cuda-cupti=11.7.101=0
|
||||
- cuda-libraries=11.7.1=0
|
||||
- cuda-nvrtc=11.7.99=0
|
||||
- cuda-nvtx=11.7.91=0
|
||||
- cuda-runtime=11.7.1=0
|
||||
- dataclasses=0.8=pyhc8e2a94_3
|
||||
- datasets=2.14.7=pyhd8ed1ab_0
|
||||
- dill=0.3.7=pyhd8ed1ab_0
|
||||
- filelock=3.13.1=pyhd8ed1ab_0
|
||||
- flatbuffers=23.5.26=h59595ed_1
|
||||
- freetype=2.12.1=h267a509_2
|
||||
- frozenlist=1.4.0=py310h2372a71_1
|
||||
- fsspec=2023.10.0=pyhca7485f_0
|
||||
- ftfy=6.1.3=pyhd8ed1ab_0
|
||||
- gflags=2.2.2=he1b5a44_1004
|
||||
- glog=0.6.0=h6f12383_0
|
||||
- glpk=5.0=h445213a_0
|
||||
- gmp=6.3.0=h59595ed_0
|
||||
- gmpy2=2.1.2=py310h3ec546c_1
|
||||
- huggingface_hub=0.17.3=pyhd8ed1ab_0
|
||||
- humanfriendly=10.0=pyhd8ed1ab_6
|
||||
- icu=73.2=h59595ed_0
|
||||
- idna=3.6=pyhd8ed1ab_0
|
||||
- importlib-metadata=7.0.0=pyha770c72_0
|
||||
- importlib_metadata=7.0.0=hd8ed1ab_0
|
||||
- joblib=1.3.2=pyhd8ed1ab_0
|
||||
- keyutils=1.6.1=h166bdaf_0
|
||||
- krb5=1.21.2=h659d440_0
|
||||
- lcms2=2.15=h7f713cb_2
|
||||
- ld_impl_linux-64=2.40=h41732ed_0
|
||||
- lerc=4.0.0=h27087fc_0
|
||||
- libabseil=20230125.3=cxx17_h59595ed_0
|
||||
- libarrow=12.0.1=hb87d912_8_cpu
|
||||
- libblas=3.9.0=20_linux64_openblas
|
||||
- libbrotlicommon=1.0.9=h166bdaf_9
|
||||
- libbrotlidec=1.0.9=h166bdaf_9
|
||||
- libbrotlienc=1.0.9=h166bdaf_9
|
||||
- libcblas=3.9.0=20_linux64_openblas
|
||||
- libcrc32c=1.1.2=h9c3ff4c_0
|
||||
- libcublas=11.10.3.66=0
|
||||
- libcufft=10.7.2.124=h4fbf590_0
|
||||
- libcufile=1.8.1.2=0
|
||||
- libcurand=10.3.4.101=0
|
||||
- libcurl=8.5.0=hca28451_0
|
||||
- libcusolver=11.4.0.1=0
|
||||
- libcusparse=11.7.4.91=0
|
||||
- libdeflate=1.19=hd590300_0
|
||||
- libedit=3.1.20191231=he28a2e2_2
|
||||
- libev=4.33=hd590300_2
|
||||
- libevent=2.1.12=hf998b51_1
|
||||
- libffi=3.4.2=h7f98852_5
|
||||
- libgcc-ng=13.2.0=h807b86a_3
|
||||
- libgfortran-ng=13.2.0=h69a702a_3
|
||||
- libgfortran5=13.2.0=ha4646dd_3
|
||||
- libgoogle-cloud=2.12.0=hac9eb74_1
|
||||
- libgrpc=1.54.3=hb20ce57_0
|
||||
- libhwloc=2.9.3=default_h554bfaf_1009
|
||||
- libiconv=1.17=hd590300_1
|
||||
- libjpeg-turbo=2.1.5.1=hd590300_1
|
||||
- liblapack=3.9.0=20_linux64_openblas
|
||||
- liblapacke=3.9.0=20_linux64_openblas
|
||||
- libnghttp2=1.58.0=h47da74e_1
|
||||
- libnpp=11.7.4.75=0
|
||||
- libnsl=2.0.1=hd590300_0
|
||||
- libnuma=2.0.16=h0b41bf4_1
|
||||
- libnvjpeg=11.8.0.2=0
|
||||
- libopenblas=0.3.25=pthreads_h413a1c8_0
|
||||
- libpng=1.6.39=h753d276_0
|
||||
- libprotobuf=3.21.12=hfc55251_2
|
||||
- libsentencepiece=0.1.99=h180e1df_0
|
||||
- libsqlite=3.44.2=h2797004_0
|
||||
- libssh2=1.11.0=h0841786_0
|
||||
- libstdcxx-ng=13.2.0=h7e041cc_3
|
||||
- libthrift=0.18.1=h8fd135c_2
|
||||
- libtiff=4.6.0=h29866fb_1
|
||||
- libutf8proc=2.8.0=h166bdaf_0
|
||||
- libuuid=2.38.1=h0b41bf4_0
|
||||
- libwebp-base=1.3.2=hd590300_0
|
||||
- libxcb=1.15=h0b41bf4_0
|
||||
- libxml2=2.11.6=h232c23b_0
|
||||
- libzlib=1.2.13=hd590300_5
|
||||
- llvm-openmp=17.0.6=h4dfa4b3_0
|
||||
- lz4-c=1.9.4=hcb278e6_0
|
||||
- mkl=2022.2.1=h84fe81f_16997
|
||||
- mkl-devel=2022.2.1=ha770c72_16998
|
||||
- mkl-include=2022.2.1=h84fe81f_16997
|
||||
- mpc=1.3.1=hfe3b2da_0
|
||||
- mpfr=4.2.1=h9458935_0
|
||||
- mpmath=1.3.0=pyhd8ed1ab_0
|
||||
- multidict=6.0.4=py310h2372a71_1
|
||||
- multiprocess=0.70.15=py310h2372a71_1
|
||||
- ncurses=6.4=h59595ed_2
|
||||
- numpy=1.26.2=py310hb13e2d6_0
|
||||
- onnx=1.14.0=py310ha3deec4_1
|
||||
- onnx2torch=1.5.13=pyhd8ed1ab_0
|
||||
- onnxruntime=1.16.3=py310hd4b7fbc_1_cpu
|
||||
- open-clip-torch=2.23.0=pyhd8ed1ab_1
|
||||
- openblas=0.3.25=pthreads_h7a3da1a_0
|
||||
- openjpeg=2.5.0=h488ebb8_3
|
||||
- openssl=3.2.0=hd590300_1
|
||||
- orc=1.9.0=h2f23424_1
|
||||
- packaging=23.2=pyhd8ed1ab_0
|
||||
- pandas=2.1.4=py310hcc13569_0
|
||||
- pillow=10.0.1=py310h29da1c1_1
|
||||
- pip=23.3.1=pyhd8ed1ab_0
|
||||
- protobuf=4.21.12=py310heca2aa9_0
|
||||
- pthread-stubs=0.4=h36c2ea0_1001
|
||||
- pyarrow=12.0.1=py310h0576679_8_cpu
|
||||
- pyarrow-hotfix=0.6=pyhd8ed1ab_0
|
||||
- pysocks=1.7.1=pyha2e5f31_6
|
||||
- python=3.10.13=hd12c33a_0_cpython
|
||||
- python-dateutil=2.8.2=pyhd8ed1ab_0
|
||||
- python-flatbuffers=23.5.26=pyhd8ed1ab_0
|
||||
- python-tzdata=2023.3=pyhd8ed1ab_0
|
||||
- python-xxhash=3.4.1=py310h2372a71_0
|
||||
- python_abi=3.10=4_cp310
|
||||
- pytorch=1.13.1=cpu_py310hd11e9c7_1
|
||||
- pytorch-cuda=11.7=h778d358_5
|
||||
- pytorch-mutex=1.0=cuda
|
||||
- pytz=2023.3.post1=pyhd8ed1ab_0
|
||||
- pyyaml=6.0.1=py310h2372a71_1
|
||||
- rdma-core=28.9=h59595ed_1
|
||||
- re2=2023.03.02=h8c504da_0
|
||||
- readline=8.2=h8228510_1
|
||||
- regex=2023.10.3=py310h2372a71_0
|
||||
- requests=2.31.0=pyhd8ed1ab_0
|
||||
- s2n=1.3.49=h06160fa_0
|
||||
- sacremoses=0.0.53=pyhd8ed1ab_0
|
||||
- safetensors=0.3.3=py310hcb5633a_1
|
||||
- sentencepiece=0.1.99=hff52083_0
|
||||
- sentencepiece-python=0.1.99=py310hebdb9f0_0
|
||||
- sentencepiece-spm=0.1.99=h180e1df_0
|
||||
- setuptools=68.2.2=pyhd8ed1ab_0
|
||||
- six=1.16.0=pyh6c4a22f_0
|
||||
- sleef=3.5.1=h9b69904_2
|
||||
- snappy=1.1.10=h9fff704_0
|
||||
- sympy=1.12=pypyh9d50eac_103
|
||||
- tbb=2021.11.0=h00ab1b0_0
|
||||
- texttable=1.7.0=pyhd8ed1ab_0
|
||||
- timm=0.9.12=pyhd8ed1ab_0
|
||||
- tk=8.6.13=noxft_h4845f30_101
|
||||
- tokenizers=0.14.1=py310h320607d_2
|
||||
- torchvision=0.14.1=cpu_py310hd3d2ac3_1
|
||||
- tqdm=4.66.1=pyhd8ed1ab_0
|
||||
- transformers=4.35.2=pyhd8ed1ab_0
|
||||
- typing-extensions=4.9.0=hd8ed1ab_0
|
||||
- typing_extensions=4.9.0=pyha770c72_0
|
||||
- tzdata=2023c=h71feb2d_0
|
||||
- ucx=1.14.1=h64cca9d_5
|
||||
- urllib3=2.1.0=pyhd8ed1ab_0
|
||||
- wcwidth=0.2.12=pyhd8ed1ab_0
|
||||
- wheel=0.42.0=pyhd8ed1ab_0
|
||||
- xorg-libxau=1.0.11=hd590300_0
|
||||
- xorg-libxdmcp=1.1.3=h7f98852_0
|
||||
- xxhash=0.8.2=hd590300_0
|
||||
- xz=5.2.6=h166bdaf_0
|
||||
- yaml=0.2.5=h7f98852_2
|
||||
- yarl=1.9.3=py310h2372a71_0
|
||||
- zipp=3.17.0=pyhd8ed1ab_0
|
||||
- zlib=1.2.13=hd590300_5
|
||||
- zstd=1.5.5=hfc55251_0
|
||||
- pip:
|
||||
- git+https://github.com/fyfrey/TinyNeuralNetwork.git
|
||||
157
machine-learning/ann/export/run.py
Normal file
157
machine-learning/ann/export/run.py
Normal file
@@ -0,0 +1,157 @@
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from abc import abstractmethod
|
||||
|
||||
import onnx
|
||||
import open_clip
|
||||
import torch
|
||||
from onnx2torch import convert
|
||||
from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed
|
||||
from tinynn.converter import TFLiteConverter
|
||||
|
||||
|
||||
class ExportBase(torch.nn.Module):
|
||||
input_shape: tuple[int, ...]
|
||||
|
||||
def __init__(self, device: torch.device, name: str):
|
||||
super().__init__()
|
||||
self.device = device
|
||||
self.name = name
|
||||
self.optimize = 5
|
||||
self.nchw_transpose = False
|
||||
|
||||
@abstractmethod
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.Tensor | tuple[torch.Tensor]:
|
||||
pass
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand((1, 3, 224, 224), device=self.device)
|
||||
|
||||
|
||||
class ArcFace(ExportBase):
|
||||
input_shape = (1, 3, 112, 112)
|
||||
|
||||
def __init__(self, onnx_model_path: str, device: torch.device):
|
||||
name, _ = os.path.splitext(os.path.basename(onnx_model_path))
|
||||
super().__init__(device, name)
|
||||
onnx_model = onnx.load_model(onnx_model_path)
|
||||
make_input_shape_fixed(onnx_model.graph, onnx_model.graph.input[0].name, self.input_shape)
|
||||
fix_output_shapes(onnx_model)
|
||||
self.model = convert(onnx_model).to(device)
|
||||
if self.device.type == "cuda":
|
||||
self.model = self.model.half()
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.FloatTensor:
|
||||
embedding: torch.FloatTensor = self.model(
|
||||
input_tensor.half() if self.device.type == "cuda" else input_tensor
|
||||
).float()
|
||||
assert isinstance(embedding, torch.FloatTensor)
|
||||
return embedding
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand(self.input_shape, device=self.device)
|
||||
|
||||
|
||||
class RetinaFace(ExportBase):
|
||||
input_shape = (1, 3, 640, 640)
|
||||
|
||||
def __init__(self, onnx_model_path: str, device: torch.device):
|
||||
name, _ = os.path.splitext(os.path.basename(onnx_model_path))
|
||||
super().__init__(device, name)
|
||||
self.optimize = 3
|
||||
self.model = convert(onnx_model_path).eval().to(device)
|
||||
if self.device.type == "cuda":
|
||||
self.model = self.model.half()
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> tuple[torch.FloatTensor]:
|
||||
out: torch.Tensor = self.model(input_tensor.half() if self.device.type == "cuda" else input_tensor)
|
||||
return tuple(o.float() for o in out)
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand(self.input_shape, device=self.device)
|
||||
|
||||
|
||||
class ClipVision(ExportBase):
|
||||
input_shape = (1, 3, 224, 224)
|
||||
|
||||
def __init__(self, model_name: str, weights: str, device: torch.device):
|
||||
super().__init__(device, model_name + "__" + weights)
|
||||
self.model = open_clip.create_model(
|
||||
model_name,
|
||||
weights,
|
||||
precision="fp16" if device.type == "cuda" else "fp32",
|
||||
jit=False,
|
||||
require_pretrained=True,
|
||||
device=device,
|
||||
)
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.FloatTensor:
|
||||
embedding: torch.Tensor = self.model.encode_image(
|
||||
input_tensor.half() if self.device.type == "cuda" else input_tensor,
|
||||
normalize=True,
|
||||
).float()
|
||||
return embedding
|
||||
|
||||
|
||||
def export(model: ExportBase) -> None:
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad = False
|
||||
dummy_input = model.dummy_input()
|
||||
model(dummy_input)
|
||||
jit = torch.jit.trace(model, dummy_input) # type: ignore[no-untyped-call,attr-defined]
|
||||
tflite_model_path = f"output/{model.name}.tflite"
|
||||
os.makedirs("output", exist_ok=True)
|
||||
|
||||
converter = TFLiteConverter(
|
||||
jit,
|
||||
dummy_input,
|
||||
tflite_model_path,
|
||||
optimize=model.optimize,
|
||||
nchw_transpose=model.nchw_transpose,
|
||||
)
|
||||
# segfaults on ARM, must run on x86_64 / AMD64
|
||||
converter.convert()
|
||||
|
||||
armnn_model_path = f"output/{model.name}.armnn"
|
||||
os.environ["LD_LIBRARY_PATH"] = "armnn"
|
||||
subprocess.run(
|
||||
[
|
||||
"./armnnconverter",
|
||||
"-f",
|
||||
"tflite-binary",
|
||||
"-m",
|
||||
tflite_model_path,
|
||||
"-i",
|
||||
"input_tensor",
|
||||
"-o",
|
||||
"output_tensor",
|
||||
"-p",
|
||||
armnn_model_path,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
if platform.machine() not in ("x86_64", "AMD64"):
|
||||
raise RuntimeError(f"Can only run on x86_64 / AMD64, not {platform.machine()}")
|
||||
|
||||
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||
if device.type != "cuda":
|
||||
logging.warning(
|
||||
"No CUDA available, cannot create fp16 model! proceeding to create a fp32 model (use only for testing)"
|
||||
)
|
||||
models = [
|
||||
ClipVision("ViT-B-32", "openai", device),
|
||||
ArcFace("buffalo_l_rec.onnx", device),
|
||||
RetinaFace("buffalo_l_det.onnx", device),
|
||||
]
|
||||
for model in models:
|
||||
export(model)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with torch.no_grad():
|
||||
main()
|
||||
@@ -168,12 +168,6 @@ def warning() -> Iterator[mock.Mock]:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def exception() -> Iterator[mock.Mock]:
|
||||
with mock.patch.object(log, "exception") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def snapshot_download() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.base.snapshot_download") as mocked:
|
||||
|
||||
@@ -29,7 +29,6 @@ from .schemas import (
|
||||
InferenceEntry,
|
||||
InferenceResponse,
|
||||
MessageResponse,
|
||||
ModelFormat,
|
||||
ModelIdentity,
|
||||
ModelTask,
|
||||
ModelType,
|
||||
@@ -196,17 +195,7 @@ async def load(model: InferenceModel) -> InferenceModel:
|
||||
if model.load_attempts > 1:
|
||||
raise HTTPException(500, f"Failed to load model '{model.model_name}'")
|
||||
with lock:
|
||||
try:
|
||||
model.load()
|
||||
except FileNotFoundError as e:
|
||||
if model.model_format == ModelFormat.ONNX:
|
||||
raise e
|
||||
log.exception(e)
|
||||
log.warning(
|
||||
f"{model.model_format.upper()} is available, but model '{model.model_name}' does not support it."
|
||||
)
|
||||
model.model_format = ModelFormat.ONNX
|
||||
model.load()
|
||||
model.load()
|
||||
return model
|
||||
|
||||
try:
|
||||
|
||||
@@ -23,7 +23,7 @@ class InferenceModel(ABC):
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: Path | str | None = None,
|
||||
model_format: ModelFormat | None = None,
|
||||
preferred_format: ModelFormat | None = None,
|
||||
session: ModelSession | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
@@ -31,7 +31,7 @@ class InferenceModel(ABC):
|
||||
self.load_attempts = 0
|
||||
self.model_name = clean_name(model_name)
|
||||
self.cache_dir = Path(cache_dir) if cache_dir is not None else self._cache_dir_default
|
||||
self.model_format = model_format if model_format is not None else self._model_format_default
|
||||
self.model_format = preferred_format if preferred_format is not None else self._model_format_default
|
||||
if session is not None:
|
||||
self.session = session
|
||||
|
||||
@@ -48,7 +48,7 @@ class InferenceModel(ABC):
|
||||
self.load_attempts += 1
|
||||
|
||||
self.download()
|
||||
attempt = f"Attempt #{self.load_attempts} to load" if self.load_attempts > 1 else "Loading"
|
||||
attempt = f"Attempt #{self.load_attempts + 1} to load" if self.load_attempts else "Loading"
|
||||
log.info(f"{attempt} {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
|
||||
self.session = self._load()
|
||||
self.loaded = True
|
||||
@@ -101,9 +101,6 @@ class InferenceModel(ABC):
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _make_session(self, model_path: Path) -> ModelSession:
|
||||
if not model_path.is_file():
|
||||
raise FileNotFoundError(f"Model file not found: {model_path}")
|
||||
|
||||
match model_path.suffix:
|
||||
case ".armnn":
|
||||
session: ModelSession = AnnSession(model_path)
|
||||
@@ -147,13 +144,17 @@ class InferenceModel(ABC):
|
||||
|
||||
@property
|
||||
def model_format(self) -> ModelFormat:
|
||||
return self._model_format
|
||||
return self._preferred_format
|
||||
|
||||
@model_format.setter
|
||||
def model_format(self, model_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting model format to {model_format}")
|
||||
self._model_format = model_format
|
||||
def model_format(self, preferred_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting preferred format to {preferred_format}")
|
||||
self._preferred_format = preferred_format
|
||||
|
||||
@property
|
||||
def _model_format_default(self) -> ModelFormat:
|
||||
return ModelFormat.ARMNN if ann.ann.is_available and settings.ann else ModelFormat.ONNX
|
||||
prefer_ann = ann.ann.is_available and settings.ann
|
||||
ann_exists = (self.model_dir / "model.armnn").is_file()
|
||||
if prefer_ann and not ann_exists:
|
||||
log.warning(f"ARM NN is available, but '{self.model_name}' does not support ARM NN. Falling back to ONNX.")
|
||||
return ModelFormat.ARMNN if prefer_ann and ann_exists else ModelFormat.ONNX
|
||||
|
||||
@@ -22,12 +22,11 @@ class BaseCLIPTextualEncoder(InferenceModel):
|
||||
return res
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = super()._load()
|
||||
log.debug(f"Loading tokenizer for CLIP model '{self.model_name}'")
|
||||
self.tokenizer = self._load_tokenizer()
|
||||
log.debug(f"Loaded tokenizer for CLIP model '{self.model_name}'")
|
||||
|
||||
return session
|
||||
return super()._load()
|
||||
|
||||
@abstractmethod
|
||||
def _load_tokenizer(self) -> Tokenizer:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
@@ -13,9 +14,15 @@ class FaceDetector(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
|
||||
@@ -9,7 +9,7 @@ from numpy.typing import NDArray
|
||||
from onnx.tools.update_model_dims import update_inputs_outputs_dims
|
||||
from PIL import Image
|
||||
|
||||
from app.config import log
|
||||
from app.config import clean_name, log
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.transforms import decode_cv2
|
||||
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelFormat, ModelSession, ModelTask, ModelType
|
||||
@@ -20,14 +20,20 @@ class FaceRecognizer(InferenceModel):
|
||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
self.batch = self.model_format == ModelFormat.ONNX
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
if self.batch and not has_batch_axis(session):
|
||||
if self.model_format == ModelFormat.ONNX and not has_batch_axis(session):
|
||||
self._add_batch_axis(self.model_path)
|
||||
session = self._make_session(self.model_path)
|
||||
self.model = ArcFaceONNX(
|
||||
|
||||
@@ -43,7 +43,7 @@ class TestBase:
|
||||
|
||||
assert encoder.cache_dir == cache_dir
|
||||
|
||||
def test_sets_default_model_format(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_default_preferred_format(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", True)
|
||||
mocker.patch("ann.ann.is_available", False)
|
||||
|
||||
@@ -51,7 +51,7 @@ class TestBase:
|
||||
|
||||
assert encoder.model_format == ModelFormat.ONNX
|
||||
|
||||
def test_sets_default_model_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
|
||||
def test_sets_default_preferred_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", True)
|
||||
mocker.patch("ann.ann.is_available", True)
|
||||
path.suffix = ".armnn"
|
||||
@@ -60,11 +60,11 @@ class TestBase:
|
||||
|
||||
assert encoder.model_format == ModelFormat.ARMNN
|
||||
|
||||
def test_sets_model_format_kwarg(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_preferred_format_kwarg(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", False)
|
||||
mocker.patch("ann.ann.is_available", False)
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
|
||||
|
||||
assert encoder.model_format == ModelFormat.ARMNN
|
||||
|
||||
@@ -129,7 +129,7 @@ class TestBase:
|
||||
)
|
||||
|
||||
def test_download_downloads_armnn_if_preferred_format(self, snapshot_download: mock.Mock) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
|
||||
encoder.download()
|
||||
|
||||
snapshot_download.assert_called_once_with(
|
||||
@@ -140,19 +140,6 @@ class TestBase:
|
||||
ignore_patterns=[],
|
||||
)
|
||||
|
||||
def test_throws_exception_if_model_path_does_not_exist(
|
||||
self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock
|
||||
) -> None:
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.is_file.return_value = False
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
encoder.load()
|
||||
|
||||
snapshot_download.assert_called_once()
|
||||
ort_session.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ort_session")
|
||||
class TestOrtSession:
|
||||
@@ -480,18 +467,16 @@ class TestFaceRecognition:
|
||||
assert isinstance(call_args[0][0], np.ndarray)
|
||||
assert call_args[0][0].shape == (112, 112, 3)
|
||||
|
||||
def test_recognition_adds_batch_axis_for_ort(
|
||||
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
def test_recognition_adds_batch_axis_for_ort(self, ort_session: mock.Mock, mocker: MockerFixture) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
|
||||
ort_session.return_value.get_inputs.return_value = [SimpleNamespace(name="input.1", shape=(1, 3, 224, 224))]
|
||||
ort_session.return_value.get_outputs.return_value = [SimpleNamespace(name="output.1", shape=(1, 800))]
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
proto = mock.Mock()
|
||||
|
||||
@@ -507,30 +492,27 @@ class TestFaceRecognition:
|
||||
|
||||
onnx.load.return_value = proto
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
|
||||
face_recognizer = FaceRecognizer("buffalo_s")
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is True
|
||||
update_dims.assert_called_once_with(proto, {"input.1": ["batch", 3, 224, 224]}, {"output.1": ["batch", 800]})
|
||||
onnx.save.assert_called_once_with(update_dims.return_value, face_recognizer.model_path)
|
||||
|
||||
def test_recognition_does_not_add_batch_axis_if_exists(
|
||||
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
def test_recognition_does_not_add_batch_axis_if_exists(self, ort_session: mock.Mock, mocker: MockerFixture) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
|
||||
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
|
||||
ort_session.return_value.get_inputs.return_value = inputs
|
||||
ort_session.return_value.get_outputs.return_value = outputs
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
|
||||
face_recognizer = FaceRecognizer("buffalo_s")
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is True
|
||||
@@ -538,30 +520,6 @@ class TestFaceRecognition:
|
||||
onnx.load.assert_not_called()
|
||||
onnx.save.assert_not_called()
|
||||
|
||||
def test_recognition_does_not_add_batch_axis_for_armnn(
|
||||
self, ann_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".armnn"
|
||||
|
||||
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
|
||||
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
|
||||
ann_session.return_value.get_inputs.return_value = inputs
|
||||
ann_session.return_value.get_outputs.return_value = outputs
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", model_format=ModelFormat.ARMNN, cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is False
|
||||
update_dims.assert_not_called()
|
||||
onnx.load.assert_not_called()
|
||||
onnx.save.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestCache:
|
||||
@@ -735,7 +693,7 @@ class TestLoad:
|
||||
mock_model.clear_cache.assert_called_once()
|
||||
assert mock_model.load.call_count == 2
|
||||
|
||||
async def test_load_raises_if_os_error_and_already_retried(self) -> None:
|
||||
async def test_load_clears_cache_and_raises_if_os_error_and_already_retried(self) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.model_name = "test_model_name"
|
||||
mock_model.model_type = ModelType.VISUAL
|
||||
@@ -749,27 +707,6 @@ class TestLoad:
|
||||
mock_model.clear_cache.assert_not_called()
|
||||
mock_model.load.assert_not_called()
|
||||
|
||||
async def test_falls_back_to_onnx_if_other_format_does_not_exist(
|
||||
self, exception: mock.Mock, warning: mock.Mock
|
||||
) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.model_name = "test_model_name"
|
||||
mock_model.model_type = ModelType.VISUAL
|
||||
mock_model.model_task = ModelTask.SEARCH
|
||||
mock_model.model_format = ModelFormat.ARMNN
|
||||
mock_model.loaded = False
|
||||
mock_model.load_attempts = 0
|
||||
error = FileNotFoundError()
|
||||
mock_model.load.side_effect = [error, None]
|
||||
|
||||
await load(mock_model)
|
||||
|
||||
mock_model.clear_cache.assert_not_called()
|
||||
assert mock_model.load.call_count == 2
|
||||
exception.assert_called_once_with(error)
|
||||
warning.assert_called_once_with("ARMNN is available, but model 'test_model_name' does not support it.")
|
||||
mock_model.model_format = ModelFormat.ONNX
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not settings.test_full,
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a as builder
|
||||
|
||||
USER root
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
cmake \
|
||||
curl \
|
||||
git
|
||||
USER $MAMBA_USER
|
||||
|
||||
WORKDIR /home/mambauser
|
||||
ENV ARMNN_PATH=armnn
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/* .
|
||||
RUN ./download-armnn.sh && \
|
||||
./build-converter.sh && \
|
||||
./build.sh
|
||||
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER conda-lock.yml .
|
||||
RUN micromamba create -y -p /home/mambauser/venv -f conda-lock.yml && \
|
||||
micromamba clean --all --yes
|
||||
ENV PATH="/home/mambauser/venv/bin:${PATH}"
|
||||
|
||||
FROM gcr.io/distroless/base-debian12
|
||||
# FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a
|
||||
|
||||
WORKDIR /export/ann
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
LD_LIBRARY_PATH=/export/ann/armnn \
|
||||
PATH="/opt/venv/bin:${PATH}"
|
||||
|
||||
COPY --from=builder /home/mambauser/armnnconverter /home/mambauser/armnn ./
|
||||
COPY --from=builder /home/mambauser/venv /opt/venv
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER onnx2ann onnx2ann
|
||||
|
||||
ENTRYPOINT ["python", "-m", "onnx2ann"]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,21 +0,0 @@
|
||||
name: onnx2ann
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python>=3.11,<4.0
|
||||
- onnx>=1.16.1
|
||||
# - onnxruntime>=1.18.1 # conda only has gpu version
|
||||
- psutil>=6.0.0
|
||||
- flatbuffers>=24.3.25
|
||||
- ml_dtypes>=0.3.1
|
||||
- typer-slim>=0.12.3
|
||||
- huggingface_hub>=0.23.4
|
||||
- pip
|
||||
- pip:
|
||||
- onnxruntime>=1.18.1 # conda only has gpu version
|
||||
- onnxsim>=0.4.36
|
||||
- onnx2tf>=1.24.1
|
||||
- onnx_graphsurgeon>=0.5.2
|
||||
- simple_onnx_processing_tools>=1.1.32
|
||||
- tf_keras>=2.16.0
|
||||
- git+https://github.com/microsoft/onnxconverter-common.git
|
||||
@@ -1,99 +0,0 @@
|
||||
import os
|
||||
import platform
|
||||
from typing import Annotated, Optional
|
||||
|
||||
import typer
|
||||
|
||||
from onnx2ann.export import Exporter, ModelType, Precision
|
||||
|
||||
app = typer.Typer(add_completion=False, pretty_exceptions_show_locals=False)
|
||||
|
||||
|
||||
@app.command()
|
||||
def export(
|
||||
model_name: Annotated[
|
||||
str, typer.Argument(..., help="The name of the model to be exported as it exists in Hugging Face.")
|
||||
],
|
||||
model_type: Annotated[ModelType, typer.Option(..., "--type", "-t", help="The type of model to be exported.")],
|
||||
input_shapes: Annotated[
|
||||
list[str],
|
||||
typer.Option(
|
||||
...,
|
||||
"--input-shape",
|
||||
"-s",
|
||||
help="The shape of an input tensor to the model, each dimension separated by commas. "
|
||||
"Multiple shapes can be provided for multiple inputs.",
|
||||
),
|
||||
],
|
||||
precision: Annotated[
|
||||
Precision,
|
||||
typer.Option(
|
||||
...,
|
||||
"--precision",
|
||||
"-p",
|
||||
help="The precision of the exported model. `float16` requires a GPU.",
|
||||
),
|
||||
] = Precision.FLOAT32,
|
||||
cache_dir: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
...,
|
||||
"--cache-dir",
|
||||
"-c",
|
||||
help="Directory where pre-export models will be stored.",
|
||||
envvar="CACHE_DIR",
|
||||
show_envvar=True,
|
||||
),
|
||||
] = "~/.cache/huggingface",
|
||||
output_dir: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
...,
|
||||
"--output-dir",
|
||||
"-o",
|
||||
help="Directory where exported models will be stored.",
|
||||
),
|
||||
] = "output",
|
||||
auth_token: Annotated[
|
||||
Optional[str],
|
||||
typer.Option(
|
||||
...,
|
||||
"--auth-token",
|
||||
"-t",
|
||||
help="If uploading models to Hugging Face, the auth token of the user or organisation.",
|
||||
envvar="HF_AUTH_TOKEN",
|
||||
show_envvar=True,
|
||||
),
|
||||
] = None,
|
||||
force_export: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
...,
|
||||
"--force-export",
|
||||
"-f",
|
||||
help="Export the model even if an exported model already exists in the output directory.",
|
||||
),
|
||||
] = False,
|
||||
) -> None:
|
||||
if platform.machine() not in ("x86_64", "AMD64"):
|
||||
msg = f"Can only run on x86_64 / AMD64, not {platform.machine()}"
|
||||
raise RuntimeError(msg)
|
||||
os.environ.setdefault("LD_LIBRARY_PATH", "armnn")
|
||||
parsed_input_shapes = [tuple(map(int, shape.split(","))) for shape in input_shapes]
|
||||
model = Exporter(
|
||||
model_name, model_type, input_shapes=parsed_input_shapes, cache_dir=cache_dir, force_export=force_export
|
||||
)
|
||||
model_dir = os.path.join("output", model_name)
|
||||
output_dir = os.path.join(model_dir, model_type)
|
||||
armnn_model = model.to_armnn(output_dir, precision)
|
||||
|
||||
if not auth_token:
|
||||
return
|
||||
|
||||
from huggingface_hub import upload_file
|
||||
|
||||
relative_path = os.path.relpath(armnn_model, start=model_dir)
|
||||
upload_file(path_or_fileobj=armnn_model, path_in_repo=relative_path, repo_id=model.repo_name, token=auth_token)
|
||||
|
||||
|
||||
app()
|
||||
@@ -1,129 +0,0 @@
|
||||
import os
|
||||
import subprocess
|
||||
from enum import StrEnum
|
||||
|
||||
from onnx2ann.helpers import onnx_make_armnn_compatible, onnx_make_inputs_fixed
|
||||
|
||||
|
||||
class ModelType(StrEnum):
|
||||
VISUAL = "visual"
|
||||
TEXTUAL = "textual"
|
||||
RECOGNITION = "recognition"
|
||||
DETECTION = "detection"
|
||||
|
||||
|
||||
class Precision(StrEnum):
|
||||
FLOAT16 = "float16"
|
||||
FLOAT32 = "float32"
|
||||
|
||||
|
||||
class Exporter:
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
model_type: str,
|
||||
input_shapes: list[tuple[int, ...]],
|
||||
optimization_level: int = 5,
|
||||
cache_dir: str = os.environ.get("CACHE_DIR", "~/.cache/huggingface"),
|
||||
force_export: bool = False,
|
||||
):
|
||||
self.model_name = model_name.split("/")[-1]
|
||||
self.model_type = model_type
|
||||
self.optimize = optimization_level
|
||||
self.input_shapes = input_shapes
|
||||
self.cache_dir = os.path.join(cache_dir, self.repo_name)
|
||||
self.force_export = force_export
|
||||
|
||||
def download(self) -> str:
|
||||
model_path = os.path.join(self.cache_dir, self.model_type, "model.onnx")
|
||||
if os.path.isfile(model_path):
|
||||
print(f"Model is already downloaded at {model_path}")
|
||||
return model_path
|
||||
from huggingface_hub import snapshot_download
|
||||
|
||||
snapshot_download(
|
||||
self.repo_name, cache_dir=self.cache_dir, local_dir=self.cache_dir, local_dir_use_symlinks=False
|
||||
)
|
||||
return model_path
|
||||
|
||||
def to_onnx_static(self, precision: Precision) -> str:
|
||||
import onnx
|
||||
from onnxconverter_common import float16
|
||||
onnx_path_original = self.download()
|
||||
static_dir = os.path.join(self.cache_dir, self.model_type, "static")
|
||||
|
||||
static_path = os.path.join(static_dir, f"model.onnx")
|
||||
if self.force_export and not os.path.isfile(static_path):
|
||||
print(f"Making {self} static")
|
||||
os.makedirs(static_dir, exist_ok=True)
|
||||
onnx_make_inputs_fixed(onnx_path_original, static_path, self.input_shapes)
|
||||
onnx_make_armnn_compatible(static_path)
|
||||
print(f"Finished making {self} static")
|
||||
|
||||
model = onnx.load(static_path)
|
||||
self.inputs = [input_.name for input_ in model.graph.input]
|
||||
self.outputs = [output_.name for output_ in model.graph.output]
|
||||
if precision == Precision.FLOAT16:
|
||||
static_path = os.path.join(static_dir, f"model_{precision}.onnx")
|
||||
print(f"Converting {self} to {precision} precision")
|
||||
model = float16.convert_float_to_float16(model, keep_io_types=True, disable_shape_infer=True)
|
||||
onnx.save(model, static_path)
|
||||
print(f"Finished converting {self} to {precision} precision")
|
||||
# self.inputs, self.outputs = onnx_get_inputs_outputs(static_path)
|
||||
return static_path
|
||||
|
||||
def to_tflite(self, output_dir: str, precision: Precision) -> str:
|
||||
onnx_model = self.to_onnx_static(precision)
|
||||
tflite_dir = os.path.join(output_dir, precision)
|
||||
tflite_model = os.path.join(tflite_dir, f"model_{precision}.tflite")
|
||||
if self.force_export or not os.path.isfile(tflite_model):
|
||||
import onnx2tf
|
||||
|
||||
print(f"Exporting {self} to TFLite with {precision} precision (this might take a few minutes)")
|
||||
onnx2tf.convert(
|
||||
input_onnx_file_path=onnx_model,
|
||||
output_folder_path=tflite_dir,
|
||||
keep_shape_absolutely_input_names=self.inputs,
|
||||
# verbosity="warn",
|
||||
copy_onnx_input_output_names_to_tflite=True,
|
||||
output_signaturedefs=True,
|
||||
not_use_onnxsim=True,
|
||||
)
|
||||
print(f"Finished exporting {self} to TFLite with {precision} precision")
|
||||
|
||||
return tflite_model
|
||||
|
||||
def to_armnn(self, output_dir: str, precision: Precision) -> tuple[str, str]:
|
||||
armnn_model = os.path.join(output_dir, "model.armnn")
|
||||
if not self.force_export and os.path.isfile(armnn_model):
|
||||
return armnn_model
|
||||
|
||||
tflite_model_dir = os.path.join(output_dir, "tflite")
|
||||
tflite_model = self.to_tflite(tflite_model_dir, precision)
|
||||
|
||||
args = ["./armnnconverter", "-f", "tflite-binary", "-m", tflite_model, "-p", armnn_model]
|
||||
args.append("-i")
|
||||
args.extend(self.inputs)
|
||||
args.append("-o")
|
||||
args.extend(self.outputs)
|
||||
|
||||
print(f"Exporting {self} to ARM NN with {precision} precision")
|
||||
try:
|
||||
if (stdout := subprocess.check_output(args, stderr=subprocess.STDOUT).decode()):
|
||||
print(stdout)
|
||||
print(f"Finished exporting {self} to ARM NN with {precision} precision")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output.decode())
|
||||
try:
|
||||
from shutil import rmtree
|
||||
|
||||
rmtree(tflite_model_dir, ignore_errors=True)
|
||||
finally:
|
||||
raise e
|
||||
|
||||
@property
|
||||
def repo_name(self) -> str:
|
||||
return f"immich-app/{self.model_name}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.model_name} ({self.model_type})"
|
||||
@@ -1,260 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
|
||||
def onnx_make_armnn_compatible(model_path: str) -> None:
|
||||
"""
|
||||
i can explain
|
||||
armnn only supports up to 4d tranposes, but the model has a 5d transpose due to a redundant unsqueeze
|
||||
this function folds the unsqueeze+transpose+squeeze into a single 4d transpose
|
||||
it also switches from gather ops to slices since armnn has different dimension semantics for gathers
|
||||
also fixes batch normalization being in training mode
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
from onnx_graphsurgeon import Constant, Node, Variable, export_onnx, import_onnx
|
||||
|
||||
proto = onnx.load(model_path)
|
||||
graph = import_onnx(proto)
|
||||
|
||||
gather_idx = 1
|
||||
squeeze_idx = 1
|
||||
for node in graph.nodes:
|
||||
for link1 in node.outputs:
|
||||
if "Unsqueeze" in link1.name:
|
||||
for node1 in link1.outputs:
|
||||
for link2 in node1.outputs:
|
||||
if "Transpose" in link2.name:
|
||||
for node2 in link2.outputs:
|
||||
if node2.attrs.get("perm") == [3, 1, 2, 0, 4]:
|
||||
node2.attrs["perm"] = [2, 0, 1, 3]
|
||||
link2.shape = link1.shape
|
||||
for link3 in node2.outputs:
|
||||
if "Squeeze" in link3.name:
|
||||
link3.shape = [link3.shape[x] for x in [0, 1, 2, 4]]
|
||||
for node3 in link3.outputs:
|
||||
for link4 in node3.outputs:
|
||||
link4.shape = link3.shape
|
||||
try:
|
||||
idx = link2.inputs.index(node1)
|
||||
link2.inputs[idx] = node
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
node.outputs = [link2]
|
||||
if "Gather" in link4.name:
|
||||
for node4 in link4.outputs:
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node4.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=link4.dtype,
|
||||
shape=[1] + link3.shape[1:],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
link3,
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
for link5 in node4.outputs:
|
||||
for node5 in link5.outputs:
|
||||
try:
|
||||
idx = node5.inputs.index(link5)
|
||||
node5.inputs[idx] = slice_link
|
||||
except ValueError:
|
||||
pass
|
||||
elif node.op == "LayerNormalization":
|
||||
for node1 in link1.outputs:
|
||||
if node1.op == "Gather":
|
||||
for link2 in node1.outputs:
|
||||
for node2 in link2.outputs:
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node1.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=link2.dtype,
|
||||
shape=[1, *link2.shape],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
node1.inputs[0],
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
squeeze_link = Variable(
|
||||
f"onnx::Squeeze_123{squeeze_idx}",
|
||||
dtype=link2.dtype,
|
||||
shape=link2.shape,
|
||||
)
|
||||
squeeze_node = Node(
|
||||
op="Squeeze",
|
||||
inputs=[
|
||||
slice_link,
|
||||
Constant(
|
||||
f"SqueezeAxis_123{squeeze_idx}",
|
||||
np.array([0]),
|
||||
),
|
||||
],
|
||||
outputs=[squeeze_link],
|
||||
name=f"Squeeze_123{squeeze_idx}",
|
||||
)
|
||||
graph.nodes.append(squeeze_node)
|
||||
squeeze_idx += 1
|
||||
try:
|
||||
idx = node2.inputs.index(link2)
|
||||
node2.inputs[idx] = squeeze_link
|
||||
except ValueError:
|
||||
pass
|
||||
elif node.op == "Reshape":
|
||||
for node1 in link1.outputs:
|
||||
if node1.op == "Gather":
|
||||
node2s = [n for link in node1.outputs for n in link.outputs]
|
||||
if any(n.op == "Abs" for n in node2s):
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node1.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=node1.outputs[0].dtype,
|
||||
shape=[1, *node1.outputs[0].shape],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
node1.inputs[0],
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
squeeze_link = Variable(
|
||||
f"onnx::Squeeze_123{squeeze_idx}",
|
||||
dtype=node1.outputs[0].dtype,
|
||||
shape=node1.outputs[0].shape,
|
||||
)
|
||||
squeeze_node = Node(
|
||||
op="Squeeze",
|
||||
inputs=[
|
||||
slice_link,
|
||||
Constant(
|
||||
f"SqueezeAxis_123{squeeze_idx}",
|
||||
np.array([0]),
|
||||
),
|
||||
],
|
||||
outputs=[squeeze_link],
|
||||
name=f"Squeeze_123{squeeze_idx}",
|
||||
)
|
||||
graph.nodes.append(squeeze_node)
|
||||
squeeze_idx += 1
|
||||
for node2 in node2s:
|
||||
node2.inputs[0] = squeeze_link
|
||||
elif node.op == "BatchNormalization" and node.attrs.get("training_mode") == 1:
|
||||
node.attrs["training_mode"] = 0
|
||||
node.outputs = node.outputs[:1]
|
||||
|
||||
graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True)
|
||||
graph.toposort()
|
||||
graph.fold_constants()
|
||||
updated = export_onnx(graph)
|
||||
onnx_save(updated, model_path)
|
||||
|
||||
# for some reason, reloading the model is necessary to apply the correct shape
|
||||
proto = onnx.load(model_path)
|
||||
graph = import_onnx(proto)
|
||||
for node in graph.nodes:
|
||||
if node.op == "Slice":
|
||||
for link in node.outputs:
|
||||
if "Slice_123" in link.name and link.shape[0] == 3: # noqa: PLR2004
|
||||
link.shape[0] = 1
|
||||
|
||||
graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True)
|
||||
graph.toposort()
|
||||
graph.fold_constants()
|
||||
updated = export_onnx(graph)
|
||||
onnx_save(updated, model_path)
|
||||
onnx.shape_inference.infer_shapes_path(model_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
|
||||
|
||||
def onnx_make_inputs_fixed(input_path: str, output_path: str, input_shapes: list[tuple[int, ...]]) -> None:
|
||||
import onnx
|
||||
import onnxsim
|
||||
from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed
|
||||
|
||||
model, success = onnxsim.simplify(input_path)
|
||||
if not success:
|
||||
msg = f"Failed to simplify {input_path}"
|
||||
raise RuntimeError(msg)
|
||||
onnx_save(model, output_path)
|
||||
onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
model = onnx.load_model(output_path)
|
||||
for input_node, shape in zip(model.graph.input, input_shapes, strict=False):
|
||||
make_input_shape_fixed(model.graph, input_node.name, shape)
|
||||
fix_output_shapes(model)
|
||||
onnx_save(model, output_path)
|
||||
onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
|
||||
|
||||
def onnx_get_inputs_outputs(model_path: str) -> tuple[list[str], list[str]]:
|
||||
import onnx
|
||||
|
||||
model = onnx.load(model_path)
|
||||
inputs = [input_.name for input_ in model.graph.input]
|
||||
outputs = [output_.name for output_ in model.graph.output]
|
||||
return inputs, outputs
|
||||
|
||||
|
||||
def onnx_save(model: Any, output_path: str) -> None:
|
||||
import onnx
|
||||
|
||||
try:
|
||||
onnx.save(model, output_path)
|
||||
except:
|
||||
onnx.save(model, output_path, save_as_external_data=True, all_tensors_to_one_file=False, size_threshold=1_000_000)
|
||||
@@ -1,56 +0,0 @@
|
||||
[project]
|
||||
name = "onnx2ann"
|
||||
version = "1.107.2"
|
||||
dependencies = [
|
||||
"onnx>=1.16.1",
|
||||
"psutil>=6.0.0",
|
||||
"flatbuffers>=24.3.25",
|
||||
"ml_dtypes>=0.3.1,<1.0.0",
|
||||
"typer-slim>=0.12.3,<1.0.0",
|
||||
"huggingface_hub>=0.23.4,<1.0.0",
|
||||
"onnxruntime>=1.18.1",
|
||||
"onnxsim>=0.4.36,<1.0.0",
|
||||
"onnx2tf>=1.24.0",
|
||||
"onnx_graphsurgeon>=0.5.2,<1.0.0",
|
||||
"simple_onnx_processing_tools>=1.1.32",
|
||||
"tf_keras>=2.16.0",
|
||||
"onnxconverter-common @ git+https://github.com/microsoft/onnxconverter-common"
|
||||
]
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
only-include = ["onnx2ann"]
|
||||
|
||||
[tool.hatch.metadata]
|
||||
allow-direct-references = true
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.12"
|
||||
follow_imports = "silent"
|
||||
warn_redundant_casts = true
|
||||
disallow_any_generics = true
|
||||
check_untyped_defs = true
|
||||
disallow_untyped_defs = true
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.pydantic-mypy]
|
||||
init_forbid_extra = true
|
||||
init_typed = true
|
||||
warn_required_dynamic_aliases = true
|
||||
warn_untyped_fields = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
target-version = "py312"
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["E", "F", "I"]
|
||||
extend-ignore = ["FBT001", "FBT002"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
target-version = ['py312']
|
||||
@@ -1,281 +0,0 @@
|
||||
#include <fstream>
|
||||
#include <mutex>
|
||||
#include <atomic>
|
||||
|
||||
#include "armnn/IRuntime.hpp"
|
||||
#include "armnn/INetwork.hpp"
|
||||
#include "armnn/Types.hpp"
|
||||
#include "armnnDeserializer/IDeserializer.hpp"
|
||||
#include "armnnTfLiteParser/ITfLiteParser.hpp"
|
||||
#include "armnnOnnxParser/IOnnxParser.hpp"
|
||||
|
||||
using namespace armnn;
|
||||
|
||||
struct IOInfos
|
||||
{
|
||||
std::vector<BindingPointInfo> inputInfos;
|
||||
std::vector<BindingPointInfo> outputInfos;
|
||||
};
|
||||
|
||||
// from https://rigtorp.se/spinlock/
|
||||
struct SpinLock
|
||||
{
|
||||
std::atomic<bool> lock_ = {false};
|
||||
|
||||
void lock()
|
||||
{
|
||||
for (;;)
|
||||
{
|
||||
if (!lock_.exchange(true, std::memory_order_acquire))
|
||||
{
|
||||
break;
|
||||
}
|
||||
while (lock_.load(std::memory_order_relaxed))
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
void unlock() { lock_.store(false, std::memory_order_release); }
|
||||
};
|
||||
|
||||
class Ann
|
||||
{
|
||||
|
||||
public:
|
||||
int load(const char *modelPath,
|
||||
bool fastMath,
|
||||
bool fp16,
|
||||
bool saveCachedNetwork,
|
||||
const char *cachedNetworkPath)
|
||||
{
|
||||
INetworkPtr network = loadModel(modelPath);
|
||||
IOptimizedNetworkPtr optNet = OptimizeNetwork(network.get(), fastMath, fp16, saveCachedNetwork, cachedNetworkPath);
|
||||
const IOInfos infos = getIOInfos(optNet.get());
|
||||
NetworkId netId;
|
||||
mutex.lock();
|
||||
Status status = runtime->LoadNetwork(netId, std::move(optNet));
|
||||
mutex.unlock();
|
||||
if (status != Status::Success)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
spinLock.lock();
|
||||
ioInfos[netId] = infos;
|
||||
mutexes.emplace(netId, std::make_unique<std::mutex>());
|
||||
spinLock.unlock();
|
||||
return netId;
|
||||
}
|
||||
|
||||
void execute(NetworkId netId, const void **inputData, void **outputData)
|
||||
{
|
||||
spinLock.lock();
|
||||
const IOInfos *infos = &ioInfos[netId];
|
||||
auto m = mutexes[netId].get();
|
||||
spinLock.unlock();
|
||||
InputTensors inputTensors;
|
||||
inputTensors.reserve(infos->inputInfos.size());
|
||||
size_t i = 0;
|
||||
for (const BindingPointInfo &info : infos->inputInfos)
|
||||
inputTensors.emplace_back(info.first, ConstTensor(info.second, inputData[i++]));
|
||||
OutputTensors outputTensors;
|
||||
outputTensors.reserve(infos->outputInfos.size());
|
||||
i = 0;
|
||||
for (const BindingPointInfo &info : infos->outputInfos)
|
||||
outputTensors.emplace_back(info.first, Tensor(info.second, outputData[i++]));
|
||||
m->lock();
|
||||
runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
|
||||
m->unlock();
|
||||
}
|
||||
|
||||
void unload(NetworkId netId)
|
||||
{
|
||||
mutex.lock();
|
||||
runtime->UnloadNetwork(netId);
|
||||
mutex.unlock();
|
||||
}
|
||||
|
||||
int tensors(NetworkId netId, bool isInput = false)
|
||||
{
|
||||
spinLock.lock();
|
||||
const IOInfos *infos = &ioInfos[netId];
|
||||
spinLock.unlock();
|
||||
return (int)(isInput ? infos->inputInfos.size() : infos->outputInfos.size());
|
||||
}
|
||||
|
||||
unsigned long shape(NetworkId netId, bool isInput = false, int index = 0)
|
||||
{
|
||||
spinLock.lock();
|
||||
const IOInfos *infos = &ioInfos[netId];
|
||||
spinLock.unlock();
|
||||
const TensorShape shape = (isInput ? infos->inputInfos : infos->outputInfos)[index].second.GetShape();
|
||||
unsigned long s = 0;
|
||||
for (unsigned int d = 0; d < shape.GetNumDimensions(); d++)
|
||||
s |= ((unsigned long)shape[d]) << (d * 16); // stores up to 4 16-bit values in a 64-bit value
|
||||
return s;
|
||||
}
|
||||
|
||||
Ann(int tuningLevel, const char *tuningFile)
|
||||
{
|
||||
IRuntime::CreationOptions runtimeOptions;
|
||||
BackendOptions backendOptions{"GpuAcc",
|
||||
{
|
||||
{"TuningLevel", tuningLevel},
|
||||
{"MemoryOptimizerStrategy", "ConstantMemoryStrategy"}, // SingleAxisPriorityList or ConstantMemoryStrategy
|
||||
}};
|
||||
if (tuningFile)
|
||||
backendOptions.AddOption({"TuningFile", tuningFile});
|
||||
runtimeOptions.m_BackendOptions.emplace_back(backendOptions);
|
||||
runtime = IRuntime::CreateRaw(runtimeOptions);
|
||||
};
|
||||
~Ann()
|
||||
{
|
||||
IRuntime::Destroy(runtime);
|
||||
};
|
||||
|
||||
private:
|
||||
INetworkPtr loadModel(const char *modelPath)
|
||||
{
|
||||
const auto path = std::string(modelPath);
|
||||
if (path.rfind(".tflite") == path.length() - 7) // endsWith()
|
||||
{
|
||||
auto parser = armnnTfLiteParser::ITfLiteParser::CreateRaw();
|
||||
return parser->CreateNetworkFromBinaryFile(modelPath);
|
||||
}
|
||||
else if (path.rfind(".onnx") == path.length() - 5) // endsWith()
|
||||
{
|
||||
auto parser = armnnOnnxParser::IOnnxParser::CreateRaw();
|
||||
return parser->CreateNetworkFromBinaryFile(modelPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
std::ifstream ifs(path, std::ifstream::in | std::ifstream::binary);
|
||||
auto parser = armnnDeserializer::IDeserializer::CreateRaw();
|
||||
return parser->CreateNetworkFromBinary(ifs);
|
||||
}
|
||||
}
|
||||
|
||||
static BindingPointInfo getInputTensorInfo(LayerBindingId inputBindingId, TensorInfo info)
|
||||
{
|
||||
const auto newInfo = TensorInfo{info.GetShape(), info.GetDataType(),
|
||||
info.GetQuantizationScale(),
|
||||
info.GetQuantizationOffset(),
|
||||
true};
|
||||
return {inputBindingId, newInfo};
|
||||
}
|
||||
|
||||
IOptimizedNetworkPtr OptimizeNetwork(INetwork *network, bool fastMath, bool fp16, bool saveCachedNetwork, const char *cachedNetworkPath)
|
||||
{
|
||||
const bool allowExpandedDims = false;
|
||||
const ShapeInferenceMethod shapeInferenceMethod = ShapeInferenceMethod::ValidateOnly;
|
||||
|
||||
OptimizerOptionsOpaque options;
|
||||
options.SetReduceFp32ToFp16(fp16);
|
||||
options.SetShapeInferenceMethod(shapeInferenceMethod);
|
||||
options.SetAllowExpandedDims(allowExpandedDims);
|
||||
|
||||
BackendOptions gpuAcc("GpuAcc", {{"FastMathEnabled", fastMath}});
|
||||
if (cachedNetworkPath)
|
||||
{
|
||||
gpuAcc.AddOption({"SaveCachedNetwork", saveCachedNetwork});
|
||||
gpuAcc.AddOption({"CachedNetworkFilePath", cachedNetworkPath});
|
||||
}
|
||||
options.AddModelOption(gpuAcc);
|
||||
|
||||
// No point in using ARMNN for CPU, use ONNX (quantized) instead.
|
||||
// BackendOptions cpuAcc("CpuAcc",
|
||||
// {
|
||||
// {"FastMathEnabled", fastMath},
|
||||
// {"NumberOfThreads", 0},
|
||||
// });
|
||||
// options.AddModelOption(cpuAcc);
|
||||
|
||||
BackendOptions allowExDimOpt("AllowExpandedDims",
|
||||
{{"AllowExpandedDims", allowExpandedDims}});
|
||||
options.AddModelOption(allowExDimOpt);
|
||||
BackendOptions shapeInferOpt("ShapeInferenceMethod",
|
||||
{{"InferAndValidate", shapeInferenceMethod == ShapeInferenceMethod::InferAndValidate}});
|
||||
options.AddModelOption(shapeInferOpt);
|
||||
|
||||
std::vector<BackendId> backends = {
|
||||
BackendId("GpuAcc"),
|
||||
// BackendId("CpuAcc"),
|
||||
// BackendId("CpuRef"),
|
||||
};
|
||||
return Optimize(*network, backends, runtime->GetDeviceSpec(), options);
|
||||
}
|
||||
|
||||
IOInfos getIOInfos(IOptimizedNetwork *optNet)
|
||||
{
|
||||
struct InfoStrategy : IStrategy
|
||||
{
|
||||
void ExecuteStrategy(const IConnectableLayer *layer,
|
||||
const BaseDescriptor &descriptor,
|
||||
const std::vector<ConstTensor> &constants,
|
||||
const char *name,
|
||||
const LayerBindingId id = 0) override
|
||||
{
|
||||
IgnoreUnused(descriptor, constants, id);
|
||||
const LayerType lt = layer->GetType();
|
||||
if (lt == LayerType::Input)
|
||||
ioInfos.inputInfos.push_back(getInputTensorInfo(id, layer->GetOutputSlot(0).GetTensorInfo()));
|
||||
else if (lt == LayerType::Output)
|
||||
ioInfos.outputInfos.push_back({id, layer->GetInputSlot(0).GetTensorInfo()});
|
||||
}
|
||||
IOInfos ioInfos;
|
||||
};
|
||||
|
||||
InfoStrategy infoStrategy;
|
||||
optNet->ExecuteStrategy(infoStrategy);
|
||||
return infoStrategy.ioInfos;
|
||||
}
|
||||
|
||||
IRuntime *runtime;
|
||||
std::map<NetworkId, IOInfos> ioInfos;
|
||||
std::map<NetworkId, std::unique_ptr<std::mutex>> mutexes; // mutex per network to not execute the same the same network concurrently
|
||||
std::mutex mutex; // global mutex for load/unload calls to the runtime
|
||||
SpinLock spinLock; // fast spin lock to guard access to the ioInfos and mutexes maps
|
||||
};
|
||||
|
||||
extern "C" void *init(int logLevel, int tuningLevel, const char *tuningFile)
|
||||
{
|
||||
LogSeverity level = static_cast<LogSeverity>(logLevel);
|
||||
ConfigureLogging(true, true, level);
|
||||
|
||||
Ann *ann = new Ann(tuningLevel, tuningFile);
|
||||
return ann;
|
||||
}
|
||||
|
||||
extern "C" void destroy(void *ann)
|
||||
{
|
||||
delete ((Ann *)ann);
|
||||
}
|
||||
|
||||
extern "C" int load(void *ann,
|
||||
const char *path,
|
||||
bool fastMath,
|
||||
bool fp16,
|
||||
bool saveCachedNetwork,
|
||||
const char *cachedNetworkPath)
|
||||
{
|
||||
return ((Ann *)ann)->load(path, fastMath, fp16, saveCachedNetwork, cachedNetworkPath);
|
||||
}
|
||||
|
||||
extern "C" void unload(void *ann, NetworkId netId)
|
||||
{
|
||||
((Ann *)ann)->unload(netId);
|
||||
}
|
||||
|
||||
extern "C" void execute(void *ann, NetworkId netId, const void **inputData, void **outputData)
|
||||
{
|
||||
((Ann *)ann)->execute(netId, inputData, outputData);
|
||||
}
|
||||
|
||||
extern "C" unsigned long shape(void *ann, NetworkId netId, bool isInput, int index)
|
||||
{
|
||||
return ((Ann *)ann)->shape(netId, isInput, index);
|
||||
}
|
||||
|
||||
extern "C" int tensors(void *ann, NetworkId netId, bool isInput)
|
||||
{
|
||||
return ((Ann *)ann)->tensors(netId, isInput);
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
cd armnn-23.11/ || exit
|
||||
g++ -o ../armnnconverter -fPIC -O1 -DARMNN_ONNX_PARSER -DARMNN_SERIALIZER -DARMNN_TF_LITE_PARSER -fuse-ld=gold -std=c++17 -Iinclude -Isrc/armnnUtils -Ithird-party -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -larmnnSerializer -L../armnn src/armnnConverter/ArmnnConverter.cpp
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
g++ -shared -O3 -fPIC -o libann.so -fuse-ld=gold -std=c++17 -I"$ARMNN_PATH"/include -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -L"$ARMNN_PATH" ann.cpp
|
||||
@@ -19,44 +19,37 @@ _MCLIP_TO_OPENCLIP = {
|
||||
}
|
||||
|
||||
|
||||
def forward(self: MultilingualCLIP, input_ids: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
|
||||
embs = self.transformer(input_ids, attention_mask)[0]
|
||||
embs = (embs * attention_mask.unsqueeze(2)).sum(dim=1) / attention_mask.sum(dim=1)[:, None]
|
||||
embs = self.LinearTransformation(embs)
|
||||
return torch.nn.functional.normalize(embs, dim=-1)
|
||||
|
||||
# unfortunately need to monkeypatch for tracing to work here
|
||||
# otherwise it hits the 2GiB protobuf serialization limit
|
||||
MultilingualCLIP.forward = forward
|
||||
|
||||
|
||||
def to_torchscript(model_name: str) -> torch.jit.ScriptModule:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
|
||||
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad_(False)
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def to_onnx(
|
||||
model_name: str,
|
||||
output_dir_visual: Path | str,
|
||||
output_dir_textual: Path | str,
|
||||
) -> None:
|
||||
textual_path = get_model_path(output_dir_textual)
|
||||
model = to_torchscript(model_name)
|
||||
AutoTokenizer.from_pretrained(model_name).save_pretrained(output_dir_textual)
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
|
||||
AutoTokenizer.from_pretrained(model_name).save_pretrained(output_dir_textual)
|
||||
|
||||
_text_encoder_to_onnx(model, textual_path)
|
||||
openclip_to_onnx(_MCLIP_TO_OPENCLIP[model_name], output_dir_visual)
|
||||
optimize(textual_path)
|
||||
for param in model.parameters():
|
||||
param.requires_grad_(False)
|
||||
|
||||
export_text_encoder(model, textual_path)
|
||||
openclip_to_onnx(_MCLIP_TO_OPENCLIP[model_name], output_dir_visual)
|
||||
optimize(textual_path)
|
||||
|
||||
|
||||
def _text_encoder_to_onnx(model: MultilingualCLIP, output_path: Path | str) -> None:
|
||||
def export_text_encoder(model: MultilingualCLIP, output_path: Path | str) -> None:
|
||||
output_path = Path(output_path)
|
||||
|
||||
def forward(self: MultilingualCLIP, input_ids: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
|
||||
embs = self.transformer(input_ids, attention_mask)[0]
|
||||
embs = (embs * attention_mask.unsqueeze(2)).sum(dim=1) / attention_mask.sum(dim=1)[:, None]
|
||||
embs = self.LinearTransformation(embs)
|
||||
return torch.nn.functional.normalize(embs, dim=-1)
|
||||
|
||||
# unfortunately need to monkeypatch for tracing to work here
|
||||
# otherwise it hits the 2GiB protobuf serialization limit
|
||||
MultilingualCLIP.forward = forward
|
||||
|
||||
args = (torch.ones(1, 77, dtype=torch.int32), torch.ones(1, 77, dtype=torch.int32))
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", UserWarning)
|
||||
@@ -26,17 +26,6 @@ class OpenCLIPModelConfig:
|
||||
self.sequence_length = open_clip_cfg["text_cfg"]["context_length"]
|
||||
|
||||
|
||||
def to_torchscript(model_name: str) -> torch.jit.ScriptModule:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
|
||||
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad_(False)
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def to_onnx(
|
||||
model_cfg: OpenCLIPModelConfig,
|
||||
output_dir_visual: Path | str | None = None,
|
||||
@@ -62,7 +51,7 @@ def to_onnx(
|
||||
|
||||
save_config(open_clip.get_model_preprocess_cfg(model), output_dir_visual / "preprocess_cfg.json")
|
||||
save_config(text_vision_cfg, output_dir_visual.parent / "config.json")
|
||||
_image_encoder_to_onnx(model, model_cfg, visual_path)
|
||||
export_image_encoder(model, model_cfg, visual_path)
|
||||
|
||||
optimize(visual_path)
|
||||
|
||||
@@ -72,11 +61,11 @@ def to_onnx(
|
||||
|
||||
tokenizer_name = text_vision_cfg["text_cfg"].get("hf_tokenizer_name", "openai/clip-vit-base-patch32")
|
||||
AutoTokenizer.from_pretrained(tokenizer_name).save_pretrained(output_dir_textual)
|
||||
_text_encoder_to_onnx(model, model_cfg, textual_path)
|
||||
export_text_encoder(model, model_cfg, textual_path)
|
||||
optimize(textual_path)
|
||||
|
||||
|
||||
def _image_encoder_to_onnx(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
def export_image_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
output_path = Path(output_path)
|
||||
|
||||
def encode_image(image: torch.Tensor) -> torch.Tensor:
|
||||
@@ -100,7 +89,7 @@ def _image_encoder_to_onnx(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig
|
||||
)
|
||||
|
||||
|
||||
def _text_encoder_to_onnx(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
def export_text_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
output_path = Path(output_path)
|
||||
|
||||
def encode_text(text: torch.Tensor) -> torch.Tensor:
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.108.0"
|
||||
version = "1.107.2"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="app.alextran.immich"
|
||||
xmlns:tools="http://schemas.android.com/tools">
|
||||
<application android:label="Immich" android:name=".ImmichApp" android:usesCleartextTraffic="true"
|
||||
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true" android:largeHeap="true">
|
||||
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true">
|
||||
|
||||
<meta-data
|
||||
android:name="io.flutter.embedding.android.EnableImpeller"
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 148,
|
||||
"android.injected.version.name" => "1.108.0",
|
||||
"android.injected.version.code" => 147,
|
||||
"android.injected.version.name" => "1.107.2",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -167,7 +167,7 @@ SPEC CHECKSUMS:
|
||||
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
|
||||
path_provider_ios: 14f3d2fd28c4fdb42f44e0f751d12861c43cee02
|
||||
permission_handler_apple: e76247795d700c14ea09e3a2d8855d41ee80a2e6
|
||||
photo_manager: ff695c7a1dd5bc379974953a2b5c0a293f7c4c8a
|
||||
photo_manager: 4f6810b7dfc4feb03b461ac1a70dacf91fba7604
|
||||
ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825
|
||||
SAMKeychain: 483e1c9f32984d50ca961e26818a534283b4cd5c
|
||||
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
|
||||
|
||||
@@ -383,7 +383,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 163;
|
||||
CURRENT_PROJECT_VERSION = 162;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -525,7 +525,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 163;
|
||||
CURRENT_PROJECT_VERSION = 162;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -553,7 +553,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 163;
|
||||
CURRENT_PROJECT_VERSION = 162;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
|
||||
@@ -58,11 +58,11 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.108.0</string>
|
||||
<string>1.107.1</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>163</string>
|
||||
<string>162</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
@@ -16,10 +16,10 @@
|
||||
default_platform(:ios)
|
||||
|
||||
platform :ios do
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
desc "iOS Beta"
|
||||
lane :beta do
|
||||
increment_version_number(
|
||||
version_number: "1.108.0"
|
||||
version_number: "1.107.2"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
2
mobile/openapi/README.md
generated
2
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.108.0
|
||||
- API version: 1.107.2
|
||||
- Generator version: 7.5.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
|
||||
@@ -23,7 +23,6 @@ class SystemConfigOAuthDto {
|
||||
required this.issuerUrl,
|
||||
required this.mobileOverrideEnabled,
|
||||
required this.mobileRedirectUri,
|
||||
required this.profileSigningAlgorithm,
|
||||
required this.scope,
|
||||
required this.signingAlgorithm,
|
||||
required this.storageLabelClaim,
|
||||
@@ -51,8 +50,6 @@ class SystemConfigOAuthDto {
|
||||
|
||||
String mobileRedirectUri;
|
||||
|
||||
String profileSigningAlgorithm;
|
||||
|
||||
String scope;
|
||||
|
||||
String signingAlgorithm;
|
||||
@@ -73,7 +70,6 @@ class SystemConfigOAuthDto {
|
||||
other.issuerUrl == issuerUrl &&
|
||||
other.mobileOverrideEnabled == mobileOverrideEnabled &&
|
||||
other.mobileRedirectUri == mobileRedirectUri &&
|
||||
other.profileSigningAlgorithm == profileSigningAlgorithm &&
|
||||
other.scope == scope &&
|
||||
other.signingAlgorithm == signingAlgorithm &&
|
||||
other.storageLabelClaim == storageLabelClaim &&
|
||||
@@ -92,14 +88,13 @@ class SystemConfigOAuthDto {
|
||||
(issuerUrl.hashCode) +
|
||||
(mobileOverrideEnabled.hashCode) +
|
||||
(mobileRedirectUri.hashCode) +
|
||||
(profileSigningAlgorithm.hashCode) +
|
||||
(scope.hashCode) +
|
||||
(signingAlgorithm.hashCode) +
|
||||
(storageLabelClaim.hashCode) +
|
||||
(storageQuotaClaim.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigOAuthDto[autoLaunch=$autoLaunch, autoRegister=$autoRegister, buttonText=$buttonText, clientId=$clientId, clientSecret=$clientSecret, defaultStorageQuota=$defaultStorageQuota, enabled=$enabled, issuerUrl=$issuerUrl, mobileOverrideEnabled=$mobileOverrideEnabled, mobileRedirectUri=$mobileRedirectUri, profileSigningAlgorithm=$profileSigningAlgorithm, scope=$scope, signingAlgorithm=$signingAlgorithm, storageLabelClaim=$storageLabelClaim, storageQuotaClaim=$storageQuotaClaim]';
|
||||
String toString() => 'SystemConfigOAuthDto[autoLaunch=$autoLaunch, autoRegister=$autoRegister, buttonText=$buttonText, clientId=$clientId, clientSecret=$clientSecret, defaultStorageQuota=$defaultStorageQuota, enabled=$enabled, issuerUrl=$issuerUrl, mobileOverrideEnabled=$mobileOverrideEnabled, mobileRedirectUri=$mobileRedirectUri, scope=$scope, signingAlgorithm=$signingAlgorithm, storageLabelClaim=$storageLabelClaim, storageQuotaClaim=$storageQuotaClaim]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -113,7 +108,6 @@ class SystemConfigOAuthDto {
|
||||
json[r'issuerUrl'] = this.issuerUrl;
|
||||
json[r'mobileOverrideEnabled'] = this.mobileOverrideEnabled;
|
||||
json[r'mobileRedirectUri'] = this.mobileRedirectUri;
|
||||
json[r'profileSigningAlgorithm'] = this.profileSigningAlgorithm;
|
||||
json[r'scope'] = this.scope;
|
||||
json[r'signingAlgorithm'] = this.signingAlgorithm;
|
||||
json[r'storageLabelClaim'] = this.storageLabelClaim;
|
||||
@@ -139,7 +133,6 @@ class SystemConfigOAuthDto {
|
||||
issuerUrl: mapValueOfType<String>(json, r'issuerUrl')!,
|
||||
mobileOverrideEnabled: mapValueOfType<bool>(json, r'mobileOverrideEnabled')!,
|
||||
mobileRedirectUri: mapValueOfType<String>(json, r'mobileRedirectUri')!,
|
||||
profileSigningAlgorithm: mapValueOfType<String>(json, r'profileSigningAlgorithm')!,
|
||||
scope: mapValueOfType<String>(json, r'scope')!,
|
||||
signingAlgorithm: mapValueOfType<String>(json, r'signingAlgorithm')!,
|
||||
storageLabelClaim: mapValueOfType<String>(json, r'storageLabelClaim')!,
|
||||
@@ -201,7 +194,6 @@ class SystemConfigOAuthDto {
|
||||
'issuerUrl',
|
||||
'mobileOverrideEnabled',
|
||||
'mobileRedirectUri',
|
||||
'profileSigningAlgorithm',
|
||||
'scope',
|
||||
'signingAlgorithm',
|
||||
'storageLabelClaim',
|
||||
|
||||
@@ -1162,10 +1162,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: photo_manager
|
||||
sha256: "68d6099d07ce5033170f8368af8128a4555cf1d590a97242f83669552de989b1"
|
||||
sha256: "8cf79918f6de9843b394a1670fe1aec54ebcac852b4b4c9ef88211894547dc61"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.2.0"
|
||||
version: "3.0.0-dev.5"
|
||||
photo_manager_image_provider:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 1.108.0+148
|
||||
version: 1.107.2+147
|
||||
|
||||
environment:
|
||||
sdk: '>=3.3.0 <4.0.0'
|
||||
@@ -15,7 +15,7 @@ dependencies:
|
||||
path_provider_ios:
|
||||
# TODO: upgrade to stable after 3.0.1 is released. 3.0.0 is broken
|
||||
# https://github.com/fluttercandies/flutter_photo_manager/pull/990#issuecomment-2058066427
|
||||
photo_manager: ^3.2.0
|
||||
photo_manager: ^3.0.0-dev.5
|
||||
photo_manager_image_provider: ^2.1.0
|
||||
flutter_hooks: ^0.20.4
|
||||
hooks_riverpod: ^2.4.9
|
||||
|
||||
@@ -7007,7 +7007,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
@@ -11030,9 +11030,6 @@
|
||||
"mobileRedirectUri": {
|
||||
"type": "string"
|
||||
},
|
||||
"profileSigningAlgorithm": {
|
||||
"type": "string"
|
||||
},
|
||||
"scope": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -11057,7 +11054,6 @@
|
||||
"issuerUrl",
|
||||
"mobileOverrideEnabled",
|
||||
"mobileRedirectUri",
|
||||
"profileSigningAlgorithm",
|
||||
"scope",
|
||||
"signingAlgorithm",
|
||||
"storageLabelClaim",
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.15.0
|
||||
|
||||
12
open-api/typescript-sdk/package-lock.json
generated
12
open-api/typescript-sdk/package-lock.json
generated
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -22,9 +22,9 @@
|
||||
"integrity": "sha512-8tKiYffhwTGHSHYGnZ3oneLGCjX0po/XAXQ5Ng9fqKkvIdl/xz8+Vh8i+6xjzZqvZ2pLVpUcuSfnvNI/x67L0g=="
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.14.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz",
|
||||
"integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==",
|
||||
"version": "20.14.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
|
||||
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
@@ -19,7 +19,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"repository": {
|
||||
@@ -28,6 +28,6 @@
|
||||
"directory": "open-api/typescript-sdk"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 1.108.0
|
||||
* 1.107.2
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
@@ -1063,7 +1063,6 @@ export type SystemConfigOAuthDto = {
|
||||
issuerUrl: string;
|
||||
mobileOverrideEnabled: boolean;
|
||||
mobileRedirectUri: string;
|
||||
profileSigningAlgorithm: string;
|
||||
scope: string;
|
||||
signingAlgorithm: string;
|
||||
storageLabelClaim: string;
|
||||
|
||||
@@ -37,96 +37,72 @@
|
||||
## Warnung
|
||||
|
||||
- ⚠️ Das Projekt befindet sich in **sehr aktiver** Entwicklung.
|
||||
- ⚠️ Gehe von möglichen Fehlern und von Änderungen mit Breaking-Changes aus.
|
||||
- ⚠️ Erwarte Fehler und Änderungen mit Breaking-Changes.
|
||||
- ⚠️ **Nutze die App auf keinen Fall als einziges Speichermedium für deine Fotos und Videos.**
|
||||
- ⚠️ Befolge immer die [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) Backup-Regel für deine wertvollen Fotos und Videos!
|
||||
|
||||
> [!NOTE]
|
||||
> Die Hauptdokumentation, einschließlich der Installationsanleitungen, befinden sich unter https://immich.app/.
|
||||
|
||||
|
||||
## Inhalt
|
||||
|
||||
- [Offizielle Dokumentation](https://immich.app/docs)
|
||||
- [Über Immich](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
|
||||
- [Demo](#demo)
|
||||
- [Funktionen](#funktionen)
|
||||
- [Übersetzungen](https://immich.app/docs/developer/translations)
|
||||
- [Einführung](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Beitragsrichtlinien](https://immich.app/docs/overview/support-the-project)
|
||||
|
||||
## Dokumentation
|
||||
|
||||
Die Hauptdokumentation, inklusive Installationsanleitungen, ist unter https://immich.app zu finden.
|
||||
|
||||
## Demo
|
||||
|
||||
Die Web-Demo kannst Du unter https://demo.immich.app finden.
|
||||
Die Demo läuft auf einer Free Tier Oracle VM in Amsterdam mit einer 2.4Ghz Quad-Core ARM64 CPU und 24GB RAM.
|
||||
|
||||
Für die Handy-App kannst Du `https://demo.immich.app/api` als `Server Endpoint URL` angeben.
|
||||
|
||||
### Login Daten
|
||||
```bash title="Demo Credential"
|
||||
Die Anmeldedaten
|
||||
email: demo@immich.app
|
||||
passwort: demo
|
||||
```
|
||||
|
||||
| Email | Password |
|
||||
| --------------- | -------- |
|
||||
| demo@immich.app | demo |
|
||||
```
|
||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
## Funktionen
|
||||
|
||||
| Funktionen | Mobil | Web |
|
||||
| ---------------------------------------------------- | ------ | ----- |
|
||||
| Fotos & Videos hochladen und ansehen | Ja | Ja |
|
||||
| Automatische Sicherung beim öffnen der App | Ja | k. A |
|
||||
| Selektive Auswahl von Alben zum Sichern | Ja | k. A |
|
||||
| Vermeidung von Duplikaten | Ja | Ja |
|
||||
| Automatisches Backup wenn die App geöffnet ist | Ja | n. a. |
|
||||
| Selektive Auswahl von Alben zum Sichern | Ja | n. a. |
|
||||
| Fotos und Videos auf das Gerät herunterladen | Ja | Ja |
|
||||
| Unterstützung mehrerer Benutzer | Ja | Ja |
|
||||
| Alben und geteilte Alben | Ja | Ja |
|
||||
| Scrollbar mit Scrubbing-/Drag-Funktion | Ja | Ja |
|
||||
| Unterstützung für RAW Formate | Ja | Ja |
|
||||
| Unterstützt mehrere Benutzer | Ja | Ja |
|
||||
| Album und geteilte Alben | Ja | Ja |
|
||||
| Scrollleiste | Ja | Ja |
|
||||
| Unterstützt RAW Formate | Ja | Ja |
|
||||
| Metadaten anzeigen (EXIF, Karte) | Ja | Ja |
|
||||
| Suchen nach Metadaten, Objekten, Gesichtern und CLIP | Ja | Ja |
|
||||
| Administrative Funktionen (Benutzerverwaltung) | Nein | Ja |
|
||||
| Hintergrundsicherung | Ja | k. A |
|
||||
| Backup im Hintergrund | Ja | n. a. |
|
||||
| Virtuelles Scrollen | Ja | Ja |
|
||||
| OAuth Unterstützung | Ja | Ja |
|
||||
| API-Schlüssel | k. A | Ja |
|
||||
| LivePhoto/MotionPhoto Sicherung und Wiedergabe | Ja | Ja |
|
||||
| Unterstützung für 360-Grad-Bilder | Nein | Ja |
|
||||
| API-Schlüssel | n. a. | Ja |
|
||||
| LivePhoto/MotionPhoto Backup und Wiedergabe | Ja | Ja |
|
||||
| Benutzerdefinierte Speicherstruktur | Ja | Ja |
|
||||
| Öffentliches Teilen | Nein | Ja |
|
||||
| Archiv und Favoriten | Ja | Ja |
|
||||
| Archive und Favoriten | Ja | Ja |
|
||||
| Globale Karte | Ja | Ja |
|
||||
| Partnerfreigabe (Teilen) | Ja | Ja |
|
||||
| Gesichtserkennung und -gruppierung | Ja | Ja |
|
||||
| Teilen mit Partner | Ja | Ja |
|
||||
| Gesichtserkennung und Gruppierung | Ja | Ja |
|
||||
| Rückblicke (heute vor x Jahren) | Ja | Ja |
|
||||
| Offline Unterstützung | Ja | Nein |
|
||||
| Schreibgeschützte Gallerie | Ja | Ja |
|
||||
| Gestapelte Bilder | Ja | Ja |
|
||||
|
||||
|
||||
## Übersetzungen
|
||||
|
||||
Mehr zum Thema Übersetzungen kannst du [hier](https://immich.app/docs/developer/translations) erfahren.
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
</a>
|
||||
|
||||
## Repository-Aktivität
|
||||
|
||||

|
||||
|
||||
## Github Sterne
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Mitwirkende
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.15
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dev build
|
||||
FROM ghcr.io/immich-app/base-server-dev:20240710@sha256:5944dac5d73dc54d733461db555786c823460e730f6793ea5ad4ee3843a75c7d as dev
|
||||
FROM ghcr.io/immich-app/base-server-dev:20240708@sha256:2a9e3231c34493cb861299d475c84c031e7f04519dbc895bbebb5017d479a3cb as dev
|
||||
|
||||
RUN apt-get install --no-install-recommends -yqq tini
|
||||
WORKDIR /usr/src/app
|
||||
@@ -25,7 +25,7 @@ COPY --from=dev /usr/src/app/node_modules/@img ./node_modules/@img
|
||||
COPY --from=dev /usr/src/app/node_modules/exiftool-vendored.pl ./node_modules/exiftool-vendored.pl
|
||||
|
||||
# web build
|
||||
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as web
|
||||
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as web
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
@@ -41,7 +41,7 @@ RUN npm run build
|
||||
|
||||
|
||||
# prod build
|
||||
FROM ghcr.io/immich-app/base-server-prod:20240710@sha256:bc3cda314634467d5a92b78c09ba566246e00ee8495aac398f5c1000c5e26fa9
|
||||
FROM ghcr.io/immich-app/base-server-prod:20240708@sha256:0af3a5bb036c9a4b6a5a51becaa6e94fe182f6bc97480d57e8f2e6f994bfa453
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
|
||||
994
server/package-lock.json
generated
994
server/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
@@ -46,7 +46,7 @@
|
||||
"@nestjs/swagger": "^7.1.8",
|
||||
"@nestjs/typeorm": "^10.0.0",
|
||||
"@nestjs/websockets": "^10.2.2",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.48.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.47.0",
|
||||
"@opentelemetry/context-async-hooks": "^1.24.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.52.0",
|
||||
"@opentelemetry/sdk-node": "^0.52.0",
|
||||
@@ -106,7 +106,7 @@
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/semver": "^7.5.8",
|
||||
@@ -132,6 +132,6 @@
|
||||
"vitest": "^1.5.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,7 +146,6 @@ export interface SystemConfig {
|
||||
mobileRedirectUri: string;
|
||||
scope: string;
|
||||
signingAlgorithm: string;
|
||||
profileSigningAlgorithm: string;
|
||||
storageLabelClaim: string;
|
||||
storageQuotaClaim: string;
|
||||
};
|
||||
@@ -290,7 +289,6 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
mobileRedirectUri: '',
|
||||
scope: 'openid email profile',
|
||||
signingAlgorithm: 'RS256',
|
||||
profileSigningAlgorithm: 'none',
|
||||
storageLabelClaim: 'preferred_username',
|
||||
storageQuotaClaim: 'immich_quota',
|
||||
},
|
||||
|
||||
@@ -5,7 +5,6 @@ import { APIKeyEntity } from 'src/entities/api-key.entity';
|
||||
import { SessionEntity } from 'src/entities/session.entity';
|
||||
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
|
||||
import { UserEntity } from 'src/entities/user.entity';
|
||||
import { toEmail } from 'src/validation';
|
||||
|
||||
export enum ImmichCookie {
|
||||
ACCESS_TOKEN = 'immich_access_token',
|
||||
@@ -42,7 +41,7 @@ export class AuthDto {
|
||||
|
||||
export class LoginCredentialDto {
|
||||
@IsEmail({ require_tld: false })
|
||||
@Transform(toEmail)
|
||||
@Transform(({ value }) => value?.toLowerCase())
|
||||
@IsNotEmpty()
|
||||
@ApiProperty({ example: 'testuser@email.com' })
|
||||
email!: string;
|
||||
|
||||
@@ -349,10 +349,6 @@ class SystemConfigOAuthDto {
|
||||
@IsNotEmpty()
|
||||
signingAlgorithm!: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
profileSigningAlgorithm!: string;
|
||||
|
||||
@IsString()
|
||||
storageLabelClaim!: string;
|
||||
|
||||
|
||||
@@ -38,22 +38,6 @@ describe('create user DTO', () => {
|
||||
expect(errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('validates invalid email type', async () => {
|
||||
let dto = plainToInstance(UserAdminCreateDto, {
|
||||
email: [],
|
||||
password: 'some password',
|
||||
name: 'some name',
|
||||
});
|
||||
expect(await validate(dto)).toHaveLength(1);
|
||||
|
||||
dto = plainToInstance(UserAdminCreateDto, {
|
||||
email: {},
|
||||
password: 'some password',
|
||||
name: 'some name',
|
||||
});
|
||||
expect(await validate(dto)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should allow emails without a tld', async () => {
|
||||
const someEmail = 'test@test';
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetOrder } from 'src/entities/album.entity';
|
||||
import { AlbumEntity, AssetOrder } from 'src/entities/album.entity';
|
||||
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
|
||||
import { AssetEntity, AssetType } from 'src/entities/asset.entity';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { PartnerEntity } from 'src/entities/partner.entity';
|
||||
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
|
||||
import {
|
||||
AssetBuilderOptions,
|
||||
@@ -62,6 +63,8 @@ export class AssetRepository implements IAssetRepository {
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
|
||||
@InjectRepository(SmartInfoEntity) private smartInfoRepository: Repository<SmartInfoEntity>,
|
||||
@InjectRepository(PartnerEntity) private partnerRepository: Repository<PartnerEntity>,
|
||||
@InjectRepository(AlbumEntity) private albumRepository: Repository<AlbumEntity>,
|
||||
) {}
|
||||
|
||||
async upsertExif(exif: Partial<ExifEntity>): Promise<void> {
|
||||
|
||||
@@ -1,19 +1,22 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { InjectDataSource, InjectRepository } from '@nestjs/typeorm';
|
||||
import { DefaultReadTaskOptions, ExifTool, Tags } from 'exiftool-vendored';
|
||||
import geotz from 'geo-tz';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { ExifEntity } from 'src/entities/exif.entity';
|
||||
import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import { IMetadataRepository, ImmichTags } from 'src/interfaces/metadata.interface';
|
||||
import { Instrumentation } from 'src/utils/instrumentation';
|
||||
import { Repository } from 'typeorm';
|
||||
import { DataSource, Repository } from 'typeorm';
|
||||
|
||||
@Instrumentation()
|
||||
@Injectable()
|
||||
export class MetadataRepository implements IMetadataRepository {
|
||||
constructor(
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
@InjectRepository(GeodataPlacesEntity) private geodataPlacesRepository: Repository<GeodataPlacesEntity>,
|
||||
@InjectDataSource() private dataSource: DataSource,
|
||||
@Inject(ILoggerRepository) private logger: ILoggerRepository,
|
||||
) {
|
||||
this.logger.setContext(MetadataRepository.name);
|
||||
|
||||
@@ -9,7 +9,7 @@ import { isNumber, isString } from 'class-validator';
|
||||
import cookieParser from 'cookie';
|
||||
import { DateTime } from 'luxon';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { Issuer, UserinfoResponse, custom, generators } from 'openid-client';
|
||||
import { ClientMetadata, Issuer, UserinfoResponse, custom, generators } from 'openid-client';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { AuthType, LOGIN_URL, MOBILE_REDIRECT, SALT_ROUNDS } from 'src/constants';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
@@ -45,7 +45,9 @@ export interface LoginDetails {
|
||||
deviceOS: string;
|
||||
}
|
||||
|
||||
type OAuthProfile = UserinfoResponse;
|
||||
interface OAuthProfile extends UserinfoResponse {
|
||||
email: string;
|
||||
}
|
||||
|
||||
interface ClaimOptions<T> {
|
||||
key: string;
|
||||
@@ -190,35 +192,34 @@ export class AuthService {
|
||||
async callback(dto: OAuthCallbackDto, loginDetails: LoginDetails) {
|
||||
const config = await this.configCore.getConfig({ withCache: false });
|
||||
const profile = await this.getOAuthProfile(config, dto.url);
|
||||
const { autoRegister, defaultStorageQuota, storageLabelClaim, storageQuotaClaim } = config.oauth;
|
||||
this.logger.debug(`Logging in with OAuth: ${JSON.stringify(profile)}`);
|
||||
let user = await this.userRepository.getByOAuthId(profile.sub);
|
||||
|
||||
// link by email
|
||||
if (!user && profile.email) {
|
||||
// link existing user
|
||||
if (!user) {
|
||||
const emailUser = await this.userRepository.getByEmail(profile.email);
|
||||
if (emailUser) {
|
||||
if (emailUser.oauthId) {
|
||||
throw new BadRequestException('User already exists, but is linked to another account.');
|
||||
} else {
|
||||
user = await this.userRepository.update(emailUser.id, { oauthId: profile.sub });
|
||||
}
|
||||
user = await this.userRepository.update(emailUser.id, { oauthId: profile.sub });
|
||||
}
|
||||
}
|
||||
|
||||
const { autoRegister, defaultStorageQuota, storageLabelClaim, storageQuotaClaim } = config.oauth;
|
||||
|
||||
// register new user
|
||||
if (!user) {
|
||||
if (!autoRegister) {
|
||||
this.logger.warn(
|
||||
`Unable to register ${profile.sub}/${profile.email || '(no email)'}. To enable set OAuth Auto Register to true in admin settings.`,
|
||||
`Unable to register ${profile.email}. To enable set OAuth Auto Register to true in admin settings.`,
|
||||
);
|
||||
throw new BadRequestException(`User does not exist and auto registering is disabled.`);
|
||||
}
|
||||
|
||||
if (!profile.email) {
|
||||
throw new BadRequestException('OAuth profile does not have an email address');
|
||||
}
|
||||
|
||||
this.logger.log(`Registering new user: ${profile.sub}/${profile.email}`);
|
||||
this.logger.log(`Registering new user: ${profile.email}/${profile.sub}`);
|
||||
this.logger.verbose(`OAuth Profile: ${JSON.stringify(profile)}`);
|
||||
|
||||
const storageLabel = this.getClaim(profile, {
|
||||
key: storageLabelClaim,
|
||||
@@ -298,21 +299,23 @@ export class AuthService {
|
||||
}
|
||||
|
||||
private async getOAuthClient(config: SystemConfig) {
|
||||
const { enabled, clientId, clientSecret, issuerUrl, signingAlgorithm, profileSigningAlgorithm } = config.oauth;
|
||||
const { enabled, clientId, clientSecret, issuerUrl, signingAlgorithm } = config.oauth;
|
||||
|
||||
if (!enabled) {
|
||||
throw new BadRequestException('OAuth2 is not enabled');
|
||||
}
|
||||
|
||||
const metadata: ClientMetadata = {
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
response_types: ['code'],
|
||||
};
|
||||
|
||||
try {
|
||||
const issuer = await Issuer.discover(issuerUrl);
|
||||
return new issuer.Client({
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
response_types: ['code'],
|
||||
userinfo_signed_response_alg: profileSigningAlgorithm === 'none' ? undefined : profileSigningAlgorithm,
|
||||
id_token_signed_response_alg: signingAlgorithm,
|
||||
});
|
||||
metadata.id_token_signed_response_alg = signingAlgorithm;
|
||||
|
||||
return new issuer.Client(metadata);
|
||||
} catch (error: any | AggregateError) {
|
||||
this.logger.error(`Error in OAuth discovery: ${error}`, error?.stack, error?.errors);
|
||||
throw new InternalServerErrorException(`Error in OAuth discovery: ${error}`, { cause: error });
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { IAlbumRepository } from 'src/interfaces/album.interface';
|
||||
import { IAssetRepository } from 'src/interfaces/asset.interface';
|
||||
import { IJobRepository } from 'src/interfaces/job.interface';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import { INotificationRepository } from 'src/interfaces/notification.interface';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
||||
import { IUserRepository } from 'src/interfaces/user.interface';
|
||||
import { NotificationService } from 'src/services/notification.service';
|
||||
import { newAlbumRepositoryMock } from 'test/repositories/album.repository.mock';
|
||||
import { newAssetRepositoryMock } from 'test/repositories/asset.repository.mock';
|
||||
import { newJobRepositoryMock } from 'test/repositories/job.repository.mock';
|
||||
import { newLoggerRepositoryMock } from 'test/repositories/logger.repository.mock';
|
||||
import { newNotificationRepositoryMock } from 'test/repositories/notification.repository.mock';
|
||||
import { newSystemMetadataRepositoryMock } from 'test/repositories/system-metadata.repository.mock';
|
||||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
import { Mocked } from 'vitest';
|
||||
|
||||
const configs = {
|
||||
smtpDisabled: Object.freeze<SystemConfig>({
|
||||
...defaults,
|
||||
notifications: {
|
||||
smtp: {
|
||||
...defaults.notifications.smtp,
|
||||
enabled: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
smtpEnabled: Object.freeze<SystemConfig>({
|
||||
...defaults,
|
||||
notifications: {
|
||||
smtp: {
|
||||
...defaults.notifications.smtp,
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
smtpTransport: Object.freeze<SystemConfig>({
|
||||
...defaults,
|
||||
notifications: {
|
||||
smtp: {
|
||||
...defaults.notifications.smtp,
|
||||
enabled: true,
|
||||
transport: {
|
||||
ignoreCert: false,
|
||||
host: 'localhost',
|
||||
port: 587,
|
||||
username: 'test',
|
||||
password: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
describe(NotificationService.name, () => {
|
||||
let sut: NotificationService;
|
||||
let systemMock: Mocked<ISystemMetadataRepository>;
|
||||
let notificationMock: Mocked<INotificationRepository>;
|
||||
let userMock: Mocked<IUserRepository>;
|
||||
let jobMock: Mocked<IJobRepository>;
|
||||
let loggerMock: Mocked<ILoggerRepository>;
|
||||
let assetMock: Mocked<IAssetRepository>;
|
||||
let albumMock: Mocked<IAlbumRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
systemMock = newSystemMetadataRepositoryMock();
|
||||
notificationMock = newNotificationRepositoryMock();
|
||||
userMock = newUserRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
loggerMock = newLoggerRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
|
||||
sut = new NotificationService(systemMock, notificationMock, userMock, jobMock, loggerMock, assetMock, albumMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('onConfigValidateEvent', () => {
|
||||
it('validates smtp config when enabling smtp', async () => {
|
||||
const oldConfig = configs.smtpDisabled;
|
||||
const newConfig = configs.smtpEnabled;
|
||||
|
||||
notificationMock.verifySmtp.mockResolvedValue(true);
|
||||
await expect(sut.onConfigValidateEvent({ oldConfig, newConfig })).resolves.not.toThrow();
|
||||
expect(notificationMock.verifySmtp).toHaveBeenCalledWith(newConfig.notifications.smtp.transport);
|
||||
});
|
||||
|
||||
it('validates smtp config when transport changes', async () => {
|
||||
const oldConfig = configs.smtpEnabled;
|
||||
const newConfig = configs.smtpTransport;
|
||||
|
||||
notificationMock.verifySmtp.mockResolvedValue(true);
|
||||
await expect(sut.onConfigValidateEvent({ oldConfig, newConfig })).resolves.not.toThrow();
|
||||
expect(notificationMock.verifySmtp).toHaveBeenCalledWith(newConfig.notifications.smtp.transport);
|
||||
});
|
||||
|
||||
it('skips smtp validation when there are no changes', async () => {
|
||||
const oldConfig = { ...configs.smtpEnabled };
|
||||
const newConfig = { ...configs.smtpEnabled };
|
||||
|
||||
await expect(sut.onConfigValidateEvent({ oldConfig, newConfig })).resolves.not.toThrow();
|
||||
expect(notificationMock.verifySmtp).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('skips smtp validation when smtp is disabled', async () => {
|
||||
const oldConfig = { ...configs.smtpEnabled };
|
||||
const newConfig = { ...configs.smtpDisabled };
|
||||
|
||||
await expect(sut.onConfigValidateEvent({ oldConfig, newConfig })).resolves.not.toThrow();
|
||||
expect(notificationMock.verifySmtp).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,4 @@
|
||||
import { HttpException, HttpStatus, Inject, Injectable } from '@nestjs/common';
|
||||
import { isEqual } from 'lodash';
|
||||
import { DEFAULT_EXTERNAL_DOMAIN } from 'src/constants';
|
||||
import { SystemConfigCore } from 'src/cores/system-config.core';
|
||||
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
|
||||
@@ -45,12 +44,9 @@ export class NotificationService implements OnEvents {
|
||||
this.configCore = SystemConfigCore.create(systemMetadataRepository, logger);
|
||||
}
|
||||
|
||||
async onConfigValidateEvent({ oldConfig, newConfig }: SystemConfigUpdateEvent) {
|
||||
async onConfigValidateEvent({ newConfig }: SystemConfigUpdateEvent) {
|
||||
try {
|
||||
if (
|
||||
newConfig.notifications.smtp.enabled &&
|
||||
!isEqual(oldConfig.notifications.smtp, newConfig.notifications.smtp)
|
||||
) {
|
||||
if (newConfig.notifications.smtp.enabled) {
|
||||
await this.notificationRepository.verifySmtp(newConfig.notifications.smtp.transport);
|
||||
}
|
||||
} catch (error: Error | any) {
|
||||
|
||||
@@ -112,7 +112,6 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
||||
mobileRedirectUri: '',
|
||||
scope: 'openid email profile',
|
||||
signingAlgorithm: 'RS256',
|
||||
profileSigningAlgorithm: 'none',
|
||||
storageLabelClaim: 'preferred_username',
|
||||
storageQuotaClaim: 'immich_quota',
|
||||
},
|
||||
|
||||
@@ -152,14 +152,11 @@ export function validateCronExpression(expression: string) {
|
||||
return true;
|
||||
}
|
||||
|
||||
type IValue = { value: unknown };
|
||||
type IValue = { value: string };
|
||||
|
||||
export const toEmail = ({ value }: IValue) => (typeof value === 'string' ? value.toLowerCase() : value);
|
||||
export const toEmail = ({ value }: IValue) => (value ? value.toLowerCase() : value);
|
||||
|
||||
export const toSanitized = ({ value }: IValue) => {
|
||||
const input = typeof value === 'string' ? value : '';
|
||||
return sanitize(input.replaceAll('.', ''));
|
||||
};
|
||||
export const toSanitized = ({ value }: IValue) => sanitize((value || '').replaceAll('.', ''));
|
||||
|
||||
export const isValidInteger = (value: number, options: { min?: number; max?: number }): value is number => {
|
||||
const { min = Number.MIN_SAFE_INTEGER, max = Number.MAX_SAFE_INTEGER } = options;
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.15
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a
|
||||
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665
|
||||
|
||||
RUN apk add --no-cache tini
|
||||
USER node
|
||||
|
||||
81
web/package-lock.json
generated
81
web/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich-web",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@formatjs/icu-messageformat-parser": "^2.7.8",
|
||||
@@ -16,6 +16,7 @@
|
||||
"@photo-sphere-viewer/equirectangular-video-adapter": "^5.7.2",
|
||||
"@photo-sphere-viewer/video-plugin": "^5.7.2",
|
||||
"@zoom-image/svelte": "^0.2.6",
|
||||
"buffer": "^6.0.3",
|
||||
"copy-image-clipboard": "^2.1.2",
|
||||
"dom-to-image": "^2.6.0",
|
||||
"handlebars": "^4.7.8",
|
||||
@@ -69,13 +70,13 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.14.9",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -1657,30 +1658,30 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@photo-sphere-viewer/core": {
|
||||
"version": "5.8.2",
|
||||
"resolved": "https://registry.npmjs.org/@photo-sphere-viewer/core/-/core-5.8.2.tgz",
|
||||
"integrity": "sha512-7Ex8OLk5ihywT/WpYz/+No6BlGzo/XDbW8M3pe2diBEYU7xXfxQjhQ7WKFRuaKasNrCUNks8r6jM+pUkl4MOtg==",
|
||||
"version": "5.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@photo-sphere-viewer/core/-/core-5.8.1.tgz",
|
||||
"integrity": "sha512-mxkVNtXSWHGtvPMtBJ+uXA/cxzo4dEZm74rffg14VvF1lmIR7wyqDGCXtdXjFYLRGHjiNuimhDhfaqbM/SoSYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"three": "^0.166.1"
|
||||
"three": "^0.165.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@photo-sphere-viewer/equirectangular-video-adapter": {
|
||||
"version": "5.8.2",
|
||||
"resolved": "https://registry.npmjs.org/@photo-sphere-viewer/equirectangular-video-adapter/-/equirectangular-video-adapter-5.8.2.tgz",
|
||||
"integrity": "sha512-HaT7GsI0xydp9vaeZnWQy2jNa0TDb0CohecdlyfQNFtvG4WhpaLnibJgMQSc8m1GtsydK3cGN7HArD0fhAEyIA==",
|
||||
"version": "5.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@photo-sphere-viewer/equirectangular-video-adapter/-/equirectangular-video-adapter-5.8.1.tgz",
|
||||
"integrity": "sha512-JUqRhLN+f4RtYmvFtZBpRH4y80DCY5y00HAc26rLAh6eGCs8s4By73n5+j0ICxoaAgARuvt+kw0cdcf18AhrIQ==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@photo-sphere-viewer/core": "5.8.2"
|
||||
"@photo-sphere-viewer/core": "5.8.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@photo-sphere-viewer/video-plugin": {
|
||||
"version": "5.8.2",
|
||||
"resolved": "https://registry.npmjs.org/@photo-sphere-viewer/video-plugin/-/video-plugin-5.8.2.tgz",
|
||||
"integrity": "sha512-HKDRkIbGqj4/k0csLRVrLXebkreHINqnb4Os+70VAjSuaK4VxRlmFy5R/LYy6nA7SDxrJR57Nq4//n75DBBDkg==",
|
||||
"version": "5.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@photo-sphere-viewer/video-plugin/-/video-plugin-5.8.1.tgz",
|
||||
"integrity": "sha512-EW8CAxAw3kyAyWvTZJFCsE+HzALYGXfcYYrbJjQLBuWObi8b1VSyT63zzzmPs9HwO9o745hyyeEOWbH5g1q5QA==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@photo-sphere-viewer/core": "5.8.2"
|
||||
"@photo-sphere-viewer/core": "5.8.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@polka/url": {
|
||||
@@ -3035,6 +3036,25 @@
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
|
||||
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/binary-extensions": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
|
||||
@@ -3098,6 +3118,29 @@
|
||||
"node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer": {
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
|
||||
"integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.2.1"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
@@ -8404,9 +8447,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/three": {
|
||||
"version": "0.166.1",
|
||||
"resolved": "https://registry.npmjs.org/three/-/three-0.166.1.tgz",
|
||||
"integrity": "sha512-LtuafkKHHzm61AQA1be2MAYIw1IjmhOUxhBa0prrLpEMWbV7ijvxCRHjSgHPGp2493wLBzwKV46tA9nivLEgKg==",
|
||||
"version": "0.165.0",
|
||||
"resolved": "https://registry.npmjs.org/three/-/three-0.165.0.tgz",
|
||||
"integrity": "sha512-cc96IlVYGydeceu0e5xq70H8/yoVT/tXBxV/W8A/U6uOq7DXc4/s1Mkmnu6SqoYGhSRWWYFOhVwvq6V0VtbplA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/thumbhash": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "1.108.0",
|
||||
"version": "1.107.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"scripts": {
|
||||
"dev": "vite dev --host 0.0.0.0 --port 3000",
|
||||
@@ -68,6 +68,7 @@
|
||||
"@photo-sphere-viewer/equirectangular-video-adapter": "^5.7.2",
|
||||
"@photo-sphere-viewer/video-plugin": "^5.7.2",
|
||||
"@zoom-image/svelte": "^0.2.6",
|
||||
"buffer": "^6.0.3",
|
||||
"copy-image-clipboard": "^2.1.2",
|
||||
"dom-to-image": "^2.6.0",
|
||||
"handlebars": "^4.7.8",
|
||||
@@ -82,6 +83,6 @@
|
||||
"thumbhash": "^0.1.1"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
import { getConfig, getConfigDefaults, updateConfig, type SystemConfigDto } from '@immich/sdk';
|
||||
import { loadConfig } from '$lib/stores/server-config.store';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { onMount } from 'svelte';
|
||||
import type { SettingsResetOptions } from './admin-settings';
|
||||
import { createEventDispatcher, onMount } from 'svelte';
|
||||
import type { SettingsEventType } from './admin-settings';
|
||||
import { t } from 'svelte-i18n';
|
||||
|
||||
export let config: SystemConfigDto;
|
||||
@@ -18,8 +18,10 @@
|
||||
let savedConfig: SystemConfigDto;
|
||||
let defaultConfig: SystemConfigDto;
|
||||
|
||||
const handleReset = async (options: SettingsResetOptions) => {
|
||||
await (options.default ? resetToDefault(options.configKeys) : reset(options.configKeys));
|
||||
const dispatch = createEventDispatcher<{ save: void }>();
|
||||
|
||||
const handleReset = async (detail: SettingsEventType['reset']) => {
|
||||
await (detail.default ? resetToDefault(detail.configKeys) : reset(detail.configKeys));
|
||||
};
|
||||
|
||||
export const handleSave = async (update: Partial<SystemConfigDto>) => {
|
||||
@@ -36,6 +38,8 @@
|
||||
notificationController.show({ message: $t('settings_saved'), type: NotificationType.Info });
|
||||
|
||||
await loadConfig();
|
||||
|
||||
dispatch('save');
|
||||
} catch (error) {
|
||||
handleError(error, $t('errors.unable_to_save_settings'));
|
||||
}
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
import type { ResetOptions } from '$lib/utils/dipatch';
|
||||
import type { SystemConfigDto } from '@immich/sdk';
|
||||
|
||||
export type SettingsResetOptions = ResetOptions & { configKeys: Array<keyof SystemConfigDto> };
|
||||
export type SettingsResetEvent = (options: SettingsResetOptions) => void;
|
||||
export type SettingsSaveEvent = (config: Partial<SystemConfigDto>) => void;
|
||||
|
||||
export type SettingsComponentProps = {
|
||||
disabled?: boolean;
|
||||
defaultConfig: SystemConfigDto;
|
||||
config: SystemConfigDto;
|
||||
savedConfig: SystemConfigDto;
|
||||
onReset: SettingsResetEvent;
|
||||
onSave: SettingsSaveEvent;
|
||||
export type SettingsEventType = {
|
||||
reset: ResetOptions & { configKeys: Array<keyof SystemConfigDto> };
|
||||
save: Partial<SystemConfigDto>;
|
||||
};
|
||||
|
||||
@@ -8,8 +8,9 @@
|
||||
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
|
||||
import { type SystemConfigDto } from '@immich/sdk';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
import { fade } from 'svelte/transition';
|
||||
import type { SettingsResetEvent, SettingsSaveEvent } from '../admin-settings';
|
||||
import type { SettingsEventType } from '../admin-settings';
|
||||
import { t } from 'svelte-i18n';
|
||||
import FormatMessage from '$lib/components/i18n/format-message.svelte';
|
||||
|
||||
@@ -17,8 +18,8 @@
|
||||
export let defaultConfig: SystemConfigDto;
|
||||
export let config: SystemConfigDto; // this is the config that is being edited
|
||||
export let disabled = false;
|
||||
export let onReset: SettingsResetEvent;
|
||||
export let onSave: SettingsSaveEvent;
|
||||
|
||||
const dispatch = createEventDispatcher<SettingsEventType>();
|
||||
|
||||
let isConfirmOpen = false;
|
||||
|
||||
@@ -38,7 +39,7 @@
|
||||
}
|
||||
|
||||
isConfirmOpen = false;
|
||||
onSave({ passwordLogin: config.passwordLogin, oauth: config.oauth });
|
||||
dispatch('save', { passwordLogin: config.passwordLogin, oauth: config.oauth });
|
||||
};
|
||||
</script>
|
||||
|
||||
@@ -144,16 +145,6 @@
|
||||
isEdited={!(config.oauth.signingAlgorithm == savedConfig.oauth.signingAlgorithm)}
|
||||
/>
|
||||
|
||||
<SettingInputField
|
||||
inputType={SettingInputFieldType.TEXT}
|
||||
label={$t('admin.oauth_profile_signing_algorithm').toUpperCase()}
|
||||
desc={$t('admin.oauth_profile_signing_algorithm_description')}
|
||||
bind:value={config.oauth.profileSigningAlgorithm}
|
||||
required={true}
|
||||
disabled={disabled || !config.oauth.enabled}
|
||||
isEdited={!(config.oauth.profileSigningAlgorithm == savedConfig.oauth.profileSigningAlgorithm)}
|
||||
/>
|
||||
|
||||
<SettingInputField
|
||||
inputType={SettingInputFieldType.TEXT}
|
||||
label={$t('admin.oauth_storage_label_claim').toUpperCase()}
|
||||
@@ -249,8 +240,8 @@
|
||||
showResetToDefault={!isEqual(savedConfig.passwordLogin, defaultConfig.passwordLogin) ||
|
||||
!isEqual(savedConfig.oauth, defaultConfig.oauth)}
|
||||
{disabled}
|
||||
onReset={(options) => onReset({ ...options, configKeys: ['passwordLogin', 'oauth'] })}
|
||||
onSave={() => handleSave(false)}
|
||||
on:reset={({ detail }) => dispatch('reset', { ...detail, configKeys: ['passwordLogin', 'oauth'] })}
|
||||
on:save={() => handleSave(false)}
|
||||
/>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
@@ -11,8 +11,9 @@
|
||||
} from '@immich/sdk';
|
||||
import { mdiHelpCircleOutline } from '@mdi/js';
|
||||
import { isEqual, sortBy } from 'lodash-es';
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
import { fade } from 'svelte/transition';
|
||||
import type { SettingsResetEvent, SettingsSaveEvent } from '../admin-settings';
|
||||
import type { SettingsEventType } from '../admin-settings';
|
||||
import SettingAccordion from '$lib/components/shared-components/settings/setting-accordion.svelte';
|
||||
import SettingInputField, {
|
||||
SettingInputFieldType,
|
||||
@@ -28,8 +29,8 @@
|
||||
export let defaultConfig: SystemConfigDto;
|
||||
export let config: SystemConfigDto; // this is the config that is being edited
|
||||
export let disabled = false;
|
||||
export let onReset: SettingsResetEvent;
|
||||
export let onSave: SettingsSaveEvent;
|
||||
|
||||
const dispatch = createEventDispatcher<SettingsEventType>();
|
||||
</script>
|
||||
|
||||
<div>
|
||||
@@ -367,8 +368,8 @@
|
||||
|
||||
<div class="ml-4">
|
||||
<SettingButtonsRow
|
||||
onReset={(options) => onReset({ ...options, configKeys: ['ffmpeg'] })}
|
||||
onSave={() => onSave({ ffmpeg: config.ffmpeg })}
|
||||
on:reset={({ detail }) => dispatch('reset', { ...detail, configKeys: ['ffmpeg'] })}
|
||||
on:save={() => dispatch('save', { ffmpeg: config.ffmpeg })}
|
||||
showResetToDefault={!isEqual(savedConfig.ffmpeg, defaultConfig.ffmpeg)}
|
||||
{disabled}
|
||||
/>
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
<script lang="ts">
|
||||
import { Colorspace, ImageFormat, type SystemConfigDto } from '@immich/sdk';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
import { fade } from 'svelte/transition';
|
||||
import type { SettingsResetEvent, SettingsSaveEvent } from '../admin-settings';
|
||||
import type { SettingsEventType } from '../admin-settings';
|
||||
import SettingSelect from '$lib/components/shared-components/settings/setting-select.svelte';
|
||||
|
||||
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
|
||||
@@ -16,8 +17,8 @@
|
||||
export let defaultConfig: SystemConfigDto;
|
||||
export let config: SystemConfigDto; // this is the config that is being edited
|
||||
export let disabled = false;
|
||||
export let onReset: SettingsResetEvent;
|
||||
export let onSave: SettingsSaveEvent;
|
||||
|
||||
const dispatch = createEventDispatcher<SettingsEventType>();
|
||||
</script>
|
||||
|
||||
<div>
|
||||
@@ -113,8 +114,8 @@
|
||||
|
||||
<div class="ml-4">
|
||||
<SettingButtonsRow
|
||||
onReset={(options) => onReset({ ...options, configKeys: ['image'] })}
|
||||
onSave={() => onSave({ image: config.image })}
|
||||
on:reset={({ detail }) => dispatch('reset', { ...detail, configKeys: ['image'] })}
|
||||
on:save={() => dispatch('save', { image: config.image })}
|
||||
showResetToDefault={!isEqual(savedConfig.image, defaultConfig.image)}
|
||||
{disabled}
|
||||
/>
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
import { getJobName } from '$lib/utils';
|
||||
import { JobName, type SystemConfigDto, type SystemConfigJobDto } from '@immich/sdk';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
import { fade } from 'svelte/transition';
|
||||
import type { SettingsResetEvent, SettingsSaveEvent } from '../admin-settings';
|
||||
import type { SettingsEventType } from '../admin-settings';
|
||||
import SettingButtonsRow from '$lib/components/shared-components/settings/setting-buttons-row.svelte';
|
||||
import SettingInputField, {
|
||||
SettingInputFieldType,
|
||||
@@ -14,8 +15,8 @@
|
||||
export let defaultConfig: SystemConfigDto;
|
||||
export let config: SystemConfigDto; // this is the config that is being edited
|
||||
export let disabled = false;
|
||||
export let onReset: SettingsResetEvent;
|
||||
export let onSave: SettingsSaveEvent;
|
||||
|
||||
const dispatch = createEventDispatcher<SettingsEventType>();
|
||||
|
||||
const jobNames = [
|
||||
JobName.ThumbnailGeneration,
|
||||
@@ -66,8 +67,8 @@
|
||||
|
||||
<div class="ml-4">
|
||||
<SettingButtonsRow
|
||||
onReset={(options) => onReset({ ...options, configKeys: ['job'] })}
|
||||
onSave={() => onSave({ job: config.job })}
|
||||
on:reset={({ detail }) => dispatch('reset', { ...detail, configKeys: ['job'] })}
|
||||
on:save={() => dispatch('save', { job: config.job })}
|
||||
showResetToDefault={!isEqual(savedConfig.job, defaultConfig.job)}
|
||||
{disabled}
|
||||
/>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user