Compare commits
706 Commits
v1.99.0
...
dev/paymen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad3e92fff0 | ||
|
|
588860455f | ||
|
|
643309b27f | ||
|
|
e0ec75119f | ||
|
|
f446bc8caa | ||
|
|
a2bccf23c9 | ||
|
|
6937440772 | ||
|
|
69bce6680f | ||
|
|
454c995e90 | ||
|
|
bcff21f72b | ||
|
|
47ec6c41ec | ||
|
|
471cf5eaf7 | ||
|
|
b3ee394fdc | ||
|
|
15474e81b2 | ||
|
|
bb9e18247b | ||
|
|
e7dc1f7968 | ||
|
|
1323c7ee88 | ||
|
|
d1135db8cf | ||
|
|
5af67d159f | ||
|
|
203cbbbfdb | ||
|
|
01f52c9021 | ||
|
|
4e16e2520d | ||
|
|
21718cc343 | ||
|
|
7524c746a6 | ||
|
|
69d2fcb43e | ||
|
|
66fced40e7 | ||
|
|
380ed966d8 | ||
|
|
afa10ebcb2 | ||
|
|
5ef2553bca | ||
|
|
53562e8439 | ||
|
|
0a53dc412b | ||
|
|
371a5ce0aa | ||
|
|
9fc0a0d935 | ||
|
|
8fc4ce14ab | ||
|
|
4376104e3a | ||
|
|
77d1b9ace6 | ||
|
|
802b4ef190 | ||
|
|
9d0aceb768 | ||
|
|
8315488b99 | ||
|
|
61051ba479 | ||
|
|
30e18aba69 | ||
|
|
12cf116798 | ||
|
|
416399499b | ||
|
|
5463660746 | ||
|
|
5ef144bf79 | ||
|
|
db4c66094c | ||
|
|
d400925aeb | ||
|
|
e459d524a4 | ||
|
|
3f6e61d073 | ||
|
|
cc013158d1 | ||
|
|
ce524256da | ||
|
|
771df7f09f | ||
|
|
ee3530b34c | ||
|
|
8812c3afcf | ||
|
|
fbc3790cb6 | ||
|
|
0fc6d69824 | ||
|
|
1f9158c545 | ||
|
|
832084687d | ||
|
|
bce916e4c8 | ||
|
|
f020d29ab6 | ||
|
|
dedf1ecc9d | ||
|
|
4c7347d653 | ||
|
|
dca420ef70 | ||
|
|
21bd20fd75 | ||
|
|
351dd647a9 | ||
|
|
298370b7be | ||
|
|
e3d39837d0 | ||
|
|
38f4a02a14 | ||
|
|
fc5615eff6 | ||
|
|
6879bcb7a4 | ||
|
|
11152f9b3d | ||
|
|
75830a4878 | ||
|
|
e7c8501930 | ||
|
|
50f9b2d44e | ||
|
|
9628ea2d24 | ||
|
|
4d4bb8b6a7 | ||
|
|
99f0aa868a | ||
|
|
459fee9ee4 | ||
|
|
871f3ea468 | ||
|
|
98c4c683ae | ||
|
|
8a7b0f66a4 | ||
|
|
9e71256191 | ||
|
|
d5cf8e4bfe | ||
|
|
95012dc19b | ||
|
|
f197f5d530 | ||
|
|
7168707395 | ||
|
|
847cb90038 | ||
|
|
602f0a3499 | ||
|
|
fdaa0e5413 | ||
|
|
39d2c4f37b | ||
|
|
1f5d82e9d9 | ||
|
|
e98744f222 | ||
|
|
56ea07bcba | ||
|
|
b3b258f32f | ||
|
|
69b5eb005f | ||
|
|
3f44a33eac | ||
|
|
b2a0422efb | ||
|
|
562c43b6f5 | ||
|
|
4f21f6a2e1 | ||
|
|
76fdcc9863 | ||
|
|
57d94bce68 | ||
|
|
832d728940 | ||
|
|
8bfa6769a5 | ||
|
|
e7aa50425c | ||
|
|
a5e8b451b2 | ||
|
|
13cbdf6851 | ||
|
|
967d195a05 | ||
|
|
8f37784eae | ||
|
|
ecd018a826 | ||
|
|
202745f14b | ||
|
|
6a4c2e97c0 | ||
|
|
f6f82a5662 | ||
|
|
ae21781442 | ||
|
|
06ce8247cc | ||
|
|
a4887bfa7e | ||
|
|
27a02c75dc | ||
|
|
f8ee977b9e | ||
|
|
a3e7e8cc31 | ||
|
|
a341ab0050 | ||
|
|
61b850f0ce | ||
|
|
a3489d604b | ||
|
|
00b5ad3421 | ||
|
|
2ada4a8426 | ||
|
|
924e9f08cd | ||
|
|
91b835cfeb | ||
|
|
bb79df655d | ||
|
|
02e755bd92 | ||
|
|
0963a32a95 | ||
|
|
6119618ae2 | ||
|
|
4d044658a0 | ||
|
|
67fa598f44 | ||
|
|
9222b9d130 | ||
|
|
d6757fc2dd | ||
|
|
4f838eabbe | ||
|
|
143b9d6828 | ||
|
|
1df7be8436 | ||
|
|
5f25f28c42 | ||
|
|
ae92422df7 | ||
|
|
84d824d6a7 | ||
|
|
4353153fe6 | ||
|
|
c37bf9d5d0 | ||
|
|
5c8c0b2f5b | ||
|
|
39129721fa | ||
|
|
451416ec88 | ||
|
|
e39ee8a16f | ||
|
|
1e56352b04 | ||
|
|
470c95d614 | ||
|
|
1ad04f0b17 | ||
|
|
60427f18ce | ||
|
|
9aed736911 | ||
|
|
6b369e84fc | ||
|
|
136bb69bd0 | ||
|
|
975f2351ec | ||
|
|
2e62c7b417 | ||
|
|
2689178a35 | ||
|
|
d61418886f | ||
|
|
c03981ac1d | ||
|
|
4807fc40a6 | ||
|
|
101bc290f9 | ||
|
|
df42352f84 | ||
|
|
c8aa6a62c2 | ||
|
|
85aca2bb54 | ||
|
|
ff52300624 | ||
|
|
936a46b4ed | ||
|
|
d8eca168ca | ||
|
|
64636c0618 | ||
|
|
673e97e71d | ||
|
|
984aa8fb41 | ||
|
|
7f0f016f2e | ||
|
|
0589575154 | ||
|
|
73bf8f343a | ||
|
|
581b467b4b | ||
|
|
efb844c6cd | ||
|
|
88d4338348 | ||
|
|
ce7bbe88f9 | ||
|
|
d62e90424e | ||
|
|
0f129cae4a | ||
|
|
5583635947 | ||
|
|
b7715305b3 | ||
|
|
6c008176c9 | ||
|
|
72b1d582ba | ||
|
|
7b1112f3e3 | ||
|
|
42d0fc85ca | ||
|
|
d32b621750 | ||
|
|
d551003311 | ||
|
|
dd64379a4e | ||
|
|
c1cdda0ac4 | ||
|
|
596ab39293 | ||
|
|
31e57d27a7 | ||
|
|
f28b4e7c99 | ||
|
|
f01cf63c70 | ||
|
|
e55c5091d9 | ||
|
|
e8cdf1c300 | ||
|
|
116043b2b4 | ||
|
|
acc611a3d9 | ||
|
|
4d7aa7effd | ||
|
|
77b8c2f330 | ||
|
|
09e9e91b6a | ||
|
|
ad915ccd64 | ||
|
|
1ea55d642e | ||
|
|
3d5e55bdaa | ||
|
|
46868b3336 | ||
|
|
b1ca5455b5 | ||
|
|
462f0f76a4 | ||
|
|
3d4ae9c210 | ||
|
|
f2be310aec | ||
|
|
6fd6a8ba15 | ||
|
|
e479e556bc | ||
|
|
bf036f2f58 | ||
|
|
6d575e692b | ||
|
|
4e6aeeda4d | ||
|
|
a05c990718 | ||
|
|
844f5a16a1 | ||
|
|
1bebc7368c | ||
|
|
b7ebf3152f | ||
|
|
5985f72643 | ||
|
|
a6f557c24c | ||
|
|
1045957add | ||
|
|
540e568e9d | ||
|
|
9c5a2b97bf | ||
|
|
06402aa9fb | ||
|
|
45316f985b | ||
|
|
48927f5fb9 | ||
|
|
d121903b38 | ||
|
|
e2a31323bb | ||
|
|
aa1dc68867 | ||
|
|
2ae44022c2 | ||
|
|
a97e0caeb9 | ||
|
|
f667c9597b | ||
|
|
8fdcabaf70 | ||
|
|
55a7e54011 | ||
|
|
6f82f220b8 | ||
|
|
dd8d7732de | ||
|
|
bb4843747b | ||
|
|
f3fbb9b588 | ||
|
|
35ef82ab6b | ||
|
|
fa4cd74dfd | ||
|
|
fed8d11fb8 | ||
|
|
3a68190b99 | ||
|
|
757840c2fd | ||
|
|
a8abf2753e | ||
|
|
8743e17528 | ||
|
|
34d8879d32 | ||
|
|
55031cc117 | ||
|
|
f9dc870166 | ||
|
|
87053c8c0d | ||
|
|
57429ddc76 | ||
|
|
13a62715e4 | ||
|
|
f3407860a1 | ||
|
|
004074b25a | ||
|
|
98f87b275b | ||
|
|
8e9895df27 | ||
|
|
1167f0f2b7 | ||
|
|
81e4b69caf | ||
|
|
e4b777ecef | ||
|
|
4e7966c8e8 | ||
|
|
f10fb0723d | ||
|
|
ae08abde24 | ||
|
|
3790d8fcbc | ||
|
|
535c7a8618 | ||
|
|
725d594027 | ||
|
|
8c56c1c22b | ||
|
|
6a5bc156a6 | ||
|
|
5eaf489ecf | ||
|
|
8c54c13307 | ||
|
|
675fdc1d93 | ||
|
|
e79d1b1ec2 | ||
|
|
e9f99673b9 | ||
|
|
ee6995783f | ||
|
|
78b5990d2a | ||
|
|
f13100e261 | ||
|
|
21fe829a2c | ||
|
|
ad404d79d4 | ||
|
|
5806a3ce25 | ||
|
|
7520ffd6c3 | ||
|
|
bbb9453e1a | ||
|
|
8ff397527f | ||
|
|
a2c5eaa73e | ||
|
|
065f1410f8 | ||
|
|
bc31404909 | ||
|
|
f08e9a4447 | ||
|
|
cf79bc9ed7 | ||
|
|
090592e5ae | ||
|
|
398d99a052 | ||
|
|
c18beddef8 | ||
|
|
dc4f7bef69 | ||
|
|
48b490f5e9 | ||
|
|
5b87abb021 | ||
|
|
d26ac431b8 | ||
|
|
d7f53d93a6 | ||
|
|
ec4e6a143e | ||
|
|
7961d00e56 | ||
|
|
c1253663b7 | ||
|
|
ec4eb7cd19 | ||
|
|
16706f7f49 | ||
|
|
bf100dcde1 | ||
|
|
c2a525170e | ||
|
|
0bf923feb4 | ||
|
|
42f03af2dc | ||
|
|
9bce3417e9 | ||
|
|
4b86c7a298 | ||
|
|
a2c040a47f | ||
|
|
959b3f05d2 | ||
|
|
a0c43a2b5a | ||
|
|
b7d0bc16bb | ||
|
|
ef09fc4157 | ||
|
|
84d638645d | ||
|
|
64e9791a3f | ||
|
|
0aa3b29eeb | ||
|
|
0a598ae1b8 | ||
|
|
c0495ca23f | ||
|
|
f057fe045e | ||
|
|
eba42b245d | ||
|
|
2cf63eeeab | ||
|
|
926de96ce6 | ||
|
|
374a9b557b | ||
|
|
9e79a23bbe | ||
|
|
3212069eec | ||
|
|
c1636ef7ab | ||
|
|
0b0ab99016 | ||
|
|
4f097d9106 | ||
|
|
461f2595b5 | ||
|
|
5722c830ff | ||
|
|
72ce81f0c2 | ||
|
|
0c9bf2835d | ||
|
|
ba18776fc2 | ||
|
|
fdae0dcbe5 | ||
|
|
25262b644f | ||
|
|
4291b38769 | ||
|
|
ee4877b090 | ||
|
|
b4f6184aa6 | ||
|
|
87de809e3d | ||
|
|
59caf1fce4 | ||
|
|
6eb5d2e95e | ||
|
|
32e7cfea3d | ||
|
|
fc2e709ad4 | ||
|
|
3c7b8d560f | ||
|
|
4bb7d2df49 | ||
|
|
a2cf8c7fc7 | ||
|
|
56a42dad17 | ||
|
|
13e093b3c2 | ||
|
|
2648032163 | ||
|
|
19aa97da02 | ||
|
|
034c928d9e | ||
|
|
953896a35a | ||
|
|
833a78181b | ||
|
|
cf01ec1eb0 | ||
|
|
90882a9b26 | ||
|
|
0b68cc2da6 | ||
|
|
7ea539b753 | ||
|
|
1e34b01986 | ||
|
|
0d8a04b43c | ||
|
|
5a49de5592 | ||
|
|
0c60aaf557 | ||
|
|
0571901288 | ||
|
|
557f9d8e5f | ||
|
|
8d5729c3b2 | ||
|
|
0ff0b891a7 | ||
|
|
53d571d29e | ||
|
|
00d186ec52 | ||
|
|
69f8bfe874 | ||
|
|
1d15cfb5f3 | ||
|
|
3e03f5348f | ||
|
|
59537f8f1b | ||
|
|
f1083a4c73 | ||
|
|
52bcb46b42 | ||
|
|
d52ed51aab | ||
|
|
c9dcb5c624 | ||
|
|
40899f6137 | ||
|
|
912d0c4d74 | ||
|
|
adb607c3ee | ||
|
|
c14a2eda5d | ||
|
|
a0d03925e0 | ||
|
|
732bd1e652 | ||
|
|
2943f93098 | ||
|
|
0b3373c552 | ||
|
|
a90138e42e | ||
|
|
466451abc9 | ||
|
|
f0f9053115 | ||
|
|
dc9b51ad02 | ||
|
|
0dbe44cb78 | ||
|
|
a78260296c | ||
|
|
1e004611e4 | ||
|
|
2593110219 | ||
|
|
56ce58c718 | ||
|
|
aac789f788 | ||
|
|
99ccf28bc6 | ||
|
|
48b0b7e8bd | ||
|
|
70c78a09a4 | ||
|
|
4458cc4370 | ||
|
|
0466da03ae | ||
|
|
7a2c1bab23 | ||
|
|
0435de50f8 | ||
|
|
661540c886 | ||
|
|
f5cf057e84 | ||
|
|
a91fd772e4 | ||
|
|
7f1651df71 | ||
|
|
c9a079201a | ||
|
|
be4a783845 | ||
|
|
c30cd3b378 | ||
|
|
0d3cc28f45 | ||
|
|
f004487be0 | ||
|
|
21231d53a5 | ||
|
|
a99862120d | ||
|
|
776023b149 | ||
|
|
7d4187962a | ||
|
|
a93534fc3c | ||
|
|
cef84f6ced | ||
|
|
a2180a467d | ||
|
|
1e3dceea4d | ||
|
|
fd4514711f | ||
|
|
2dd7c13b88 | ||
|
|
40931b5668 | ||
|
|
25549b87c9 | ||
|
|
7ec62f12b5 | ||
|
|
caf76f0713 | ||
|
|
6778653825 | ||
|
|
c858b43717 | ||
|
|
6eb1b82541 | ||
|
|
71b6d8b569 | ||
|
|
3abfe3c99e | ||
|
|
171b6bb0a6 | ||
|
|
78c7ff855d | ||
|
|
57be9182d4 | ||
|
|
886e07604e | ||
|
|
431ffebddd | ||
|
|
74c921148b | ||
|
|
eaf9e5e477 | ||
|
|
4478e524f8 | ||
|
|
e72e41a7aa | ||
|
|
efd8f0d648 | ||
|
|
d2b5cc6a4a | ||
|
|
596c35dc00 | ||
|
|
112d6d60ec | ||
|
|
c50241369a | ||
|
|
b74f8273c2 | ||
|
|
8573c84605 | ||
|
|
a4f805e99b | ||
|
|
7db07bbe61 | ||
|
|
3a9df6dae8 | ||
|
|
c227f9893e | ||
|
|
a3feca2580 | ||
|
|
b21566c2fc | ||
|
|
1071396a4a | ||
|
|
f58886514d | ||
|
|
17dc12cf7d | ||
|
|
6d4d0f86cf | ||
|
|
14b1425e98 | ||
|
|
c70d9f9055 | ||
|
|
18fa6018c0 | ||
|
|
47fb9bd213 | ||
|
|
6e6deec40c | ||
|
|
877207a2e6 | ||
|
|
64cfd017b4 | ||
|
|
4c4ebf769f | ||
|
|
28d081338b | ||
|
|
50c9bc0336 | ||
|
|
ed2e4e5217 | ||
|
|
1aa8707b8a | ||
|
|
103cb60a57 | ||
|
|
58e516c766 | ||
|
|
bcdec25843 | ||
|
|
28f591d01b | ||
|
|
dba365634a | ||
|
|
1c1e461936 | ||
|
|
2db76034b1 | ||
|
|
95e67a7b1d | ||
|
|
3deaaf14c0 | ||
|
|
084a97a77a | ||
|
|
ed74213c63 | ||
|
|
7ce1662b05 | ||
|
|
f959f2de85 | ||
|
|
07716bbff7 | ||
|
|
0f74b17000 | ||
|
|
3c7f70ec30 | ||
|
|
85df3f1e99 | ||
|
|
a903898781 | ||
|
|
25e1887939 | ||
|
|
9c696e4c28 | ||
|
|
87a36846f4 | ||
|
|
ded01401f8 | ||
|
|
8aff392275 | ||
|
|
14b798fcc4 | ||
|
|
97c099e26d | ||
|
|
3eb61a9d53 | ||
|
|
e65b3a8ea0 | ||
|
|
1fdbc949d6 | ||
|
|
605da89425 | ||
|
|
0d062b32a8 | ||
|
|
a4267ed60f | ||
|
|
58346465aa | ||
|
|
ec76e5ef23 | ||
|
|
37eea2d353 | ||
|
|
8c9a092561 | ||
|
|
e421fe9860 | ||
|
|
e13d4c9c13 | ||
|
|
640f53fe0a | ||
|
|
1bca1b8bde | ||
|
|
c902c93082 | ||
|
|
f1ca1794a1 | ||
|
|
ad5d115abe | ||
|
|
56079527ef | ||
|
|
c77b9f359f | ||
|
|
7f504ec5fc | ||
|
|
b1bcd67f5a | ||
|
|
2a26574808 | ||
|
|
1d427d0581 | ||
|
|
321868963d | ||
|
|
1529b67e41 | ||
|
|
190e4b55eb | ||
|
|
9e122764e7 | ||
|
|
327b9bd59c | ||
|
|
301c217303 | ||
|
|
9883473376 | ||
|
|
6631e6eedc | ||
|
|
933b6b67f5 | ||
|
|
56e0e5d6ad | ||
|
|
369bd17c8b | ||
|
|
9681f5b360 | ||
|
|
b107894976 | ||
|
|
796c933fb8 | ||
|
|
d43daaee81 | ||
|
|
b6cdffa509 | ||
|
|
7b1562c050 | ||
|
|
20583d5334 | ||
|
|
2d03d7c373 | ||
|
|
dd15d33bce | ||
|
|
c5e8f38e1e | ||
|
|
db45ec7434 | ||
|
|
4f4bceec94 | ||
|
|
7a16233584 | ||
|
|
fff12e3d78 | ||
|
|
da750ed838 | ||
|
|
e49512896f | ||
|
|
0075243ed5 | ||
|
|
29e47dd7c1 | ||
|
|
105a74caca | ||
|
|
55b9acca78 | ||
|
|
0d130b8957 | ||
|
|
0aa5d3daeb | ||
|
|
4b622e6cfa | ||
|
|
82aeb3292a | ||
|
|
335c03d0b8 | ||
|
|
4681ff88d0 | ||
|
|
33fd27f113 | ||
|
|
527fd7d472 | ||
|
|
3a69e5e819 | ||
|
|
7e611fa99c | ||
|
|
71d346207d | ||
|
|
56d27bc1b4 | ||
|
|
e1f8e96e28 | ||
|
|
ab97f03cb5 | ||
|
|
a2e38270e4 | ||
|
|
8f981b6052 | ||
|
|
939e91f9ed | ||
|
|
22c3d26604 | ||
|
|
7aaf48cb0c | ||
|
|
afd7815420 | ||
|
|
e5fe68cbf6 | ||
|
|
3b0fff3b3d | ||
|
|
ec7015be88 | ||
|
|
19fafd8c10 | ||
|
|
e47a89b274 | ||
|
|
66650f5944 | ||
|
|
0529076ed7 | ||
|
|
7f854432ae | ||
|
|
15a2e6feeb | ||
|
|
4ed68cf673 | ||
|
|
8337da183c | ||
|
|
6dfa9e1146 | ||
|
|
282bccaca5 | ||
|
|
62d307321a | ||
|
|
f7afc0334e | ||
|
|
28e8e539f6 | ||
|
|
7cc19b50fc | ||
|
|
97c340b8a4 | ||
|
|
7b1d4a6787 | ||
|
|
0714d119d7 | ||
|
|
700622e521 | ||
|
|
3682e76dee | ||
|
|
cd0e537e3e | ||
|
|
0849dbd1af | ||
|
|
4ab4a35eba | ||
|
|
e5d9372708 | ||
|
|
8edc2fb46f | ||
|
|
e520c0d1f5 | ||
|
|
506f9f6fb9 | ||
|
|
3cb8f54307 | ||
|
|
ee4d9fff16 | ||
|
|
27be813011 | ||
|
|
861b72ef04 | ||
|
|
fd83280b70 | ||
|
|
169d9d18b0 | ||
|
|
245535ee04 | ||
|
|
5bc9158724 | ||
|
|
6a4bc777a2 | ||
|
|
34cbb18ecd | ||
|
|
e2d5a8c0bb | ||
|
|
94cd806675 | ||
|
|
b6af7788e1 | ||
|
|
c4bb9f49ff | ||
|
|
8e5695f06d | ||
|
|
395c28f5fa | ||
|
|
3e5183606c | ||
|
|
6a36bbd1d1 | ||
|
|
4b39d37cae | ||
|
|
25c9b779e4 | ||
|
|
fcc3b81745 | ||
|
|
6f677b4fae | ||
|
|
78f202603c | ||
|
|
b8c5363a15 | ||
|
|
b8b3c487d4 | ||
|
|
ec48fccb30 | ||
|
|
3f61019ca1 | ||
|
|
16513b4a6e | ||
|
|
9b705e4450 | ||
|
|
e1c2135850 | ||
|
|
9fe80c25eb | ||
|
|
13b11a39a9 | ||
|
|
8bf571bf48 | ||
|
|
613b544bf0 | ||
|
|
916603d2d4 | ||
|
|
e30eecba2c | ||
|
|
3a94be0212 | ||
|
|
6295edcdb7 | ||
|
|
335b4937ed | ||
|
|
06da0469c4 | ||
|
|
1ad893ded4 | ||
|
|
636f5fb933 | ||
|
|
c45e28ab53 | ||
|
|
c56c04a82b | ||
|
|
d431d37454 | ||
|
|
1694dd146e | ||
|
|
4a6a0aa142 | ||
|
|
c788160532 | ||
|
|
cc66159f04 | ||
|
|
c58a70ac8f | ||
|
|
1855aaea99 | ||
|
|
3901b5da44 | ||
|
|
5dc59b591d | ||
|
|
96a5710932 | ||
|
|
d36d32d07b | ||
|
|
727b3b9f53 | ||
|
|
c85563da50 | ||
|
|
a771c563ba | ||
|
|
3cc800f93a | ||
|
|
b449feb3e1 | ||
|
|
b07a565e34 | ||
|
|
787eebcf1e | ||
|
|
604b8ff17c | ||
|
|
6e93ddf2f1 | ||
|
|
b6e4be72f0 | ||
|
|
75aa8e6621 | ||
|
|
5b7417bf64 | ||
|
|
db744f500b | ||
|
|
a56cf35d8c | ||
|
|
d1e6843f3e | ||
|
|
d18868873e | ||
|
|
827014fa4b | ||
|
|
944b33983c | ||
|
|
2641185af2 | ||
|
|
64aac239f0 | ||
|
|
d6823b128c | ||
|
|
508f32c08a | ||
|
|
8ed6ed4d2b | ||
|
|
1abb0bdae8 | ||
|
|
5ef6215546 | ||
|
|
95fb9c4365 | ||
|
|
fa0a5107c2 | ||
|
|
dc3c329431 | ||
|
|
2a9f2b4515 | ||
|
|
793049388b | ||
|
|
382b63954c | ||
|
|
87ccba7f9d | ||
|
|
e21c96c0ef | ||
|
|
4de0b2f44e | ||
|
|
b588a87d4a | ||
|
|
44ed1f0919 | ||
|
|
16d0df796c | ||
|
|
9fd5d2ad9c | ||
|
|
28ad004b01 | ||
|
|
ef4a492cb1 | ||
|
|
6d9e7694b1 | ||
|
|
0c13c63bb6 | ||
|
|
907eb869bc | ||
|
|
c1402eee8e | ||
|
|
84f7ca855a | ||
|
|
2dcce03352 | ||
|
|
96a22ec3c1 | ||
|
|
4b29bccc7c | ||
|
|
40e079a247 | ||
|
|
81f0265095 | ||
|
|
92cc647cf6 | ||
|
|
048d437b0b | ||
|
|
ec9a6bca14 | ||
|
|
bd5952b943 | ||
|
|
3f0d54c752 | ||
|
|
dab4595a4e | ||
|
|
6d9ca82b19 | ||
|
|
373a03e819 | ||
|
|
d97b0259fa | ||
|
|
2267ca1949 | ||
|
|
29be53e70d | ||
|
|
851fe4a49f | ||
|
|
30f499cf2e | ||
|
|
591a641d8d |
@@ -23,7 +23,6 @@ server/node_modules/
|
|||||||
server/upload/
|
server/upload/
|
||||||
server/dist/
|
server/dist/
|
||||||
server/www/
|
server/www/
|
||||||
server/test/assets/
|
|
||||||
|
|
||||||
web/node_modules/
|
web/node_modules/
|
||||||
web/coverage/
|
web/coverage/
|
||||||
|
|||||||
2
.gitattributes
vendored
@@ -2,8 +2,6 @@ mobile/openapi/**/*.md -diff -merge
|
|||||||
mobile/openapi/**/*.md linguist-generated=true
|
mobile/openapi/**/*.md linguist-generated=true
|
||||||
mobile/openapi/**/*.dart -diff -merge
|
mobile/openapi/**/*.dart -diff -merge
|
||||||
mobile/openapi/**/*.dart linguist-generated=true
|
mobile/openapi/**/*.dart linguist-generated=true
|
||||||
mobile/openapi/.openapi-generator/FILES -diff -merge
|
|
||||||
mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
|
||||||
|
|
||||||
mobile/lib/**/*.g.dart -diff -merge
|
mobile/lib/**/*.g.dart -diff -merge
|
||||||
mobile/lib/**/*.g.dart linguist-generated=true
|
mobile/lib/**/*.g.dart linguist-generated=true
|
||||||
|
|||||||
@@ -6,6 +6,14 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Please use this form to request new feature for Immich
|
Please use this form to request new feature for Immich
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
||||||
|
options:
|
||||||
|
- label: "Yes"
|
||||||
|
required: true
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: feature
|
id: feature
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
5
.github/FUNDING.yml
vendored
@@ -1,5 +0,0 @@
|
|||||||
# These are supported funding model platforms
|
|
||||||
|
|
||||||
github: immich-app
|
|
||||||
liberapay: alex.tran1502
|
|
||||||
custom: https://www.buymeacoffee.com/altran1502
|
|
||||||
10
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -87,6 +87,16 @@ body:
|
|||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: logs
|
||||||
|
attributes:
|
||||||
|
label: Relevant log output
|
||||||
|
description: Please copy and paste any relevant logs below. (code formatting is
|
||||||
|
enabled, no need for backticks)
|
||||||
|
render: shell
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Additional information
|
label: Additional information
|
||||||
|
|||||||
10
.github/workflows/build-mobile.yml
vendored
@@ -37,15 +37,15 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-java@v4
|
- uses: actions/setup-java@v4
|
||||||
with:
|
with:
|
||||||
distribution: "zulu"
|
distribution: 'zulu'
|
||||||
java-version: "11.0.21+9"
|
java-version: '17'
|
||||||
cache: "gradle"
|
cache: 'gradle'
|
||||||
|
|
||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@v2
|
uses: subosito/flutter-action@v2
|
||||||
with:
|
with:
|
||||||
channel: "stable"
|
channel: 'stable'
|
||||||
flutter-version: "3.16.9"
|
flutter-version-file: ./mobile/pubspec.yaml
|
||||||
cache: true
|
cache: true
|
||||||
|
|
||||||
- name: Create the Keystore
|
- name: Create the Keystore
|
||||||
|
|||||||
2
.github/workflows/cli.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3.2.0
|
uses: docker/setup-buildx-action@v3.3.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
|
|||||||
4
.github/workflows/docker-cleanup.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Clean temporary images
|
- name: Clean temporary images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.5.0
|
uses: stumpylog/image-cleaner-action/ephemeral@v0.7.0
|
||||||
with:
|
with:
|
||||||
token: "${{ env.TOKEN }}"
|
token: "${{ env.TOKEN }}"
|
||||||
owner: "immich-app"
|
owner: "immich-app"
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Clean untagged images
|
- name: Clean untagged images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/untagged@v0.5.0
|
uses: stumpylog/image-cleaner-action/untagged@v0.7.0
|
||||||
with:
|
with:
|
||||||
token: "${{ env.TOKEN }}"
|
token: "${{ env.TOKEN }}"
|
||||||
owner: "immich-app"
|
owner: "immich-app"
|
||||||
|
|||||||
2
.github/workflows/docker.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3.2.0
|
uses: docker/setup-buildx-action@v3.3.0
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
# Only push to Docker Hub when making a release
|
# Only push to Docker Hub when making a release
|
||||||
|
|||||||
43
.github/workflows/docs-build.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Docs build
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- "docs/**"
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- "docs/**"
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./docs
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run npm install
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Check formatting
|
||||||
|
run: npm run format
|
||||||
|
|
||||||
|
- name: Run build
|
||||||
|
run: npm run build
|
||||||
|
|
||||||
|
- name: Upload build output
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: docs-build-output
|
||||||
|
path: docs/build/
|
||||||
|
retention-days: 1
|
||||||
189
.github/workflows/docs-deploy.yml
vendored
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
name: Docs deploy
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["Docs build"]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
checks:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
parameters: ${{ steps.parameters.outputs.result }}
|
||||||
|
steps:
|
||||||
|
- if: ${{ github.event.workflow_run.conclusion == 'failure' }}
|
||||||
|
run: echo 'The triggering workflow failed' && exit 1
|
||||||
|
|
||||||
|
- name: Determine deploy parameters
|
||||||
|
id: parameters
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const eventType = context.payload.workflow_run.event;
|
||||||
|
const isFork = context.payload.workflow_run.repository.fork;
|
||||||
|
|
||||||
|
let parameters;
|
||||||
|
|
||||||
|
console.log({eventType, isFork});
|
||||||
|
|
||||||
|
if (eventType == "push") {
|
||||||
|
const branch = context.payload.workflow_run.head_branch;
|
||||||
|
console.log({branch});
|
||||||
|
const shouldDeploy = !isFork && branch == "main";
|
||||||
|
parameters = {
|
||||||
|
event: "branch",
|
||||||
|
name: "main",
|
||||||
|
shouldDeploy
|
||||||
|
};
|
||||||
|
} else if (eventType == "pull_request") {
|
||||||
|
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||||
|
if(!pull_number) {
|
||||||
|
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||||
|
const items = response.data.items
|
||||||
|
if (items.length < 1) {
|
||||||
|
throw new Error("No pull request found for the commit")
|
||||||
|
}
|
||||||
|
const pullRequestNumber = items[0].number
|
||||||
|
console.info("Pull request number is", pullRequestNumber)
|
||||||
|
pull_number = pullRequestNumber
|
||||||
|
}
|
||||||
|
const {data: pr} = await github.rest.pulls.get({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
pull_number
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log({pull_number});
|
||||||
|
|
||||||
|
parameters = {
|
||||||
|
event: "pr",
|
||||||
|
name: `pr-${pull_number}`,
|
||||||
|
pr_number: pull_number,
|
||||||
|
shouldDeploy: true
|
||||||
|
};
|
||||||
|
} else if (eventType == "release") {
|
||||||
|
parameters = {
|
||||||
|
event: "release",
|
||||||
|
name: context.payload.workflow_run.head_branch,
|
||||||
|
shouldDeploy: !isFork
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(parameters);
|
||||||
|
return parameters;
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: checks
|
||||||
|
if: ${{ fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Load parameters
|
||||||
|
id: parameters
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const json = `${{ needs.checks.outputs.parameters }}`;
|
||||||
|
const parameters = JSON.parse(json);
|
||||||
|
core.setOutput("event", parameters.event);
|
||||||
|
core.setOutput("name", parameters.name);
|
||||||
|
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
||||||
|
|
||||||
|
- name: Download artifact
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
run_id: context.payload.workflow_run.id,
|
||||||
|
});
|
||||||
|
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||||
|
return artifact.name == "docs-build-output"
|
||||||
|
})[0];
|
||||||
|
let download = await github.rest.actions.downloadArtifact({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
artifact_id: matchArtifact.id,
|
||||||
|
archive_format: 'zip',
|
||||||
|
});
|
||||||
|
let fs = require('fs');
|
||||||
|
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/docs-build-output.zip`, Buffer.from(download.data));
|
||||||
|
|
||||||
|
- name: Unzip artifact
|
||||||
|
run: unzip "${{ github.workspace }}/docs-build-output.zip" -d "${{ github.workspace }}/docs/build"
|
||||||
|
|
||||||
|
- name: Deploy Docs Subdomain
|
||||||
|
env:
|
||||||
|
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||||
|
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||||
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
|
uses: gruntwork-io/terragrunt-action@v2
|
||||||
|
with:
|
||||||
|
tg_version: "0.58.12"
|
||||||
|
tofu_version: "1.7.1"
|
||||||
|
tg_dir: "deployment/modules/cloudflare/docs"
|
||||||
|
tg_command: "apply"
|
||||||
|
|
||||||
|
- name: Deploy Docs Subdomain Output
|
||||||
|
id: docs-output
|
||||||
|
env:
|
||||||
|
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||||
|
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||||
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
|
uses: gruntwork-io/terragrunt-action@v2
|
||||||
|
with:
|
||||||
|
tg_version: "0.58.12"
|
||||||
|
tofu_version: "1.7.1"
|
||||||
|
tg_dir: "deployment/modules/cloudflare/docs"
|
||||||
|
tg_command: "output -json"
|
||||||
|
|
||||||
|
- name: Output Cleaning
|
||||||
|
id: clean
|
||||||
|
run: |
|
||||||
|
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||||
|
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Publish to Cloudflare Pages
|
||||||
|
uses: cloudflare/pages-action@v1
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||||
|
workingDirectory: "docs"
|
||||||
|
directory: "build"
|
||||||
|
branch: ${{ steps.parameters.outputs.name }}
|
||||||
|
wranglerVersion: '3'
|
||||||
|
|
||||||
|
- name: Deploy Docs Release Domain
|
||||||
|
if: ${{ steps.parameters.outputs.event == 'release' }}
|
||||||
|
env:
|
||||||
|
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||||
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
|
uses: gruntwork-io/terragrunt-action@v2
|
||||||
|
with:
|
||||||
|
tg_version: '0.58.12'
|
||||||
|
tofu_version: '1.7.1'
|
||||||
|
tg_dir: 'deployment/modules/cloudflare/docs-release'
|
||||||
|
tg_command: 'apply'
|
||||||
|
|
||||||
|
- name: Comment
|
||||||
|
uses: actions-cool/maintain-one-comment@v3
|
||||||
|
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||||
|
with:
|
||||||
|
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||||
|
body: |
|
||||||
|
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
|
||||||
|
emojis: 'rocket'
|
||||||
|
body-include: '<!-- Docs PR URL -->'
|
||||||
32
.github/workflows/docs-destroy.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Docs destroy
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [closed]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Destroy Docs Subdomain
|
||||||
|
env:
|
||||||
|
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
|
||||||
|
TF_VAR_prefix_event_type: "pr"
|
||||||
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
|
uses: gruntwork-io/terragrunt-action@v2
|
||||||
|
with:
|
||||||
|
tg_version: "0.58.12"
|
||||||
|
tofu_version: "1.7.1"
|
||||||
|
tg_dir: "deployment/modules/cloudflare/docs"
|
||||||
|
tg_command: "destroy"
|
||||||
|
|
||||||
|
- name: Comment
|
||||||
|
uses: actions-cool/maintain-one-comment@v3
|
||||||
|
with:
|
||||||
|
number: ${{ github.event.number }}
|
||||||
|
delete: true
|
||||||
|
body-include: '<!-- Docs PR URL -->'
|
||||||
15
.github/workflows/pr-require-conventional-commit.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: PR Conventional Commit Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened, edited]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-pr-title:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: PR Conventional Commit Validation
|
||||||
|
uses: ytanikin/PRConventionalCommits@1.2.0
|
||||||
|
with:
|
||||||
|
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
||||||
|
add_label: 'false'
|
||||||
4
.github/workflows/static_analysis.yml
vendored
@@ -22,8 +22,8 @@ jobs:
|
|||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@v2
|
uses: subosito/flutter-action@v2
|
||||||
with:
|
with:
|
||||||
channel: "stable"
|
channel: 'stable'
|
||||||
flutter-version: "3.16.9"
|
flutter-version-file: ./mobile/pubspec.yaml
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: dart pub get
|
run: dart pub get
|
||||||
|
|||||||
64
.github/workflows/test.yml
vendored
@@ -10,41 +10,6 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
server-e2e-jobs:
|
|
||||||
name: Server (e2e-jobs)
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: 'recursive'
|
|
||||||
|
|
||||||
- name: Run e2e tests
|
|
||||||
run: make server-e2e-jobs
|
|
||||||
|
|
||||||
doc-tests:
|
|
||||||
name: Docs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: ./docs
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Run npm install
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Run formatter
|
|
||||||
run: npm run format
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
|
|
||||||
- name: Run build
|
|
||||||
run: npm run build
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
|
|
||||||
server-unit-tests:
|
server-unit-tests:
|
||||||
name: Server
|
name: Server
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -221,7 +186,7 @@ jobs:
|
|||||||
uses: subosito/flutter-action@v2
|
uses: subosito/flutter-action@v2
|
||||||
with:
|
with:
|
||||||
channel: 'stable'
|
channel: 'stable'
|
||||||
flutter-version: '3.16.9'
|
flutter-version-file: ./mobile/pubspec.yaml
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
run: flutter test -j 1
|
run: flutter test -j 1
|
||||||
@@ -273,16 +238,27 @@ jobs:
|
|||||||
name: OpenAPI Clients
|
name: OpenAPI Clients
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install server dependencies
|
||||||
|
run: npm --prefix=server ci
|
||||||
|
|
||||||
|
- name: Build the app
|
||||||
|
run: npm --prefix=server run build
|
||||||
|
|
||||||
- name: Run API generation
|
- name: Run API generation
|
||||||
run: make open-api
|
run: make open-api
|
||||||
|
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v19
|
uses: tj-actions/verify-changed-files@v20
|
||||||
id: verify-changed-files
|
id: verify-changed-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
mobile/openapi
|
mobile/openapi
|
||||||
open-api/typescript-sdk
|
open-api/typescript-sdk
|
||||||
|
open-api/immich-openapi-specs.json
|
||||||
|
|
||||||
- name: Verify files have not changed
|
- name: Verify files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -329,14 +305,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate new migrations
|
- name: Generate new migrations
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: npm run typeorm:migrations:generate ./src/infra/migrations/TestMigration
|
run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
|
||||||
|
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v19
|
uses: tj-actions/verify-changed-files@v20
|
||||||
id: verify-changed-files
|
id: verify-changed-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
server/src/infra/migrations/
|
server/src/migrations/
|
||||||
- name: Verify migration files have not changed
|
- name: Verify migration files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -345,16 +321,16 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Run SQL generation
|
- name: Run SQL generation
|
||||||
run: npm run sql:generate
|
run: npm run sync:sql
|
||||||
env:
|
env:
|
||||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||||
|
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v19
|
uses: tj-actions/verify-changed-files@v20
|
||||||
id: verify-changed-sql-files
|
id: verify-changed-sql-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
server/src/infra/sql
|
server/src/queries
|
||||||
|
|
||||||
- name: Verify SQL files have not changed
|
- name: Verify SQL files have not changed
|
||||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||||
|
|||||||
5
.gitignore
vendored
@@ -14,7 +14,10 @@ mobile/gradle.properties
|
|||||||
mobile/openapi/pubspec.lock
|
mobile/openapi/pubspec.lock
|
||||||
mobile/*.jks
|
mobile/*.jks
|
||||||
mobile/libisar.dylib
|
mobile/libisar.dylib
|
||||||
|
mobile/openapi/test
|
||||||
|
mobile/openapi/doc
|
||||||
|
mobile/openapi/.openapi-generator/FILES
|
||||||
|
|
||||||
open-api/typescript-sdk/build
|
open-api/typescript-sdk/build
|
||||||
mobile/android/fastlane/report.xml
|
mobile/android/fastlane/report.xml
|
||||||
mobile/ios/fastlane/report.xml
|
mobile/ios/fastlane/report.xml
|
||||||
|
|||||||
2
.gitmodules
vendored
@@ -2,5 +2,5 @@
|
|||||||
path = mobile/.isar
|
path = mobile/.isar
|
||||||
url = https://github.com/isar/isar
|
url = https://github.com/isar/isar
|
||||||
[submodule "server/test/assets"]
|
[submodule "server/test/assets"]
|
||||||
path = server/test/assets
|
path = e2e/test-assets
|
||||||
url = https://github.com/immich-app/test-assets
|
url = https://github.com/immich-app/test-assets
|
||||||
|
|||||||
44
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"[javascript]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.tabSize": 2,
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.tabSize": 2,
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[css]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.tabSize": 2,
|
||||||
|
"editor.formatOnSave": true
|
||||||
|
},
|
||||||
|
"[svelte]": {
|
||||||
|
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"svelte.enable-ts-plugin": true,
|
||||||
|
"eslint.validate": [
|
||||||
|
"javascript",
|
||||||
|
"svelte"
|
||||||
|
],
|
||||||
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
|
"[dart]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.selectionHighlight": false,
|
||||||
|
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||||
|
"editor.suggestSelection": "first",
|
||||||
|
"editor.tabCompletion": "onlySnippets",
|
||||||
|
"editor.wordBasedSuggestions": "off",
|
||||||
|
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||||
|
},
|
||||||
|
"cSpell.words": [
|
||||||
|
"immich"
|
||||||
|
],
|
||||||
|
"explorer.fileNesting.enabled": true,
|
||||||
|
"explorer.fileNesting.patterns": {
|
||||||
|
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||||
|
}
|
||||||
|
}
|
||||||
5
CODEOWNERS
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
/.github/ @bo0tzz
|
||||||
|
/docker/ @bo0tzz
|
||||||
|
/server/ @danieldietzler
|
||||||
|
/machine-learning/ @mertalev
|
||||||
|
/e2e/ @danieldietzler
|
||||||
5
Makefile
@@ -16,9 +16,6 @@ stage:
|
|||||||
pull-stage:
|
pull-stage:
|
||||||
docker compose -f ./docker/docker-compose.staging.yml pull
|
docker compose -f ./docker/docker-compose.staging.yml pull
|
||||||
|
|
||||||
server-e2e-jobs:
|
|
||||||
docker compose -f ./server/e2e/docker-compose.server-e2e.yml up --renew-anon-volumes --abort-on-container-exit --exit-code-from immich-server --remove-orphans --build
|
|
||||||
|
|
||||||
.PHONY: e2e
|
.PHONY: e2e
|
||||||
e2e:
|
e2e:
|
||||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||||
@@ -40,7 +37,7 @@ open-api-typescript:
|
|||||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||||
|
|
||||||
sql:
|
sql:
|
||||||
npm --prefix server run sql:generate
|
npm --prefix server run sync:sql
|
||||||
|
|
||||||
attach-server:
|
attach-server:
|
||||||
docker exec -it docker_immich-server_1 sh
|
docker exec -it docker_immich-server_1 sh
|
||||||
|
|||||||
52
README.md
@@ -18,17 +18,20 @@
|
|||||||
</a>
|
</a>
|
||||||
<br/>
|
<br/>
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="README_ca_ES.md">Català</a>
|
|
||||||
<a href="README_es_ES.md">Español</a>
|
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||||
<a href="README_fr_FR.md">Français</a>
|
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||||
<a href="README_it_IT.md">Italiano</a>
|
<a href="readme_i18n/README_fr_FR.md">Français</a>
|
||||||
<a href="README_ja_JP.md">日本語</a>
|
<a href="readme_i18n/README_it_IT.md">Italiano</a>
|
||||||
<a href="README_ko_KR.md">한국어</a>
|
<a href="readme_i18n/README_ja_JP.md">日本語</a>
|
||||||
<a href="README_de_DE.md">Deutsch</a>
|
<a href="readme_i18n/README_ko_KR.md">한국어</a>
|
||||||
<a href="README_nl_NL.md">Nederlands</a>
|
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||||
<a href="README_tr_TR.md">Türkçe</a>
|
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||||
<a href="README_zh_CN.md">中文</a>
|
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||||
<a href="README_ru_RU.md">Русский</a>
|
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||||
|
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||||
|
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||||
|
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## Disclaimer
|
## Disclaimer
|
||||||
@@ -47,7 +50,6 @@
|
|||||||
- [Introduction](https://immich.app/docs/overview/introduction)
|
- [Introduction](https://immich.app/docs/overview/introduction)
|
||||||
- [Installation](https://immich.app/docs/install/requirements)
|
- [Installation](https://immich.app/docs/install/requirements)
|
||||||
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
||||||
- [Support The Project](#support-the-project)
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
@@ -70,6 +72,7 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Activities
|
## Activities
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
@@ -106,23 +109,6 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
|||||||
| Read-only gallery | Yes | Yes |
|
| Read-only gallery | Yes | Yes |
|
||||||
| Stacked Photos | Yes | Yes |
|
| Stacked Photos | Yes | Yes |
|
||||||
|
|
||||||
## Support the project
|
|
||||||
|
|
||||||
I've committed to this project, and I will not stop. I will keep updating the docs, adding new features, and fixing bugs. But I can't do it alone. So I need your help to give me additional motivation to keep going.
|
|
||||||
|
|
||||||
As our hosts in the [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) said, this is a massive undertaking of what the team and I are doing. And I would love to someday be able to do this full-time, and I am asking for your help to make that happen.
|
|
||||||
|
|
||||||
If you feel like this is the right cause and the app is something you are seeing yourself using for a long time, please consider supporting the project with the option below.
|
|
||||||
|
|
||||||
### Donation
|
|
||||||
|
|
||||||
- [Monthly donation](https://github.com/sponsors/immich-app) via GitHub Sponsors
|
|
||||||
- [One-time donation](https://github.com/sponsors/immich-app?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
|
||||||
- [Liberapay](https://liberapay.com/alex.tran1502/)
|
|
||||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
|
||||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
|
||||||
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
|
||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|
||||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||||
@@ -131,6 +117,10 @@ If you feel like this is the right cause and the app is something you are seeing
|
|||||||
|
|
||||||
## Star History
|
## Star History
|
||||||
|
|
||||||
<a href="https://star-history.com/#immich-app/immich">
|
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||||
<img src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" alt="Star History Chart" width="100%" />
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
|
||||||
|
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||||
|
</picture>
|
||||||
</a>
|
</a>
|
||||||
|
|||||||
124
README_ru_RU.md
@@ -1,124 +0,0 @@
|
|||||||
<p align="center">
|
|
||||||
<br/>
|
|
||||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
|
||||||
<a href="https://discord.gg/D8JsnBEuKb">
|
|
||||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
|
||||||
</a>
|
|
||||||
<br/>
|
|
||||||
<br/>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
<img src="design/immich-logo-stacked-light.svg" width="300" title="Login With Custom URL">
|
|
||||||
</p>
|
|
||||||
<h3 align="center">Immich - Высокопроизводительное решение для автономоного создания фото и видео архивов</h3>
|
|
||||||
<br/>
|
|
||||||
<a href="https://immich.app">
|
|
||||||
<img src="design/immich-screenshots.png" title="Main Screenshot">
|
|
||||||
</a>
|
|
||||||
<br/>
|
|
||||||
<p align="center">
|
|
||||||
<a href="README_ca_ES.md">Català</a>
|
|
||||||
<a href="README_es_ES.md">Español</a>
|
|
||||||
<a href="README_fr_FR.md">Français</a>
|
|
||||||
<a href="README_it_IT.md">Italiano</a>
|
|
||||||
<a href="README_ja_JP.md">日本語</a>
|
|
||||||
<a href="README_ko_KR.md">한국어</a>
|
|
||||||
<a href="README_de_DE.md">Deutsch</a>
|
|
||||||
<a href="README_nl_NL.md">Nederlands</a>
|
|
||||||
<a href="README_tr_TR.md">Türkçe</a>
|
|
||||||
<a href="README_zh_CN.md">中文</a>
|
|
||||||
<a href="README_ru_RU.md">Русский</a>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
## Предупреждение
|
|
||||||
|
|
||||||
- ⚠️ Этот проект находится **в очень активной** разработке.
|
|
||||||
- ⚠️ Ожидайте ошибок и критических изменение.
|
|
||||||
- ⚠️ **Не используйте это приложение для бекапа ваших фото и видео.**
|
|
||||||
- ⚠️ Всегда следуйте [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) плану резервного копирования ваших драгоценных фото и видео!
|
|
||||||
|
|
||||||
## Содержание
|
|
||||||
|
|
||||||
- [Официальная документация](https://immich.app/docs)
|
|
||||||
- [План разработки](https://github.com/orgs/immich-app/projects/1)
|
|
||||||
- [Демо](#demo)
|
|
||||||
- [Возможности](#features)
|
|
||||||
- [Введение](https://immich.app/docs/overview/introduction)
|
|
||||||
- [Инсталяция](https://immich.app/docs/install/requirements)
|
|
||||||
- [Гайд по доработке проекта](https://immich.app/docs/overview/support-the-project)
|
|
||||||
- [Поддержки проект](#support-the-project)
|
|
||||||
|
|
||||||
## Документация
|
|
||||||
|
|
||||||
Вы можете найти основную документация, включая инструкции по установке по ссылке https://immich.app/.
|
|
||||||
|
|
||||||
## Демо
|
|
||||||
|
|
||||||
Вы можете посмотреть веб демо по ссылке https://demo.immich.app
|
|
||||||
|
|
||||||
Для мобильного приложения вы можете использовать адрес `https://demo.immich.app/api` в поле `Server Endpoint URL`
|
|
||||||
|
|
||||||
```bash title="Демо доступ"
|
|
||||||
Реквизиты доступа
|
|
||||||
логин/почта: demo@immich.app
|
|
||||||
пароль: demo
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
|
||||||
```
|
|
||||||
|
|
||||||
## Возможности
|
|
||||||
|
|
||||||
| Возможности | Приложение | Веб |
|
|
||||||
| --------------------------------------------------- | ---------- | --- |
|
|
||||||
| Выгрузка на сервер и просмотр видео и фото | Да | Да |
|
|
||||||
| Авто бекап когда приложение открыто | Да | Н/Д |
|
|
||||||
| Выбор альбома(ов) для бекапа | Да | Н/Д |
|
|
||||||
| загрузка с сервера фото и видео на устройство | Да | Да |
|
|
||||||
| Поддержка нескольких пользователей | Да | Да |
|
|
||||||
| Альбомы и общие альбомы | Да | Да |
|
|
||||||
| Прокручиваемая/перетаскиваемая полоса прокрутки | Да | Да |
|
|
||||||
| Поддержка формата RAW | Да | Да |
|
|
||||||
| Просмотр метаданных (EXIF, map) | Да | Да |
|
|
||||||
| Поиск до метаданным, объектам, лицам и CLIP | Да | Да |
|
|
||||||
| Административные функци (управление пользователями) | Нет | Да |
|
|
||||||
| Фоновый бекпа | Да | Н/Д |
|
|
||||||
| Виртуальная прокрутка | Да | Да |
|
|
||||||
| Поддержка OAuth | Да | Да |
|
|
||||||
| Ключи API | Н/Д | Да |
|
|
||||||
| LivePhoto/MotionPhoto бекап и воспроизведение | Да | Да |
|
|
||||||
| Настраиваемая структура хранилища | Да | Да |
|
|
||||||
| Публичные альбомы | Нет | Да |
|
|
||||||
| Архив и Избранное | Да | Да |
|
|
||||||
| Мировая карта | Да | Да |
|
|
||||||
| Совместное использование | Да | Да |
|
|
||||||
| Распознавание лиц и группировка по лицам | Да | Да |
|
|
||||||
| В этот день (x лет назад) | Да | Да |
|
|
||||||
| Работа без интернета | Да | Нет |
|
|
||||||
| Галлереи только для просмотра | Да | Да |
|
|
||||||
| Колллажи | Да | Да |
|
|
||||||
|
|
||||||
## Поддержка проекта
|
|
||||||
|
|
||||||
Я посвятил себя этому проекту и не остановлюсь. Я буду продолжать обновлять документацию, добавлять новые функции и исправлять ошибки. Но я не могу сделать это один. Поэтому мне нужна ваша помощь, чтобы дать мне дополнительную мотивацию продолжать идти дальше.
|
|
||||||
|
|
||||||
Как сказали наши покровители [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418), это масштабная работа, которую мы с командой делаем. И мне бы очень хотелось когда-нибудь иметь возможность заниматься этим на постоянной основе, и я прошу вашей помощи, чтобы это произошло.
|
|
||||||
|
|
||||||
|
|
||||||
Если вы считаете, что это правильная причина и вы уже давно используете это приложение, рассмотрите возможность финансовой поддержки проекта, выбрав вариант ниже.
|
|
||||||
|
|
||||||
### Пожертвование
|
|
||||||
|
|
||||||
- [Ежемесячное пожертвование](https://github.com/sponsors/immich-app) via GitHub Sponsors
|
|
||||||
- [Одноразовое пожертвование](https://github.com/sponsors/immich-app?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
|
||||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
|
||||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
|
||||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
|
||||||
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
|
||||||
|
|
||||||
## Авторы
|
|
||||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
|
||||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
|
||||||
</a>
|
|
||||||
@@ -21,6 +21,7 @@ module.exports = {
|
|||||||
'unicorn/prefer-module': 'off',
|
'unicorn/prefer-module': 'off',
|
||||||
'unicorn/prevent-abbreviations': 'off',
|
'unicorn/prevent-abbreviations': 'off',
|
||||||
'unicorn/no-process-exit': 'off',
|
'unicorn/no-process-exit': 'off',
|
||||||
|
'unicorn/import-style': 'off',
|
||||||
curly: 2,
|
curly: 2,
|
||||||
'prettier/prettier': 0,
|
'prettier/prettier': 0,
|
||||||
},
|
},
|
||||||
|
|||||||
1
cli/.nvmrc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
20.14
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:20-alpine3.19@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c as core
|
FROM node:20-alpine3.19@sha256:696ae41fb5880949a15ade7879a2deae93b3f0723f757bdb5b8a9e4a744ce27f as core
|
||||||
|
|
||||||
WORKDIR /usr/src/open-api/typescript-sdk
|
WORKDIR /usr/src/open-api/typescript-sdk
|
||||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||||
|
|||||||
1005
cli/package-lock.json
generated
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@immich/cli",
|
"name": "@immich/cli",
|
||||||
"version": "2.1.0",
|
"version": "2.2.0",
|
||||||
"description": "Command Line Interface (CLI) for Immich",
|
"description": "Command Line Interface (CLI) for Immich",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"exports": "./dist/index.js",
|
"exports": "./dist/index.js",
|
||||||
@@ -28,8 +28,7 @@
|
|||||||
"eslint": "^8.56.0",
|
"eslint": "^8.56.0",
|
||||||
"eslint-config-prettier": "^9.1.0",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
"eslint-plugin-unicorn": "^51.0.0",
|
"eslint-plugin-unicorn": "^53.0.0",
|
||||||
"glob": "^10.3.1",
|
|
||||||
"mock-fs": "^5.2.0",
|
"mock-fs": "^5.2.0",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"prettier-plugin-organize-imports": "^3.2.4",
|
"prettier-plugin-organize-imports": "^3.2.4",
|
||||||
@@ -59,6 +58,10 @@
|
|||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"fast-glob": "^3.3.2",
|
||||||
"lodash-es": "^4.17.21"
|
"lodash-es": "^4.17.21"
|
||||||
|
},
|
||||||
|
"volta": {
|
||||||
|
"node": "20.14.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
import {
|
import {
|
||||||
|
Action,
|
||||||
AssetBulkUploadCheckResult,
|
AssetBulkUploadCheckResult,
|
||||||
|
AssetMediaResponseDto,
|
||||||
|
AssetMediaStatus,
|
||||||
addAssetsToAlbum,
|
addAssetsToAlbum,
|
||||||
checkBulkUpload,
|
checkBulkUpload,
|
||||||
createAlbum,
|
createAlbum,
|
||||||
@@ -8,445 +11,342 @@ import {
|
|||||||
getSupportedMediaTypes,
|
getSupportedMediaTypes,
|
||||||
} from '@immich/sdk';
|
} from '@immich/sdk';
|
||||||
import byteSize from 'byte-size';
|
import byteSize from 'byte-size';
|
||||||
import cliProgress from 'cli-progress';
|
import { Presets, SingleBar } from 'cli-progress';
|
||||||
import { chunk, zip } from 'lodash-es';
|
import { chunk } from 'lodash-es';
|
||||||
import { createHash } from 'node:crypto';
|
import { Stats, createReadStream } from 'node:fs';
|
||||||
import fs, { createReadStream } from 'node:fs';
|
import { stat, unlink } from 'node:fs/promises';
|
||||||
import { access, constants, stat, unlink } from 'node:fs/promises';
|
|
||||||
import os from 'node:os';
|
import os from 'node:os';
|
||||||
import { basename } from 'node:path';
|
import path, { basename } from 'node:path';
|
||||||
import { CrawlService } from 'src/services/crawl.service';
|
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||||
import { BaseOptions, authenticate } from 'src/utils';
|
|
||||||
|
|
||||||
const zipDefined = zip as <T, U>(a: T[], b: U[]) => [T, U][];
|
const s = (count: number) => (count === 1 ? '' : 's');
|
||||||
|
|
||||||
enum CheckResponseStatus {
|
// TODO figure out why `id` is missing
|
||||||
ACCEPT = 'accept',
|
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||||
REJECT = 'reject',
|
type Asset = { id: string; filepath: string };
|
||||||
DUPLICATE = 'duplicate',
|
|
||||||
}
|
|
||||||
|
|
||||||
class Asset {
|
interface UploadOptionsDto {
|
||||||
readonly path: string;
|
recursive?: boolean;
|
||||||
|
ignore?: string;
|
||||||
id?: string;
|
dryRun?: boolean;
|
||||||
deviceAssetId?: string;
|
skipHash?: boolean;
|
||||||
fileCreatedAt?: Date;
|
delete?: boolean;
|
||||||
fileModifiedAt?: Date;
|
album?: boolean;
|
||||||
sidecarPath?: string;
|
|
||||||
fileSize?: number;
|
|
||||||
albumName?: string;
|
albumName?: string;
|
||||||
|
includeHidden?: boolean;
|
||||||
|
concurrency: number;
|
||||||
|
}
|
||||||
|
|
||||||
constructor(path: string) {
|
class UploadFile extends File {
|
||||||
this.path = path;
|
constructor(
|
||||||
|
private filepath: string,
|
||||||
|
private _size: number,
|
||||||
|
) {
|
||||||
|
super([], basename(filepath));
|
||||||
}
|
}
|
||||||
|
|
||||||
async prepare() {
|
get size() {
|
||||||
const stats = await stat(this.path);
|
return this._size;
|
||||||
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replaceAll(/\s+/g, '');
|
|
||||||
this.fileCreatedAt = stats.mtime;
|
|
||||||
this.fileModifiedAt = stats.mtime;
|
|
||||||
this.fileSize = stats.size;
|
|
||||||
this.albumName = this.extractAlbumName();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async getUploadFormData(): Promise<FormData> {
|
stream() {
|
||||||
if (!this.deviceAssetId) {
|
return createReadStream(this.filepath) as any;
|
||||||
throw new Error('Device asset id not set');
|
|
||||||
}
|
|
||||||
if (!this.fileCreatedAt) {
|
|
||||||
throw new Error('File created at not set');
|
|
||||||
}
|
|
||||||
if (!this.fileModifiedAt) {
|
|
||||||
throw new Error('File modified at not set');
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: doesn't xmp replace the file extension? Will need investigation
|
|
||||||
const sideCarPath = `${this.path}.xmp`;
|
|
||||||
let sidecarData: Blob | undefined = undefined;
|
|
||||||
try {
|
|
||||||
await access(sideCarPath, constants.R_OK);
|
|
||||||
sidecarData = new File([await fs.openAsBlob(sideCarPath)], basename(sideCarPath));
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const data: any = {
|
|
||||||
assetData: new File([await fs.openAsBlob(this.path)], basename(this.path)),
|
|
||||||
deviceAssetId: this.deviceAssetId,
|
|
||||||
deviceId: 'CLI',
|
|
||||||
fileCreatedAt: this.fileCreatedAt.toISOString(),
|
|
||||||
fileModifiedAt: this.fileModifiedAt.toISOString(),
|
|
||||||
isFavorite: String(false),
|
|
||||||
};
|
|
||||||
const formData = new FormData();
|
|
||||||
|
|
||||||
for (const property in data) {
|
|
||||||
formData.append(property, data[property]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sidecarData) {
|
|
||||||
formData.append('sidecarData', sidecarData);
|
|
||||||
}
|
|
||||||
|
|
||||||
return formData;
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(): Promise<void> {
|
|
||||||
return unlink(this.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async hash(): Promise<string> {
|
|
||||||
const sha1 = (filePath: string) => {
|
|
||||||
const hash = createHash('sha1');
|
|
||||||
return new Promise<string>((resolve, reject) => {
|
|
||||||
const rs = createReadStream(filePath);
|
|
||||||
rs.on('error', reject);
|
|
||||||
rs.on('data', (chunk) => hash.update(chunk));
|
|
||||||
rs.on('end', () => resolve(hash.digest('hex')));
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
return await sha1(this.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
private extractAlbumName(): string | undefined {
|
|
||||||
return os.platform() === 'win32' ? this.path.split('\\').at(-2) : this.path.split('/').at(-2);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class UploadOptionsDto {
|
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||||
recursive? = false;
|
await authenticate(baseOptions);
|
||||||
exclusionPatterns?: string[] = [];
|
|
||||||
dryRun? = false;
|
|
||||||
skipHash? = false;
|
|
||||||
delete? = false;
|
|
||||||
album? = false;
|
|
||||||
albumName? = '';
|
|
||||||
includeHidden? = false;
|
|
||||||
concurrency? = 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const upload = (paths: string[], baseOptions: BaseOptions, uploadOptions: UploadOptionsDto) =>
|
const scanFiles = await scan(paths, options);
|
||||||
new UploadCommand().run(paths, baseOptions, uploadOptions);
|
if (scanFiles.length === 0) {
|
||||||
|
console.log('No files found, exiting');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// TODO refactor this
|
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
||||||
class UploadCommand {
|
const newAssets = await uploadFiles(newFiles, options);
|
||||||
public async run(paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto): Promise<void> {
|
await updateAlbums([...newAssets, ...duplicates], options);
|
||||||
await authenticate(baseOptions);
|
await deleteFiles(newFiles, options);
|
||||||
|
};
|
||||||
|
|
||||||
console.log('Crawling for assets...');
|
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||||
const files = await this.getFiles(paths, options);
|
const { image, video } = await getSupportedMediaTypes();
|
||||||
|
|
||||||
if (files.length === 0) {
|
console.log('Crawling for assets...');
|
||||||
console.log('No assets found, exiting');
|
const files = await crawl({
|
||||||
return;
|
pathsToCrawl,
|
||||||
|
recursive: options.recursive,
|
||||||
|
exclusionPattern: options.ignore,
|
||||||
|
includeHidden: options.includeHidden,
|
||||||
|
extensions: [...image, ...video],
|
||||||
|
});
|
||||||
|
|
||||||
|
return files;
|
||||||
|
};
|
||||||
|
|
||||||
|
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||||
|
if (skipHash) {
|
||||||
|
console.log('Skipping hash check, assuming all files are new');
|
||||||
|
return { newFiles: files, duplicates: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
const progressBar = new SingleBar(
|
||||||
|
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
|
||||||
|
progressBar.start(files.length, 0);
|
||||||
|
|
||||||
|
const newFiles: string[] = [];
|
||||||
|
const duplicates: Asset[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// TODO refactor into a queue
|
||||||
|
for (const items of chunk(files, concurrency)) {
|
||||||
|
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
|
||||||
|
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||||
|
|
||||||
|
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
|
||||||
|
if (action === Action.Accept) {
|
||||||
|
newFiles.push(filepath);
|
||||||
|
} else {
|
||||||
|
// rejects are always duplicates
|
||||||
|
duplicates.push({ id: assetId as string, filepath });
|
||||||
|
}
|
||||||
|
progressBar.increment();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
progressBar.stop();
|
||||||
|
}
|
||||||
|
|
||||||
const assetsToCheck = files.map((path) => new Asset(path));
|
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||||
|
|
||||||
const { newAssets, duplicateAssets } = await this.checkAssets(assetsToCheck, options.concurrency ?? 4);
|
return { newFiles, duplicates };
|
||||||
|
};
|
||||||
|
|
||||||
const totalSizeUploaded = await this.upload(newAssets, options);
|
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||||
const messageStart = options.dryRun ? 'Would have' : 'Successfully';
|
if (files.length === 0) {
|
||||||
if (newAssets.length === 0) {
|
console.log('All assets were already uploaded, nothing to do.');
|
||||||
console.log('All assets were already uploaded, nothing to do.');
|
return [];
|
||||||
} else {
|
}
|
||||||
console.log(
|
|
||||||
`${messageStart} uploaded ${newAssets.length} asset${newAssets.length === 1 ? '' : 's'} (${byteSize(totalSizeUploaded)})`,
|
// Compute total size first
|
||||||
|
let totalSize = 0;
|
||||||
|
const statsMap = new Map<string, Stats>();
|
||||||
|
for (const filepath of files) {
|
||||||
|
const stats = await stat(filepath);
|
||||||
|
statsMap.set(filepath, stats);
|
||||||
|
totalSize += stats.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dryRun) {
|
||||||
|
console.log(`Would have uploaded ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||||
|
return files.map((filepath) => ({ id: '', filepath }));
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadProgress = new SingleBar(
|
||||||
|
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
uploadProgress.start(totalSize, 0);
|
||||||
|
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||||
|
|
||||||
|
let duplicateCount = 0;
|
||||||
|
let duplicateSize = 0;
|
||||||
|
let successCount = 0;
|
||||||
|
let successSize = 0;
|
||||||
|
|
||||||
|
const newAssets: Asset[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (const items of chunk(files, concurrency)) {
|
||||||
|
await Promise.all(
|
||||||
|
items.map(async (filepath) => {
|
||||||
|
const stats = statsMap.get(filepath) as Stats;
|
||||||
|
const response = await uploadFile(filepath, stats);
|
||||||
|
|
||||||
|
newAssets.push({ id: response.id, filepath });
|
||||||
|
|
||||||
|
if (response.status === AssetMediaStatus.Duplicate) {
|
||||||
|
duplicateCount++;
|
||||||
|
duplicateSize += stats.size ?? 0;
|
||||||
|
} else {
|
||||||
|
successCount++;
|
||||||
|
successSize += stats.size ?? 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
|
uploadProgress.stop();
|
||||||
|
}
|
||||||
|
|
||||||
if (options.album || options.albumName) {
|
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||||
const { createdAlbumCount, updatedAssetCount } = await this.updateAlbums(
|
if (duplicateCount > 0) {
|
||||||
[...newAssets, ...duplicateAssets],
|
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
|
||||||
options,
|
}
|
||||||
|
return newAssets;
|
||||||
|
};
|
||||||
|
|
||||||
|
const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaResponseDto> => {
|
||||||
|
const { baseUrl, headers } = defaults;
|
||||||
|
|
||||||
|
const assetPath = path.parse(input);
|
||||||
|
const noExtension = path.join(assetPath.dir, assetPath.name);
|
||||||
|
|
||||||
|
const sidecarsFiles = await Promise.all(
|
||||||
|
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
|
||||||
|
[`${noExtension}.xmp`, `${input}.xmp`].map(async (sidecarPath) => {
|
||||||
|
try {
|
||||||
|
const stats = await stat(sidecarPath);
|
||||||
|
return new UploadFile(sidecarPath, stats.size);
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const sidecarData = sidecarsFiles.find((file): file is UploadFile => file !== false);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('deviceAssetId', `${basename(input)}-${stats.size}`.replaceAll(/\s+/g, ''));
|
||||||
|
formData.append('deviceId', 'CLI');
|
||||||
|
formData.append('fileCreatedAt', stats.mtime.toISOString());
|
||||||
|
formData.append('fileModifiedAt', stats.mtime.toISOString());
|
||||||
|
formData.append('fileSize', String(stats.size));
|
||||||
|
formData.append('isFavorite', 'false');
|
||||||
|
formData.append('assetData', new UploadFile(input, stats.size));
|
||||||
|
|
||||||
|
if (sidecarData) {
|
||||||
|
formData.append('sidecarData', sidecarData);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`${baseUrl}/assets`, {
|
||||||
|
method: 'post',
|
||||||
|
redirect: 'error',
|
||||||
|
headers: headers as Record<string, string>,
|
||||||
|
body: formData,
|
||||||
|
});
|
||||||
|
if (response.status !== 200 && response.status !== 201) {
|
||||||
|
throw new Error(await response.text());
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
|
||||||
|
if (!options.delete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.dryRun) {
|
||||||
|
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Deleting assets that have been uploaded...');
|
||||||
|
|
||||||
|
const deletionProgress = new SingleBar(
|
||||||
|
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
deletionProgress.start(files.length, 0);
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (const assetBatch of chunk(files, options.concurrency)) {
|
||||||
|
await Promise.all(assetBatch.map((input: string) => unlink(input)));
|
||||||
|
deletionProgress.update(assetBatch.length);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
deletionProgress.stop();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||||
|
if (!options.album && !options.albumName) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { dryRun, concurrency } = options;
|
||||||
|
|
||||||
|
const albums = await getAllAlbums({});
|
||||||
|
const existingAlbums = new Map(albums.map((album) => [album.albumName, album.id]));
|
||||||
|
const newAlbums: Set<string> = new Set();
|
||||||
|
for (const { filepath } of assets) {
|
||||||
|
const albumName = getAlbumName(filepath, options);
|
||||||
|
if (albumName && !existingAlbums.has(albumName)) {
|
||||||
|
newAlbums.add(albumName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dryRun) {
|
||||||
|
// TODO print asset counts for new albums
|
||||||
|
console.log(`Would have created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||||
|
console.log(`Would have updated albums of ${assets.length} asset${s(assets.length)}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const progressBar = new SingleBar(
|
||||||
|
{ format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums' },
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
progressBar.start(newAlbums.size, 0);
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (const albumNames of chunk([...newAlbums], concurrency)) {
|
||||||
|
const items = await Promise.all(
|
||||||
|
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } })),
|
||||||
);
|
);
|
||||||
console.log(`${messageStart} created ${createdAlbumCount} new album${createdAlbumCount === 1 ? '' : 's'}`);
|
for (const { id, albumName } of items) {
|
||||||
console.log(`${messageStart} updated ${updatedAssetCount} asset${updatedAssetCount === 1 ? '' : 's'}`);
|
existingAlbums.set(albumName, id);
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.delete) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.dryRun) {
|
|
||||||
console.log(`Would now have deleted assets, but skipped due to dry run`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Deleting assets that have been uploaded...');
|
|
||||||
|
|
||||||
await this.deleteAssets(newAssets, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async checkAssets(
|
|
||||||
assetsToCheck: Asset[],
|
|
||||||
concurrency: number,
|
|
||||||
): Promise<{ newAssets: Asset[]; duplicateAssets: Asset[]; rejectedAssets: Asset[] }> {
|
|
||||||
for (const assets of chunk(assetsToCheck, concurrency)) {
|
|
||||||
await Promise.all(assets.map((asset: Asset) => asset.prepare()));
|
|
||||||
}
|
|
||||||
|
|
||||||
const checkProgress = new cliProgress.SingleBar(
|
|
||||||
{ format: 'Checking assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
|
||||||
cliProgress.Presets.shades_classic,
|
|
||||||
);
|
|
||||||
checkProgress.start(assetsToCheck.length, 0);
|
|
||||||
|
|
||||||
const newAssets = [];
|
|
||||||
const duplicateAssets = [];
|
|
||||||
const rejectedAssets = [];
|
|
||||||
try {
|
|
||||||
for (const assets of chunk(assetsToCheck, concurrency)) {
|
|
||||||
const checkedAssets = await this.getStatus(assets);
|
|
||||||
for (const checked of checkedAssets) {
|
|
||||||
if (checked.status === CheckResponseStatus.ACCEPT) {
|
|
||||||
newAssets.push(checked.asset);
|
|
||||||
} else if (checked.status === CheckResponseStatus.DUPLICATE) {
|
|
||||||
duplicateAssets.push(checked.asset);
|
|
||||||
} else {
|
|
||||||
rejectedAssets.push(checked.asset);
|
|
||||||
}
|
|
||||||
checkProgress.increment();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} finally {
|
progressBar.increment(albumNames.length);
|
||||||
checkProgress.stop();
|
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
return { newAssets, duplicateAssets, rejectedAssets };
|
progressBar.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async upload(assetsToUpload: Asset[], options: UploadOptionsDto): Promise<number> {
|
console.log(`Successfully created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||||
let totalSize = 0;
|
console.log(`Successfully updated ${assets.length} asset${s(assets.length)}`);
|
||||||
|
|
||||||
// Compute total size first
|
const albumToAssets = new Map<string, string[]>();
|
||||||
for (const asset of assetsToUpload) {
|
for (const asset of assets) {
|
||||||
totalSize += asset.fileSize ?? 0;
|
const albumName = getAlbumName(asset.filepath, options);
|
||||||
|
if (!albumName) {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
const albumId = existingAlbums.get(albumName);
|
||||||
if (options.dryRun) {
|
if (albumId) {
|
||||||
return totalSize;
|
if (!albumToAssets.has(albumId)) {
|
||||||
}
|
albumToAssets.set(albumId, []);
|
||||||
|
|
||||||
const uploadProgress = new cliProgress.SingleBar(
|
|
||||||
{
|
|
||||||
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
|
||||||
},
|
|
||||||
cliProgress.Presets.shades_classic,
|
|
||||||
);
|
|
||||||
uploadProgress.start(totalSize, 0);
|
|
||||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
|
||||||
|
|
||||||
let totalSizeUploaded = 0;
|
|
||||||
try {
|
|
||||||
for (const assets of chunk(assetsToUpload, options.concurrency)) {
|
|
||||||
const ids = await this.uploadAssets(assets);
|
|
||||||
for (const [asset, id] of zipDefined(assets, ids)) {
|
|
||||||
asset.id = id;
|
|
||||||
if (asset.fileSize) {
|
|
||||||
totalSizeUploaded += asset.fileSize ?? 0;
|
|
||||||
} else {
|
|
||||||
console.log(`Could not determine file size for ${asset.path}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
uploadProgress.update(totalSizeUploaded, { value_formatted: byteSize(totalSizeUploaded) });
|
|
||||||
}
|
}
|
||||||
} finally {
|
albumToAssets.get(albumId)?.push(asset.id);
|
||||||
uploadProgress.stop();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return totalSizeUploaded;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async getFiles(paths: string[], options: UploadOptionsDto): Promise<string[]> {
|
const albumUpdateProgress = new SingleBar(
|
||||||
const inputFiles: string[] = [];
|
{ format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||||
for (const pathArgument of paths) {
|
Presets.shades_classic,
|
||||||
const fileStat = await fs.promises.lstat(pathArgument);
|
);
|
||||||
if (fileStat.isFile()) {
|
albumUpdateProgress.start(assets.length, 0);
|
||||||
inputFiles.push(pathArgument);
|
|
||||||
|
try {
|
||||||
|
for (const [albumId, assets] of albumToAssets.entries()) {
|
||||||
|
for (const assetBatch of chunk(assets, Math.min(1000 * concurrency, 65_000))) {
|
||||||
|
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
|
||||||
|
albumUpdateProgress.increment(assetBatch.length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
const files: string[] = await this.crawl(paths, options);
|
albumUpdateProgress.stop();
|
||||||
files.push(...inputFiles);
|
|
||||||
return files;
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
public async getAlbums(): Promise<Map<string, string>> {
|
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||||
const existingAlbums = await getAllAlbums({});
|
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
|
||||||
|
return options.albumName ?? folderName;
|
||||||
const albumMapping = new Map<string, string>();
|
};
|
||||||
for (const album of existingAlbums) {
|
|
||||||
albumMapping.set(album.albumName, album.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
return albumMapping;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async updateAlbums(
|
|
||||||
assets: Asset[],
|
|
||||||
options: UploadOptionsDto,
|
|
||||||
): Promise<{ createdAlbumCount: number; updatedAssetCount: number }> {
|
|
||||||
if (options.albumName) {
|
|
||||||
for (const asset of assets) {
|
|
||||||
asset.albumName = options.albumName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingAlbums = await this.getAlbums();
|
|
||||||
const assetsToUpdate = assets.filter(
|
|
||||||
(asset): asset is Asset & { albumName: string; id: string } => !!(asset.albumName && asset.id),
|
|
||||||
);
|
|
||||||
|
|
||||||
const newAlbumsSet: Set<string> = new Set();
|
|
||||||
for (const asset of assetsToUpdate) {
|
|
||||||
if (!existingAlbums.has(asset.albumName)) {
|
|
||||||
newAlbumsSet.add(asset.albumName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const newAlbums = [...newAlbumsSet];
|
|
||||||
|
|
||||||
if (options.dryRun) {
|
|
||||||
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
|
|
||||||
}
|
|
||||||
|
|
||||||
const albumCreationProgress = new cliProgress.SingleBar(
|
|
||||||
{
|
|
||||||
format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums',
|
|
||||||
},
|
|
||||||
cliProgress.Presets.shades_classic,
|
|
||||||
);
|
|
||||||
albumCreationProgress.start(newAlbums.length, 0);
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (const albumNames of chunk(newAlbums, options.concurrency)) {
|
|
||||||
const newAlbumIds = await Promise.all(
|
|
||||||
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } }).then((r) => r.id)),
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const [albumName, albumId] of zipDefined(albumNames, newAlbumIds)) {
|
|
||||||
existingAlbums.set(albumName, albumId);
|
|
||||||
}
|
|
||||||
|
|
||||||
albumCreationProgress.increment(albumNames.length);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
albumCreationProgress.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
const albumToAssets = new Map<string, string[]>();
|
|
||||||
for (const asset of assetsToUpdate) {
|
|
||||||
const albumId = existingAlbums.get(asset.albumName);
|
|
||||||
if (albumId) {
|
|
||||||
if (!albumToAssets.has(albumId)) {
|
|
||||||
albumToAssets.set(albumId, []);
|
|
||||||
}
|
|
||||||
albumToAssets.get(albumId)?.push(asset.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const albumUpdateProgress = new cliProgress.SingleBar(
|
|
||||||
{
|
|
||||||
format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
|
|
||||||
},
|
|
||||||
cliProgress.Presets.shades_classic,
|
|
||||||
);
|
|
||||||
albumUpdateProgress.start(assetsToUpdate.length, 0);
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (const [albumId, assets] of albumToAssets.entries()) {
|
|
||||||
for (const assetBatch of chunk(assets, Math.min(1000 * (options.concurrency ?? 4), 65_000))) {
|
|
||||||
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
|
|
||||||
albumUpdateProgress.increment(assetBatch.length);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
albumUpdateProgress.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
|
|
||||||
}
|
|
||||||
|
|
||||||
public async deleteAssets(assets: Asset[], options: UploadOptionsDto): Promise<void> {
|
|
||||||
const deletionProgress = new cliProgress.SingleBar(
|
|
||||||
{
|
|
||||||
format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
|
|
||||||
},
|
|
||||||
cliProgress.Presets.shades_classic,
|
|
||||||
);
|
|
||||||
deletionProgress.start(assets.length, 0);
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (const assetBatch of chunk(assets, options.concurrency)) {
|
|
||||||
await Promise.all(assetBatch.map((asset: Asset) => asset.delete()));
|
|
||||||
deletionProgress.update(assetBatch.length);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
deletionProgress.stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async getStatus(assets: Asset[]): Promise<{ asset: Asset; status: CheckResponseStatus }[]> {
|
|
||||||
const checkResponse = await this.checkHashes(assets);
|
|
||||||
|
|
||||||
const responses = [];
|
|
||||||
for (const [check, asset] of zipDefined(checkResponse, assets)) {
|
|
||||||
if (check.assetId) {
|
|
||||||
asset.id = check.assetId;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (check.action === 'accept') {
|
|
||||||
responses.push({ asset, status: CheckResponseStatus.ACCEPT });
|
|
||||||
} else if (check.reason === 'duplicate') {
|
|
||||||
responses.push({ asset, status: CheckResponseStatus.DUPLICATE });
|
|
||||||
} else {
|
|
||||||
responses.push({ asset, status: CheckResponseStatus.REJECT });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return responses;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async checkHashes(assetsToCheck: Asset[]): Promise<AssetBulkUploadCheckResult[]> {
|
|
||||||
const checksums = await Promise.all(assetsToCheck.map((asset) => asset.hash()));
|
|
||||||
const assetBulkUploadCheckDto = {
|
|
||||||
assets: zipDefined(assetsToCheck, checksums).map(([asset, checksum]) => ({ id: asset.path, checksum })),
|
|
||||||
};
|
|
||||||
const checkResponse = await checkBulkUpload({ assetBulkUploadCheckDto });
|
|
||||||
return checkResponse.results;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async uploadAssets(assets: Asset[]): Promise<string[]> {
|
|
||||||
const fileRequests = await Promise.all(assets.map((asset) => asset.getUploadFormData()));
|
|
||||||
const results = await Promise.all(fileRequests.map((request) => this.uploadAsset(request)));
|
|
||||||
return results.map((response) => response.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async crawl(paths: string[], options: UploadOptionsDto): Promise<string[]> {
|
|
||||||
const formatResponse = await getSupportedMediaTypes();
|
|
||||||
const crawlService = new CrawlService(formatResponse.image, formatResponse.video);
|
|
||||||
|
|
||||||
return crawlService.crawl({
|
|
||||||
pathsToCrawl: paths,
|
|
||||||
recursive: options.recursive,
|
|
||||||
exclusionPatterns: options.exclusionPatterns,
|
|
||||||
includeHidden: options.includeHidden,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private async uploadAsset(data: FormData): Promise<{ id: string }> {
|
|
||||||
const { baseUrl, headers } = defaults;
|
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
|
||||||
method: 'post',
|
|
||||||
redirect: 'error',
|
|
||||||
headers: headers as Record<string, string>,
|
|
||||||
body: data,
|
|
||||||
});
|
|
||||||
if (response.status !== 200 && response.status !== 201) {
|
|
||||||
throw new Error(await response.text());
|
|
||||||
}
|
|
||||||
return response.json();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
import { getMyUserInfo } from '@immich/sdk';
|
import { getMyUser } from '@immich/sdk';
|
||||||
import { existsSync } from 'node:fs';
|
import { existsSync } from 'node:fs';
|
||||||
import { mkdir, unlink } from 'node:fs/promises';
|
import { mkdir, unlink } from 'node:fs/promises';
|
||||||
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
||||||
|
|
||||||
export const login = async (instanceUrl: string, apiKey: string, options: BaseOptions) => {
|
export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||||
console.log(`Logging in to ${instanceUrl}`);
|
console.log(`Logging in to ${url}`);
|
||||||
|
|
||||||
const { configDirectory: configDir } = options;
|
const { configDirectory: configDir } = options;
|
||||||
|
|
||||||
await connect(instanceUrl, apiKey);
|
await connect(url, key);
|
||||||
|
|
||||||
const [error, userInfo] = await withError(getMyUserInfo());
|
const [error, user] = await withError(getMyUser());
|
||||||
if (error) {
|
if (error) {
|
||||||
logError(error, 'Failed to load user info');
|
logError(error, 'Failed to load user info');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Logged in as ${userInfo.email}`);
|
console.log(`Logged in as ${user.email}`);
|
||||||
|
|
||||||
if (!existsSync(configDir)) {
|
if (!existsSync(configDir)) {
|
||||||
// Create config folder if it doesn't exist
|
// Create config folder if it doesn't exist
|
||||||
@@ -27,7 +27,7 @@ export const login = async (instanceUrl: string, apiKey: string, options: BaseOp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await writeAuthFile(configDir, { instanceUrl, apiKey });
|
await writeAuthFile(configDir, { url, key });
|
||||||
|
|
||||||
console.log(`Wrote auth info to ${getAuthFilePath(configDir)}`);
|
console.log(`Wrote auth info to ${getAuthFilePath(configDir)}`);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,15 +1,24 @@
|
|||||||
import { getAssetStatistics, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||||
import { BaseOptions, authenticate } from 'src/utils';
|
import { BaseOptions, authenticate } from 'src/utils';
|
||||||
|
|
||||||
export const serverInfo = async (options: BaseOptions) => {
|
export const serverInfo = async (options: BaseOptions) => {
|
||||||
await authenticate(options);
|
const { url } = await authenticate(options);
|
||||||
|
|
||||||
const versionInfo = await getServerVersion();
|
const [versionInfo, mediaTypes, stats, userInfo] = await Promise.all([
|
||||||
const mediaTypes = await getSupportedMediaTypes();
|
getServerVersion(),
|
||||||
const stats = await getAssetStatistics({});
|
getSupportedMediaTypes(),
|
||||||
|
getAssetStatistics({}),
|
||||||
|
getMyUser(),
|
||||||
|
]);
|
||||||
|
|
||||||
console.log(`Server Version: ${versionInfo.major}.${versionInfo.minor}.${versionInfo.patch}`);
|
console.log(`Server Info (via ${userInfo.email})`);
|
||||||
console.log(`Image Types: ${mediaTypes.image.map((extension) => extension.replace('.', ''))}`);
|
console.log(` Url: ${url}`);
|
||||||
console.log(`Video Types: ${mediaTypes.video.map((extension) => extension.replace('.', ''))}`);
|
console.log(` Version: ${versionInfo.major}.${versionInfo.minor}.${versionInfo.patch}`);
|
||||||
console.log(`Statistics:\n Images: ${stats.images}\n Videos: ${stats.videos}\n Total: ${stats.total}`);
|
console.log(` Formats:`);
|
||||||
|
console.log(` Images: ${mediaTypes.image.map((extension) => extension.replace('.', ''))}`);
|
||||||
|
console.log(` Videos: ${mediaTypes.video.map((extension) => extension.replace('.', ''))}`);
|
||||||
|
console.log(` Statistics:`);
|
||||||
|
console.log(` Images: ${stats.images}`);
|
||||||
|
console.log(` Videos: ${stats.videos}`);
|
||||||
|
console.log(` Total: ${stats.total}`);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ const program = new Command()
|
|||||||
.default(defaultConfigDirectory),
|
.default(defaultConfigDirectory),
|
||||||
)
|
)
|
||||||
.addOption(new Option('-u, --url [url]', 'Immich server URL').env('IMMICH_INSTANCE_URL'))
|
.addOption(new Option('-u, --url [url]', 'Immich server URL').env('IMMICH_INSTANCE_URL'))
|
||||||
.addOption(new Option('-k, --key [apiKey]', 'Immich API key').env('IMMICH_API_KEY'));
|
.addOption(new Option('-k, --key [key]', 'Immich API key').env('IMMICH_API_KEY'));
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('login')
|
.command('login')
|
||||||
@@ -44,7 +44,7 @@ program
|
|||||||
.description('Upload assets')
|
.description('Upload assets')
|
||||||
.usage('[paths...] [options]')
|
.usage('[paths...] [options]')
|
||||||
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
||||||
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS').default([]))
|
.addOption(new Option('-i, --ignore <pattern>', 'Pattern to ignore').env('IMMICH_IGNORE_PATHS'))
|
||||||
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
||||||
.addOption(new Option('-H, --include-hidden', 'Include hidden folders').env('IMMICH_INCLUDE_HIDDEN').default(false))
|
.addOption(new Option('-H, --include-hidden', 'Include hidden folders').env('IMMICH_INCLUDE_HIDDEN').default(false))
|
||||||
.addOption(
|
.addOption(
|
||||||
@@ -60,7 +60,8 @@ program
|
|||||||
.addOption(
|
.addOption(
|
||||||
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
||||||
.env('IMMICH_DRY_RUN')
|
.env('IMMICH_DRY_RUN')
|
||||||
.default(false),
|
.default(false)
|
||||||
|
.conflicts('skipHash'),
|
||||||
)
|
)
|
||||||
.addOption(
|
.addOption(
|
||||||
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
|
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
|
||||||
|
|||||||
@@ -1,70 +0,0 @@
|
|||||||
import { glob } from 'glob';
|
|
||||||
import * as fs from 'node:fs';
|
|
||||||
|
|
||||||
export class CrawlOptions {
|
|
||||||
pathsToCrawl!: string[];
|
|
||||||
recursive? = false;
|
|
||||||
includeHidden? = false;
|
|
||||||
exclusionPatterns?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CrawlService {
|
|
||||||
private readonly extensions!: string[];
|
|
||||||
|
|
||||||
constructor(image: string[], video: string[]) {
|
|
||||||
this.extensions = [...image, ...video].map((extension) => extension.replace('.', ''));
|
|
||||||
}
|
|
||||||
|
|
||||||
async crawl(options: CrawlOptions): Promise<string[]> {
|
|
||||||
const { recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
|
|
||||||
|
|
||||||
if (!pathsToCrawl) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const patterns: string[] = [];
|
|
||||||
const crawledFiles: string[] = [];
|
|
||||||
|
|
||||||
for await (const currentPath of pathsToCrawl) {
|
|
||||||
try {
|
|
||||||
const stats = await fs.promises.stat(currentPath);
|
|
||||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
|
||||||
crawledFiles.push(currentPath);
|
|
||||||
} else {
|
|
||||||
patterns.push(currentPath);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
if (error.code === 'ENOENT') {
|
|
||||||
patterns.push(currentPath);
|
|
||||||
} else {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let searchPattern: string;
|
|
||||||
if (patterns.length === 1) {
|
|
||||||
searchPattern = patterns[0];
|
|
||||||
} else if (patterns.length === 0) {
|
|
||||||
return crawledFiles;
|
|
||||||
} else {
|
|
||||||
searchPattern = '{' + patterns.join(',') + '}';
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recursive) {
|
|
||||||
searchPattern = searchPattern + '/**/';
|
|
||||||
}
|
|
||||||
|
|
||||||
searchPattern = `${searchPattern}/*.{${this.extensions.join(',')}}`;
|
|
||||||
|
|
||||||
const globbedFiles = await glob(searchPattern, {
|
|
||||||
absolute: true,
|
|
||||||
nocase: true,
|
|
||||||
nodir: true,
|
|
||||||
dot: includeHidden,
|
|
||||||
ignore: exclusionPatterns,
|
|
||||||
});
|
|
||||||
|
|
||||||
return [...crawledFiles, ...globbedFiles].sort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,14 +1,31 @@
|
|||||||
import mockfs from 'mock-fs';
|
import mockfs from 'mock-fs';
|
||||||
import { CrawlOptions, CrawlService } from './crawl.service';
|
import { CrawlOptions, crawl } from 'src/utils';
|
||||||
|
|
||||||
interface Test {
|
interface Test {
|
||||||
test: string;
|
test: string;
|
||||||
options: CrawlOptions;
|
options: Omit<CrawlOptions, 'extensions'>;
|
||||||
files: Record<string, boolean>;
|
files: Record<string, boolean>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
|
|
||||||
|
const extensions = [
|
||||||
|
'.jpg',
|
||||||
|
'.jpeg',
|
||||||
|
'.png',
|
||||||
|
'.heif',
|
||||||
|
'.heic',
|
||||||
|
'.tif',
|
||||||
|
'.nef',
|
||||||
|
'.webp',
|
||||||
|
'.tiff',
|
||||||
|
'.dng',
|
||||||
|
'.gif',
|
||||||
|
'.mov',
|
||||||
|
'.mp4',
|
||||||
|
'.webm',
|
||||||
|
];
|
||||||
|
|
||||||
const tests: Test[] = [
|
const tests: Test[] = [
|
||||||
{
|
{
|
||||||
test: 'should return empty when crawling an empty path list',
|
test: 'should return empty when crawling an empty path list',
|
||||||
@@ -49,7 +66,7 @@ const tests: Test[] = [
|
|||||||
test: 'should exclude by file extension',
|
test: 'should exclude by file extension',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos/'],
|
pathsToCrawl: ['/photos/'],
|
||||||
exclusionPatterns: ['**/*.tif'],
|
exclusionPattern: '**/*.tif',
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image.jpg': true,
|
'/photos/image.jpg': true,
|
||||||
@@ -60,7 +77,7 @@ const tests: Test[] = [
|
|||||||
test: 'should exclude by file extension without case sensitivity',
|
test: 'should exclude by file extension without case sensitivity',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos/'],
|
pathsToCrawl: ['/photos/'],
|
||||||
exclusionPatterns: ['**/*.TIF'],
|
exclusionPattern: '**/*.TIF',
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image.jpg': true,
|
'/photos/image.jpg': true,
|
||||||
@@ -71,7 +88,7 @@ const tests: Test[] = [
|
|||||||
test: 'should exclude by folder',
|
test: 'should exclude by folder',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos/'],
|
pathsToCrawl: ['/photos/'],
|
||||||
exclusionPatterns: ['**/raw/**'],
|
exclusionPattern: '**/raw/**',
|
||||||
recursive: true,
|
recursive: true,
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
@@ -201,7 +218,7 @@ const tests: Test[] = [
|
|||||||
test: 'should support ignoring full filename',
|
test: 'should support ignoring full filename',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos'],
|
pathsToCrawl: ['/photos'],
|
||||||
exclusionPatterns: ['**/image2.jpg'],
|
exclusionPattern: '**/image2.jpg',
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image1.jpg': true,
|
'/photos/image1.jpg': true,
|
||||||
@@ -213,7 +230,7 @@ const tests: Test[] = [
|
|||||||
test: 'should support ignoring file extensions',
|
test: 'should support ignoring file extensions',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos'],
|
pathsToCrawl: ['/photos'],
|
||||||
exclusionPatterns: ['**/*.png'],
|
exclusionPattern: '**/*.png',
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image1.jpg': true,
|
'/photos/image1.jpg': true,
|
||||||
@@ -226,7 +243,7 @@ const tests: Test[] = [
|
|||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos'],
|
pathsToCrawl: ['/photos'],
|
||||||
recursive: true,
|
recursive: true,
|
||||||
exclusionPatterns: ['**/raw/**'],
|
exclusionPattern: '**/raw/**',
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image1.jpg': true,
|
'/photos/image1.jpg': true,
|
||||||
@@ -241,7 +258,7 @@ const tests: Test[] = [
|
|||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/'],
|
pathsToCrawl: ['/'],
|
||||||
recursive: true,
|
recursive: true,
|
||||||
exclusionPatterns: ['/images/**'],
|
exclusionPattern: '/images/**',
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image1.jpg': true,
|
'/photos/image1.jpg': true,
|
||||||
@@ -251,12 +268,7 @@ const tests: Test[] = [
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
describe(CrawlService.name, () => {
|
describe('crawl', () => {
|
||||||
const sut = new CrawlService(
|
|
||||||
['.jpg', '.jpeg', '.png', '.heif', '.heic', '.tif', '.nef', '.webp', '.tiff', '.dng', '.gif'],
|
|
||||||
['.mov', '.mp4', '.webm'],
|
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
mockfs.restore();
|
mockfs.restore();
|
||||||
});
|
});
|
||||||
@@ -266,7 +278,7 @@ describe(CrawlService.name, () => {
|
|||||||
it(test, async () => {
|
it(test, async () => {
|
||||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
||||||
|
|
||||||
const actual = await sut.crawl(options);
|
const actual = await crawl({ ...options, extensions });
|
||||||
const expected = Object.entries(files)
|
const expected = Object.entries(files)
|
||||||
.filter((entry) => entry[1])
|
.filter((entry) => entry[1])
|
||||||
.map(([file]) => file);
|
.map(([file]) => file);
|
||||||
133
cli/src/utils.ts
@@ -1,54 +1,60 @@
|
|||||||
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
|
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
||||||
import { readFile, writeFile } from 'node:fs/promises';
|
import { glob } from 'fast-glob';
|
||||||
import { join } from 'node:path';
|
import { createHash } from 'node:crypto';
|
||||||
|
import { createReadStream } from 'node:fs';
|
||||||
|
import { readFile, stat, writeFile } from 'node:fs/promises';
|
||||||
|
import { join, resolve } from 'node:path';
|
||||||
import yaml from 'yaml';
|
import yaml from 'yaml';
|
||||||
|
|
||||||
export interface BaseOptions {
|
export interface BaseOptions {
|
||||||
configDirectory: string;
|
configDirectory: string;
|
||||||
apiKey?: string;
|
key?: string;
|
||||||
instanceUrl?: string;
|
url?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AuthDto {
|
export type AuthDto = { url: string; key: string };
|
||||||
instanceUrl: string;
|
type OldAuthDto = { instanceUrl: string; apiKey: string };
|
||||||
apiKey: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const authenticate = async (options: BaseOptions): Promise<void> => {
|
export const authenticate = async (options: BaseOptions): Promise<AuthDto> => {
|
||||||
const { configDirectory: configDir, instanceUrl, apiKey } = options;
|
const { configDirectory: configDir, url, key } = options;
|
||||||
|
|
||||||
// provided in command
|
// provided in command
|
||||||
if (instanceUrl && apiKey) {
|
if (url && key) {
|
||||||
await connect(instanceUrl, apiKey);
|
return connect(url, key);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// fallback to file
|
// fallback to auth file
|
||||||
const config = await readAuthFile(configDir);
|
const config = await readAuthFile(configDir);
|
||||||
await connect(config.instanceUrl, config.apiKey);
|
const auth = await connect(config.url, config.key);
|
||||||
|
if (auth.url !== config.url) {
|
||||||
|
await writeAuthFile(configDir, auth);
|
||||||
|
}
|
||||||
|
|
||||||
|
return auth;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const connect = async (instanceUrl: string, apiKey: string): Promise<void> => {
|
export const connect = async (url: string, key: string) => {
|
||||||
const wellKnownUrl = new URL('.well-known/immich', instanceUrl);
|
const wellKnownUrl = new URL('.well-known/immich', url);
|
||||||
try {
|
try {
|
||||||
const wellKnown = await fetch(wellKnownUrl).then((response) => response.json());
|
const wellKnown = await fetch(wellKnownUrl).then((response) => response.json());
|
||||||
const endpoint = new URL(wellKnown.api.endpoint, instanceUrl).toString();
|
const endpoint = new URL(wellKnown.api.endpoint, url).toString();
|
||||||
if (endpoint !== instanceUrl) {
|
if (endpoint !== url) {
|
||||||
console.debug(`Discovered API at ${endpoint}`);
|
console.debug(`Discovered API at ${endpoint}`);
|
||||||
}
|
}
|
||||||
instanceUrl = endpoint;
|
url = endpoint;
|
||||||
} catch {
|
} catch {
|
||||||
// noop
|
// noop
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults.baseUrl = instanceUrl;
|
init({ baseUrl: url, apiKey: key });
|
||||||
defaults.headers = { 'x-api-key': apiKey };
|
|
||||||
|
|
||||||
const [error] = await withError(getMyUserInfo());
|
const [error] = await withError(getMyUser());
|
||||||
if (isHttpError(error)) {
|
if (isHttpError(error)) {
|
||||||
logError(error, 'Failed to connect to server');
|
logError(error, 'Failed to connect to server');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return { url, key };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const logError = (error: unknown, message: string) => {
|
export const logError = (error: unknown, message: string) => {
|
||||||
@@ -66,7 +72,12 @@ export const readAuthFile = async (dir: string) => {
|
|||||||
try {
|
try {
|
||||||
const data = await readFile(getAuthFilePath(dir));
|
const data = await readFile(getAuthFilePath(dir));
|
||||||
// TODO add class-transform/validation
|
// TODO add class-transform/validation
|
||||||
return yaml.parse(data.toString()) as AuthDto;
|
const auth = yaml.parse(data.toString()) as AuthDto | OldAuthDto;
|
||||||
|
const { instanceUrl, apiKey } = auth as OldAuthDto;
|
||||||
|
if (instanceUrl && apiKey) {
|
||||||
|
return { url: instanceUrl, key: apiKey };
|
||||||
|
}
|
||||||
|
return auth as AuthDto;
|
||||||
} catch (error: Error | any) {
|
} catch (error: Error | any) {
|
||||||
if (error.code === 'ENOENT' || error.code === 'ENOTDIR') {
|
if (error.code === 'ENOENT' || error.code === 'ENOTDIR') {
|
||||||
console.log('No auth file exists. Please login first.');
|
console.log('No auth file exists. Please login first.');
|
||||||
@@ -87,3 +98,75 @@ export const withError = async <T>(promise: Promise<T>): Promise<[Error, undefin
|
|||||||
return [error, undefined];
|
return [error, undefined];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export interface CrawlOptions {
|
||||||
|
pathsToCrawl: string[];
|
||||||
|
recursive?: boolean;
|
||||||
|
includeHidden?: boolean;
|
||||||
|
exclusionPattern?: string;
|
||||||
|
extensions: string[];
|
||||||
|
}
|
||||||
|
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||||
|
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
|
||||||
|
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
||||||
|
|
||||||
|
if (pathsToCrawl.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const patterns: string[] = [];
|
||||||
|
const crawledFiles: string[] = [];
|
||||||
|
|
||||||
|
for await (const currentPath of pathsToCrawl) {
|
||||||
|
try {
|
||||||
|
const absolutePath = resolve(currentPath);
|
||||||
|
const stats = await stat(absolutePath);
|
||||||
|
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||||
|
crawledFiles.push(absolutePath);
|
||||||
|
} else {
|
||||||
|
patterns.push(absolutePath);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
patterns.push(currentPath);
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let searchPattern: string;
|
||||||
|
if (patterns.length === 1) {
|
||||||
|
searchPattern = patterns[0];
|
||||||
|
} else if (patterns.length === 0) {
|
||||||
|
return crawledFiles;
|
||||||
|
} else {
|
||||||
|
searchPattern = '{' + patterns.join(',') + '}';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (recursive) {
|
||||||
|
searchPattern = searchPattern + '/**/';
|
||||||
|
}
|
||||||
|
|
||||||
|
searchPattern = `${searchPattern}/*.{${extensions.join(',')}}`;
|
||||||
|
|
||||||
|
const globbedFiles = await glob(searchPattern, {
|
||||||
|
absolute: true,
|
||||||
|
caseSensitiveMatch: false,
|
||||||
|
onlyFiles: true,
|
||||||
|
dot: includeHidden,
|
||||||
|
ignore: [`**/${exclusionPattern}`],
|
||||||
|
});
|
||||||
|
globbedFiles.push(...crawledFiles);
|
||||||
|
return globbedFiles.sort();
|
||||||
|
};
|
||||||
|
|
||||||
|
export const sha1 = (filepath: string) => {
|
||||||
|
const hash = createHash('sha1');
|
||||||
|
return new Promise<string>((resolve, reject) => {
|
||||||
|
const rs = createReadStream(filepath);
|
||||||
|
rs.on('error', reject);
|
||||||
|
rs.on('data', (chunk) => hash.update(chunk));
|
||||||
|
rs.on('end', () => resolve(hash.digest('hex')));
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
import { version } from '../package.json';
|
|
||||||
|
|
||||||
export interface ICliVersion {
|
|
||||||
major: number;
|
|
||||||
minor: number;
|
|
||||||
patch: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CliVersion implements ICliVersion {
|
|
||||||
constructor(
|
|
||||||
public readonly major: number,
|
|
||||||
public readonly minor: number,
|
|
||||||
public readonly patch: number,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
toString() {
|
|
||||||
return `${this.major}.${this.minor}.${this.patch}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
toJSON() {
|
|
||||||
const { major, minor, patch } = this;
|
|
||||||
return { major, minor, patch };
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromString(version: string): CliVersion {
|
|
||||||
const regex = /v?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)/i;
|
|
||||||
const matchResult = version.match(regex);
|
|
||||||
if (matchResult) {
|
|
||||||
const [, major, minor, patch] = matchResult.map(Number);
|
|
||||||
return new CliVersion(major, minor, patch);
|
|
||||||
} else {
|
|
||||||
throw new Error(`Invalid version format: ${version}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const cliVersion = CliVersion.fromString(version);
|
|
||||||
38
deployment/.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# OpenTofu
|
||||||
|
|
||||||
|
# Local .terraform directories
|
||||||
|
**/.terraform/*
|
||||||
|
|
||||||
|
# .tfstate files
|
||||||
|
*.tfstate
|
||||||
|
*.tfstate.*
|
||||||
|
|
||||||
|
# Crash log files
|
||||||
|
crash.log
|
||||||
|
crash.*.log
|
||||||
|
|
||||||
|
# Ignore override files as they are usually used to override resources locally and so
|
||||||
|
# are not checked in
|
||||||
|
override.tf
|
||||||
|
override.tf.json
|
||||||
|
*_override.tf
|
||||||
|
*_override.tf.json
|
||||||
|
|
||||||
|
# Include override files you do wish to add to version control using negated pattern
|
||||||
|
# !example_override.tf
|
||||||
|
|
||||||
|
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||||
|
# example: *tfplan*
|
||||||
|
|
||||||
|
# Ignore CLI configuration files
|
||||||
|
.terraformrc
|
||||||
|
terraform.rc
|
||||||
|
|
||||||
|
# Terragrunt
|
||||||
|
|
||||||
|
# terragrunt cache directories
|
||||||
|
**/.terragrunt-cache/*
|
||||||
|
|
||||||
|
# Terragrunt debug output file (when using `--terragrunt-debug` option)
|
||||||
|
# See: https://terragrunt.gruntwork.io/docs/reference/cli-options/#terragrunt-debug
|
||||||
|
terragrunt-debug.tfvars.json
|
||||||
38
deployment/modules/cloudflare/docs-release/.terraform.lock.hcl
generated
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# This file is maintained automatically by "tofu init".
|
||||||
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
|
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||||
|
version = "4.34.0"
|
||||||
|
constraints = "4.34.0"
|
||||||
|
hashes = [
|
||||||
|
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
|
||||||
|
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
|
||||||
|
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
|
||||||
|
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
|
||||||
|
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
|
||||||
|
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
|
||||||
|
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
|
||||||
|
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
|
||||||
|
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
|
||||||
|
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
|
||||||
|
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
|
||||||
|
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
|
||||||
|
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
|
||||||
|
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
|
||||||
|
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
|
||||||
|
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
|
||||||
|
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
|
||||||
|
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
|
||||||
|
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
|
||||||
|
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
|
||||||
|
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||||
|
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
|
||||||
|
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
|
||||||
|
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
|
||||||
|
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
|
||||||
|
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
|
||||||
|
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
|
||||||
|
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
|
||||||
|
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
|
||||||
|
]
|
||||||
|
}
|
||||||
11
deployment/modules/cloudflare/docs-release/config.tf
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
terraform {
|
||||||
|
backend "pg" {}
|
||||||
|
required_version = "~> 1.7"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
cloudflare = {
|
||||||
|
source = "cloudflare/cloudflare"
|
||||||
|
version = "4.34.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
14
deployment/modules/cloudflare/docs-release/domain.tf
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
resource "cloudflare_pages_domain" "immich_app_release_domain" {
|
||||||
|
account_id = var.cloudflare_account_id
|
||||||
|
project_name = data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name
|
||||||
|
domain = "immich.app"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "cloudflare_record" "immich_app_release_domain" {
|
||||||
|
name = "immich.app"
|
||||||
|
proxied = true
|
||||||
|
ttl = 1
|
||||||
|
type = "CNAME"
|
||||||
|
value = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
|
||||||
|
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||||
|
}
|
||||||
3
deployment/modules/cloudflare/docs-release/providers.tf
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
provider "cloudflare" {
|
||||||
|
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||||
|
}
|
||||||
27
deployment/modules/cloudflare/docs-release/remote-state.tf
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
data "terraform_remote_state" "api_keys_state" {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = var.tf_state_postgres_conn_str
|
||||||
|
schema_name = "prod_cloudflare_api_keys"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data "terraform_remote_state" "cloudflare_account" {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = var.tf_state_postgres_conn_str
|
||||||
|
schema_name = "prod_cloudflare_account"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data "terraform_remote_state" "cloudflare_immich_app_docs" {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = var.tf_state_postgres_conn_str
|
||||||
|
schema_name = "prod_cloudflare_immich_app_docs_${var.prefix_name}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
20
deployment/modules/cloudflare/docs-release/terragrunt.hcl
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
terraform {
|
||||||
|
source = "."
|
||||||
|
|
||||||
|
extra_arguments custom_vars {
|
||||||
|
commands = get_terraform_commands_that_need_vars()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
include {
|
||||||
|
path = find_in_parent_folders("state.hcl")
|
||||||
|
}
|
||||||
|
|
||||||
|
remote_state {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||||
|
schema_name = "prod_cloudflare_immich_app_docs_release"
|
||||||
|
}
|
||||||
|
}
|
||||||
4
deployment/modules/cloudflare/docs-release/variables.tf
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
variable "cloudflare_account_id" {}
|
||||||
|
variable "tf_state_postgres_conn_str" {}
|
||||||
|
|
||||||
|
variable "prefix_name" {}
|
||||||
38
deployment/modules/cloudflare/docs/.terraform.lock.hcl
generated
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# This file is maintained automatically by "tofu init".
|
||||||
|
# Manual edits may be lost in future updates.
|
||||||
|
|
||||||
|
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||||
|
version = "4.34.0"
|
||||||
|
constraints = "4.34.0"
|
||||||
|
hashes = [
|
||||||
|
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
|
||||||
|
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
|
||||||
|
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
|
||||||
|
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
|
||||||
|
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
|
||||||
|
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
|
||||||
|
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
|
||||||
|
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
|
||||||
|
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
|
||||||
|
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
|
||||||
|
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
|
||||||
|
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
|
||||||
|
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
|
||||||
|
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
|
||||||
|
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
|
||||||
|
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
|
||||||
|
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
|
||||||
|
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
|
||||||
|
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
|
||||||
|
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
|
||||||
|
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||||
|
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
|
||||||
|
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
|
||||||
|
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
|
||||||
|
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
|
||||||
|
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
|
||||||
|
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
|
||||||
|
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
|
||||||
|
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
|
||||||
|
]
|
||||||
|
}
|
||||||
11
deployment/modules/cloudflare/docs/config.tf
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
terraform {
|
||||||
|
backend "pg" {}
|
||||||
|
required_version = "~> 1.7"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
cloudflare = {
|
||||||
|
source = "cloudflare/cloudflare"
|
||||||
|
version = "4.34.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
26
deployment/modules/cloudflare/docs/domain.tf
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
resource "cloudflare_pages_domain" "immich_app_branch_domain" {
|
||||||
|
account_id = var.cloudflare_account_id
|
||||||
|
project_name = local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_name
|
||||||
|
domain = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "cloudflare_record" "immich_app_branch_subdomain" {
|
||||||
|
name = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||||
|
proxied = true
|
||||||
|
ttl = 1
|
||||||
|
type = "CNAME"
|
||||||
|
value = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
|
||||||
|
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||||
|
}
|
||||||
|
|
||||||
|
output "immich_app_branch_subdomain" {
|
||||||
|
value = cloudflare_record.immich_app_branch_subdomain.hostname
|
||||||
|
}
|
||||||
|
|
||||||
|
output "immich_app_branch_pages_hostname" {
|
||||||
|
value = cloudflare_record.immich_app_branch_subdomain.value
|
||||||
|
}
|
||||||
|
|
||||||
|
output "pages_project_name" {
|
||||||
|
value = cloudflare_pages_domain.immich_app_branch_domain.project_name
|
||||||
|
}
|
||||||
7
deployment/modules/cloudflare/docs/locals.tf
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
locals {
|
||||||
|
domain_name = "immich.app"
|
||||||
|
preview_prefix = contains(["branch", "pr"], var.prefix_event_type) ? "preview" : ""
|
||||||
|
archive_prefix = contains(["release"], var.prefix_event_type) ? "archive" : ""
|
||||||
|
deploy_domain_prefix = coalesce(local.preview_prefix, local.archive_prefix)
|
||||||
|
is_release = contains(["release"], var.prefix_event_type)
|
||||||
|
}
|
||||||
3
deployment/modules/cloudflare/docs/providers.tf
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
provider "cloudflare" {
|
||||||
|
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||||
|
}
|
||||||
17
deployment/modules/cloudflare/docs/remote-state.tf
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
data "terraform_remote_state" "api_keys_state" {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = var.tf_state_postgres_conn_str
|
||||||
|
schema_name = "prod_cloudflare_api_keys"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data "terraform_remote_state" "cloudflare_account" {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = var.tf_state_postgres_conn_str
|
||||||
|
schema_name = "prod_cloudflare_account"
|
||||||
|
}
|
||||||
|
}
|
||||||
24
deployment/modules/cloudflare/docs/terragrunt.hcl
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
terraform {
|
||||||
|
source = "."
|
||||||
|
|
||||||
|
extra_arguments custom_vars {
|
||||||
|
commands = get_terraform_commands_that_need_vars()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
include {
|
||||||
|
path = find_in_parent_folders("state.hcl")
|
||||||
|
}
|
||||||
|
|
||||||
|
locals {
|
||||||
|
prefix_name = get_env("TF_VAR_prefix_name")
|
||||||
|
}
|
||||||
|
|
||||||
|
remote_state {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||||
|
schema_name = "prod_cloudflare_immich_app_docs_${local.prefix_name}"
|
||||||
|
}
|
||||||
|
}
|
||||||
5
deployment/modules/cloudflare/docs/variables.tf
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
variable "cloudflare_account_id" {}
|
||||||
|
variable "tf_state_postgres_conn_str" {}
|
||||||
|
|
||||||
|
variable "prefix_name" {}
|
||||||
|
variable "prefix_event_type" {}
|
||||||
20
deployment/state.hcl
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
locals {
|
||||||
|
cloudflare_account_id = get_env("CLOUDFLARE_ACCOUNT_ID")
|
||||||
|
cloudflare_api_token = get_env("CLOUDFLARE_API_TOKEN")
|
||||||
|
|
||||||
|
tf_state_postgres_conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||||
|
}
|
||||||
|
|
||||||
|
remote_state {
|
||||||
|
backend = "pg"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
conn_str = local.tf_state_postgres_conn_str
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
cloudflare_account_id = local.cloudflare_account_id
|
||||||
|
cloudflare_api_token = local.cloudflare_api_token
|
||||||
|
tf_state_postgres_conn_str = local.tf_state_postgres_conn_str
|
||||||
|
}
|
||||||
|
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.8 MiB |
@@ -2,36 +2,34 @@
|
|||||||
# - https://immich.app/docs/developer/setup
|
# - https://immich.app/docs/developer/setup
|
||||||
# - https://immich.app/docs/developer/troubleshooting
|
# - https://immich.app/docs/developer/troubleshooting
|
||||||
|
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
name: immich-dev
|
name: immich-dev
|
||||||
|
|
||||||
x-server-build: &server-common
|
|
||||||
image: immich-server-dev:latest
|
|
||||||
build:
|
|
||||||
context: ../
|
|
||||||
dockerfile: server/Dockerfile
|
|
||||||
target: dev
|
|
||||||
restart: always
|
|
||||||
volumes:
|
|
||||||
- ../server:/usr/src/app
|
|
||||||
- ../open-api:/usr/src/open-api
|
|
||||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
|
||||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
|
||||||
- /usr/src/app/node_modules
|
|
||||||
- /etc/localtime:/etc/localtime:ro
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
ulimits:
|
|
||||||
nofile:
|
|
||||||
soft: 1048576
|
|
||||||
hard: 1048576
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
immich-server:
|
immich-server:
|
||||||
container_name: immich_server
|
container_name: immich_server
|
||||||
command: ['/usr/src/app/bin/immich-dev', 'immich']
|
command: ['/usr/src/app/bin/immich-dev']
|
||||||
<<: *server-common
|
image: immich-server-dev:latest
|
||||||
|
# extends:
|
||||||
|
# file: hwaccel.transcoding.yml
|
||||||
|
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||||
|
build:
|
||||||
|
context: ../
|
||||||
|
dockerfile: server/Dockerfile
|
||||||
|
target: dev
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- ../server:/usr/src/app
|
||||||
|
- ../open-api:/usr/src/open-api
|
||||||
|
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||||
|
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||||
|
- /usr/src/app/node_modules
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
ulimits:
|
||||||
|
nofile:
|
||||||
|
soft: 1048576
|
||||||
|
hard: 1048576
|
||||||
ports:
|
ports:
|
||||||
- 3001:3001
|
- 3001:3001
|
||||||
- 9230:9230
|
- 9230:9230
|
||||||
@@ -39,19 +37,6 @@ services:
|
|||||||
- redis
|
- redis
|
||||||
- database
|
- database
|
||||||
|
|
||||||
immich-microservices:
|
|
||||||
container_name: immich_microservices
|
|
||||||
command: ['/usr/src/app/bin/immich-dev', 'microservices']
|
|
||||||
<<: *server-common
|
|
||||||
# extends:
|
|
||||||
# file: hwaccel.transcoding.yml
|
|
||||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
|
||||||
ports:
|
|
||||||
- 9231:9230
|
|
||||||
depends_on:
|
|
||||||
- database
|
|
||||||
- immich-server
|
|
||||||
|
|
||||||
immich-web:
|
immich-web:
|
||||||
container_name: immich_web
|
container_name: immich_web
|
||||||
image: immich-web-dev:latest
|
image: immich-web-dev:latest
|
||||||
@@ -99,7 +84,9 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||||
|
healthcheck:
|
||||||
|
test: redis-cli ping || exit 1
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
@@ -110,11 +97,18 @@ services:
|
|||||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
POSTGRES_USER: ${DB_USERNAME}
|
POSTGRES_USER: ${DB_USERNAME}
|
||||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||||
|
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||||
volumes:
|
volumes:
|
||||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
|
healthcheck:
|
||||||
|
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||||
|
interval: 5m
|
||||||
|
start_interval: 30s
|
||||||
|
start_period: 5m
|
||||||
|
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||||
|
|
||||||
# set IMMICH_METRICS=true in .env to enable metrics
|
# set IMMICH_METRICS=true in .env to enable metrics
|
||||||
# immich-prometheus:
|
# immich-prometheus:
|
||||||
# container_name: immich_prometheus
|
# container_name: immich_prometheus
|
||||||
|
|||||||
@@ -1,41 +1,26 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
name: immich-prod
|
name: immich-prod
|
||||||
|
|
||||||
x-server-build: &server-common
|
|
||||||
image: immich-server:latest
|
|
||||||
build:
|
|
||||||
context: ../
|
|
||||||
dockerfile: server/Dockerfile
|
|
||||||
volumes:
|
|
||||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
|
||||||
- /etc/localtime:/etc/localtime:ro
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
immich-server:
|
immich-server:
|
||||||
container_name: immich_server
|
container_name: immich_server
|
||||||
command: ['start.sh', 'immich']
|
image: immich-server:latest
|
||||||
<<: *server-common
|
# extends:
|
||||||
|
# file: hwaccel.transcoding.yml
|
||||||
|
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||||
|
build:
|
||||||
|
context: ../
|
||||||
|
dockerfile: server/Dockerfile
|
||||||
|
volumes:
|
||||||
|
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
ports:
|
ports:
|
||||||
- 2283:3001
|
- 2283:3001
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
- redis
|
||||||
- database
|
- database
|
||||||
|
restart: always
|
||||||
immich-microservices:
|
|
||||||
container_name: immich_microservices
|
|
||||||
command: ['start.sh', 'microservices']
|
|
||||||
<<: *server-common
|
|
||||||
# extends:
|
|
||||||
# file: hwaccel.transcoding.yml
|
|
||||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
|
||||||
depends_on:
|
|
||||||
- redis
|
|
||||||
- database
|
|
||||||
- immich-server
|
|
||||||
|
|
||||||
immich-machine-learning:
|
immich-machine-learning:
|
||||||
container_name: immich_machine_learning
|
container_name: immich_machine_learning
|
||||||
@@ -56,7 +41,9 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||||
|
healthcheck:
|
||||||
|
test: redis-cli ping || exit 1
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
database:
|
database:
|
||||||
@@ -68,17 +55,25 @@ services:
|
|||||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
POSTGRES_USER: ${DB_USERNAME}
|
POSTGRES_USER: ${DB_USERNAME}
|
||||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||||
|
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||||
volumes:
|
volumes:
|
||||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
|
healthcheck:
|
||||||
|
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||||
|
interval: 5m
|
||||||
|
start_interval: 30s
|
||||||
|
start_period: 5m
|
||||||
|
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||||
|
restart: always
|
||||||
|
|
||||||
# set IMMICH_METRICS=true in .env to enable metrics
|
# set IMMICH_METRICS=true in .env to enable metrics
|
||||||
immich-prometheus:
|
immich-prometheus:
|
||||||
container_name: immich_prometheus
|
container_name: immich_prometheus
|
||||||
ports:
|
ports:
|
||||||
- 9090:9090
|
- 9090:9090
|
||||||
image: prom/prometheus@sha256:bc1794e85c9e00293351b967efa267ce6af1c824ac875a9d0c7ac84700a8b53e
|
image: prom/prometheus@sha256:5c435642ca4d8427ca26f4901c11114023004709037880cd7860d5b7176aa731
|
||||||
volumes:
|
volumes:
|
||||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||||
- prometheus-data:/prometheus
|
- prometheus-data:/prometheus
|
||||||
@@ -90,7 +85,7 @@ services:
|
|||||||
command: ['./run.sh', '-disable-reporting']
|
command: ['./run.sh', '-disable-reporting']
|
||||||
ports:
|
ports:
|
||||||
- 3000:3000
|
- 3000:3000
|
||||||
image: grafana/grafana:10.4.0-ubuntu@sha256:c1f582b7cc4c1b9805d187b5600ce7879550a12ef6d29571da133c3d3fc67a9c
|
image: grafana/grafana:11.0.0-ubuntu@sha256:02e99d1ee0b52dc9d3000c7b5314e7a07e0dfd69cc49bb3f8ce323491ed3406b
|
||||||
volumes:
|
volumes:
|
||||||
- grafana-data:/var/lib/grafana
|
- grafana-data:/var/lib/grafana
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# WARNING: Make sure to use the docker-compose.yml of the current release:
|
# WARNING: Make sure to use the docker-compose.yml of the current release:
|
||||||
#
|
#
|
||||||
@@ -14,7 +12,9 @@ services:
|
|||||||
immich-server:
|
immich-server:
|
||||||
container_name: immich_server
|
container_name: immich_server
|
||||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||||
command: ['start.sh', 'immich']
|
# extends:
|
||||||
|
# file: hwaccel.transcoding.yml
|
||||||
|
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||||
volumes:
|
volumes:
|
||||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
@@ -27,23 +27,6 @@ services:
|
|||||||
- database
|
- database
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
immich-microservices:
|
|
||||||
container_name: immich_microservices
|
|
||||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
|
||||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/hardware-transcoding
|
|
||||||
# file: hwaccel.transcoding.yml
|
|
||||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
|
||||||
command: ['start.sh', 'microservices']
|
|
||||||
volumes:
|
|
||||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
|
||||||
- /etc/localtime:/etc/localtime:ro
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
depends_on:
|
|
||||||
- redis
|
|
||||||
- database
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
immich-machine-learning:
|
immich-machine-learning:
|
||||||
container_name: immich_machine_learning
|
container_name: immich_machine_learning
|
||||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||||
@@ -60,20 +43,28 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: registry.hub.docker.com/library/redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
image: docker.io/redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||||
|
healthcheck:
|
||||||
|
test: redis-cli ping || exit 1
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: registry.hub.docker.com/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
POSTGRES_USER: ${DB_USERNAME}
|
POSTGRES_USER: ${DB_USERNAME}
|
||||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||||
|
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||||
|
interval: 5m
|
||||||
|
start_interval: 30s
|
||||||
|
start_period: 5m
|
||||||
|
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
pgdata:
|
|
||||||
model-cache:
|
model-cache:
|
||||||
|
|||||||
@@ -2,6 +2,11 @@
|
|||||||
|
|
||||||
# The location where your uploaded files are stored
|
# The location where your uploaded files are stored
|
||||||
UPLOAD_LOCATION=./library
|
UPLOAD_LOCATION=./library
|
||||||
|
# The location where your database files are stored
|
||||||
|
DB_DATA_LOCATION=./postgres
|
||||||
|
|
||||||
|
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||||
|
# TZ=Etc/UTC
|
||||||
|
|
||||||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||||
IMMICH_VERSION=release
|
IMMICH_VERSION=release
|
||||||
@@ -11,8 +16,5 @@ DB_PASSWORD=postgres
|
|||||||
|
|
||||||
# The values below this line do not need to be changed
|
# The values below this line do not need to be changed
|
||||||
###################################################################################
|
###################################################################################
|
||||||
DB_HOSTNAME=immich_postgres
|
|
||||||
DB_USERNAME=postgres
|
DB_USERNAME=postgres
|
||||||
DB_DATABASE_NAME=immich
|
DB_DATABASE_NAME=immich
|
||||||
|
|
||||||
REDIS_HOSTNAME=immich_redis
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
version: "3.8"
|
|
||||||
|
|
||||||
# Configurations for hardware-accelerated machine learning
|
# Configurations for hardware-accelerated machine learning
|
||||||
|
|
||||||
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
||||||
# you can inline the config for a backend by copying its contents
|
# you can inline the config for a backend by copying its contents
|
||||||
# into the immich-machine-learning service in the docker-compose.yml file.
|
# into the immich-machine-learning service in the docker-compose.yml file.
|
||||||
|
|
||||||
# See https://immich.app/docs/features/ml-hardware-acceleration for info on usage.
|
# See https://immich.app/docs/features/ml-hardware-acceleration for info on usage.
|
||||||
@@ -27,12 +25,10 @@ services:
|
|||||||
count: 1
|
count: 1
|
||||||
capabilities:
|
capabilities:
|
||||||
- gpu
|
- gpu
|
||||||
- compute
|
|
||||||
- video
|
|
||||||
|
|
||||||
openvino:
|
openvino:
|
||||||
device_cgroup_rules:
|
device_cgroup_rules:
|
||||||
- "c 189:* rmw"
|
- 'c 189:* rmw'
|
||||||
devices:
|
devices:
|
||||||
- /dev/dri:/dev/dri
|
- /dev/dri:/dev/dri
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
version: "3.8"
|
|
||||||
|
|
||||||
# Configurations for hardware-accelerated transcoding
|
# Configurations for hardware-accelerated transcoding
|
||||||
|
|
||||||
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
||||||
|
|||||||
1
docs/.nvmrc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
20.14
|
||||||
@@ -1,17 +1,17 @@
|
|||||||
# Website
|
# Website
|
||||||
|
|
||||||
This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
|
This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator.
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
```
|
```
|
||||||
$ yarn
|
$ npm install
|
||||||
```
|
```
|
||||||
|
|
||||||
### Local Development
|
### Local Development
|
||||||
|
|
||||||
```
|
```
|
||||||
$ yarn start
|
$ npm run start
|
||||||
```
|
```
|
||||||
|
|
||||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||||
@@ -19,7 +19,7 @@ This command starts a local development server and opens up a browser window. Mo
|
|||||||
### Build
|
### Build
|
||||||
|
|
||||||
```
|
```
|
||||||
$ yarn build
|
$ npm run build
|
||||||
```
|
```
|
||||||
|
|
||||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||||
@@ -29,13 +29,13 @@ This command generates static content into the `build` directory and can be serv
|
|||||||
Using SSH:
|
Using SSH:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ USE_SSH=true yarn deploy
|
$ USE_SSH=true npm run deploy
|
||||||
```
|
```
|
||||||
|
|
||||||
Not using SSH:
|
Not using SSH:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ GIT_USER=<Your GitHub username> yarn deploy
|
$ GIT_USER=<Your GitHub username> npm run deploy
|
||||||
```
|
```
|
||||||
|
|
||||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ Hello everyone, it is my pleasure to deliver the new release of Immich to you. T
|
|||||||
|
|
||||||
Some notable features are:
|
Some notable features are:
|
||||||
|
|
||||||
- [OAuth integration](#livephoto-ios-support-)
|
- OAuth integration
|
||||||
- [LivePhoto support on iOS](#oauth-integration-)
|
- LivePhoto support on iOS
|
||||||
- User config system
|
- User config system
|
||||||
|
|
||||||
<!--truncate-->
|
<!--truncate-->
|
||||||
|
|||||||
75
docs/blog/2024/immich-core-team-goes-fulltime.mdx
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
---
|
||||||
|
title: The Immich core team goes full-time
|
||||||
|
authors: [alextran]
|
||||||
|
tags: [update, announcement, futo]
|
||||||
|
date: 2024-05-01T00:00
|
||||||
|
---
|
||||||
|
|
||||||
|
**Immich is joining [FUTO](https://futo.org/)!**
|
||||||
|
|
||||||
|
Since the beginning of this adventure, my goal has always been to create a better world for my children. Memories are priceless, and privacy should not be a luxury. However, building quality open source has its challenges. Over the past two years, it has taken significant dedication, time, and effort.
|
||||||
|
|
||||||
|
Recently, a company in Austin, Texas, called FUTO contacted the team. FUTO strives to develop quality and sustainable open software. They build software alternatives that focus on giving control to users. From their mission statement:
|
||||||
|
|
||||||
|
“Computers should belong to you, the people. We develop and fund technology to give them back.”
|
||||||
|
|
||||||
|
FUTO loved Immich and wanted to see if we’d consider working with them to take the project to the next level. In short, FUTO offered to:
|
||||||
|
|
||||||
|
- Pay the core team to work on Immich full-time
|
||||||
|
- Let us keep full autonomy about the project’s direction and leadership
|
||||||
|
- Continue to license Immich under AGPL
|
||||||
|
- Keep Immich’s development direction with no paywalled features
|
||||||
|
- Keep Immich “built for the people” (no ads, data mining/selling, or alternative motives)
|
||||||
|
- Provide us with financial, technical, legal, and administrative support
|
||||||
|
|
||||||
|
After careful deliberation, the team decided that FUTO’s vision closely aligns with our own: to build a better future by providing a polished, performant, and privacy-preserving open-source software solution for photo and video management delivered in a sustainable way.
|
||||||
|
|
||||||
|
Immich’s future has never looked brighter, and we look forward to realizing our vision for Immich as part of FUTO.
|
||||||
|
|
||||||
|
If you have more questions, we’ll host a Q&A live stream on May 9th at 3PM UTC (10AM CST). [You can ask questions here](https://www.live-ask.com/event/01HWP2SB99A1K8EXFBDKZ5Z9CF), and the stream will be live [here on our YouTube channel](https://youtube.com/live/cwz2iZwYpgg).
|
||||||
|
|
||||||
|
Cheers,
|
||||||
|
|
||||||
|
The Immich Team
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## FAQs
|
||||||
|
|
||||||
|
### What is FUTO?
|
||||||
|
|
||||||
|
[https://futo.org/what-is-futo/](https://futo.org/what-is-futo/)
|
||||||
|
|
||||||
|
### Will the license change?
|
||||||
|
|
||||||
|
No. Immich will continue to be licensed under AGPL without a CLA.
|
||||||
|
|
||||||
|
### Will Immich continue to be free?
|
||||||
|
|
||||||
|
Yes. The Immich source code will remain freely available under the AGPL license.
|
||||||
|
|
||||||
|
### Is Immich getting VC funding?
|
||||||
|
|
||||||
|
No. Venture capital implies investment in a business, often with the expectation of a future payout (exit plan). Immich is neither a business that can be acquired nor comes with a money-making exit plan.
|
||||||
|
|
||||||
|
### I am currently supporting Immich through GitHub sponsors. What will happen to my donation?
|
||||||
|
|
||||||
|
Effective immediately, all donations to the Immich organization will be canceled. In the future, we will offer an optional, modest payment option instead. Thank you to everyone who donated to help us get this far!
|
||||||
|
|
||||||
|
### How is funding sustainable?
|
||||||
|
|
||||||
|
Immich and FUTO believe a sustainable future requires a model that does not rely on users-as-a-product. To this end, FUTO advocates that users pay for good, open software. In keeping with this model, we will adopt a purchase price. This means we no longer accept donations, but — _without limiting features for those who do not pay_ — we will soon allow you to purchase Immich through a modest payment. We encourage you to pay for the high-quality software you use to foster a healthy software culture where developers build great applications without hidden motives for their users.
|
||||||
|
|
||||||
|
### When does this change take effect?
|
||||||
|
|
||||||
|
This change takes effect immediately.
|
||||||
|
|
||||||
|
### What will change?
|
||||||
|
|
||||||
|
The following things will change as Immich joins FUTO:
|
||||||
|
|
||||||
|
- The brand, logo, and other Immich trademarks will be transferred to FUTO.
|
||||||
|
- We will stop all donations to the project.
|
||||||
|
- The core team can now dedicate our full attention to Immich
|
||||||
|
- Before the end of the year, we plan to have a roadmap for what it will take to get Immich to a stable release.
|
||||||
|
- Bugs will be squashed, and features will be delivered faster.
|
||||||
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
The admin password can be reset by running the [reset-admin-password](/docs/administration/server-commands.md) command on the immich-server.
|
The admin password can be reset by running the [reset-admin-password](/docs/administration/server-commands.md) command on the immich-server.
|
||||||
|
|
||||||
### How can I see list of all users in Immich?
|
### How can I see a list of all users in Immich?
|
||||||
|
|
||||||
You can see the list of all users by running [list-users](/docs/administration/server-commands.md) Command on the Immich-server.
|
You can see the list of all users by running [list-users](/docs/administration/server-commands.md) Command on the Immich-server.
|
||||||
|
|
||||||
@@ -24,37 +24,58 @@ You can see the list of all users by running [list-users](/docs/administration/s
|
|||||||
|
|
||||||
### I cannot log into the application after an update. What can I do?
|
### I cannot log into the application after an update. What can I do?
|
||||||
|
|
||||||
First, verify that the mobile app and server are both running the same version (major and minor).
|
Verify that the mobile app and server are both running the same version (major and minor).
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
App store updates sometimes take longer because the stores (Google play store and Apple app store)
|
App store updates sometimes take longer because the stores (Google Play Store and Apple App Store)
|
||||||
need to approve the update first which may take some time.
|
need to approve the update first, and it can take some time.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
If you still cannot login to the app, try the following:
|
If you still cannot log in to the app, try the following:
|
||||||
|
|
||||||
- Check the mobile logs
|
- Check the mobile logs
|
||||||
- Make sure login credentials are correct by logging in on the web app
|
- Make sure login credentials are correct by logging in on the web app
|
||||||
|
|
||||||
|
### Why does foreground backup stop when I navigate away from the app? Shouldn't it transfer the job to background backup?
|
||||||
|
|
||||||
|
Foreground backup and background backup are two separate mechanisms. They don't communicate or interact with each other.
|
||||||
|
|
||||||
|
Foreground backup is controlled by the user's action, while background backup is controlled by your device's operating system. When the app is put in the background, the invocation of background tasks is delegated to the device's operating system scheduler. It decides when the background task can run and how long it can run.
|
||||||
|
|
||||||
|
The behaviors differ based on your device manufacturer and operating system, but most are related to battery-saving policies.
|
||||||
|
|
||||||
|
### Why is background backup on iOS not working?
|
||||||
|
|
||||||
|
On iOS (iPhone and iPad), the operating system determines if a particular app can invoke background tasks based on multiple factors, most of which the Immich app has no control over. To increase the likelihood that the background backup task is run, follow the steps below:
|
||||||
|
|
||||||
|
- Enable Background App Refresh for Immich in the iOS settings at `Settings > General > Background App Refresh`.
|
||||||
|
- Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich.
|
||||||
|
- Use the Immich app more often.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Assets
|
## Assets
|
||||||
|
|
||||||
|
### Does Immich change the file?
|
||||||
|
|
||||||
|
No, Immich does not touch the original file under any circumstances,
|
||||||
|
all edited metadata are saved in the companion sidecar file and the database.
|
||||||
|
|
||||||
### Can I add my existing photo library?
|
### Can I add my existing photo library?
|
||||||
|
|
||||||
Yes, with an [External Library](/docs/features/libraries.md).
|
Yes, with an [External Library](/docs/features/libraries.md).
|
||||||
|
|
||||||
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
||||||
|
|
||||||
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs.md) page.
|
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs-workers/#jobs) page.
|
||||||
|
|
||||||
### Why are only photos and not videos being uploaded to Immich?
|
### Why are only photos and not videos being uploaded to Immich?
|
||||||
|
|
||||||
This often happens when using a reverse proxy (such as nginx or Cloudflare tunnel) in front of Immich. Make sure to set your reverse proxy to allow large `POST` requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Also check the disk space of your reverse proxy, in some cases proxies cache requests to disk before passing them on, and if disk space runs out the request fails.
|
This often happens when using a reverse proxy (such as Nginx or Cloudflare tunnel) in front of Immich. Make sure to set your reverse proxy to allow large `POST` requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Also, check the disk space of your reverse proxy. In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
|
||||||
|
|
||||||
### Why are some photos stored in the file system with the wrong date?
|
### Why are some photos stored in the file system with the wrong date?
|
||||||
|
|
||||||
There are a few different scenarios that can lead to this situation. The solution is to run the storage migration job again. The job is only _automatically_ run once per asset, after upload. If metadata extraction originally failed, the jobs were cleared/cancelled, etc. the job may not have run automatically the first time.
|
There are a few different scenarios that can lead to this situation. The solution is to rerun the storage migration job. The job is only automatically run once per asset after upload. If metadata extraction originally failed, the jobs were cleared/canceled, etc., the job may not have run automatically the first time.
|
||||||
|
|
||||||
### How can I hide photos from the timeline?
|
### How can I hide photos from the timeline?
|
||||||
|
|
||||||
@@ -68,23 +89,27 @@ See [Backup and Restore](/docs/administration/backup-and-restore.md).
|
|||||||
|
|
||||||
No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
|
No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
|
||||||
|
|
||||||
### Does Immich support filtering of NSFW images?
|
### Does Immich support the filtering of NSFW images?
|
||||||
|
|
||||||
No, it currently does not. There is an [open feature request on Github](https://github.com/immich-app/immich/discussions/2451).
|
No, it currently does not. There is an [open feature request on Github](https://github.com/immich-app/immich/discussions/2451).
|
||||||
|
|
||||||
### Why are there so many thumbnail generation jobs?
|
### Why are there so many thumbnail generation jobs?
|
||||||
|
|
||||||
There are three thubmanil jobs for each asset:
|
There are three thumbnail jobs for each asset:
|
||||||
|
|
||||||
- Blurred (thumbhash)
|
- Blurred (thumbhash)
|
||||||
- Small (webp)
|
- Preview (Webp)
|
||||||
- Large (jpeg)
|
- Thumbnail (Jpeg)
|
||||||
|
|
||||||
Also, there are additional jobs for person (face) thumbnails.
|
Also, there are additional jobs for person (face) thumbnails.
|
||||||
|
|
||||||
|
### Why do files from WhatsApp not appear with the correct date?
|
||||||
|
|
||||||
|
Files sent on WhatsApp are saved without metadata on the file. Therefore, Immich has no way of knowing the original date of the file when files are uploaded from WhatsApp, not the order of arrival on the device. [See #3527](https://github.com/immich-app/immich/issues/3527).
|
||||||
|
|
||||||
### What happens if an asset exists in more than one account?
|
### What happens if an asset exists in more than one account?
|
||||||
|
|
||||||
There are no requirements for assets to be unique across users. If multiple users upload the same image they are processed as if they were distinct assets and jobs run and thumbnails are generated accordingly.
|
There are no requirements for assets to be unique across users. If multiple users upload the same image, it is processed as if it were a distinct asset, and jobs run and thumbnails are generated accordingly.
|
||||||
|
|
||||||
### Why do HDR videos appear pale in Immich player but look normal after download?
|
### Why do HDR videos appear pale in Immich player but look normal after download?
|
||||||
|
|
||||||
@@ -96,45 +121,46 @@ Immich always keeps your original files. Alongside that, it generates a transcod
|
|||||||
|
|
||||||
### How can I delete transcoded videos without deleting the original?
|
### How can I delete transcoded videos without deleting the original?
|
||||||
|
|
||||||
The transcoded version of an asset can be deleted by setting a transcode policy that makes it unnecessary, then running a transcoding job for that asset. This can be done on a per-asset basis by starting a transcoding job for a single asset with the _Refresh encoded videos_ button in the asset viewer options, or for all assets by running transcoding jobs for all assets from the administration page.
|
The transcoded version of an asset can be deleted by setting a transcode policy that makes it unnecessary and then running a transcoding job for that asset. This can be done on a per-asset basis by starting a transcoding job for a single asset with the _Refresh encoded videos_ button in the asset viewer options or for all assets by running transcoding jobs for all assets from the administration page.
|
||||||
|
|
||||||
To update the transcode policy, navigate to Administration > Video Transcoding Settings > Transcoding Policy and select a policy from the drop-down. This policy will determine whether an existing transcode will be deleted or overwritten in the transcoding job. If a video should be transcoded according to this policy, an existing transcode is overwritten. If not, then it is deleted.
|
To update the transcode policy, navigate to Administration > Video Transcoding Settings > Transcoding Policy and select a policy from the drop-down. This policy will determine whether an existing transcode will be deleted or overwritten in the transcoding job. If a video should be transcoded according to this policy, an existing transcode is overwritten. If not, then it is deleted.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
For example, say you have existing transcodes with the policy "Videos higher than normal resolution or not in the desired format" and switch to a narrower policy: "Videos not in the desired format". If an asset was only transcoded due to its resolution, then running a transcoding job for it will now delete the existing transcode. This is because resolution is no longer part of the transcode policy and the transcode is unnecessary as a result. Likewise, if you set the policy to "Don't transcode any videos" and run transcoding jobs for all assets, this will delete all existing transcodes as they are all unnecessary.
|
For example, say you have existing transcodes with the policy "Videos higher than normal resolution or not in the desired format" and switch to a narrower policy: "Videos not in the desired format." If an asset was only transcoded due to its resolution, running a transcoding job for it will delete the existing transcode. This is because resolution is no longer part of the transcode policy and the transcode is unnecessary. Likewise, if you set the policy to "Don't transcode any videos" and run transcoding jobs for all assets, this will delete all existing transcodes as they are unnecessary.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Is it possible to compress images during backup?
|
### Is it possible to compress images during backup?
|
||||||
|
|
||||||
No. Our golden rule is that the original assets should always be untouched, so we don't think this feature is a good fit for Immich.
|
No. Our design principle is that the original assets should always be untouched.
|
||||||
|
|
||||||
### How can I move all data (photos, persons, albums) from one user to another?
|
### How can I move all data (photos, persons, albums, libraries) from one user to another?
|
||||||
|
|
||||||
This is not officially supported, but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the psql command), or you can add for example an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file, so that you can use a web-interface.
|
This is not officially supported but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the `psql` command), or you can add, for example, an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file so that you can use a web interface.
|
||||||
|
|
||||||
:::warning
|
|
||||||
This is an advanced operation. If you can't do it with the steps described here, this is not for you.
|
|
||||||
:::
|
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>Steps</summary>
|
<summary>Steps</summary>
|
||||||
|
|
||||||
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
||||||
|
|
||||||
2. Find the id of both the 'source' and the 'destination' user (it's the id column in the users table)
|
2. Find the ID of both the 'source' and the 'destination' user (it's the id column in the `users` table)
|
||||||
|
|
||||||
3. Three tables need to be updated:
|
3. Four tables need to be updated:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
// reassign albums
|
BEGIN;
|
||||||
|
-- reassign albums
|
||||||
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||||
|
|
||||||
// reassign people
|
-- reassign people
|
||||||
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||||
|
|
||||||
// reassign assets
|
-- reassign assets
|
||||||
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
|
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
|
||||||
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
|
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
|
||||||
|
|
||||||
|
-- reassign external libraries
|
||||||
|
UPDATE libraries SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||||
|
COMMIT;
|
||||||
```
|
```
|
||||||
|
|
||||||
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
||||||
@@ -159,7 +185,7 @@ No, not yet. For updates on this planned feature, follow the [GitHub discussion]
|
|||||||
|
|
||||||
### Can I add an external library while keeping the existing album structure?
|
### Can I add an external library while keeping the existing album structure?
|
||||||
|
|
||||||
We haven't put in an official mechanism to create albums from external libraries at the moment, but there are some [workarounds from the community](https://github.com/immich-app/immich/discussions/4279) to help you achieve that.
|
We haven't implemented an official mechanism for creating albums from external libraries, but there are some [workarounds from the community](https://github.com/immich-app/immich/discussions/4279) to help you achieve that.
|
||||||
|
|
||||||
### What happens to duplicates in external libraries?
|
### What happens to duplicates in external libraries?
|
||||||
|
|
||||||
@@ -171,7 +197,7 @@ Duplicate checking only exists for upload libraries, using the file hash. Furthe
|
|||||||
|
|
||||||
### How does smart search work?
|
### How does smart search work?
|
||||||
|
|
||||||
Immich uses CLIP models, for more information about CLIP and its capabilities read about it [here](https://openai.com/research/clip).
|
Immich uses CLIP models. For more information about CLIP and its capabilities, read about it [here](https://openai.com/research/clip).
|
||||||
|
|
||||||
### How does facial recognition work?
|
### How does facial recognition work?
|
||||||
|
|
||||||
@@ -189,33 +215,31 @@ However, disabling all jobs will not disable the machine learning service itself
|
|||||||
|
|
||||||
### I'm getting errors about models being corrupt or failing to download. What do I do?
|
### I'm getting errors about models being corrupt or failing to download. What do I do?
|
||||||
|
|
||||||
You can delete the model cache volume, which is where models are downloaded to. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface](https://huggingface.co/immich-app) and place them in the cache folder.
|
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface][huggingface] and place them in the cache folder.
|
||||||
|
|
||||||
### Why did Immich decide to remove object detection?
|
|
||||||
|
|
||||||
The feature added keywords to images for metadata search, but wasn't used for smart search. Smart search made it unnecessary as it isn't limited to exact keywords. Combined with it causing crashes on some devices, using many dependencies and causing user confusion as to how search worked, it was better to remove the job altogether.
|
|
||||||
For more info see [here](https://github.com/immich-app/immich/pull/5903)
|
|
||||||
|
|
||||||
### Can I use a custom CLIP model?
|
### Can I use a custom CLIP model?
|
||||||
|
|
||||||
No, this is not supported. Only models listed in the [Huggingface](https://huggingface.co/immich-app) page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
No, this is not supported. Only models listed in the [Huggingface][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
||||||
|
|
||||||
### I want to be able to search in other languages besides English. How can I do that?
|
### I want to be able to search in other languages besides English. How can I do that?
|
||||||
|
|
||||||
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
|
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list](https://huggingface.co/immich-app).
|
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list][huggingface].
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Does Immich support Facial Recognition for videos ?
|
### Does Immich support Facial Recognition for videos?
|
||||||
|
|
||||||
Immich's machine learning feature operate on the generated thumbnail. If a face is visible in the video's thumbnail it will be picked up by facial recognition.
|
Immich's machine learning feature operates on the generated thumbnail. If a face is visible in the video's thumbnail it will be picked up by facial recognition.
|
||||||
Scanning the entire video for faces may be implemented in the future.
|
Scanning the entire video for faces may be implemented in the future.
|
||||||
|
|
||||||
### Does Immich have animal recognition?
|
### Does Immich have animal recognition?
|
||||||
|
|
||||||
No.
|
No.
|
||||||
|
:::tip
|
||||||
|
You can use [Smart Search](/docs/features/smart-search.md) for this to some extent. For example, if you have a Golden Retriever and a Chihuahua, type these words in the smart search and watch the results.
|
||||||
|
:::
|
||||||
|
|
||||||
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
||||||
|
|
||||||
@@ -224,14 +248,19 @@ to increase the bar for what the algorithm considers a "core face" for that pers
|
|||||||
|
|
||||||
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
||||||
|
|
||||||
If you installed several models and chose not to use some of them, it might be worth deleting the old models that are in immich_model-cache.
|
If you installed several models and chose not to use some of them, it might be worth deleting the old models that are in immich_model-cache. To do this you can mount the model cache and remove the undesired models.
|
||||||
|
|
||||||
To do this you can run:
|
<details>
|
||||||
|
<summary>Steps</summary>
|
||||||
|
|
||||||
- `docker run -it --rm -v immich_model-cache:/mnt ubuntu bash`
|
```bash
|
||||||
- `cd mnt`
|
docker run -it --rm -v immich_model-cache:/mnt-models alpine sh
|
||||||
- `ls`
|
cd /mnt-models
|
||||||
- and delete unused models with `rm -r <model_name>`.
|
ls clip/ facial-recognition/
|
||||||
|
# rm -r clip/ABC facial-recognition/DEF # delete unused models
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -248,13 +277,28 @@ The initial backup is the most intensive due to the number of jobs running. The
|
|||||||
- Lower the job concurrency for these jobs to 1.
|
- Lower the job concurrency for these jobs to 1.
|
||||||
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
|
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
|
||||||
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
|
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
|
||||||
- You _must_ re-run the Face Detection job for all images after this for facial recognition on new images to work properly.
|
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
|
||||||
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for how you can disable machine learning.
|
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
|
||||||
|
|
||||||
### Can I limit the amount of CPU and RAM usage?
|
### Can I limit the amount of CPU and RAM usage?
|
||||||
|
|
||||||
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows.
|
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows. To limit this, you can add the following to the `docker-compose.yml` block of any containers that you want to have limited resources.
|
||||||
You can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit) to learn how to limit this.
|
|
||||||
|
<details>
|
||||||
|
<summary>docker-compose.yml</summary>
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
# Number of CPU threads
|
||||||
|
cpus: '1.00'
|
||||||
|
# Gigabytes of memory
|
||||||
|
memory: '1G'
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
For more details, you can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit).
|
||||||
|
|
||||||
### How can I boost machine learning speed?
|
### How can I boost machine learning speed?
|
||||||
|
|
||||||
@@ -269,13 +313,17 @@ On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Beyond thi
|
|||||||
|
|
||||||
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
|
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
|
||||||
|
|
||||||
More detail can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
|
More details can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Why is Immich using so much of my CPU?
|
### Why is Immich using so much of my CPU?
|
||||||
|
|
||||||
When a large amount of assets are uploaded to Immich it makes sense that the CPU and RAM will be heavily used due to machine learning work and creating image thumbnails.
|
When a large number of assets are uploaded to Immich, it makes sense that the CPU and RAM will be heavily used for machine learning work and creating image thumbnails.
|
||||||
Once this process completes, the percentage of CPU usage will drop to around 3-5% usage
|
Once this process is completed, the percentage of CPU usage will drop to around 3-5% usage
|
||||||
|
|
||||||
|
### My server shows Server Status Offline | Version Unknown what can I do?
|
||||||
|
|
||||||
|
You need to enable Websocket on your reverse proxy.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -288,19 +336,37 @@ Immich components are typically deployed using docker. To see logs for deployed
|
|||||||
### How can I run Immich as a non-root user?
|
### How can I run Immich as a non-root user?
|
||||||
|
|
||||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||||
You may need to add an additional volume to `immich-microservices` that mounts internally to `/usr/src/app/.reverse-geocoding-dump`.
|
You may need to add mount points or docker volumes for the following internal container paths:
|
||||||
|
|
||||||
|
- `immich-machine-learning:/.config`
|
||||||
|
- `immich-machine-learning:/.cache`
|
||||||
|
- `redis:/data`
|
||||||
|
|
||||||
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
|
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
|
||||||
|
|
||||||
### How can I **purge** data from Immich?
|
For a further hardened system, you can add the following block to every container except for `immich_postgres`.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>docker-compose.yml</summary>
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
security_opt:
|
||||||
|
# Prevent escalation of privileges after the container is started
|
||||||
|
- no-new-privileges:true
|
||||||
|
cap_drop:
|
||||||
|
# Prevent access to raw network traffic
|
||||||
|
- NET_RAW
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### How can I purge data from Immich?
|
||||||
|
|
||||||
Data for Immich comes in two forms:
|
Data for Immich comes in two forms:
|
||||||
|
|
||||||
1. **Metadata** stored in a postgres database, persisted via the `pg_data` volume
|
1. **Metadata** stored in a Postgres database, stored in the `DB_DATA_LOCATION` folder (previously `pg_data` Docker volume).
|
||||||
2. **Files** (originals, thumbs, profile, etc.), stored in the `UPLOAD_LOCATION` folder, more [info](/docs/administration/backup-and-restore#asset-types-and-storage-locations).
|
2. **Files** (originals, thumbs, profile, etc.), stored in the `UPLOAD_LOCATION` folder, more [info](/docs/administration/backup-and-restore#asset-types-and-storage-locations).
|
||||||
|
|
||||||
To remove the **Metadata** you can stop Immich and delete the volume.
|
|
||||||
|
|
||||||
:::warning
|
:::warning
|
||||||
This will destroy your database and reset your instance, meaning that you start from scratch.
|
This will destroy your database and reset your instance, meaning that you start from scratch.
|
||||||
:::
|
:::
|
||||||
@@ -309,13 +375,16 @@ This will destroy your database and reset your instance, meaning that you start
|
|||||||
docker compose down -v
|
docker compose down -v
|
||||||
```
|
```
|
||||||
|
|
||||||
|
After removing the containers and volumes, there are a few directories that need to be deleted to reset Immich to a new installation. Once they are deleted, Immich can be started back up and will be a fresh installation.
|
||||||
|
|
||||||
|
- `DB_DATA_LOCATION` contains the database, media info, and settings.
|
||||||
|
- `UPLOAD_LOCATION` contains all the media uploaded to Immich.
|
||||||
|
|
||||||
:::note Portainer
|
:::note Portainer
|
||||||
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
||||||
and remove all the volumes related to immcih then restart the stack.
|
and remove all the volumes related to immich then restart the stack.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
After removing the containers and volumes, the **Files** should be removed from the `UPLOAD_LOCATION` to provide a clean start.
|
|
||||||
|
|
||||||
### Why does the machine learning service report workers crashing?
|
### Why does the machine learning service report workers crashing?
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
@@ -330,6 +399,47 @@ If it mentions SIGILL (note the lack of a K) or error code 132, it most likely m
|
|||||||
|
|
||||||
If your version of Immich is below 1.92.0 and the crash occurs after logs about tracing or exporting a model, consider either upgrading or disabling the Tag Objects job.
|
If your version of Immich is below 1.92.0 and the crash occurs after logs about tracing or exporting a model, consider either upgrading or disabling the Tag Objects job.
|
||||||
|
|
||||||
### Why does Immich log migration errors on startup?
|
## Database
|
||||||
|
|
||||||
Sometimes Immich logs errors such as "duplicate key value violates unique constraint" or "column (...) of relation (...) already exists". Because of Immich's container structure, this error can be seen when both immich and immich-microservices start at the same time and attempt to migrate or create the database structure. Since the database migration is run sequentially and inside of transactions, this error message does not cause harm to your installation of Immich and can safely be ignored. If needed, you can manually restart Immich by running `docker restart immich immich-microservices`.
|
### Why am I getting database ownership errors?
|
||||||
|
|
||||||
|
If you get database errors such as `FATAL: data directory "/var/lib/postgresql/data" has wrong ownership` upon database startup, this is likely due to an issue with your filesystem.
|
||||||
|
NTFS and ex/FAT/32 filesystems are not supported. See [here](/docs/install/environment-variables#supported-filesystems) for more details.
|
||||||
|
|
||||||
|
### How can I verify the integrity of my database?
|
||||||
|
|
||||||
|
If you installed Immich using v1.104.0 or later, you likely have database checksums enabled by default. You can check this by running the following command.
|
||||||
|
A result of `on` means that checksums are enabled.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Check if checksums are enabled</summary>
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="show data_checksums"
|
||||||
|
data_checksums
|
||||||
|
----------------
|
||||||
|
on
|
||||||
|
(1 row)
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
If checksums are enabled, you can check the status of the database with the following command. A normal result is all zeroes.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Check for database corruption</summary>
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="SELECT datname, checksum_failures, checksum_last_failure FROM pg_stat_database WHERE datname IS NOT NULL"
|
||||||
|
datname | checksum_failures | checksum_last_failure
|
||||||
|
-----------+-------------------+-----------------------
|
||||||
|
postgres | 0 |
|
||||||
|
immich | 0 |
|
||||||
|
template1 | 0 |
|
||||||
|
template0 | 0 |
|
||||||
|
(4 rows)
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
[huggingface]: https://huggingface.co/immich-app
|
||||||
|
|||||||
@@ -15,39 +15,47 @@ Immich saves [file paths in the database](https://github.com/immich-app/immich/d
|
|||||||
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
|
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command.
|
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command. When restoring, you need to delete the `DB_DATA_LOCATION` folder (if it exists) to reset the database.
|
||||||
|
|
||||||
|
:::caution
|
||||||
|
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||||
|
:::
|
||||||
|
|
||||||
<Tabs>
|
<Tabs>
|
||||||
<TabItem value="Linux system based Backup" label="Linux system based Backup" default>
|
<TabItem value="Linux system" label="Linux system" default>
|
||||||
|
|
||||||
```bash title='Bash'
|
```bash title='Backup'
|
||||||
docker exec -t immich_postgres pg_dumpall -c -U postgres | gzip > "/path/to/backup/dump.sql.gz"
|
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash title='Restore'
|
```bash title='Restore'
|
||||||
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
||||||
|
# rm -rf DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch.
|
||||||
docker compose pull # Update to latest version of Immich (if desired)
|
docker compose pull # Update to latest version of Immich (if desired)
|
||||||
docker compose create # Create Docker containers for Immich apps without running them.
|
docker compose create # Create Docker containers for Immich apps without running them.
|
||||||
docker start immich_postgres # Start Postgres server
|
docker start immich_postgres # Start Postgres server
|
||||||
sleep 10 # Wait for Postgres server to start up
|
sleep 10 # Wait for Postgres server to start up
|
||||||
gunzip < "/path/to/backup/dump.sql.gz" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
gunzip < "/path/to/backup/dump.sql.gz" \
|
||||||
|
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||||
|
| docker exec -i immich_postgres psql --username=postgres # Restore Backup
|
||||||
docker compose up -d # Start remainder of Immich apps
|
docker compose up -d # Start remainder of Immich apps
|
||||||
```
|
```
|
||||||
|
|
||||||
</TabItem>
|
</TabItem>
|
||||||
<TabItem value="Windows system based Backup" label="Windows system based Backup">
|
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||||
|
|
||||||
```powershell title='Backup'
|
```powershell title='Backup'
|
||||||
docker exec -t immich_postgres pg_dumpall -c -U postgres > "\path\to\backup\dump.sql"
|
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "\path\to\backup\dump.sql"
|
||||||
```
|
```
|
||||||
|
|
||||||
```powershell title='Restore'
|
```powershell title='Restore'
|
||||||
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
||||||
|
# Remove-Item -Recurse -Force DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch.
|
||||||
docker compose pull # Update to latest version of Immich (if desired)
|
docker compose pull # Update to latest version of Immich (if desired)
|
||||||
docker compose create # Create Docker containers for Immich apps without running them.
|
docker compose create # Create Docker containers for Immich apps without running them.
|
||||||
docker start immich_postgres # Start Postgres server
|
docker start immich_postgres # Start Postgres server
|
||||||
sleep 10 # Wait for Postgres server to start up
|
sleep 10 # Wait for Postgres server to start up
|
||||||
gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql --username=postgres # Restore Backup
|
||||||
docker compose up -d # Start remainder of Immich apps
|
docker compose up -d # Start remainder of Immich apps
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -56,6 +64,10 @@ docker compose up -d # Start remainder of Immich apps
|
|||||||
|
|
||||||
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).
|
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
Some deployment methods make it difficult to start the database without also starting the server or microservices. In these cases, you may set the environmental variable `DB_SKIP_MIGRATIONS=true` before starting the services. This will prevent the server from running migrations that interfere with the restore process. Note that both the server and microservices must have this variable set to prevent the migrations from running. Be sure to remove this variable and restart the services after the database is restored.
|
||||||
|
:::
|
||||||
|
|
||||||
The database dumps can also be automated (using [this image](https://github.com/prodrigestivill/docker-postgres-backup-local)) by editing the docker compose file to match the following:
|
The database dumps can also be automated (using [this image](https://github.com/prodrigestivill/docker-postgres-backup-local)) by editing the docker compose file to match the following:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -63,15 +75,17 @@ services:
|
|||||||
...
|
...
|
||||||
backup:
|
backup:
|
||||||
container_name: immich_db_dumper
|
container_name: immich_db_dumper
|
||||||
image: prodrigestivill/postgres-backup-local
|
image: prodrigestivill/postgres-backup-local:14
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_HOST: database
|
POSTGRES_HOST: database
|
||||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
POSTGRES_CLUSTER: 'TRUE'
|
||||||
POSTGRES_USER: ${DB_USERNAME}
|
POSTGRES_USER: ${DB_USERNAME}
|
||||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
|
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||||
SCHEDULE: "@daily"
|
SCHEDULE: "@daily"
|
||||||
|
POSTGRES_EXTRA_OPTS: '--clean --if-exists'
|
||||||
BACKUP_DIR: /db_dumps
|
BACKUP_DIR: /db_dumps
|
||||||
volumes:
|
volumes:
|
||||||
- ./db_dumps:/db_dumps
|
- ./db_dumps:/db_dumps
|
||||||
@@ -82,17 +96,29 @@ services:
|
|||||||
Then you can restore with the same command but pointed at the latest dump.
|
Then you can restore with the same command but pointed at the latest dump.
|
||||||
|
|
||||||
```bash title='Automated Restore'
|
```bash title='Automated Restore'
|
||||||
gunzip < db_dumps/last/immich-latest.sql.gz | docker exec -i immich_postgres psql -U postgres -d immich
|
gunzip < db_dumps/last/immich-latest.sql.gz \
|
||||||
|
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||||
|
| docker exec -i immich_postgres psql --username=postgres
|
||||||
```
|
```
|
||||||
|
|
||||||
|
:::note
|
||||||
|
If you see the error `ERROR: type "earth" does not exist`, or you have problems with Reverse Geocoding after a restore, add the following `sed` fragment to your restore command.
|
||||||
|
|
||||||
|
Example: `gunzip < "/path/to/backup/dump.sql.gz" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | docker exec -i immich_postgres psql --username=postgres`
|
||||||
|
:::
|
||||||
|
|
||||||
## Filesystem
|
## Filesystem
|
||||||
|
|
||||||
Immich stores two types of content in the filesystem: (1) original, unmodified content, and (2) generated content. Only the original content needs to be backed-up, which includes the following folders:
|
Immich stores two types of content in the filesystem: (1) original, unmodified assets (photos and videos), and (2) generated content. Only the original content needs to be backed-up, which is stored in the following folders:
|
||||||
|
|
||||||
1. `UPLOAD_LOCATION/library`
|
1. `UPLOAD_LOCATION/library`
|
||||||
2. `UPLOAD_LOCATION/upload`
|
2. `UPLOAD_LOCATION/upload`
|
||||||
3. `UPLOAD_LOCATION/profile`
|
3. `UPLOAD_LOCATION/profile`
|
||||||
|
|
||||||
|
:::caution
|
||||||
|
If you moved some of these folders onto a different storage device, such as `profile/`, make sure to adjust the backup path to match your setup
|
||||||
|
:::
|
||||||
|
|
||||||
### Asset Types and Storage Locations
|
### Asset Types and Storage Locations
|
||||||
|
|
||||||
Some storage locations are impacted by the Storage Template. See below for more details.
|
Some storage locations are impacted by the Storage Template. See below for more details.
|
||||||
@@ -101,7 +127,8 @@ Some storage locations are impacted by the Storage Template. See below for more
|
|||||||
<TabItem value="Storage Template Off (Default)." label="Storage Template Off (Default)." default>
|
<TabItem value="Storage Template Off (Default)." label="Storage Template Off (Default)." default>
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
`UPLOAD_LOCATION/library` folder is not used by default on new machines running version 1.92.0. These are if the system administrator activated the storage template engine, for [more info](https://github.com/immich-app/immich/releases/tag/v1.92.0#:~:text=the%20partner%E2%80%99s%20assets.-,Hardening%20storage%20template).
|
The `UPLOAD_LOCATION/library` folder is not used by default on new machines running version 1.92.0. It is used only if the system administrator activated the storage template engine,
|
||||||
|
for more info read the [release notes](https://github.com/immich-app/immich/releases/tag/v1.92.0#:~:text=the%20partner%E2%80%99s%20assets.-,Hardening%20storage%20template).
|
||||||
:::
|
:::
|
||||||
|
|
||||||
**1. User-Specific Folders:**
|
**1. User-Specific Folders:**
|
||||||
@@ -113,16 +140,16 @@ Some storage locations are impacted by the Storage Template. See below for more
|
|||||||
|
|
||||||
- **Source Assets:**
|
- **Source Assets:**
|
||||||
- Original assets uploaded through the browser interface & mobile & CLI.
|
- Original assets uploaded through the browser interface & mobile & CLI.
|
||||||
- Stored in `/library/upload/<userID>`.
|
- Stored in `UPLOAD_LOCATION/upload/<userID>`.
|
||||||
- **Avatar Images:**
|
- **Avatar Images:**
|
||||||
- User profile images.
|
- User profile images.
|
||||||
- Stored in `/library/profile/<userID>`.
|
- Stored in `UPLOAD_LOCATION/profile/<userID>`.
|
||||||
- **Thumbs Images:**
|
- **Thumbs Images:**
|
||||||
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
|
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
|
||||||
- Stored in `/library/thumbs/<userID>`.
|
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
|
||||||
- **Encoded Assets:**
|
- **Encoded Assets:**
|
||||||
- By default, unless otherwise specified re-encoded video assets for wider compatibility.
|
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||||
- Stored in `/library/encoded-video/<userID>`.
|
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||||
|
|
||||||
</TabItem>
|
</TabItem>
|
||||||
<TabItem value="Storage Template On" label="Storage Template On">
|
<TabItem value="Storage Template On" label="Storage Template On">
|
||||||
@@ -130,34 +157,34 @@ Some storage locations are impacted by the Storage Template. See below for more
|
|||||||
:::note
|
:::note
|
||||||
If you choose to activate the storage template engine, it will move all assets to `UPLOAD_LOCATION/library/<userID>`.
|
If you choose to activate the storage template engine, it will move all assets to `UPLOAD_LOCATION/library/<userID>`.
|
||||||
|
|
||||||
When you turn off the storage template engine, it will leave the assets in `UPLOAD_LOCATION/library/<userID>` and will not return them to `/library/upload`.
|
When you turn off the storage template engine, it will leave the assets in `UPLOAD_LOCATION/library/<userID>` and will not return them to `UPLOAD_LOCATION/upload`.
|
||||||
**New assets** will be saved to `/library/upload`.
|
**New assets** will be saved to `UPLOAD_LOCATION/upload`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
**1. User-Specific Folders:**
|
**1. User-Specific Folders:**
|
||||||
|
|
||||||
- Each user has a unique string representing them.
|
- Each user has a unique string representing them.
|
||||||
- The main user is "Admin" (but only for `UPLOAD_LOCATION/library`)
|
- The administrator can set a Storage Label for a user, which will be used instead of `<userID>` for the `library/` folder.
|
||||||
- Other users have different string identifiers.
|
- The Admin has a default storage label of `admin`.
|
||||||
- You can find your user ID in Account Account Settings -> Account -> User ID.
|
- You can find your user ID and Storage Label in Account Account Settings -> Account -> User ID.
|
||||||
|
|
||||||
**2. Asset Types and Storage Locations:**
|
**2. Asset Types and Storage Locations:**
|
||||||
|
|
||||||
- **Source Assets:**
|
- **Source Assets:**
|
||||||
- Original assets uploaded through the browser interface & mobile & CLI.
|
- Original assets uploaded through the browser interface, mobile, and CLI.
|
||||||
- Stored in `UPLOAD_LOCATION/library/<userID>`.
|
- Stored in `UPLOAD_LOCATION/library/<userID>`.
|
||||||
- **Avatar Images:**
|
- **Avatar Images:**
|
||||||
- User profile images.
|
- User profile images.
|
||||||
- Stored in `/library/profile/<userID>`.
|
- Stored in `UPLOAD_LOCATION/profile/<userID>`.
|
||||||
- **Thumbs Images:**
|
- **Thumbs Images:**
|
||||||
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
|
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
|
||||||
- Stored in `/library/thumbs/<userID>`.
|
- Stored in `UPLOCAD_LOCATION/thumbs/<userID>`.
|
||||||
- **Encoded Assets:**
|
- **Encoded Assets:**
|
||||||
- By default, unless otherwise specified re-encoded video assets for wider compatibility .
|
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||||
- Stored in `/library/encoded-video/<userID>`.
|
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||||
- **Files in Upload Queue (Mobile):**
|
- **Files in Upload Queue (Mobile):**
|
||||||
- Files uploaded through mobile apps.
|
- Files uploaded through mobile apps.
|
||||||
- Temporarily located in `/library/upload/<userID>`.
|
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
|
||||||
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
||||||
|
|
||||||
</TabItem>
|
</TabItem>
|
||||||
|
|||||||
|
After Width: | Height: | Size: 55 KiB |
BIN
docs/docs/administration/img/google-redirect-uris-example.webp
Normal file
|
After Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 57 KiB |
BIN
docs/docs/administration/img/repair-page-1.png
Normal file
|
After Width: | Height: | Size: 68 KiB |
BIN
docs/docs/administration/img/repair-page.png
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
docs/docs/administration/img/server-stats.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
55
docs/docs/administration/jobs-workers.md
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# Jobs and Workers
|
||||||
|
|
||||||
|
## Workers
|
||||||
|
|
||||||
|
### Architecture
|
||||||
|
|
||||||
|
The `immich-server` container contains multiple workers:
|
||||||
|
|
||||||
|
- `api`: responds to API requests for data and files for the web and mobile app.
|
||||||
|
- `microservices`: handles most other work, such as thumbnail generation and video encoding, in the form of _jobs_. Simply put, a job is a request to process data in the background.
|
||||||
|
|
||||||
|
## Split workers
|
||||||
|
|
||||||
|
If you prefer to throttle or distribute the workers, you can do this using the [environment variables](/docs/install/environment-variables) to specify which container should pick up which tasks.
|
||||||
|
|
||||||
|
For example, for a simple setup with one container for the Web/API and one for all other microservices, you can do the following:
|
||||||
|
|
||||||
|
Copy the entire `immich-server` block as a new service and make the following changes to the **copy**:
|
||||||
|
|
||||||
|
```diff
|
||||||
|
- immich-server:
|
||||||
|
- container_name: immich_server
|
||||||
|
...
|
||||||
|
- ports:
|
||||||
|
- - 2283:3001
|
||||||
|
+ immich-microservices:
|
||||||
|
+ container_name: immich_microservices
|
||||||
|
```
|
||||||
|
|
||||||
|
Once you have two copies of the immich-server service, make the following chnages to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
|
||||||
|
|
||||||
|
```diff
|
||||||
|
services:
|
||||||
|
immich-server:
|
||||||
|
...
|
||||||
|
+ environment:
|
||||||
|
+ IMMICH_WORKERS_INCLUDE: 'api'
|
||||||
|
|
||||||
|
immich-microservices:
|
||||||
|
...
|
||||||
|
+ environment:
|
||||||
|
+ IMMICH_WORKERS_EXCLUDE: 'api'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Jobs
|
||||||
|
|
||||||
|
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||||
|
|
||||||
|
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||||
|
|
||||||
|
:::info
|
||||||
|
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||||
|
:::
|
||||||
|
|
||||||
|
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# Jobs
|
|
||||||
|
|
||||||
Several Immich functionalities are implemented as jobs, which run in the background. To view the status of a job navigate to the Administration Screen, and then the `Jobs` page.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
:::info
|
|
||||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
|
||||||
:::
|
|
||||||
@@ -52,8 +52,8 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
|||||||
|
|
||||||
Hostname
|
Hostname
|
||||||
|
|
||||||
- `https://immich.example.com/auth/login`)
|
- `https://immich.example.com/auth/login`
|
||||||
- `https://immich.example.com/user-settings`)
|
- `https://immich.example.com/user-settings`
|
||||||
|
|
||||||
## Enable OAuth
|
## Enable OAuth
|
||||||
|
|
||||||
@@ -67,14 +67,20 @@ Once you have a new OAuth client application configured, Immich can be configure
|
|||||||
| Client Secret | string | (required) | Required. Client Secret (previous step) |
|
| Client Secret | string | (required) | Required. Client Secret (previous step) |
|
||||||
| Scope | string | openid email profile | Full list of scopes to send with the request (space delimited) |
|
| Scope | string | openid email profile | Full list of scopes to send with the request (space delimited) |
|
||||||
| Signing Algorithm | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
|
| Signing Algorithm | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
|
||||||
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label |
|
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label**¹** |
|
||||||
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage |
|
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage**¹** |
|
||||||
| Default Storage Quota (GiB) | number | 0 | Default quota for user without storage quota claim (Enter 0 for unlimited quota) |
|
| Default Storage Quota (GiB) | number | 0 | Default quota for user without storage quota claim (Enter 0 for unlimited quota) |
|
||||||
| Button Text | string | Login with OAuth | Text for the OAuth button on the web |
|
| Button Text | string | Login with OAuth | Text for the OAuth button on the web |
|
||||||
| Auto Register | boolean | true | When true, will automatically register a user the first time they sign in |
|
| Auto Register | boolean | true | When true, will automatically register a user the first time they sign in |
|
||||||
| [Auto Launch](#auto-launch) | boolean | false | When true, will skip the login page and automatically start the OAuth login process |
|
| [Auto Launch](#auto-launch) | boolean | false | When true, will skip the login page and automatically start the OAuth login process |
|
||||||
| [Mobile Redirect URI Override](#mobile-redirect-uri) | URL | (empty) | Http(s) alternative mobile redirect URI |
|
| [Mobile Redirect URI Override](#mobile-redirect-uri) | URL | (empty) | Http(s) alternative mobile redirect URI |
|
||||||
|
|
||||||
|
:::note Claim Options [1]
|
||||||
|
|
||||||
|
Claim is only used on user creation and not synchronized after that.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
The Issuer URL should look something like the following, and return a valid json document.
|
The Issuer URL should look something like the following, and return a valid json document.
|
||||||
|
|
||||||
@@ -104,8 +110,66 @@ Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to
|
|||||||
|
|
||||||
## Example Configuration
|
## Example Configuration
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Authentik Example</summary>
|
||||||
|
|
||||||
|
### Authentik Example
|
||||||
|
|
||||||
Here's an example of OAuth configured for Authentik:
|
Here's an example of OAuth configured for Authentik:
|
||||||
|
|
||||||

|
Configuration of Authorised redirect URIs (Authentik OAuth2/OpenID Provider)
|
||||||
|
|
||||||
|
<img src={require('./img/authentik-redirect-uris-example.webp').default} width='70%' title="Authentik authorised redirect URIs" />
|
||||||
|
|
||||||
|
Configuration of OAuth in Immich System Settings
|
||||||
|
|
||||||
|
| Setting | Value |
|
||||||
|
| ---------------------------- | ---------------------------------------------------------------------------------- |
|
||||||
|
| Issuer URL | `https://example.immich.app/application/o/immich/.well-known/openid-configuration` |
|
||||||
|
| Client ID | AFCj2rM1f4rps**\*\*\*\***\***\*\*\*\***lCLEum6hH9... |
|
||||||
|
| Client Secret | 0v89FXkQOWO\***\*\*\*\*\***\*\*\***\*\*\*\*\***mprbvXD549HH6s1iw... |
|
||||||
|
| Scope | openid email profile |
|
||||||
|
| Signing Algorithm | RS256 |
|
||||||
|
| Storage Label Claim | preferred_username |
|
||||||
|
| Storage Quota Claim | immich_quota |
|
||||||
|
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||||
|
| Button Text | Sign in with Authentik (optional) |
|
||||||
|
| Auto Register | Enabled (optional) |
|
||||||
|
| Auto Launch | Enabled (optional) |
|
||||||
|
| Mobile Redirect URI Override | Disable |
|
||||||
|
| Mobile Redirect URI | |
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Google Example</summary>
|
||||||
|
|
||||||
|
### Google Example
|
||||||
|
|
||||||
|
Here's an example of OAuth configured for Google:
|
||||||
|
|
||||||
|
Configuration of Authorised redirect URIs (Google Console)
|
||||||
|
|
||||||
|
<img src={require('./img/google-redirect-uris-example.webp').default} width='50%' title="Google authorised redirect URIs" />
|
||||||
|
|
||||||
|
Configuration of OAuth in Immich System Settings
|
||||||
|
|
||||||
|
| Setting | Value |
|
||||||
|
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||||
|
| Issuer URL | [https://accounts.google.com](https://accounts.google.com) |
|
||||||
|
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
|
||||||
|
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
|
||||||
|
| Scope | openid email profile |
|
||||||
|
| Signing Algorithm | RS256 |
|
||||||
|
| Storage Label Claim | preferred_username |
|
||||||
|
| Storage Quota Claim | immich_quota |
|
||||||
|
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||||
|
| Button Text | Sign in with Google (optional) |
|
||||||
|
| Auto Register | Enabled (optional) |
|
||||||
|
| Auto Launch | Enabled |
|
||||||
|
| Mobile Redirect URI Override | Enabled (required) |
|
||||||
|
| Mobile Redirect URI | [https://demo.immich.app/api/oauth/mobile-redirect](https://demo.immich.app/api/oauth/mobile-redirect) |
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
[oidc]: https://openid.net/connect/
|
[oidc]: https://openid.net/connect/
|
||||||
|
|||||||
@@ -1,32 +0,0 @@
|
|||||||
# Password Login
|
|
||||||
|
|
||||||
An overview of password login and related settings for Immich.
|
|
||||||
|
|
||||||
## Enable/Disable
|
|
||||||
|
|
||||||
Immich supports password login, which is enabled by default. The preferred way to disable it is via the [Administration Page](#administration-page), although it can also be changed via a [Server Command](#server-command) as well.
|
|
||||||
|
|
||||||
### Administration Page
|
|
||||||
|
|
||||||
To toggle the password login setting via the web, navigate to the "Administration", expand "Password Authentication", toggle the "Enabled" switch, and press "Save".
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
### Server Command
|
|
||||||
|
|
||||||
There are two [Server Commands](/docs/administration/server-commands.md) for password login:
|
|
||||||
|
|
||||||
1. `enable-password-login`
|
|
||||||
2. `disable-password-login`
|
|
||||||
|
|
||||||
See [Server Commands](/docs/administration/server-commands.md) for more details about how to run them.
|
|
||||||
|
|
||||||
## Password Reset
|
|
||||||
|
|
||||||
### Admin
|
|
||||||
|
|
||||||
To reset the administrator password, use the `reset-admin-password` [Server Command](/docs/administration/server-commands.md).
|
|
||||||
|
|
||||||
### User
|
|
||||||
|
|
||||||
Immich does not currently support self-service password reset. However, the administration can reset passwords for other users. See [User Management: Password Reset](/docs/administration/user-management.mdx#password-reset) for more information about how to do this.
|
|
||||||
69
docs/docs/administration/postgres-standalone.md
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
# Pre-existing Postgres
|
||||||
|
|
||||||
|
While not officially recommended, it is possible to run Immich using a pre-existing Postgres server. To use this setup, you should have a baseline level of familiarity with Postgres and the Linux command line. If you do not have these, we recommend using the default setup with a dedicated Postgres container.
|
||||||
|
|
||||||
|
By default, Immich expects superuser permission on the Postgres database and requires certain extensions to be installed. This guide outlines the steps required to prepare a pre-existing Postgres server to be used by Immich.
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
Running with a pre-existing Postgres server can unlock powerful administrative features, including logical replication and streaming write-ahead log backups using programs like pgBackRest or Barman.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported.
|
||||||
|
|
||||||
|
Make sure the installed version of pgvecto.rs is compatible with your version of Immich. For example, if your Immich version uses the dedicated database image `tensorchord/pgvecto-rs:pg14-v0.2.1`, you must install pgvecto.rs `>= 0.2.1, < 0.3.0`.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Specifying the connection URL
|
||||||
|
|
||||||
|
You can connect to your pre-existing Postgres server by setting the `DB_URL` environment variable in the `.env` file.
|
||||||
|
|
||||||
|
```
|
||||||
|
DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename'
|
||||||
|
|
||||||
|
# require a SSL connection to Postgres
|
||||||
|
# DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename?sslmode=require'
|
||||||
|
|
||||||
|
# require a SSL connection, but don't enforce checking the certificate name
|
||||||
|
# DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename?sslmode=require&sslmode=no-verify'
|
||||||
|
```
|
||||||
|
|
||||||
|
## With superuser permission
|
||||||
|
|
||||||
|
Typically Immich expects superuser permission in the database, which you can grant by running `ALTER USER <immichdbusername> WITH SUPERUSER;` at the `psql` console. If you prefer not to grant superuser permissions, follow the instructions in the next section.
|
||||||
|
|
||||||
|
## Without superuser permission
|
||||||
|
|
||||||
|
:::caution
|
||||||
|
This method is recommended for **advanced users only** and often requires manual intervention when updating Immich.
|
||||||
|
:::
|
||||||
|
|
||||||
|
Immich can run without superuser permissions by following the below instructions at the `psql` prompt to prepare the database.
|
||||||
|
|
||||||
|
```sql title="Set up Postgres for Immich"
|
||||||
|
CREATE DATABASE <immichdatabasename>;
|
||||||
|
\c <immichdatabasename>
|
||||||
|
BEGIN;
|
||||||
|
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
||||||
|
CREATE EXTENSION vectors;
|
||||||
|
CREATE EXTENSION earthdistance CASCADE;
|
||||||
|
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
|
||||||
|
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Updating pgvecto.rs
|
||||||
|
|
||||||
|
When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
|
||||||
|
|
||||||
|
### Common errors
|
||||||
|
|
||||||
|
#### Permission denied for view
|
||||||
|
|
||||||
|
If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
|
||||||
|
|
||||||
|
[vectors-install]: https://docs.pgvecto.rs/getting-started/installation.html
|
||||||
27
docs/docs/administration/repair-page.md
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Repair Page
|
||||||
|
|
||||||
|
The repair page is designed to give information to the system administrator about files that are not tracked, or offline paths.
|
||||||
|
|
||||||
|
## Natural State
|
||||||
|
|
||||||
|
In this situation, everything is in its place and there is no problem that the system administrator should be aware of.
|
||||||
|
|
||||||
|
<img src={require('./img/repair-page.png').default} title="server statistic" />
|
||||||
|
|
||||||
|
## Any Other Situation
|
||||||
|
|
||||||
|
:::note RAM Usage
|
||||||
|
Several users report a situation where the page fails to load. In order to solve this problem you should try to allocate more RAM to Immich, if the problem continues, you should stop using the reverse proxy while loading the page.
|
||||||
|
:::
|
||||||
|
|
||||||
|
In any other situation, there are 3 different options that can appear:
|
||||||
|
|
||||||
|
- MATCHES - These files are matched by their checksums.
|
||||||
|
|
||||||
|
- OFFLINE PATHS - These files are the result of manually deleting files from immich or a failed file move in the past (losing track of a file).
|
||||||
|
|
||||||
|
- UNTRACKED FILES - These files are not tracked by the application. They can be the result of failed moves, interrupted uploads, or left behind due to a bug.
|
||||||
|
|
||||||
|
In addition, you can download the information from a page, mark everything (in order to check hashing) and correct the problem if a match is found in the hashing.
|
||||||
|
|
||||||
|
<img src={require('./img/repair-page-1.png').default} title="server statistic" />
|
||||||
@@ -1,29 +1,41 @@
|
|||||||
# Reverse Proxy
|
# Reverse Proxy
|
||||||
|
|
||||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Forwarded-Host`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Real-IP`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
The Repair page can take a long time to load. To avoid server timeouts or errors, we recommend specifying a timeout of at least 10 minutes on your proxy server.
|
||||||
|
:::
|
||||||
|
|
||||||
### Nginx example config
|
### Nginx example config
|
||||||
|
|
||||||
Below is an example config for nginx. Make sure to include `client_max_body_size 50000M;` also in a `http` block in `/etc/nginx/nginx.conf`.
|
Below is an example config for nginx. Make sure to set `public_url` to the front-facing URL of your instance, and `backend_url` to the path of the Immich server.
|
||||||
|
|
||||||
```nginx
|
```nginx
|
||||||
server {
|
server {
|
||||||
server_name <snip>
|
server_name <public_url>;
|
||||||
|
|
||||||
|
# allow large file uploads
|
||||||
client_max_body_size 50000M;
|
client_max_body_size 50000M;
|
||||||
|
|
||||||
location / {
|
# Set headers
|
||||||
proxy_pass http://<snip>:2283;
|
proxy_set_header Host $http_host;
|
||||||
proxy_set_header Host $http_host;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
|
|
||||||
# http://nginx.org/en/docs/http/websocket.html
|
# enable websockets: http://nginx.org/en/docs/http/websocket.html
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
proxy_redirect off;
|
proxy_redirect off;
|
||||||
|
|
||||||
|
# set timeout
|
||||||
|
proxy_read_timeout 600s;
|
||||||
|
proxy_send_timeout 600s;
|
||||||
|
send_timeout 600s;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://<backend_url>:2283;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -42,15 +54,13 @@ immich.example.org {
|
|||||||
|
|
||||||
Below is an example config for Apache2 site configuration.
|
Below is an example config for Apache2 site configuration.
|
||||||
|
|
||||||
```
|
```ApacheConf
|
||||||
<VirtualHost *:80>
|
<VirtualHost *:80>
|
||||||
ServerName <snip>
|
ServerName <snip>
|
||||||
ProxyRequests Off
|
ProxyRequests Off
|
||||||
|
# set timeout in seconds
|
||||||
ProxyPass / http://127.0.0.1:2283/ timeout=600 upgrade=websocket
|
ProxyPass / http://127.0.0.1:2283/ timeout=600 upgrade=websocket
|
||||||
ProxyPassReverse / http://127.0.0.1:2283/
|
ProxyPassReverse / http://127.0.0.1:2283/
|
||||||
ProxyPreserveHost On
|
ProxyPreserveHost On
|
||||||
|
|
||||||
</VirtualHost>
|
</VirtualHost>
|
||||||
```
|
```
|
||||||
|
|
||||||
**timeout:** is measured in seconds, and it is particularly useful when long operations are triggered (i.e. Repair), so the server doesn't return an error.
|
|
||||||
|
|||||||
@@ -77,7 +77,6 @@ immich-admin list-users
|
|||||||
deletedAt: null,
|
deletedAt: null,
|
||||||
updatedAt: 2023-09-21T15:42:28.129Z,
|
updatedAt: 2023-09-21T15:42:28.129Z,
|
||||||
oauthId: '',
|
oauthId: '',
|
||||||
memoriesEnabled: true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|||||||
13
docs/docs/administration/server-stats.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Server Stats
|
||||||
|
|
||||||
|
Server statistics to show the total number of videos, photos, and usage per user.
|
||||||
|
|
||||||
|
:::info
|
||||||
|
If a storage quota has been defined for the user, the usage number will be displayed as a percentage of the total storage quota allocated to them.
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::info External library
|
||||||
|
External library is not included in the storage quota.
|
||||||
|
:::
|
||||||
|
|
||||||
|
<img src={require('./img/server-stats.png').default} title="server statistic" />
|
||||||
171
docs/docs/administration/system-settings.md
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
# System Settings
|
||||||
|
|
||||||
|
On the system settings page, the administrator can manage global settings for the Immich instance.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
Viewing and modifying the system settings is restricted to the Administrator.
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
You can always return to the default settings by clicking the `Reset to default` button.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Job Settings
|
||||||
|
|
||||||
|
Using these settings, you can determine the amount of work that will run concurrently for each task in microservices. Some tasks can be set to higher values on computers with powerful hardware and storage with good I/O capabilities.
|
||||||
|
|
||||||
|
With higher concurrency, the host will work on more assets in parallel,
|
||||||
|
this advice improves throughput, not latency, for example, it will make Smart Search jobs process more quickly, but it won't make searching faster.
|
||||||
|
|
||||||
|
It is important to remember that jobs like Smart Search, Face Detection, Facial Recognition, and Transcode Videos require a **lot** of processing power and therefore do not exaggerate the amount of jobs because you're probably thoroughly overloading the server.
|
||||||
|
|
||||||
|
:::info Facial Recognition Concurrency
|
||||||
|
The Facial Recognition Concurrency value cannot be changed because
|
||||||
|
[DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok) is traditionally sequential, but there are parallel implementations of it out there. Our implementation isn't parallel.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## External Library
|
||||||
|
|
||||||
|
### Library watching (EXPERIMENTAL)
|
||||||
|
|
||||||
|
External libraries can automatically import changed files without a full rescan. It will import the file whenever the operating system reports a file change. If your photos are mounted over the network, this does not work.
|
||||||
|
|
||||||
|
### Periodic Scanning
|
||||||
|
|
||||||
|
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> Library.
|
||||||
|
You can set the scanning interval using the preset or cron format. For more information please refer to e.g. [Crontab Guru](https://crontab.guru/).
|
||||||
|
|
||||||
|
## Logging
|
||||||
|
|
||||||
|
The default Immich log level is `Log` (commonly known as `Info`). The Immich administrator can choose a higher or lower log level according to personal preference or as requested by the Immich support team.
|
||||||
|
|
||||||
|
## Machine Learning Settings
|
||||||
|
|
||||||
|
Through this setting, you can manage all the settings related to machine learning in Immich, from the setting of remote machine learning to the model and its parameters
|
||||||
|
You can choose to disable a certain type of machine learning, for example smart search or facial recognition.
|
||||||
|
|
||||||
|
### Smart Search
|
||||||
|
|
||||||
|
The smart search settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
|
||||||
|
Smart Search job on all images to fully apply the change.
|
||||||
|
|
||||||
|
:::info Internet connection
|
||||||
|
Changing models requires a connection to the Internet to download the model.
|
||||||
|
After downloading, there is no need for Immich to connect to the network
|
||||||
|
Unless version checking has been enabled in the settings.
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Facial Recognition
|
||||||
|
|
||||||
|
Under these settings, you can change the facial recognition settings
|
||||||
|
Editable settings:
|
||||||
|
|
||||||
|
- **Facial Recognition Model -** Models are listed in descending order of size. Larger models are slower and use more memory, but produce better results. Note that you must re-run the Face Detection job for all images upon changing a model.
|
||||||
|
- **Min Detection Score -** Minimum confidence score for a face to be detected from 0-1. Lower values will detect more faces but may result in false positives.
|
||||||
|
- **Max Recognition Distance -** Maximum distance between two faces to be considered the same person, ranging from 0-2. Lowering this can prevent labeling two people as the same person, while raising it can prevent labeling the same person as two different people. Note that it is easier to merge two people than to split one person in two, so err on the side of a lower threshold when possible.
|
||||||
|
- **Min Recognized Faces -** The minimum number of recognized faces for a person to be created (AKA: Core face). Increasing this makes Facial Recognition more precise at the cost of increasing the chance that a face is not assigned to a person.
|
||||||
|
|
||||||
|
:::info
|
||||||
|
When changing the values in Min Detection Score, Max Recognition Distance, and Min Recognized Faces.
|
||||||
|
You will have to restart **only** the job FACIAL RECOGNITION - ALL.
|
||||||
|
|
||||||
|
If you replace the Facial Recognition Model, you will have to run the job FACE DETECTION - ALL.
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::tip identical twins
|
||||||
|
If you have twins, you might want to lower the Max Recognition Distance value, decreasing this a **bit** can make it distinguish between them.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Map & GPS Settings
|
||||||
|
|
||||||
|
### Map Settings
|
||||||
|
|
||||||
|
In these settings, you can change the appearance of the map in night and day modes according to your personal preference and according to the supported options.
|
||||||
|
The map can be adjusted via [OpenMapTiles](https://openmaptiles.org/styles/) for example.
|
||||||
|
|
||||||
|
### Reverse Geocoding Settings
|
||||||
|
|
||||||
|
Immich supports [Reverse Geocoding](/docs/features/reverse-geocoding) using data from the [GeoNames](https://www.geonames.org/) geographical database.
|
||||||
|
|
||||||
|
## OAuth Authentication
|
||||||
|
|
||||||
|
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||||
|
|
||||||
|
## Password Authentication
|
||||||
|
|
||||||
|
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Server Settings
|
||||||
|
|
||||||
|
### External Domain
|
||||||
|
|
||||||
|
When set, will override the domain name used when viewing and copying a shared link.
|
||||||
|
|
||||||
|
### Welcome Message
|
||||||
|
|
||||||
|
The administrator can set a custom message on the login screen (the message will be displayed to all users).
|
||||||
|
|
||||||
|
## Storage Template
|
||||||
|
|
||||||
|
Immich supports a custom [Storage Template](/docs/administration/storage-template). Learn more about this feature and its configuration [here](/docs/administration/storage-template).
|
||||||
|
|
||||||
|
## Theme Settings
|
||||||
|
|
||||||
|
You can write custom CSS that will get loaded in the web application for all users. This enables administrators to change fonts, colors, and other styles.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```css title='Custom CSS'
|
||||||
|
p {
|
||||||
|
color: green;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Thumbnail Settings
|
||||||
|
|
||||||
|
By default Immich creates 3 thumbnails for each asset,
|
||||||
|
Blurred (thumbhash) , Small (webp) , and Large (jpeg), using these settings you can change the quality for the thumbnail files that are created.
|
||||||
|
|
||||||
|
**Small thumbnail resolution**
|
||||||
|
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||||
|
|
||||||
|
**Large thumbnail resolution**
|
||||||
|
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||||
|
|
||||||
|
**Quality**
|
||||||
|
Thumbnail quality from 1-100. Higher is better for quality but produces larger files.
|
||||||
|
|
||||||
|
**Prefer wide gamut**
|
||||||
|
Use display p3 for thumbnails. This better preserves the vibrance of images with wide color spaces, but images may appear differently on old devices with an old browser version. Srgb images are kept as srgb to avoid color shifts.
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Trash Settings
|
||||||
|
|
||||||
|
In the system administrator's option to set a trash for deleted files, these files will remain in the trash until the deletion date 30 days (default) or as defined by the system administrator.
|
||||||
|
|
||||||
|
The trash can be disabled, however this is not recommended as future files that are deleted will be permanently deleted.
|
||||||
|
|
||||||
|
:::tip Keyboard shortcut for permanently deletion
|
||||||
|
You can select assets and press Ctrl + Del from the timeline for quick permanent deletion without the trash option.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## User Settings
|
||||||
|
|
||||||
|
### Delete delay
|
||||||
|
|
||||||
|
The system administrator can choose to delete users through the administration panel, the system administrator can delete users immediately or alternatively delay the deletion for users (7 days by default) this action permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.
|
||||||
|
|
||||||
|
## Version Check
|
||||||
|
|
||||||
|
When this option is enabled the `immich-server` will periodically make requests to GitHub to check for new releases.
|
||||||
|
|
||||||
|
## Video Transcoding Settings
|
||||||
|
|
||||||
|
The system administrator can define parameters according to which video files will be converted to different formats (depending on the settings). The settings can be changed in depth, to learn more about the terminology used here, refer to FFmpeg documentation for [H.264](https://trac.ffmpeg.org/wiki/Encode/H.264) codec, [HEVC](https://trac.ffmpeg.org/wiki/Encode/H.265) codec and [VP9](https://trac.ffmpeg.org/wiki/Encode/VP9) codec.
|
||||||
12
docs/docs/community-guides.mdx
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Community Guides
|
||||||
|
|
||||||
|
This page lists community guides that are written around Immich, but not officially supported by the development team.
|
||||||
|
|
||||||
|
:::warning
|
||||||
|
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||||
|
:::
|
||||||
|
|
||||||
|
import CommunityGuides from '../src/components/community-guides.tsx';
|
||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
<CommunityGuides />
|
||||||
12
docs/docs/community-projects.mdx
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Community Projects
|
||||||
|
|
||||||
|
This page lists community projects that are built around Immich, but not officially supported by the development team.
|
||||||
|
|
||||||
|
:::warning
|
||||||
|
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||||
|
:::
|
||||||
|
|
||||||
|
import CommunityProjects from '../src/components/community-projects.tsx';
|
||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
<CommunityProjects />
|
||||||
@@ -80,7 +80,7 @@ The Immich Microservices image uses the same `Dockerfile` as the Immich Server,
|
|||||||
- Background jobs (file deletion, user deletion)
|
- Background jobs (file deletion, user deletion)
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs.md) page, which provides some remote queue management capabilities.
|
This list closely matches what is available on the [Administration > Jobs](/docs/administration/jobs-workers/#jobs) page, which provides some remote queue management capabilities.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Machine Learning
|
### Machine Learning
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
# Database Migrations
|
# Database Migrations
|
||||||
|
|
||||||
After making any changes in the `server/src/infra/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||||
|
|
||||||
1. Run the command
|
1. Run the command
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm run typeorm:migrations:generate ./src/infra/<migration-name>
|
npm run typeorm:migrations:generate <migration-name>
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Check if the migration file makes sense.
|
2. Check if the migration file makes sense.
|
||||||
3. Move the migration file to folder `./server/src/infra/migrations` in your code editor.
|
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
||||||
|
|
||||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||||
|
|||||||
@@ -9,7 +9,12 @@ When contributing code through a pull request, please check the following:
|
|||||||
- [ ] `npm run check:svelte` (Type checking via SvelteKit)
|
- [ ] `npm run check:svelte` (Type checking via SvelteKit)
|
||||||
- [ ] `npm test` (unit tests)
|
- [ ] `npm test` (unit tests)
|
||||||
|
|
||||||
:::tip
|
## Documentation
|
||||||
|
|
||||||
|
- [ ] `npm run format` (formatting via Prettier)
|
||||||
|
- [ ] Update the `_redirects` file if you have renamed a page or removed it from the documentation.
|
||||||
|
|
||||||
|
:::tip AIO
|
||||||
Run all web checks with `npm run check:all`
|
Run all web checks with `npm run check:all`
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@@ -20,10 +25,14 @@ Run all web checks with `npm run check:all`
|
|||||||
- [ ] `npm run check` (Type checking via `tsc`)
|
- [ ] `npm run check` (Type checking via `tsc`)
|
||||||
- [ ] `npm test` (unit tests)
|
- [ ] `npm test` (unit tests)
|
||||||
|
|
||||||
:::tip
|
:::tip AIO
|
||||||
Run all server checks with `npm run check:all`
|
Run all server checks with `npm run check:all`
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
:::info Auto Fix
|
||||||
|
You can use `npm run __:fix` to potentially correct some issues automatically for `npm run format` and `lint`.
|
||||||
|
:::
|
||||||
|
|
||||||
## OpenAPI
|
## OpenAPI
|
||||||
|
|
||||||
The OpenAPI client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. Note that you should not modify this file directly as it is auto-generated. See [OpenAPI](/docs/developer/open-api.md) for more details.
|
The OpenAPI client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. Note that you should not modify this file directly as it is auto-generated. See [OpenAPI](/docs/developer/open-api.md) for more details.
|
||||||
|
|||||||
@@ -16,20 +16,19 @@ Thanks for being interested in contributing 😊
|
|||||||
|
|
||||||
## Environment
|
## Environment
|
||||||
|
|
||||||
### Server and web app
|
### Services
|
||||||
|
|
||||||
This environment includes the following services:
|
This environment includes the services below. Additional details are available in each service's README.
|
||||||
|
|
||||||
- Core server - `/server/src/immich`
|
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
||||||
- Machine learning - `/machine-learning`
|
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
||||||
- Microservices - `/server/src/microservicess`
|
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
||||||
- Web app - `/web`
|
|
||||||
- Redis
|
- Redis
|
||||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
|
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
|
||||||
|
|
||||||
All the services are packaged to run as with single Docker Compose command.
|
All the services are packaged to run as with single Docker Compose command.
|
||||||
|
|
||||||
### Instructions
|
### Server and web apps
|
||||||
|
|
||||||
1. Clone the project repo.
|
1. Clone the project repo.
|
||||||
2. Run `cp docker/example.env docker/.env`.
|
2. Run `cp docker/example.env docker/.env`.
|
||||||
@@ -48,13 +47,7 @@ You can access the web from `http://your-machine-ip:2283` or `http://localhost:2
|
|||||||
|
|
||||||
**Note:** the "web" development container runs with uid 1000. If that uid does not have read/write permissions on the mounted volumes, you may encounter errors
|
**Note:** the "web" development container runs with uid 1000. If that uid does not have read/write permissions on the mounted volumes, you may encounter errors
|
||||||
|
|
||||||
### Mobile app
|
#### Connect web to a remote backend
|
||||||
|
|
||||||
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x to be installed on your system.
|
|
||||||
|
|
||||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
|
||||||
|
|
||||||
### Connect to a remote backend
|
|
||||||
|
|
||||||
If you only want to do web development connected to an existing, remote backend, follow these steps:
|
If you only want to do web development connected to an existing, remote backend, follow these steps:
|
||||||
|
|
||||||
@@ -67,13 +60,21 @@ If you only want to do web development connected to an existing, remote backend,
|
|||||||
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Mobile app
|
||||||
|
|
||||||
|
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x to be installed on your system.
|
||||||
|
|
||||||
|
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
||||||
|
|
||||||
|
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.
|
||||||
|
|
||||||
## IDE setup
|
## IDE setup
|
||||||
|
|
||||||
### Lint / format extensions
|
### Lint / format extensions
|
||||||
|
|
||||||
Setting these in the IDE give a better developer experience, auto-formatting code on save, and providing instant feedback on lint issues.
|
Setting these in the IDE give a better developer experience, auto-formatting code on save, and providing instant feedback on lint issues.
|
||||||
|
|
||||||
### Dart Code Metris
|
### Dart Code Metrics
|
||||||
|
|
||||||
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/getting-started/#installation) page for more information on setting up DCM
|
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/getting-started/#installation) page for more information on setting up DCM
|
||||||
|
|
||||||
|
|||||||
@@ -8,15 +8,24 @@ Unit are run by calling `npm run test` from the `server` directory.
|
|||||||
|
|
||||||
### End to end tests
|
### End to end tests
|
||||||
|
|
||||||
The backend has two end-to-end test suites that can be called with the following two commands from the project root directory:
|
The e2e tests can be run by first starting up a test production environment via:
|
||||||
|
|
||||||
- `make server-e2e-api`
|
```bash
|
||||||
- `make server-e2e-jobs`
|
make e2e
|
||||||
|
```
|
||||||
|
|
||||||
#### API (e2e)
|
Once the test environment is running, the e2e tests can be run via:
|
||||||
|
|
||||||
The API e2e tests spin up a test database and execute http requests against the server, validating the expected response codes and functionality for API endpoints.
|
```bash
|
||||||
|
cd e2e/
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
#### Jobs (e2e)
|
The tests check various things including:
|
||||||
|
|
||||||
The Jobs e2e tests spin up a docker test environment where thumbnail generation, library scanning, and other _job_ workflows are validated.
|
- Authentication and authorization
|
||||||
|
- Query param, body, and url validation
|
||||||
|
- Response codes
|
||||||
|
- Thumbnail generation
|
||||||
|
- Metadata extraction
|
||||||
|
- Library scanning
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# The Immich CLI
|
# The Immich CLI
|
||||||
|
|
||||||
Immich has a CLI that allows you to perform certain actions from the command line. This CLI replaces the [legacy CLI](https://github.com/immich-app/CLI) that was previously available. The CLI is hosted in the [cli folder of the the main Immich github repository](https://github.com/immich-app/immich/tree/main/cli).
|
Immich has a command line interface (CLI) that allows you to perform certain actions from the command line.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
@@ -44,66 +44,72 @@ Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variabl
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```
|
<details>
|
||||||
immich
|
<summary>Usage</summary>
|
||||||
```
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
$ immich
|
||||||
Usage: immich [options] [command]
|
Usage: immich [options] [command]
|
||||||
|
|
||||||
Command line interface for Immich
|
Command line interface for Immich
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
-V, --version output the version number
|
-V, --version output the version number
|
||||||
-d, --config Configuration directory (env: IMMICH_CONFIG_DIR)
|
-d, --config-directory <directory> Configuration directory where auth.yml will be stored (default: "~/.config/immich/", env:
|
||||||
-h, --help display help for command
|
IMMICH_CONFIG_DIR)
|
||||||
|
-u, --url [url] Immich server URL (env: IMMICH_INSTANCE_URL)
|
||||||
|
-k, --key [key] Immich API key (env: IMMICH_API_KEY)
|
||||||
|
-h, --help display help for command
|
||||||
|
|
||||||
Commands:
|
Commands:
|
||||||
upload [options] [paths...] Upload assets
|
login|login-key <url> <key> Login using an API key
|
||||||
server-info Display server information
|
logout Remove stored credentials
|
||||||
login-key [instanceUrl] [apiKey] Login using an API key
|
server-info Display server information
|
||||||
logout Remove stored credentials
|
upload [options] [paths...] Upload assets
|
||||||
help [command] display help for command
|
help [command] display help for command
|
||||||
```
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
|
||||||
The upload command supports the following options:
|
The upload command supports the following options:
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Options</summary>
|
||||||
|
|
||||||
```
|
```
|
||||||
Usage: immich upload [options] [paths...]
|
Usage: immich upload [paths...] [options]
|
||||||
|
|
||||||
Upload assets
|
Upload assets
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
paths One or more paths to assets to be uploaded
|
paths One or more paths to assets to be uploaded
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
||||||
-i, --ignore [paths...] Paths to ignore (env: IMMICH_IGNORE_PATHS)
|
-i, --ignore [paths...] Paths to ignore (default: [], env: IMMICH_IGNORE_PATHS)
|
||||||
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
||||||
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
||||||
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
||||||
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
||||||
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
||||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
|
||||||
--help display help for command
|
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||||
|
--help display help for command
|
||||||
```
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
Note that the above options can read from environment variables as well.
|
Note that the above options can read from environment variables as well.
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
You begin by authenticating to your Immich server.
|
You begin by authenticating to your Immich server. For instance:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
immich login-key [instanceUrl] [apiKey]
|
# immich login [url] [key]
|
||||||
```
|
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
||||||
|
|
||||||
For instance,
|
|
||||||
|
|
||||||
```bash
|
|
||||||
immich login-key http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
|
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
|
||||||
|
|||||||
@@ -22,7 +22,8 @@ You do not need to redo any transcoding jobs after enabling hardware acceleratio
|
|||||||
- WSL2 does not support Quick Sync.
|
- WSL2 does not support Quick Sync.
|
||||||
- Raspberry Pi is currently not supported.
|
- Raspberry Pi is currently not supported.
|
||||||
- Two-pass mode is only supported for NVENC. Other APIs will ignore this setting.
|
- Two-pass mode is only supported for NVENC. Other APIs will ignore this setting.
|
||||||
- Only encoding is currently hardware accelerated, so the CPU is still used for software decoding and tone-mapping.
|
- By default, only encoding is currently hardware accelerated. This means the CPU is still used for software decoding and tone-mapping.
|
||||||
|
- NVENC and RKMPP can be fully accelerated by enabling hardware decoding in the video transcoding settings.
|
||||||
- Hardware dependent
|
- Hardware dependent
|
||||||
- Codec support varies, but H.264 and HEVC are usually supported.
|
- Codec support varies, but H.264 and HEVC are usually supported.
|
||||||
- Notably, NVIDIA and AMD GPUs do not support VP9 encoding.
|
- Notably, NVIDIA and AMD GPUs do not support VP9 encoding.
|
||||||
@@ -33,7 +34,7 @@ You do not need to redo any transcoding jobs after enabling hardware acceleratio
|
|||||||
#### NVENC
|
#### NVENC
|
||||||
|
|
||||||
- You must have the official NVIDIA driver installed on the server.
|
- You must have the official NVIDIA driver installed on the server.
|
||||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Runtime][nvcr] installed.
|
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||||
|
|
||||||
#### QSV
|
#### QSV
|
||||||
|
|
||||||
@@ -65,6 +66,7 @@ For RKMPP to work:
|
|||||||
|
|
||||||
3. Redeploy the `immich-microservices` container with these updated settings.
|
3. Redeploy the `immich-microservices` container with these updated settings.
|
||||||
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
|
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
|
||||||
|
5. (Optional) If using a compatible backend, you may enable hardware decoding for optimal performance.
|
||||||
|
|
||||||
#### Single Compose File
|
#### Single Compose File
|
||||||
|
|
||||||
@@ -102,7 +104,14 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
|||||||
|
|
||||||
#### All-In-One - Unraid Setup
|
#### All-In-One - Unraid Setup
|
||||||
|
|
||||||
##### NVENC - NVIDIA GPUs
|
##### QSV
|
||||||
|
|
||||||
|
1. Unraid > Docker > (Stop) Immich container > Edit
|
||||||
|
2. Scroll down and select `Add another Path, Port, Variable, Label or Device`
|
||||||
|
3. In the drop-down menu, select `Device` and an entry with any name and the value `/dev/dri`.
|
||||||
|
4. Continue to step 4 of "Basic Setup".
|
||||||
|
|
||||||
|
##### NVENC
|
||||||
|
|
||||||
1. In the container app, add this environmental variable: Key=`NVIDIA_VISIBLE_DEVICES` Value=`all`
|
1. In the container app, add this environmental variable: Key=`NVIDIA_VISIBLE_DEVICES` Value=`all`
|
||||||
2. While still in the container app, change the container from Basic Mode to Advanced Mode and add the following parameter to the Extra Parameters field: `--runtime=nvidia`
|
2. While still in the container app, change the container from Basic Mode to Advanced Mode and add the following parameter to the Extra Parameters field: `--runtime=nvidia`
|
||||||
@@ -115,7 +124,7 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
|||||||
- While you can use VAAPI with NVIDIA and Intel devices, prefer the more specific APIs since they're more optimized for their respective devices
|
- While you can use VAAPI with NVIDIA and Intel devices, prefer the more specific APIs since they're more optimized for their respective devices
|
||||||
|
|
||||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||||
[nvcr]: https://github.com/NVIDIA/nvidia-container-runtime/
|
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||||
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
||||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
||||||
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
||||||
|
|||||||
BIN
docs/docs/features/img/advanced-search-filters.webp
Normal file
|
After Width: | Height: | Size: 1.9 MiB |
BIN
docs/docs/features/img/library-custom-scan-interval.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/docs/features/img/moblie-smart-serach.webp
Normal file
|
After Width: | Height: | Size: 4.9 MiB |
|
Before Width: | Height: | Size: 84 KiB After Width: | Height: | Size: 43 KiB |
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 183 KiB After Width: | Height: | Size: 236 KiB |
BIN
docs/docs/features/img/partner-sharing-4.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/docs/features/img/partner-sharing-5.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
docs/docs/features/img/partner-sharing-6.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
BIN
docs/docs/features/img/partner-sharing-7.png
Normal file
|
After Width: | Height: | Size: 37 KiB |