refactor: migrate some e2e to medium (#17640)

This commit is contained in:
Jason Rasmussen
2025-04-16 14:59:08 -04:00
committed by GitHub
parent f50e5d006c
commit 8cefa0b84b
20 changed files with 547 additions and 115 deletions
@@ -0,0 +1,46 @@
import { Kysely } from 'kysely';
import { DB } from 'src/db';
import { AssetRepository } from 'src/repositories/asset.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { AssetService } from 'src/services/asset.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
describe(AssetService.name, () => {
let defaultDatabase: Kysely<DB>;
let assetRepo: AssetRepository;
let userRepo: UserRepository;
const createSut = (db?: Kysely<DB>) => {
return newMediumService(AssetService, {
database: db || defaultDatabase,
repos: {
asset: 'real',
},
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
assetRepo = new AssetRepository(defaultDatabase);
userRepo = new UserRepository(defaultDatabase);
});
describe('getStatistics', () => {
it('should return stats as numbers, not strings', async () => {
const { sut } = createSut();
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await userRepo.create(user);
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
});
});
});
@@ -0,0 +1,81 @@
import { Kysely } from 'kysely';
import { DB } from 'src/db';
import { AssetRepository } from 'src/repositories/asset.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { partners_delete_audit } from 'src/schema/functions';
import { mediumFactory } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe('audit', () => {
let defaultDatabase: Kysely<DB>;
let assetRepo: AssetRepository;
let userRepo: UserRepository;
let partnerRepo: PartnerRepository;
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
assetRepo = new AssetRepository(defaultDatabase);
userRepo = new UserRepository(defaultDatabase);
partnerRepo = new PartnerRepository(defaultDatabase);
});
describe(partners_delete_audit.name, () => {
it('should not cascade user deletes to partners_audit', async () => {
const user1 = mediumFactory.userInsert();
const user2 = mediumFactory.userInsert();
await Promise.all([userRepo.create(user1), userRepo.create(user2)]);
await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
await userRepo.delete(user1, true);
await expect(
defaultDatabase.selectFrom('partners_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('assets_audit', () => {
it('should not cascade user deletes to assets_audit', async () => {
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await userRepo.create(user);
await assetRepo.create(asset);
await userRepo.delete(user, true);
await expect(
defaultDatabase.selectFrom('assets_audit').select(['id']).where('assetId', '=', asset.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('exif', () => {
it('should automatically set updatedAt and updateId when the row is updated', async () => {
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id });
await userRepo.create(user);
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const before = await defaultDatabase
.selectFrom('exif')
.select(['updatedAt', 'updateId'])
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow();
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon 2' });
const after = await defaultDatabase
.selectFrom('exif')
.select(['updatedAt', 'updateId'])
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow();
expect(before.updateId).not.toEqual(after.updateId);
expect(before.updatedAt).not.toEqual(after.updatedAt);
});
});
});
@@ -0,0 +1,142 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { DB } from 'src/db';
import { AssetFileType } from 'src/enum';
import { UserRepository } from 'src/repositories/user.repository';
import { MemoryService } from 'src/services/memory.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe(MemoryService.name, () => {
let defaultDatabase: Kysely<DB>;
const createSut = (db?: Kysely<DB>) => {
return newMediumService(MemoryService, {
database: db || defaultDatabase,
repos: {
asset: 'real',
memory: 'real',
user: 'real',
systemMetadata: 'real',
partner: 'real',
},
});
};
beforeEach(async () => {
defaultDatabase = await getKyselyDB();
const userRepo = new UserRepository(defaultDatabase);
const admin = mediumFactory.userInsert({ isAdmin: true });
await userRepo.create(admin);
});
describe('onMemoryCreate', () => {
it('should work on an empty database', async () => {
const { sut } = createSut();
await expect(sut.onMemoriesCreate()).resolves.not.toThrow();
});
it('should create a memory from an asset', async () => {
const { sut, repos, getRepository } = createSut();
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' });
const user = mediumFactory.userInsert();
const asset = mediumFactory.assetInsert({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
const jobStatus = mediumFactory.assetJobStatusInsert({ assetId: asset.id });
const userRepo = getRepository('user');
const assetRepo = getRepository('asset');
await userRepo.create(user);
await assetRepo.create(asset);
await Promise.all([
assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.PREVIEW, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.THUMBNAIL, path: '/path/to/thumbnail.jpg' },
]),
assetRepo.upsertJobStatus(jobStatus),
]);
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await repos.memory.search(user.id, {});
expect(memories.length).toBe(1);
expect(memories[0]).toEqual(
expect.objectContaining({
id: expect.any(String),
createdAt: expect.any(Date),
memoryAt: expect.any(Date),
updatedAt: expect.any(Date),
deletedAt: null,
ownerId: user.id,
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
isSaved: false,
showAt: now.startOf('day').toJSDate(),
hideAt: now.endOf('day').toJSDate(),
seenAt: null,
type: 'on_this_day',
data: { year: 2024 },
}),
);
});
it('should not generate a memory twice for the same day', async () => {
const { sut, repos, getRepository } = createSut();
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' });
const assetRepo = getRepository('asset');
const memoryRepo = getRepository('memory');
const user = mediumFactory.userInsert();
await repos.user.create(user);
for (const dto of [
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 3 }).toISO(),
},
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 4 }).toISO(),
},
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 5 }).toISO(),
},
]) {
const asset = mediumFactory.assetInsert(dto);
await assetRepo.create(asset);
await Promise.all([
assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' }),
assetRepo.upsertJobStatus(mediumFactory.assetJobStatusInsert({ assetId: asset.id })),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.PREVIEW, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.THUMBNAIL, path: '/path/to/thumbnail.jpg' },
]),
]);
}
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await memoryRepo.search(user.id, {});
expect(memories.length).toBe(1);
await sut.onMemoriesCreate();
const memoriesAfter = await memoryRepo.search(user.id, {});
expect(memoriesAfter.length).toBe(1);
});
});
describe('onMemoriesCleanup', () => {
it('should run without error', async () => {
const { sut } = createSut();
await expect(sut.onMemoriesCleanup()).resolves.not.toThrow();
});
});
});
@@ -0,0 +1,139 @@
import { Stats } from 'node:fs';
import { writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MetadataService } from 'src/services/metadata.service';
import { automock, newRandomImage, newTestService, ServiceMocks } from 'test/utils';
const metadataRepository = new MetadataRepository(
// eslint-disable-next-line no-sparse-arrays
automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }),
);
const createTestFile = async (exifData: Record<string, any>) => {
const data = newRandomImage();
const filePath = join(tmpdir(), 'test.png');
await writeFile(filePath, data);
await metadataRepository.writeTags(filePath, exifData);
return { filePath };
};
type TimeZoneTest = {
description: string;
serverTimeZone?: string;
exifData: Record<string, any>;
expected: {
localDateTime: string;
dateTimeOriginal: string;
timeZone: string | null;
};
};
describe(MetadataService.name, () => {
let sut: MetadataService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(MetadataService, { metadata: metadataRepository }));
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: new Date(654_321),
mtimeMs: 654_321,
birthtimeMs: 654_322,
} as Stats);
delete process.env.TZ;
});
it('should be defined', () => {
expect(sut).toBeDefined();
});
describe('handleMetadataExtraction', () => {
const timeZoneTests: TimeZoneTest[] = [
{
description: 'should handle no time zone information',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2022-01-01T00:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server behind UTC',
serverTimeZone: 'America/Los_Angeles',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2022-01-01T08:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server ahead of UTC',
serverTimeZone: 'Europe/Brussels',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2021-12-31T23:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server ahead of UTC in the summer',
serverTimeZone: 'Europe/Brussels',
exifData: {
DateTimeOriginal: '2022:06:01 00:00:00',
},
expected: {
localDateTime: '2022-06-01T00:00:00.000Z',
dateTimeOriginal: '2022-05-31T22:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle a +13:00 time zone',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00+13:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2021-12-31T11:00:00.000Z',
timeZone: 'UTC+13',
},
},
];
it.each(timeZoneTests)('$description', async ({ exifData, serverTimeZone, expected }) => {
process.env.TZ = serverTimeZone ?? undefined;
const { filePath } = await createTestFile(exifData);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({ id: 'asset-1', originalPath: filePath } as never);
await sut.handleMetadataExtraction({ id: 'asset-1' });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
dateTimeOriginal: new Date(expected.dateTimeOriginal),
timeZone: expected.timeZone,
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
localDateTime: new Date(expected.localDateTime),
}),
);
});
});
});
@@ -0,0 +1,910 @@
import { AuthDto } from 'src/dtos/auth.dto';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { SYNC_TYPES_ORDER, SyncService } from 'src/services/sync.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
const setup = async () => {
const db = await getKyselyDB();
const { sut, mocks, repos, getRepository } = newMediumService(SyncService, {
database: db,
repos: {
sync: 'real',
session: 'real',
},
});
const user = mediumFactory.userInsert();
const session = mediumFactory.sessionInsert({ userId: user.id });
const auth = factory.auth({
session,
user: {
id: user.id,
name: user.name,
email: user.email,
},
});
await getRepository('user').create(user);
await getRepository('session').create(session);
const testSync = async (auth: AuthDto, types: SyncRequestType[]) => {
const stream = mediumFactory.syncStream();
// Wait for 1ms to ensure all updates are available
await new Promise((resolve) => setTimeout(resolve, 1));
await sut.stream(auth, stream, { types });
return stream.getResponse();
};
return {
sut,
auth,
mocks,
repos,
getRepository,
testSync,
};
};
describe(SyncService.name, () => {
it('should have all the types in the ordering variable', () => {
for (const key in SyncRequestType) {
expect(SYNC_TYPES_ORDER).includes(key);
}
expect(SYNC_TYPES_ORDER.length).toBe(Object.keys(SyncRequestType).length);
});
describe.concurrent(SyncEntityType.UserV1, () => {
it('should detect and sync the first user', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user = await userRepo.get(auth.user.id, { withDeleted: false });
if (!user) {
expect.fail('First user should exist');
}
const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual([
{
ack: expect.any(String),
data: {
deletedAt: user.deletedAt,
email: user.email,
id: user.id,
name: user.name,
},
type: 'UserV1',
},
]);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should detect and sync a soft deleted user', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const deletedAt = new Date().toISOString();
const deletedUser = mediumFactory.userInsert({ deletedAt });
const deleted = await getRepository('user').create(deletedUser);
const response = await testSync(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
deletedAt: null,
email: auth.user.email,
id: auth.user.id,
name: auth.user.name,
},
type: 'UserV1',
},
{
ack: expect.any(String),
data: {
deletedAt,
email: deleted.email,
id: deleted.id,
name: deleted.name,
},
type: 'UserV1',
},
]),
);
const acks = [response[1].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should detect and sync a deleted user', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user = mediumFactory.userInsert();
await userRepo.create(user);
await userRepo.delete({ id: user.id }, true);
const response = await testSync(auth, [SyncRequestType.UsersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
userId: user.id,
},
type: 'UserDeleteV1',
},
{
ack: expect.any(String),
data: {
deletedAt: null,
email: auth.user.email,
id: auth.user.id,
name: auth.user.name,
},
type: 'UserV1',
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should sync a user and then an update to that same user', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const initialSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
deletedAt: null,
email: auth.user.email,
id: auth.user.id,
name: auth.user.name,
},
type: 'UserV1',
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const userRepo = getRepository('user');
const updated = await userRepo.update(auth.user.id, { name: 'new name' });
const updatedSyncResponse = await testSync(auth, [SyncRequestType.UsersV1]);
expect(updatedSyncResponse).toHaveLength(1);
expect(updatedSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
deletedAt: null,
email: auth.user.email,
id: auth.user.id,
name: updated.name,
},
type: 'UserV1',
},
]),
);
});
});
describe.concurrent(SyncEntityType.PartnerV1, () => {
it('should detect and sync the first partner', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const user1 = auth.user;
const userRepo = getRepository('user');
const partnerRepo = getRepository('partner');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
inTimeline: partner.inTimeline,
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerV1',
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should detect and sync a deleted partner', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user1 = auth.user;
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partnerRepo = getRepository('partner');
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
await partnerRepo.remove(partner);
const response = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerDeleteV1',
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should detect and sync a partner share both to and from another user', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user1 = auth.user;
const user2 = await userRepo.create(mediumFactory.userInsert());
const partnerRepo = getRepository('partner');
const partner1 = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
const partner2 = await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
const response = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(response).toHaveLength(2);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
inTimeline: partner1.inTimeline,
sharedById: partner1.sharedById,
sharedWithId: partner1.sharedWithId,
},
type: 'PartnerV1',
},
{
ack: expect.any(String),
data: {
inTimeline: partner2.inTimeline,
sharedById: partner2.sharedById,
sharedWithId: partner2.sharedWithId,
},
type: 'PartnerV1',
},
]),
);
await sut.setAcks(auth, { acks: [response[1].ack] });
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should sync a partner and then an update to that same partner', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user1 = auth.user;
const user2 = await userRepo.create(mediumFactory.userInsert());
const partnerRepo = getRepository('partner');
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user1.id });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
inTimeline: partner.inTimeline,
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerV1',
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const updated = await partnerRepo.update(
{ sharedById: partner.sharedById, sharedWithId: partner.sharedWithId },
{ inTimeline: true },
);
const updatedSyncResponse = await testSync(auth, [SyncRequestType.PartnersV1]);
expect(updatedSyncResponse).toHaveLength(1);
expect(updatedSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
inTimeline: updated.inTimeline,
sharedById: updated.sharedById,
sharedWithId: updated.sharedWithId,
},
type: 'PartnerV1',
},
]),
);
});
it('should not sync a partner or partner delete for an unrelated user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = await userRepo.create(mediumFactory.userInsert());
const user3 = await userRepo.create(mediumFactory.userInsert());
const partnerRepo = getRepository('partner');
const partner = await partnerRepo.create({ sharedById: user2.id, sharedWithId: user3.id });
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
await partnerRepo.remove(partner);
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
});
it('should not sync a partner delete after a user is deleted', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = await userRepo.create(mediumFactory.userInsert());
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
await userRepo.delete({ id: user2.id }, true);
expect(await testSync(auth, [SyncRequestType.PartnersV1])).toHaveLength(0);
});
});
describe.concurrent(SyncEntityType.AssetV1, () => {
it('should detect and sync the first asset', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString();
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({
ownerId: auth.user.id,
checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date,
fileModifiedAt: date,
localDateTime: date,
deletedAt: null,
});
await assetRepo.create(asset);
const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
id: asset.id,
ownerId: asset.ownerId,
thumbhash,
checksum,
deletedAt: asset.deletedAt,
fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: asset.fileModifiedAt,
isFavorite: asset.isFavorite,
isVisible: asset.isVisible,
localDateTime: asset.localDateTime,
type: asset.type,
},
type: 'AssetV1',
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should detect and sync a deleted asset', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
await assetRepo.remove(asset);
const response = await testSync(auth, [SyncRequestType.AssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
assetId: asset.id,
},
type: 'AssetDeleteV1',
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetsV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should not sync an asset or asset delete for an unrelated user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user2.id });
await sessionRepo.create(session);
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
const auth2 = factory.auth({ session, user: user2 });
expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
await assetRepo.remove(asset);
expect(await testSync(auth2, [SyncRequestType.AssetsV1])).toHaveLength(1);
expect(await testSync(auth, [SyncRequestType.AssetsV1])).toHaveLength(0);
});
});
describe.concurrent(SyncRequestType.PartnerAssetsV1, () => {
it('should detect and sync the first partner asset', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({
ownerId: user2.id,
checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date,
fileModifiedAt: date,
localDateTime: date,
deletedAt: null,
});
await assetRepo.create(asset);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
id: asset.id,
ownerId: asset.ownerId,
thumbhash,
checksum,
deletedAt: null,
fileCreatedAt: date,
fileModifiedAt: date,
isFavorite: false,
isVisible: true,
localDateTime: date,
type: asset.type,
},
type: SyncEntityType.PartnerAssetV1,
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should detect and sync a deleted partner asset', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
const assetRepo = getRepository('asset');
await assetRepo.create(asset);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
await assetRepo.remove(asset);
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(1);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
assetId: asset.id,
},
type: SyncEntityType.PartnerAssetDeleteV1,
},
]),
);
const acks = response.map(({ ack }) => ack);
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should not sync a deleted partner asset due to a user delete', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id }));
await userRepo.delete({ id: user2.id }, true);
const response = await testSync(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toHaveLength(0);
});
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const assetRepo = getRepository('asset');
await assetRepo.create(mediumFactory.assetInsert({ ownerId: user2.id }));
const partnerRepo = getRepository('partner');
const partner = { sharedById: user2.id, sharedWithId: auth.user.id };
await partnerRepo.create(partner);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(1);
await partnerRepo.remove(partner);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
});
it('should not sync an asset or asset delete for own user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await assetRepo.remove(asset);
await expect(testSync(auth, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
});
it('should not sync an asset or asset delete for unrelated user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user2.id });
await sessionRepo.create(session);
const auth2 = factory.auth({ session, user: user2 });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
await assetRepo.remove(asset);
await expect(testSync(auth2, [SyncRequestType.AssetsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toHaveLength(0);
});
});
describe.concurrent(SyncRequestType.AssetExifsV1, () => {
it('should detect and sync the first asset exif', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const initialSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
assetId: asset.id,
city: null,
country: null,
dateTimeOriginal: null,
description: '',
exifImageHeight: null,
exifImageWidth: null,
exposureTime: null,
fNumber: null,
fileSizeInByte: null,
focalLength: null,
fps: null,
iso: null,
latitude: null,
lensModel: null,
longitude: null,
make: 'Canon',
model: null,
modifyDate: null,
orientation: null,
profileDescription: null,
projectionType: null,
rating: null,
state: null,
timeZone: null,
},
type: SyncEntityType.AssetExifV1,
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.AssetExifsV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should only sync asset exif for own user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user2.id });
await sessionRepo.create(session);
const auth2 = factory.auth({ session, user: user2 });
await expect(testSync(auth2, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(0);
});
});
describe.concurrent(SyncRequestType.PartnerAssetExifsV1, () => {
it('should detect and sync the first partner asset exif', async () => {
const { auth, sut, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user2.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const initialSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(initialSyncResponse).toHaveLength(1);
expect(initialSyncResponse).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: {
assetId: asset.id,
city: null,
country: null,
dateTimeOriginal: null,
description: '',
exifImageHeight: null,
exifImageWidth: null,
exposureTime: null,
fNumber: null,
fileSizeInByte: null,
focalLength: null,
fps: null,
iso: null,
latitude: null,
lensModel: null,
longitude: null,
make: 'Canon',
model: null,
modifyDate: null,
orientation: null,
profileDescription: null,
projectionType: null,
rating: null,
state: null,
timeZone: null,
},
type: SyncEntityType.PartnerAssetExifV1,
},
]),
);
const acks = [initialSyncResponse[0].ack];
await sut.setAcks(auth, { acks });
const ackSyncResponse = await testSync(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(ackSyncResponse).toHaveLength(0);
});
it('should not sync partner asset exif for own user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
await userRepo.create(user2);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: auth.user.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
await expect(testSync(auth, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
});
it('should not sync partner asset exif for unrelated user', async () => {
const { auth, getRepository, testSync } = await setup();
const userRepo = getRepository('user');
const user2 = mediumFactory.userInsert();
const user3 = mediumFactory.userInsert();
await Promise.all([userRepo.create(user2), userRepo.create(user3)]);
const partnerRepo = getRepository('partner');
await partnerRepo.create({ sharedById: user2.id, sharedWithId: auth.user.id });
const assetRepo = getRepository('asset');
const asset = mediumFactory.assetInsert({ ownerId: user3.id });
await assetRepo.create(asset);
await assetRepo.upsertExif({ assetId: asset.id, make: 'Canon' });
const sessionRepo = getRepository('session');
const session = mediumFactory.sessionInsert({ userId: user3.id });
await sessionRepo.create(session);
const authUser3 = factory.auth({ session, user: user3 });
await expect(testSync(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toHaveLength(1);
await expect(testSync(auth, [SyncRequestType.PartnerAssetExifsV1])).resolves.toHaveLength(0);
});
});
});
@@ -0,0 +1,205 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { DB } from 'src/db';
import { ImmichEnvironment, JobName, JobStatus } from 'src/enum';
import { UserService } from 'src/services/user.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
describe(UserService.name, () => {
let defaultDatabase: Kysely<DB>;
const createSut = (db?: Kysely<DB>) => {
process.env.IMMICH_ENV = ImmichEnvironment.TESTING;
return newMediumService(UserService, {
database: db || defaultDatabase,
repos: {
user: 'real',
crypto: 'real',
config: 'real',
job: 'mock',
systemMetadata: 'real',
},
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
const { repos } = createSut();
await repos.user.create({ isAdmin: true, email: 'admin@immich.cloud' });
});
describe('create', () => {
it('should create a user', async () => {
const { sut } = createSut();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ name: user.name, email: user.email })).resolves.toEqual(
expect.objectContaining({ name: user.name, email: user.email }),
);
});
it('should reject user with duplicate email', async () => {
const { sut } = createSut();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
});
it('should not return password', async () => {
const { sut } = createSut();
const dto = mediumFactory.userInsert({ password: 'password' });
const user = await sut.createUser({ email: dto.email, password: 'password' });
expect((user as any).password).toBeUndefined();
});
});
describe('search', () => {
it('should get users', async () => {
const { sut, repos } = createSut();
const user1 = mediumFactory.userInsert();
const user2 = mediumFactory.userInsert();
await Promise.all([repos.user.create(user1), repos.user.create(user2)]);
const auth = factory.auth({ user: user1 });
await expect(sut.search(auth)).resolves.toEqual(
expect.arrayContaining([
expect.objectContaining({ email: user1.email }),
expect.objectContaining({ email: user2.email }),
]),
);
});
});
describe('get', () => {
it('should get a user', async () => {
const { sut, repos } = createSut();
const user = mediumFactory.userInsert();
await repos.user.create(user);
await expect(sut.get(user.id)).resolves.toEqual(
expect.objectContaining({
id: user.id,
name: user.name,
email: user.email,
}),
);
});
it('should not return password', async () => {
const { sut, repos } = createSut();
const user = mediumFactory.userInsert();
await repos.user.create(user);
const result = await sut.get(user.id);
expect((result as any).password).toBeUndefined();
});
});
describe('updateMe', () => {
it('should update a user', async () => {
const { sut, repos: repositories } = createSut();
const before = await repositories.user.create(mediumFactory.userInsert());
const auth = factory.auth({ user: { id: before.id } });
const after = await sut.updateMe(auth, { name: `${before.name} Updated` });
expect(before.updatedAt).toBeDefined();
expect(after.updatedAt).toBeDefined();
expect(before.updatedAt).not.toEqual(after.updatedAt);
});
});
describe('setLicense', () => {
it('should set a license', async () => {
const license = {
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
activationKey:
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
};
const { sut, repos } = createSut();
const user = mediumFactory.userInsert();
await repos.user.create(user);
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.getLicense(auth)).rejects.toThrowError();
const after = await sut.setLicense(auth, license);
expect(after.licenseKey).toEqual(license.licenseKey);
expect(after.activationKey).toEqual(license.activationKey);
const getResponse = await sut.getLicense(auth);
expect(getResponse).toEqual(after);
});
});
describe.sequential('handleUserDeleteCheck', () => {
beforeEach(async () => {
const { sut } = createSut();
// These tests specifically have to be sequential otherwise we hit race conditions with config changes applying in incorrect tests
const config = await sut.getConfig({ withCache: false });
config.user.deleteDelay = 7;
await sut.updateConfig(config);
});
it('should work when there are no deleted users', async () => {
const { sut, mocks } = createSut();
mocks.job.queueAll.mockResolvedValue(void 0);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
expect(mocks.job.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
it('should work when there is a user to delete', async () => {
const { sut, repos, mocks } = createSut(await getKyselyDB());
mocks.job.queueAll.mockResolvedValue(void 0);
const user = mediumFactory.userInsert({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() });
await repos.user.create(user);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
expect(mocks.job.queueAll).toHaveBeenCalledExactlyOnceWith([
{ name: JobName.USER_DELETION, data: { id: user.id } },
]);
});
it('should skip a recently deleted user', async () => {
const { sut, repos, mocks } = createSut(await getKyselyDB());
mocks.job.queueAll.mockResolvedValue(void 0);
const user = mediumFactory.userInsert({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() });
await repos.user.create(user);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
expect(mocks.job.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
it('should respect a custom user delete delay', async () => {
const { sut, repos, mocks } = createSut(await getKyselyDB());
mocks.job.queueAll.mockResolvedValue(void 0);
const user = mediumFactory.userInsert({ deletedAt: DateTime.now().minus({ days: 25 }).toJSDate() });
await repos.user.create(user);
const config = await sut.getConfig({ withCache: false });
config.user.deleteDelay = 30;
await sut.updateConfig(config);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.SUCCESS);
expect(mocks.job.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
});
});
@@ -0,0 +1,64 @@
import { Kysely } from 'kysely';
import { serverVersion } from 'src/constants';
import { DB } from 'src/db';
import { JobName } from 'src/enum';
import { VersionService } from 'src/services/version.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe(VersionService.name, () => {
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(VersionService, {
database: db || defaultDatabase,
repos: {
job: 'mock',
database: 'real',
versionHistory: 'real',
},
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe('onBootstrap', () => {
it('record the current version on startup', async () => {
const { sut, repos } = setup();
const itemsBefore = await repos.versionHistory.getAll();
expect(itemsBefore).toHaveLength(0);
await sut.onBootstrap();
const itemsAfter = await repos.versionHistory.getAll();
expect(itemsAfter).toHaveLength(1);
expect(itemsAfter[0]).toEqual({
createdAt: expect.any(Date),
id: expect.any(String),
version: serverVersion.toString(),
});
});
it('should queue memory creation when upgrading from 1.128.0', async () => {
const { sut, repos, mocks } = setup();
mocks.job.queue.mockResolvedValue(void 0);
await repos.versionHistory.create({ version: 'v1.128.0' });
await sut.onBootstrap();
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.MEMORIES_CREATE });
});
it('should not queue memory creation when upgrading from 1.129.0', async () => {
const { sut, repos, mocks } = setup();
await repos.versionHistory.create({ version: 'v1.129.0' });
await sut.onBootstrap();
expect(mocks.job.queue).not.toHaveBeenCalled();
});
});
});