Compare commits

...

8 Commits

Author SHA1 Message Date
shenlong-tanwen
023d2195f9 feat: inline storage columns in remote and local tables 2025-07-11 01:37:02 +05:30
mertalev
c482bdfae7 thumbhash render box 2025-07-10 16:41:53 +03:00
mertalev
0dadfc52dd thumbhash improvements 2025-07-08 16:20:11 +03:00
Jason Rasmussen
df4a27e8a7 feat: sql-tools overrides (#19796) 2025-07-08 08:17:40 -04:00
renovate[bot]
1f9813a28e chore(deps): update github/codeql-action action to v3.29.2 (#19806)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-08 12:26:07 +01:00
renovate[bot]
bbfff45058 chore(deps): update redis:6.2-alpine docker digest to 03fd052 (#19804)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-08 12:18:58 +02:00
Alex
87dd09d103 feat: selection mode timeline (#19734)
* feat: new page

* force multi-selection state

* fix: provider scoping

* Return selected assets

* lint

* lint

* simplify provider scope and drop drilling

* selection styling
2025-07-08 10:11:37 +05:30
Alex
dd94ad17aa fix: scrubber scroll error when page is not long enough (#19809) 2025-07-07 23:30:47 -05:00
153 changed files with 1960 additions and 724 deletions

View File

@@ -50,7 +50,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -63,7 +63,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/autobuild@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -76,6 +76,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: '/language:${{matrix.language}}'

View File

@@ -130,7 +130,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
uses: github/codeql-action/upload-sarif@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
sarif_file: results.sarif
category: zizmor

View File

@@ -36,7 +36,7 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
image: redis:6.2-alpine@sha256:03fd052257735b41cd19f3d8ae9782926bf9b704fb6a9dc5e29f9ccfbe8827f0
database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:3aef84a0a4fabbda17ef115c3019ba0c914ec73e9f6e59203674322d858b8eea

File diff suppressed because one or more lines are too long

View File

@@ -18,7 +18,7 @@ const String kSecuredPinCode = "secured_pin_code";
// Timeline constants
const int kTimelineNoneSegmentSize = 120;
const int kTimelineAssetLoadBatchSize = 256;
const int kTimelineAssetLoadBatchSize = 1024;
const int kTimelineAssetLoadOppositeSize = 64;
// Widget keys

View File

@@ -42,16 +42,29 @@ class TimelineFactory {
TimelineService localAlbum({required String albumId}) => TimelineService(
assetSource: (offset, count) => _timelineRepository
.getLocalBucketAssets(albumId, offset: offset, count: count),
bucketSource: () =>
_timelineRepository.watchLocalBucket(albumId, groupBy: groupBy),
.getLocalAlbumBucketAssets(albumId, offset: offset, count: count),
bucketSource: () => _timelineRepository.watchLocalAlbumBucket(
albumId,
groupBy: groupBy,
),
);
TimelineService remoteAlbum({required String albumId}) => TimelineService(
assetSource: (offset, count) => _timelineRepository
.getRemoteBucketAssets(albumId, offset: offset, count: count),
bucketSource: () =>
_timelineRepository.watchRemoteBucket(albumId, groupBy: groupBy),
.getRemoteAlbumBucketAssets(albumId, offset: offset, count: count),
bucketSource: () => _timelineRepository.watchRemoteAlbumBucket(
albumId,
groupBy: groupBy,
),
);
TimelineService remoteAssets(List<String> timelineUsers) => TimelineService(
assetSource: (offset, count) => _timelineRepository
.getRemoteBucketAssets(timelineUsers, offset: offset, count: count),
bucketSource: () => _timelineRepository.watchRemoteBucket(
timelineUsers,
groupBy: GroupAssetsBy.month,
),
);
TimelineService favorite(String userId) => TimelineService(

View File

@@ -0,0 +1,58 @@
import 'remote_asset.entity.dart';
import 'local_asset.entity.dart';
-- TRIGGERS ON local_asset_entity
-- Find and update the remote_id in local_asset_entity and local_id in remote_asset_entity when checksum is set
CREATE TRIGGER IF NOT EXISTS tr_local_asset_update_checksum_set_ids
AFTER UPDATE OF checksum ON local_asset_entity
FOR EACH ROW
WHEN NEW.checksum IS NOT NULL
BEGIN
UPDATE local_asset_entity
SET remote_id = (SELECT id FROM remote_asset_entity WHERE checksum = NEW.checksum LIMIT 1)
WHERE id = NEW.id;
UPDATE remote_asset_entity
SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = NEW.checksum ORDER BY id ASC LIMIT 1)
WHERE checksum = NEW.checksum;
END;
-- When a local asset is updated, relink remote assets that had a checksum match
CREATE TRIGGER IF NOT EXISTS tr_local_asset_update_old_checksum_set_remote_asset_local_id
AFTER UPDATE OF checksum ON local_asset_entity
FOR EACH ROW
WHEN OLD.checksum IS NOT NULL
BEGIN
UPDATE remote_asset_entity
SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = OLD.checksum ORDER BY id ASC LIMIT 1)
WHERE checksum = OLD.checksum;
END;
-- remote_asset_entity.checksum is a 1..* relationship with local_asset_entity.checksum.
-- When a local asset is deleted, update remote assets that had a checksum match
-- to ensure their local_id is set to the first matching local asset or NULL
CREATE TRIGGER IF NOT EXISTS tr_local_asset_delete_update_remote_asset_local_id
AFTER DELETE ON local_asset_entity
FOR EACH ROW
WHEN OLD.checksum IS NOT NULL
BEGIN
UPDATE remote_asset_entity
SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = OLD.checksum ORDER BY id ASC LIMIT 1)
WHERE checksum = OLD.checksum;
END;
-- TRIGGERS ON remote_asset_entity
-- Find and update local_id in remote_asset_entity when a new remote asset is inserted
CREATE TRIGGER IF NOT EXISTS tr_remote_asset_insert_set_local_id
AFTER INSERT ON remote_asset_entity
FOR EACH ROW
BEGIN
UPDATE remote_asset_entity
SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = NEW.checksum ORDER BY id ASC LIMIT 1)
WHERE id = NEW.id;
UPDATE local_asset_entity SET remote_id = NEW.id WHERE checksum = NEW.checksum;
END;

View File

@@ -0,0 +1,16 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
i0.Trigger get trLocalAssetUpdateChecksumSetIds => i0.Trigger(
'CREATE TRIGGER IF NOT EXISTS tr_local_asset_update_checksum_set_ids AFTER UPDATE OF checksum ON local_asset_entity WHEN NEW.checksum IS NOT NULL BEGIN UPDATE local_asset_entity SET remote_id = (SELECT id FROM remote_asset_entity WHERE checksum = NEW.checksum LIMIT 1) WHERE id = NEW.id;UPDATE remote_asset_entity SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = NEW.checksum ORDER BY id ASC LIMIT 1) WHERE checksum = NEW.checksum;END',
'tr_local_asset_update_checksum_set_ids');
i0.Trigger get trLocalAssetUpdateOldChecksumSetRemoteAssetLocalId => i0.Trigger(
'CREATE TRIGGER IF NOT EXISTS tr_local_asset_update_old_checksum_set_remote_asset_local_id AFTER UPDATE OF checksum ON local_asset_entity WHEN OLD.checksum IS NOT NULL BEGIN UPDATE remote_asset_entity SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = OLD.checksum ORDER BY id ASC LIMIT 1) WHERE checksum = OLD.checksum;END',
'tr_local_asset_update_old_checksum_set_remote_asset_local_id');
i0.Trigger get trLocalAssetDeleteUpdateRemoteAssetLocalId => i0.Trigger(
'CREATE TRIGGER IF NOT EXISTS tr_local_asset_delete_update_remote_asset_local_id AFTER DELETE ON local_asset_entity WHEN OLD.checksum IS NOT NULL BEGIN UPDATE remote_asset_entity SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = OLD.checksum ORDER BY id ASC LIMIT 1) WHERE checksum = OLD.checksum;END',
'tr_local_asset_delete_update_remote_asset_local_id');
i0.Trigger get trRemoteAssetInsertSetLocalId => i0.Trigger(
'CREATE TRIGGER IF NOT EXISTS tr_remote_asset_insert_set_local_id AFTER INSERT ON remote_asset_entity BEGIN UPDATE remote_asset_entity SET local_id = (SELECT id FROM local_asset_entity WHERE checksum = NEW.checksum ORDER BY id ASC LIMIT 1) WHERE id = NEW.id;UPDATE local_asset_entity SET remote_id = NEW.id WHERE checksum = NEW.checksum;END',
'tr_remote_asset_insert_set_local_id');

View File

@@ -1,6 +1,7 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart';
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
@@ -9,6 +10,11 @@ class LocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
const LocalAssetEntity();
TextColumn get id => text()();
TextColumn get remoteId => text()
.nullable()
.references(RemoteAssetEntity, #id, onDelete: KeyAction.setNull)();
TextColumn get checksum => text().nullable()();
// Only used during backup to mirror the favorite status of the asset in the server
@@ -30,6 +36,6 @@ extension LocalAssetEntityDataDomainEx on LocalAssetEntityData {
isFavorite: isFavorite,
height: height,
width: width,
remoteId: null,
remoteId: remoteId,
);
}

View File

@@ -7,6 +7,9 @@ import 'package:immich_mobile/domain/models/asset/base_asset.model.dart' as i2;
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart'
as i3;
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
as i5;
import 'package:drift/internal/modular.dart' as i6;
typedef $$LocalAssetEntityTableCreateCompanionBuilder
= i1.LocalAssetEntityCompanion Function({
@@ -18,6 +21,7 @@ typedef $$LocalAssetEntityTableCreateCompanionBuilder
i0.Value<int?> height,
i0.Value<int?> durationInSeconds,
required String id,
i0.Value<String?> remoteId,
i0.Value<String?> checksum,
i0.Value<bool> isFavorite,
});
@@ -31,10 +35,43 @@ typedef $$LocalAssetEntityTableUpdateCompanionBuilder
i0.Value<int?> height,
i0.Value<int?> durationInSeconds,
i0.Value<String> id,
i0.Value<String?> remoteId,
i0.Value<String?> checksum,
i0.Value<bool> isFavorite,
});
final class $$LocalAssetEntityTableReferences extends i0.BaseReferences<
i0.GeneratedDatabase, i1.$LocalAssetEntityTable, i1.LocalAssetEntityData> {
$$LocalAssetEntityTableReferences(
super.$_db, super.$_table, super.$_typedResult);
static i5.$RemoteAssetEntityTable _remoteIdTable(i0.GeneratedDatabase db) =>
i6.ReadDatabaseContainer(db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity')
.createAlias(i0.$_aliasNameGenerator(
i6.ReadDatabaseContainer(db)
.resultSet<i1.$LocalAssetEntityTable>('local_asset_entity')
.remoteId,
i6.ReadDatabaseContainer(db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity')
.id));
i5.$$RemoteAssetEntityTableProcessedTableManager? get remoteId {
final $_column = $_itemColumn<String>('remote_id');
if ($_column == null) return null;
final manager = i5
.$$RemoteAssetEntityTableTableManager(
$_db,
i6.ReadDatabaseContainer($_db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity'))
.filter((f) => f.id.sqlEquals($_column));
final item = $_typedResult.readTableOrNull(_remoteIdTable($_db));
if (item == null) return manager;
return i0.ProcessedTableManager(
manager.$state.copyWith(prefetchedData: [item]));
}
}
class $$LocalAssetEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetEntityTable> {
$$LocalAssetEntityTableFilterComposer({
@@ -76,6 +113,28 @@ class $$LocalAssetEntityTableFilterComposer
i0.ColumnFilters<bool> get isFavorite => $composableBuilder(
column: $table.isFavorite, builder: (column) => i0.ColumnFilters(column));
i5.$$RemoteAssetEntityTableFilterComposer get remoteId {
final i5.$$RemoteAssetEntityTableFilterComposer composer = $composerBuilder(
composer: this,
getCurrentColumn: (t) => t.remoteId,
referencedTable: i6.ReadDatabaseContainer($db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity'),
getReferencedColumn: (t) => t.id,
builder: (joinBuilder,
{$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer}) =>
i5.$$RemoteAssetEntityTableFilterComposer(
$db: $db,
$table: i6.ReadDatabaseContainer($db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
));
return composer;
}
}
class $$LocalAssetEntityTableOrderingComposer
@@ -120,6 +179,30 @@ class $$LocalAssetEntityTableOrderingComposer
i0.ColumnOrderings<bool> get isFavorite => $composableBuilder(
column: $table.isFavorite,
builder: (column) => i0.ColumnOrderings(column));
i5.$$RemoteAssetEntityTableOrderingComposer get remoteId {
final i5.$$RemoteAssetEntityTableOrderingComposer composer =
$composerBuilder(
composer: this,
getCurrentColumn: (t) => t.remoteId,
referencedTable: i6.ReadDatabaseContainer($db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity'),
getReferencedColumn: (t) => t.id,
builder: (joinBuilder,
{$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer}) =>
i5.$$RemoteAssetEntityTableOrderingComposer(
$db: $db,
$table: i6.ReadDatabaseContainer($db)
.resultSet<i5.$RemoteAssetEntityTable>(
'remote_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
));
return composer;
}
}
class $$LocalAssetEntityTableAnnotationComposer
@@ -160,6 +243,30 @@ class $$LocalAssetEntityTableAnnotationComposer
i0.GeneratedColumn<bool> get isFavorite => $composableBuilder(
column: $table.isFavorite, builder: (column) => column);
i5.$$RemoteAssetEntityTableAnnotationComposer get remoteId {
final i5.$$RemoteAssetEntityTableAnnotationComposer composer =
$composerBuilder(
composer: this,
getCurrentColumn: (t) => t.remoteId,
referencedTable: i6.ReadDatabaseContainer($db)
.resultSet<i5.$RemoteAssetEntityTable>('remote_asset_entity'),
getReferencedColumn: (t) => t.id,
builder: (joinBuilder,
{$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer}) =>
i5.$$RemoteAssetEntityTableAnnotationComposer(
$db: $db,
$table: i6.ReadDatabaseContainer($db)
.resultSet<i5.$RemoteAssetEntityTable>(
'remote_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
));
return composer;
}
}
class $$LocalAssetEntityTableTableManager extends i0.RootTableManager<
@@ -171,13 +278,9 @@ class $$LocalAssetEntityTableTableManager extends i0.RootTableManager<
i1.$$LocalAssetEntityTableAnnotationComposer,
$$LocalAssetEntityTableCreateCompanionBuilder,
$$LocalAssetEntityTableUpdateCompanionBuilder,
(
i1.LocalAssetEntityData,
i0.BaseReferences<i0.GeneratedDatabase, i1.$LocalAssetEntityTable,
i1.LocalAssetEntityData>
),
(i1.LocalAssetEntityData, i1.$$LocalAssetEntityTableReferences),
i1.LocalAssetEntityData,
i0.PrefetchHooks Function()> {
i0.PrefetchHooks Function({bool remoteId})> {
$$LocalAssetEntityTableTableManager(
i0.GeneratedDatabase db, i1.$LocalAssetEntityTable table)
: super(i0.TableManagerState(
@@ -199,6 +302,7 @@ class $$LocalAssetEntityTableTableManager extends i0.RootTableManager<
i0.Value<int?> height = const i0.Value.absent(),
i0.Value<int?> durationInSeconds = const i0.Value.absent(),
i0.Value<String> id = const i0.Value.absent(),
i0.Value<String?> remoteId = const i0.Value.absent(),
i0.Value<String?> checksum = const i0.Value.absent(),
i0.Value<bool> isFavorite = const i0.Value.absent(),
}) =>
@@ -211,6 +315,7 @@ class $$LocalAssetEntityTableTableManager extends i0.RootTableManager<
height: height,
durationInSeconds: durationInSeconds,
id: id,
remoteId: remoteId,
checksum: checksum,
isFavorite: isFavorite,
),
@@ -223,6 +328,7 @@ class $$LocalAssetEntityTableTableManager extends i0.RootTableManager<
i0.Value<int?> height = const i0.Value.absent(),
i0.Value<int?> durationInSeconds = const i0.Value.absent(),
required String id,
i0.Value<String?> remoteId = const i0.Value.absent(),
i0.Value<String?> checksum = const i0.Value.absent(),
i0.Value<bool> isFavorite = const i0.Value.absent(),
}) =>
@@ -235,13 +341,52 @@ class $$LocalAssetEntityTableTableManager extends i0.RootTableManager<
height: height,
durationInSeconds: durationInSeconds,
id: id,
remoteId: remoteId,
checksum: checksum,
isFavorite: isFavorite,
),
withReferenceMapper: (p0) => p0
.map((e) => (e.readTable(table), i0.BaseReferences(db, table, e)))
.map((e) => (
e.readTable(table),
i1.$$LocalAssetEntityTableReferences(db, table, e)
))
.toList(),
prefetchHooksCallback: null,
prefetchHooksCallback: ({remoteId = false}) {
return i0.PrefetchHooks(
db: db,
explicitlyWatchedTables: [],
addJoins: <
T extends i0.TableManagerState<
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic>>(state) {
if (remoteId) {
state = state.withJoin(
currentTable: table,
currentColumn: table.remoteId,
referencedTable:
i1.$$LocalAssetEntityTableReferences._remoteIdTable(db),
referencedColumn: i1.$$LocalAssetEntityTableReferences
._remoteIdTable(db)
.id,
) as T;
}
return state;
},
getPrefetchedDataCallback: (items) async {
return [];
},
);
},
));
}
@@ -254,13 +399,9 @@ typedef $$LocalAssetEntityTableProcessedTableManager = i0.ProcessedTableManager<
i1.$$LocalAssetEntityTableAnnotationComposer,
$$LocalAssetEntityTableCreateCompanionBuilder,
$$LocalAssetEntityTableUpdateCompanionBuilder,
(
i1.LocalAssetEntityData,
i0.BaseReferences<i0.GeneratedDatabase, i1.$LocalAssetEntityTable,
i1.LocalAssetEntityData>
),
(i1.LocalAssetEntityData, i1.$$LocalAssetEntityTableReferences),
i1.LocalAssetEntityData,
i0.PrefetchHooks Function()>;
i0.PrefetchHooks Function({bool remoteId})>;
i0.Index get idxLocalAssetChecksum => i0.Index('idx_local_asset_checksum',
'CREATE INDEX idx_local_asset_checksum ON local_asset_entity (checksum)');
@@ -321,6 +462,15 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
late final i0.GeneratedColumn<String> id = i0.GeneratedColumn<String>(
'id', aliasedName, false,
type: i0.DriftSqlType.string, requiredDuringInsert: true);
static const i0.VerificationMeta _remoteIdMeta =
const i0.VerificationMeta('remoteId');
@override
late final i0.GeneratedColumn<String> remoteId = i0.GeneratedColumn<String>(
'remote_id', aliasedName, true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
'REFERENCES remote_asset_entity (id) ON DELETE SET NULL'));
static const i0.VerificationMeta _checksumMeta =
const i0.VerificationMeta('checksum');
@override
@@ -347,6 +497,7 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
height,
durationInSeconds,
id,
remoteId,
checksum,
isFavorite
];
@@ -394,6 +545,10 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
} else if (isInserting) {
context.missing(_idMeta);
}
if (data.containsKey('remote_id')) {
context.handle(_remoteIdMeta,
remoteId.isAcceptableOrUnknown(data['remote_id']!, _remoteIdMeta));
}
if (data.containsKey('checksum')) {
context.handle(_checksumMeta,
checksum.isAcceptableOrUnknown(data['checksum']!, _checksumMeta));
@@ -431,6 +586,8 @@ class $LocalAssetEntityTable extends i3.LocalAssetEntity
i0.DriftSqlType.int, data['${effectivePrefix}duration_in_seconds']),
id: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}id'])!,
remoteId: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}remote_id']),
checksum: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}checksum']),
isFavorite: attachedDatabase.typeMapping
@@ -461,6 +618,7 @@ class LocalAssetEntityData extends i0.DataClass
final int? height;
final int? durationInSeconds;
final String id;
final String? remoteId;
final String? checksum;
final bool isFavorite;
const LocalAssetEntityData(
@@ -472,6 +630,7 @@ class LocalAssetEntityData extends i0.DataClass
this.height,
this.durationInSeconds,
required this.id,
this.remoteId,
this.checksum,
required this.isFavorite});
@override
@@ -494,6 +653,9 @@ class LocalAssetEntityData extends i0.DataClass
map['duration_in_seconds'] = i0.Variable<int>(durationInSeconds);
}
map['id'] = i0.Variable<String>(id);
if (!nullToAbsent || remoteId != null) {
map['remote_id'] = i0.Variable<String>(remoteId);
}
if (!nullToAbsent || checksum != null) {
map['checksum'] = i0.Variable<String>(checksum);
}
@@ -514,6 +676,7 @@ class LocalAssetEntityData extends i0.DataClass
height: serializer.fromJson<int?>(json['height']),
durationInSeconds: serializer.fromJson<int?>(json['durationInSeconds']),
id: serializer.fromJson<String>(json['id']),
remoteId: serializer.fromJson<String?>(json['remoteId']),
checksum: serializer.fromJson<String?>(json['checksum']),
isFavorite: serializer.fromJson<bool>(json['isFavorite']),
);
@@ -531,6 +694,7 @@ class LocalAssetEntityData extends i0.DataClass
'height': serializer.toJson<int?>(height),
'durationInSeconds': serializer.toJson<int?>(durationInSeconds),
'id': serializer.toJson<String>(id),
'remoteId': serializer.toJson<String?>(remoteId),
'checksum': serializer.toJson<String?>(checksum),
'isFavorite': serializer.toJson<bool>(isFavorite),
};
@@ -545,6 +709,7 @@ class LocalAssetEntityData extends i0.DataClass
i0.Value<int?> height = const i0.Value.absent(),
i0.Value<int?> durationInSeconds = const i0.Value.absent(),
String? id,
i0.Value<String?> remoteId = const i0.Value.absent(),
i0.Value<String?> checksum = const i0.Value.absent(),
bool? isFavorite}) =>
i1.LocalAssetEntityData(
@@ -558,6 +723,7 @@ class LocalAssetEntityData extends i0.DataClass
? durationInSeconds.value
: this.durationInSeconds,
id: id ?? this.id,
remoteId: remoteId.present ? remoteId.value : this.remoteId,
checksum: checksum.present ? checksum.value : this.checksum,
isFavorite: isFavorite ?? this.isFavorite,
);
@@ -573,6 +739,7 @@ class LocalAssetEntityData extends i0.DataClass
? data.durationInSeconds.value
: this.durationInSeconds,
id: data.id.present ? data.id.value : this.id,
remoteId: data.remoteId.present ? data.remoteId.value : this.remoteId,
checksum: data.checksum.present ? data.checksum.value : this.checksum,
isFavorite:
data.isFavorite.present ? data.isFavorite.value : this.isFavorite,
@@ -590,6 +757,7 @@ class LocalAssetEntityData extends i0.DataClass
..write('height: $height, ')
..write('durationInSeconds: $durationInSeconds, ')
..write('id: $id, ')
..write('remoteId: $remoteId, ')
..write('checksum: $checksum, ')
..write('isFavorite: $isFavorite')
..write(')'))
@@ -598,7 +766,7 @@ class LocalAssetEntityData extends i0.DataClass
@override
int get hashCode => Object.hash(name, type, createdAt, updatedAt, width,
height, durationInSeconds, id, checksum, isFavorite);
height, durationInSeconds, id, remoteId, checksum, isFavorite);
@override
bool operator ==(Object other) =>
identical(this, other) ||
@@ -611,6 +779,7 @@ class LocalAssetEntityData extends i0.DataClass
other.height == this.height &&
other.durationInSeconds == this.durationInSeconds &&
other.id == this.id &&
other.remoteId == this.remoteId &&
other.checksum == this.checksum &&
other.isFavorite == this.isFavorite);
}
@@ -625,6 +794,7 @@ class LocalAssetEntityCompanion
final i0.Value<int?> height;
final i0.Value<int?> durationInSeconds;
final i0.Value<String> id;
final i0.Value<String?> remoteId;
final i0.Value<String?> checksum;
final i0.Value<bool> isFavorite;
const LocalAssetEntityCompanion({
@@ -636,6 +806,7 @@ class LocalAssetEntityCompanion
this.height = const i0.Value.absent(),
this.durationInSeconds = const i0.Value.absent(),
this.id = const i0.Value.absent(),
this.remoteId = const i0.Value.absent(),
this.checksum = const i0.Value.absent(),
this.isFavorite = const i0.Value.absent(),
});
@@ -648,6 +819,7 @@ class LocalAssetEntityCompanion
this.height = const i0.Value.absent(),
this.durationInSeconds = const i0.Value.absent(),
required String id,
this.remoteId = const i0.Value.absent(),
this.checksum = const i0.Value.absent(),
this.isFavorite = const i0.Value.absent(),
}) : name = i0.Value(name),
@@ -662,6 +834,7 @@ class LocalAssetEntityCompanion
i0.Expression<int>? height,
i0.Expression<int>? durationInSeconds,
i0.Expression<String>? id,
i0.Expression<String>? remoteId,
i0.Expression<String>? checksum,
i0.Expression<bool>? isFavorite,
}) {
@@ -674,6 +847,7 @@ class LocalAssetEntityCompanion
if (height != null) 'height': height,
if (durationInSeconds != null) 'duration_in_seconds': durationInSeconds,
if (id != null) 'id': id,
if (remoteId != null) 'remote_id': remoteId,
if (checksum != null) 'checksum': checksum,
if (isFavorite != null) 'is_favorite': isFavorite,
});
@@ -688,6 +862,7 @@ class LocalAssetEntityCompanion
i0.Value<int?>? height,
i0.Value<int?>? durationInSeconds,
i0.Value<String>? id,
i0.Value<String?>? remoteId,
i0.Value<String?>? checksum,
i0.Value<bool>? isFavorite}) {
return i1.LocalAssetEntityCompanion(
@@ -699,6 +874,7 @@ class LocalAssetEntityCompanion
height: height ?? this.height,
durationInSeconds: durationInSeconds ?? this.durationInSeconds,
id: id ?? this.id,
remoteId: remoteId ?? this.remoteId,
checksum: checksum ?? this.checksum,
isFavorite: isFavorite ?? this.isFavorite,
);
@@ -732,6 +908,9 @@ class LocalAssetEntityCompanion
if (id.present) {
map['id'] = i0.Variable<String>(id.value);
}
if (remoteId.present) {
map['remote_id'] = i0.Variable<String>(remoteId.value);
}
if (checksum.present) {
map['checksum'] = i0.Variable<String>(checksum.value);
}
@@ -752,6 +931,7 @@ class LocalAssetEntityCompanion
..write('height: $height, ')
..write('durationInSeconds: $durationInSeconds, ')
..write('id: $id, ')
..write('remoteId: $remoteId, ')
..write('checksum: $checksum, ')
..write('isFavorite: $isFavorite')
..write(')'))

View File

@@ -5,7 +5,7 @@ mergedAsset: SELECT * FROM
(
SELECT
rae.id as remote_id,
lae.id as local_id,
rae.local_id as local_id,
rae.name,
rae."type",
rae.created_at,
@@ -19,13 +19,11 @@ mergedAsset: SELECT * FROM
rae.owner_id
FROM
remote_asset_entity rae
LEFT JOIN
local_asset_entity lae ON rae.checksum = lae.checksum
WHERE
rae.deleted_at IS NULL AND rae.visibility = 0 AND rae.owner_id in ?
UNION ALL
SELECT
NULL as remote_id,
lae.remote_id as remote_id,
lae.id as local_id,
lae.name,
lae."type",
@@ -40,10 +38,8 @@ mergedAsset: SELECT * FROM
NULL as owner_id
FROM
local_asset_entity lae
LEFT JOIN
remote_asset_entity rae ON rae.checksum = lae.checksum
WHERE
rae.id IS NULL
lae.remote_id IS NULL
)
ORDER BY created_at DESC
LIMIT $limit;
@@ -62,8 +58,6 @@ FROM
rae.created_at
FROM
remote_asset_entity rae
LEFT JOIN
local_asset_entity lae ON rae.checksum = lae.checksum
WHERE
rae.deleted_at IS NULL AND rae.visibility = 0 AND rae.owner_id in ?
UNION ALL
@@ -72,10 +66,8 @@ FROM
lae.created_at
FROM
local_asset_entity lae
LEFT JOIN
remote_asset_entity rae ON rae.checksum = lae.checksum
WHERE
rae.id IS NULL
lae.remote_id IS NULL
)
GROUP BY bucket_date
ORDER BY bucket_date DESC;

View File

@@ -18,7 +18,7 @@ class MergedAssetDrift extends i1.ModularAccessor {
final generatedlimit = $write(limit, startIndex: $arrayStartIndex);
$arrayStartIndex += generatedlimit.amountOfVariables;
return customSelect(
'SELECT * FROM (SELECT rae.id AS remote_id, lae.id AS local_id, rae.name, rae.type, rae.created_at, rae.updated_at, rae.width, rae.height, rae.duration_in_seconds, rae.is_favorite, rae.thumb_hash, rae.checksum, rae.owner_id FROM remote_asset_entity AS rae LEFT JOIN local_asset_entity AS lae ON rae.checksum = lae.checksum WHERE rae.deleted_at IS NULL AND rae.visibility = 0 AND rae.owner_id IN ($expandedvar1) UNION ALL SELECT NULL AS remote_id, lae.id AS local_id, lae.name, lae.type, lae.created_at, lae.updated_at, lae.width, lae.height, lae.duration_in_seconds, lae.is_favorite, NULL AS thumb_hash, lae.checksum, NULL AS owner_id FROM local_asset_entity AS lae LEFT JOIN remote_asset_entity AS rae ON rae.checksum = lae.checksum WHERE rae.id IS NULL) ORDER BY created_at DESC ${generatedlimit.sql}',
'SELECT * FROM (SELECT rae.id AS remote_id, rae.local_id AS local_id, rae.name, rae.type, rae.created_at, rae.updated_at, rae.width, rae.height, rae.duration_in_seconds, rae.is_favorite, rae.thumb_hash, rae.checksum, rae.owner_id FROM remote_asset_entity AS rae WHERE rae.deleted_at IS NULL AND rae.visibility = 0 AND rae.owner_id IN ($expandedvar1) UNION ALL SELECT lae.remote_id AS remote_id, lae.id AS local_id, lae.name, lae.type, lae.created_at, lae.updated_at, lae.width, lae.height, lae.duration_in_seconds, lae.is_favorite, NULL AS thumb_hash, lae.checksum, NULL AS owner_id FROM local_asset_entity AS lae WHERE lae.remote_id IS NULL) ORDER BY created_at DESC ${generatedlimit.sql}',
variables: [
for (var $ in var1) i0.Variable<String>($),
...generatedlimit.introducedVariables
@@ -51,7 +51,7 @@ class MergedAssetDrift extends i1.ModularAccessor {
final expandedvar2 = $expandVar($arrayStartIndex, var2.length);
$arrayStartIndex += var2.length;
return customSelect(
'SELECT COUNT(*) AS asset_count, CASE WHEN ?1 = 0 THEN STRFTIME(\'%Y-%m-%d\', created_at, \'localtime\') WHEN ?1 = 1 THEN STRFTIME(\'%Y-%m\', created_at, \'localtime\') END AS bucket_date FROM (SELECT rae.name, rae.created_at FROM remote_asset_entity AS rae LEFT JOIN local_asset_entity AS lae ON rae.checksum = lae.checksum WHERE rae.deleted_at IS NULL AND rae.visibility = 0 AND rae.owner_id IN ($expandedvar2) UNION ALL SELECT lae.name, lae.created_at FROM local_asset_entity AS lae LEFT JOIN remote_asset_entity AS rae ON rae.checksum = lae.checksum WHERE rae.id IS NULL) GROUP BY bucket_date ORDER BY bucket_date DESC',
'SELECT COUNT(*) AS asset_count, CASE WHEN ?1 = 0 THEN STRFTIME(\'%Y-%m-%d\', created_at, \'localtime\') WHEN ?1 = 1 THEN STRFTIME(\'%Y-%m\', created_at, \'localtime\') END AS bucket_date FROM (SELECT rae.name, rae.created_at FROM remote_asset_entity AS rae WHERE rae.deleted_at IS NULL AND rae.visibility = 0 AND rae.owner_id IN ($expandedvar2) UNION ALL SELECT lae.name, lae.created_at FROM local_asset_entity AS lae WHERE lae.remote_id IS NULL) GROUP BY bucket_date ORDER BY bucket_date DESC',
variables: [
i0.Variable<int>(groupBy),
for (var $ in var2) i0.Variable<String>($)

View File

@@ -17,6 +17,8 @@ class RemoteAssetEntity extends Table
TextColumn get id => text()();
TextColumn get localId => text().nullable()();
TextColumn get checksum => text()();
BoolColumn get isFavorite => boolean().withDefault(const Constant(false))();
@@ -51,6 +53,6 @@ extension RemoteAssetEntityDataDomainEx on RemoteAssetEntityData {
width: width,
thumbHash: thumbHash,
visibility: visibility,
localId: null,
localId: localId,
);
}

View File

@@ -21,6 +21,7 @@ typedef $$RemoteAssetEntityTableCreateCompanionBuilder
i0.Value<int?> height,
i0.Value<int?> durationInSeconds,
required String id,
i0.Value<String?> localId,
required String checksum,
i0.Value<bool> isFavorite,
required String ownerId,
@@ -39,6 +40,7 @@ typedef $$RemoteAssetEntityTableUpdateCompanionBuilder
i0.Value<int?> height,
i0.Value<int?> durationInSeconds,
i0.Value<String> id,
i0.Value<String?> localId,
i0.Value<String> checksum,
i0.Value<bool> isFavorite,
i0.Value<String> ownerId,
@@ -118,6 +120,9 @@ class $$RemoteAssetEntityTableFilterComposer
i0.ColumnFilters<String> get id => $composableBuilder(
column: $table.id, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get localId => $composableBuilder(
column: $table.localId, builder: (column) => i0.ColumnFilters(column));
i0.ColumnFilters<String> get checksum => $composableBuilder(
column: $table.checksum, builder: (column) => i0.ColumnFilters(column));
@@ -198,6 +203,9 @@ class $$RemoteAssetEntityTableOrderingComposer
i0.ColumnOrderings<String> get id => $composableBuilder(
column: $table.id, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get localId => $composableBuilder(
column: $table.localId, builder: (column) => i0.ColumnOrderings(column));
i0.ColumnOrderings<String> get checksum => $composableBuilder(
column: $table.checksum, builder: (column) => i0.ColumnOrderings(column));
@@ -277,6 +285,9 @@ class $$RemoteAssetEntityTableAnnotationComposer
i0.GeneratedColumn<String> get id =>
$composableBuilder(column: $table.id, builder: (column) => column);
i0.GeneratedColumn<String> get localId =>
$composableBuilder(column: $table.localId, builder: (column) => column);
i0.GeneratedColumn<String> get checksum =>
$composableBuilder(column: $table.checksum, builder: (column) => column);
@@ -352,6 +363,7 @@ class $$RemoteAssetEntityTableTableManager extends i0.RootTableManager<
i0.Value<int?> height = const i0.Value.absent(),
i0.Value<int?> durationInSeconds = const i0.Value.absent(),
i0.Value<String> id = const i0.Value.absent(),
i0.Value<String?> localId = const i0.Value.absent(),
i0.Value<String> checksum = const i0.Value.absent(),
i0.Value<bool> isFavorite = const i0.Value.absent(),
i0.Value<String> ownerId = const i0.Value.absent(),
@@ -369,6 +381,7 @@ class $$RemoteAssetEntityTableTableManager extends i0.RootTableManager<
height: height,
durationInSeconds: durationInSeconds,
id: id,
localId: localId,
checksum: checksum,
isFavorite: isFavorite,
ownerId: ownerId,
@@ -386,6 +399,7 @@ class $$RemoteAssetEntityTableTableManager extends i0.RootTableManager<
i0.Value<int?> height = const i0.Value.absent(),
i0.Value<int?> durationInSeconds = const i0.Value.absent(),
required String id,
i0.Value<String?> localId = const i0.Value.absent(),
required String checksum,
i0.Value<bool> isFavorite = const i0.Value.absent(),
required String ownerId,
@@ -403,6 +417,7 @@ class $$RemoteAssetEntityTableTableManager extends i0.RootTableManager<
height: height,
durationInSeconds: durationInSeconds,
id: id,
localId: localId,
checksum: checksum,
isFavorite: isFavorite,
ownerId: ownerId,
@@ -530,6 +545,12 @@ class $RemoteAssetEntityTable extends i3.RemoteAssetEntity
late final i0.GeneratedColumn<String> id = i0.GeneratedColumn<String>(
'id', aliasedName, false,
type: i0.DriftSqlType.string, requiredDuringInsert: true);
static const i0.VerificationMeta _localIdMeta =
const i0.VerificationMeta('localId');
@override
late final i0.GeneratedColumn<String> localId = i0.GeneratedColumn<String>(
'local_id', aliasedName, true,
type: i0.DriftSqlType.string, requiredDuringInsert: false);
static const i0.VerificationMeta _checksumMeta =
const i0.VerificationMeta('checksum');
@override
@@ -589,6 +610,7 @@ class $RemoteAssetEntityTable extends i3.RemoteAssetEntity
height,
durationInSeconds,
id,
localId,
checksum,
isFavorite,
ownerId,
@@ -641,6 +663,10 @@ class $RemoteAssetEntityTable extends i3.RemoteAssetEntity
} else if (isInserting) {
context.missing(_idMeta);
}
if (data.containsKey('local_id')) {
context.handle(_localIdMeta,
localId.isAcceptableOrUnknown(data['local_id']!, _localIdMeta));
}
if (data.containsKey('checksum')) {
context.handle(_checksumMeta,
checksum.isAcceptableOrUnknown(data['checksum']!, _checksumMeta));
@@ -700,6 +726,8 @@ class $RemoteAssetEntityTable extends i3.RemoteAssetEntity
i0.DriftSqlType.int, data['${effectivePrefix}duration_in_seconds']),
id: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}id'])!,
localId: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}local_id']),
checksum: attachedDatabase.typeMapping
.read(i0.DriftSqlType.string, data['${effectivePrefix}checksum'])!,
isFavorite: attachedDatabase.typeMapping
@@ -744,6 +772,7 @@ class RemoteAssetEntityData extends i0.DataClass
final int? height;
final int? durationInSeconds;
final String id;
final String? localId;
final String checksum;
final bool isFavorite;
final String ownerId;
@@ -760,6 +789,7 @@ class RemoteAssetEntityData extends i0.DataClass
this.height,
this.durationInSeconds,
required this.id,
this.localId,
required this.checksum,
required this.isFavorite,
required this.ownerId,
@@ -787,6 +817,9 @@ class RemoteAssetEntityData extends i0.DataClass
map['duration_in_seconds'] = i0.Variable<int>(durationInSeconds);
}
map['id'] = i0.Variable<String>(id);
if (!nullToAbsent || localId != null) {
map['local_id'] = i0.Variable<String>(localId);
}
map['checksum'] = i0.Variable<String>(checksum);
map['is_favorite'] = i0.Variable<bool>(isFavorite);
map['owner_id'] = i0.Variable<String>(ownerId);
@@ -819,6 +852,7 @@ class RemoteAssetEntityData extends i0.DataClass
height: serializer.fromJson<int?>(json['height']),
durationInSeconds: serializer.fromJson<int?>(json['durationInSeconds']),
id: serializer.fromJson<String>(json['id']),
localId: serializer.fromJson<String?>(json['localId']),
checksum: serializer.fromJson<String>(json['checksum']),
isFavorite: serializer.fromJson<bool>(json['isFavorite']),
ownerId: serializer.fromJson<String>(json['ownerId']),
@@ -842,6 +876,7 @@ class RemoteAssetEntityData extends i0.DataClass
'height': serializer.toJson<int?>(height),
'durationInSeconds': serializer.toJson<int?>(durationInSeconds),
'id': serializer.toJson<String>(id),
'localId': serializer.toJson<String?>(localId),
'checksum': serializer.toJson<String>(checksum),
'isFavorite': serializer.toJson<bool>(isFavorite),
'ownerId': serializer.toJson<String>(ownerId),
@@ -862,6 +897,7 @@ class RemoteAssetEntityData extends i0.DataClass
i0.Value<int?> height = const i0.Value.absent(),
i0.Value<int?> durationInSeconds = const i0.Value.absent(),
String? id,
i0.Value<String?> localId = const i0.Value.absent(),
String? checksum,
bool? isFavorite,
String? ownerId,
@@ -880,6 +916,7 @@ class RemoteAssetEntityData extends i0.DataClass
? durationInSeconds.value
: this.durationInSeconds,
id: id ?? this.id,
localId: localId.present ? localId.value : this.localId,
checksum: checksum ?? this.checksum,
isFavorite: isFavorite ?? this.isFavorite,
ownerId: ownerId ?? this.ownerId,
@@ -901,6 +938,7 @@ class RemoteAssetEntityData extends i0.DataClass
? data.durationInSeconds.value
: this.durationInSeconds,
id: data.id.present ? data.id.value : this.id,
localId: data.localId.present ? data.localId.value : this.localId,
checksum: data.checksum.present ? data.checksum.value : this.checksum,
isFavorite:
data.isFavorite.present ? data.isFavorite.value : this.isFavorite,
@@ -926,6 +964,7 @@ class RemoteAssetEntityData extends i0.DataClass
..write('height: $height, ')
..write('durationInSeconds: $durationInSeconds, ')
..write('id: $id, ')
..write('localId: $localId, ')
..write('checksum: $checksum, ')
..write('isFavorite: $isFavorite, ')
..write('ownerId: $ownerId, ')
@@ -947,6 +986,7 @@ class RemoteAssetEntityData extends i0.DataClass
height,
durationInSeconds,
id,
localId,
checksum,
isFavorite,
ownerId,
@@ -966,6 +1006,7 @@ class RemoteAssetEntityData extends i0.DataClass
other.height == this.height &&
other.durationInSeconds == this.durationInSeconds &&
other.id == this.id &&
other.localId == this.localId &&
other.checksum == this.checksum &&
other.isFavorite == this.isFavorite &&
other.ownerId == this.ownerId &&
@@ -985,6 +1026,7 @@ class RemoteAssetEntityCompanion
final i0.Value<int?> height;
final i0.Value<int?> durationInSeconds;
final i0.Value<String> id;
final i0.Value<String?> localId;
final i0.Value<String> checksum;
final i0.Value<bool> isFavorite;
final i0.Value<String> ownerId;
@@ -1001,6 +1043,7 @@ class RemoteAssetEntityCompanion
this.height = const i0.Value.absent(),
this.durationInSeconds = const i0.Value.absent(),
this.id = const i0.Value.absent(),
this.localId = const i0.Value.absent(),
this.checksum = const i0.Value.absent(),
this.isFavorite = const i0.Value.absent(),
this.ownerId = const i0.Value.absent(),
@@ -1018,6 +1061,7 @@ class RemoteAssetEntityCompanion
this.height = const i0.Value.absent(),
this.durationInSeconds = const i0.Value.absent(),
required String id,
this.localId = const i0.Value.absent(),
required String checksum,
this.isFavorite = const i0.Value.absent(),
required String ownerId,
@@ -1040,6 +1084,7 @@ class RemoteAssetEntityCompanion
i0.Expression<int>? height,
i0.Expression<int>? durationInSeconds,
i0.Expression<String>? id,
i0.Expression<String>? localId,
i0.Expression<String>? checksum,
i0.Expression<bool>? isFavorite,
i0.Expression<String>? ownerId,
@@ -1057,6 +1102,7 @@ class RemoteAssetEntityCompanion
if (height != null) 'height': height,
if (durationInSeconds != null) 'duration_in_seconds': durationInSeconds,
if (id != null) 'id': id,
if (localId != null) 'local_id': localId,
if (checksum != null) 'checksum': checksum,
if (isFavorite != null) 'is_favorite': isFavorite,
if (ownerId != null) 'owner_id': ownerId,
@@ -1076,6 +1122,7 @@ class RemoteAssetEntityCompanion
i0.Value<int?>? height,
i0.Value<int?>? durationInSeconds,
i0.Value<String>? id,
i0.Value<String?>? localId,
i0.Value<String>? checksum,
i0.Value<bool>? isFavorite,
i0.Value<String>? ownerId,
@@ -1092,6 +1139,7 @@ class RemoteAssetEntityCompanion
height: height ?? this.height,
durationInSeconds: durationInSeconds ?? this.durationInSeconds,
id: id ?? this.id,
localId: localId ?? this.localId,
checksum: checksum ?? this.checksum,
isFavorite: isFavorite ?? this.isFavorite,
ownerId: ownerId ?? this.ownerId,
@@ -1130,6 +1178,9 @@ class RemoteAssetEntityCompanion
if (id.present) {
map['id'] = i0.Variable<String>(id.value);
}
if (localId.present) {
map['local_id'] = i0.Variable<String>(localId.value);
}
if (checksum.present) {
map['checksum'] = i0.Variable<String>(checksum.value);
}
@@ -1167,6 +1218,7 @@ class RemoteAssetEntityCompanion
..write('height: $height, ')
..write('durationInSeconds: $durationInSeconds, ')
..write('id: $id, ')
..write('localId: $localId, ')
..write('checksum: $checksum, ')
..write('isFavorite: $isFavorite, ')
..write('ownerId: $ownerId, ')

View File

@@ -55,6 +55,7 @@ class IsarDatabaseRepository implements IDatabaseRepository {
],
include: {
'package:immich_mobile/infrastructure/entities/merged_asset.drift',
'package:immich_mobile/infrastructure/entities/asset_triggers.drift',
},
)
class Drift extends $Drift implements IDatabaseRepository {

View File

@@ -7,31 +7,33 @@ import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.
as i2;
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart'
as i3;
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/asset_triggers.drift.dart'
as i4;
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.drift.dart'
as i5;
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart'
as i6;
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart'
as i7;
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.drift.dart'
as i8;
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
as i9;
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
as i10;
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
as i11;
import 'package:immich_mobile/infrastructure/entities/memory.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
as i12;
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/memory.entity.drift.dart'
as i13;
import 'package:immich_mobile/infrastructure/entities/stack.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.drift.dart'
as i14;
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
import 'package:immich_mobile/infrastructure/entities/stack.entity.drift.dart'
as i15;
import 'package:drift/internal/modular.dart' as i16;
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
as i16;
import 'package:drift/internal/modular.dart' as i17;
abstract class $Drift extends i0.GeneratedDatabase {
$Drift(i0.QueryExecutor e) : super(e);
@@ -41,28 +43,28 @@ abstract class $Drift extends i0.GeneratedDatabase {
i2.$RemoteAssetEntityTable(this);
late final i3.$LocalAssetEntityTable localAssetEntity =
i3.$LocalAssetEntityTable(this);
late final i4.$UserMetadataEntityTable userMetadataEntity =
i4.$UserMetadataEntityTable(this);
late final i5.$PartnerEntityTable partnerEntity =
i5.$PartnerEntityTable(this);
late final i6.$LocalAlbumEntityTable localAlbumEntity =
i6.$LocalAlbumEntityTable(this);
late final i7.$LocalAlbumAssetEntityTable localAlbumAssetEntity =
i7.$LocalAlbumAssetEntityTable(this);
late final i8.$RemoteExifEntityTable remoteExifEntity =
i8.$RemoteExifEntityTable(this);
late final i9.$RemoteAlbumEntityTable remoteAlbumEntity =
i9.$RemoteAlbumEntityTable(this);
late final i10.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity =
i10.$RemoteAlbumAssetEntityTable(this);
late final i11.$RemoteAlbumUserEntityTable remoteAlbumUserEntity =
i11.$RemoteAlbumUserEntityTable(this);
late final i12.$MemoryEntityTable memoryEntity = i12.$MemoryEntityTable(this);
late final i13.$MemoryAssetEntityTable memoryAssetEntity =
i13.$MemoryAssetEntityTable(this);
late final i14.$StackEntityTable stackEntity = i14.$StackEntityTable(this);
i15.MergedAssetDrift get mergedAssetDrift => i16.ReadDatabaseContainer(this)
.accessor<i15.MergedAssetDrift>(i15.MergedAssetDrift.new);
late final i5.$UserMetadataEntityTable userMetadataEntity =
i5.$UserMetadataEntityTable(this);
late final i6.$PartnerEntityTable partnerEntity =
i6.$PartnerEntityTable(this);
late final i7.$LocalAlbumEntityTable localAlbumEntity =
i7.$LocalAlbumEntityTable(this);
late final i8.$LocalAlbumAssetEntityTable localAlbumAssetEntity =
i8.$LocalAlbumAssetEntityTable(this);
late final i9.$RemoteExifEntityTable remoteExifEntity =
i9.$RemoteExifEntityTable(this);
late final i10.$RemoteAlbumEntityTable remoteAlbumEntity =
i10.$RemoteAlbumEntityTable(this);
late final i11.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity =
i11.$RemoteAlbumAssetEntityTable(this);
late final i12.$RemoteAlbumUserEntityTable remoteAlbumUserEntity =
i12.$RemoteAlbumUserEntityTable(this);
late final i13.$MemoryEntityTable memoryEntity = i13.$MemoryEntityTable(this);
late final i14.$MemoryAssetEntityTable memoryAssetEntity =
i14.$MemoryAssetEntityTable(this);
late final i15.$StackEntityTable stackEntity = i15.$StackEntityTable(this);
i16.MergedAssetDrift get mergedAssetDrift => i17.ReadDatabaseContainer(this)
.accessor<i16.MergedAssetDrift>(i16.MergedAssetDrift.new);
@override
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
@@ -71,6 +73,10 @@ abstract class $Drift extends i0.GeneratedDatabase {
userEntity,
remoteAssetEntity,
localAssetEntity,
i4.trLocalAssetUpdateChecksumSetIds,
i4.trLocalAssetUpdateOldChecksumSetRemoteAssetLocalId,
i4.trLocalAssetDeleteUpdateRemoteAssetLocalId,
i4.trRemoteAssetInsertSetLocalId,
i3.idxLocalAssetChecksum,
i2.uQRemoteAssetOwnerChecksum,
i2.idxRemoteAssetChecksum,
@@ -97,6 +103,43 @@ abstract class $Drift extends i0.GeneratedDatabase {
i0.TableUpdate('remote_asset_entity', kind: i0.UpdateKind.delete),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName('remote_asset_entity',
limitUpdateKind: i0.UpdateKind.delete),
result: [
i0.TableUpdate('local_asset_entity', kind: i0.UpdateKind.update),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName('local_asset_entity',
limitUpdateKind: i0.UpdateKind.update),
result: [
i0.TableUpdate('local_asset_entity', kind: i0.UpdateKind.update),
i0.TableUpdate('remote_asset_entity', kind: i0.UpdateKind.update),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName('local_asset_entity',
limitUpdateKind: i0.UpdateKind.update),
result: [
i0.TableUpdate('remote_asset_entity', kind: i0.UpdateKind.update),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName('local_asset_entity',
limitUpdateKind: i0.UpdateKind.delete),
result: [
i0.TableUpdate('remote_asset_entity', kind: i0.UpdateKind.update),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName('remote_asset_entity',
limitUpdateKind: i0.UpdateKind.insert),
result: [
i0.TableUpdate('remote_asset_entity', kind: i0.UpdateKind.update),
i0.TableUpdate('local_asset_entity', kind: i0.UpdateKind.update),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName('user_entity',
limitUpdateKind: i0.UpdateKind.delete),
@@ -232,27 +275,27 @@ class $DriftManager {
i2.$$RemoteAssetEntityTableTableManager(_db, _db.remoteAssetEntity);
i3.$$LocalAssetEntityTableTableManager get localAssetEntity =>
i3.$$LocalAssetEntityTableTableManager(_db, _db.localAssetEntity);
i4.$$UserMetadataEntityTableTableManager get userMetadataEntity =>
i4.$$UserMetadataEntityTableTableManager(_db, _db.userMetadataEntity);
i5.$$PartnerEntityTableTableManager get partnerEntity =>
i5.$$PartnerEntityTableTableManager(_db, _db.partnerEntity);
i6.$$LocalAlbumEntityTableTableManager get localAlbumEntity =>
i6.$$LocalAlbumEntityTableTableManager(_db, _db.localAlbumEntity);
i7.$$LocalAlbumAssetEntityTableTableManager get localAlbumAssetEntity => i7
i5.$$UserMetadataEntityTableTableManager get userMetadataEntity =>
i5.$$UserMetadataEntityTableTableManager(_db, _db.userMetadataEntity);
i6.$$PartnerEntityTableTableManager get partnerEntity =>
i6.$$PartnerEntityTableTableManager(_db, _db.partnerEntity);
i7.$$LocalAlbumEntityTableTableManager get localAlbumEntity =>
i7.$$LocalAlbumEntityTableTableManager(_db, _db.localAlbumEntity);
i8.$$LocalAlbumAssetEntityTableTableManager get localAlbumAssetEntity => i8
.$$LocalAlbumAssetEntityTableTableManager(_db, _db.localAlbumAssetEntity);
i8.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
i8.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
i9.$$RemoteAlbumEntityTableTableManager get remoteAlbumEntity =>
i9.$$RemoteAlbumEntityTableTableManager(_db, _db.remoteAlbumEntity);
i10.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
i10.$$RemoteAlbumAssetEntityTableTableManager(
i9.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
i9.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
i10.$$RemoteAlbumEntityTableTableManager get remoteAlbumEntity =>
i10.$$RemoteAlbumEntityTableTableManager(_db, _db.remoteAlbumEntity);
i11.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
i11.$$RemoteAlbumAssetEntityTableTableManager(
_db, _db.remoteAlbumAssetEntity);
i11.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i11
i12.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i12
.$$RemoteAlbumUserEntityTableTableManager(_db, _db.remoteAlbumUserEntity);
i12.$$MemoryEntityTableTableManager get memoryEntity =>
i12.$$MemoryEntityTableTableManager(_db, _db.memoryEntity);
i13.$$MemoryAssetEntityTableTableManager get memoryAssetEntity =>
i13.$$MemoryAssetEntityTableTableManager(_db, _db.memoryAssetEntity);
i14.$$StackEntityTableTableManager get stackEntity =>
i14.$$StackEntityTableTableManager(_db, _db.stackEntity);
i13.$$MemoryEntityTableTableManager get memoryEntity =>
i13.$$MemoryEntityTableTableManager(_db, _db.memoryEntity);
i14.$$MemoryAssetEntityTableTableManager get memoryAssetEntity =>
i14.$$MemoryAssetEntityTableTableManager(_db, _db.memoryAssetEntity);
i15.$$StackEntityTableTableManager get stackEntity =>
i15.$$StackEntityTableTableManager(_db, _db.stackEntity);
}

View File

@@ -282,6 +282,7 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
durationInSeconds: Value(asset.durationInSeconds),
id: asset.id,
checksum: const Value(null),
remoteId: const Value(null),
);
batch.insert<$LocalAssetEntityTable, LocalAssetEntityData>(
_db.localAssetEntity,

View File

@@ -9,23 +9,10 @@ class DriftLocalAssetRepository extends DriftDatabaseRepository {
const DriftLocalAssetRepository(this._db) : super(_db);
Stream<LocalAsset?> watchAsset(String id) {
final query = _db.localAssetEntity
.select()
.addColumns([_db.localAssetEntity.id]).join([
leftOuterJoin(
_db.remoteAssetEntity,
_db.localAssetEntity.checksum.equalsExp(_db.remoteAssetEntity.checksum),
useColumns: false,
),
])
..where(_db.localAssetEntity.id.equals(id));
final query = _db.localAssetEntity.select()
..where((row) => row.id.equals(id));
return query.map((row) {
final asset = row.readTable(_db.localAssetEntity).toDto();
return asset.copyWith(
remoteId: row.read(_db.remoteAssetEntity.id),
);
}).watchSingleOrNull();
return query.map((row) => row.toDto()).watchSingleOrNull();
}
Future<void> updateHashes(Iterable<LocalAsset> hashes) {

View File

@@ -13,24 +13,27 @@ class RemoteAssetRepository extends DriftDatabaseRepository {
final Drift _db;
const RemoteAssetRepository(this._db) : super(_db);
Stream<RemoteAsset?> watchAsset(String id) {
final query = _db.remoteAssetEntity
.select()
.addColumns([_db.localAssetEntity.id]).join([
leftOuterJoin(
_db.localAssetEntity,
_db.remoteAssetEntity.checksum.equalsExp(_db.localAssetEntity.checksum),
useColumns: false,
),
])
..where(_db.remoteAssetEntity.id.equals(id));
/// For testing purposes
Future<List<RemoteAsset>> getSome(String userId) {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
_db.remoteAssetEntity.ownerId.equals(userId) &
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.timeline),
)
..orderBy([(row) => OrderingTerm.desc(row.createdAt)])
..limit(10);
return query.map((row) {
final asset = row.readTable(_db.remoteAssetEntity).toDto();
return asset.copyWith(
localId: row.read(_db.localAssetEntity.id),
);
}).watchSingleOrNull();
return query.map((row) => row.toDto()).get();
}
Stream<RemoteAsset?> watchAsset(String id) {
final query = _db.remoteAssetEntity.select()
..where((row) => row.id.equals(id));
return query.map((row) => row.toDto()).watchSingleOrNull();
}
Future<ExifInfo?> getExif(String id) {

View File

@@ -104,7 +104,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
).get();
}
Stream<List<Bucket>> watchLocalBucket(
Stream<List<Bucket>> watchLocalAlbumBucket(
String albumId, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
@@ -137,7 +137,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
}).watch();
}
Future<List<BaseAsset>> getLocalBucketAssets(
Future<List<BaseAsset>> getLocalAlbumBucketAssets(
String albumId, {
required int offset,
required int count,
@@ -158,7 +158,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
.get();
}
Stream<List<Bucket>> watchRemoteBucket(
Stream<List<Bucket>> watchRemoteAlbumBucket(
String albumId, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
@@ -192,7 +192,7 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
}).watch();
}
Future<List<BaseAsset>> getRemoteBucketAssets(
Future<List<BaseAsset>> getRemoteAlbumBucketAssets(
String albumId, {
required int offset,
required int count,
@@ -469,6 +469,61 @@ class DriftTimelineRepository extends DriftDatabaseRepository {
return query.map((row) => row.toDto()).get();
}
Stream<List<Bucket>> watchRemoteBucket(
List<String> userIds, {
GroupAssetsBy groupBy = GroupAssetsBy.day,
}) {
if (groupBy == GroupAssetsBy.none) {
return _db.remoteAssetEntity
.count(
where: (row) =>
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.timeline) &
row.ownerId.isIn(userIds),
)
.map(_generateBuckets)
.watchSingle();
}
final assetCountExp = _db.remoteAssetEntity.id.count();
final dateExp = _db.remoteAssetEntity.createdAt.dateFmt(groupBy);
final query = _db.remoteAssetEntity.selectOnly()
..addColumns([assetCountExp, dateExp])
..where(
_db.remoteAssetEntity.deletedAt.isNull() &
_db.remoteAssetEntity.visibility
.equalsValue(AssetVisibility.timeline) &
_db.remoteAssetEntity.ownerId.isIn(userIds),
)
..groupBy([dateExp])
..orderBy([OrderingTerm.desc(dateExp)]);
return query.map((row) {
final timeline = row.read(dateExp)!.dateFmt(groupBy);
final assetCount = row.read(assetCountExp)!;
return TimeBucket(date: timeline, assetCount: assetCount);
}).watch();
}
Future<List<BaseAsset>> getRemoteBucketAssets(
List<String> userIds, {
required int offset,
required int count,
}) {
final query = _db.remoteAssetEntity.select()
..where(
(row) =>
row.deletedAt.isNull() &
row.visibility.equalsValue(AssetVisibility.timeline) &
row.ownerId.isIn(userIds),
)
..orderBy([(t) => OrderingTerm.desc(t.createdAt)])
..limit(count, offset: offset);
return query.map((row) => row.toDto()).get();
}
}
extension on Expression<DateTime> {

View File

@@ -5,15 +5,43 @@ import 'package:drift/drift.dart' hide Column;
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/presentation/pages/dev/dev_logger.dart';
import 'package:immich_mobile/providers/background_sync.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
final _features = [
_Feature(
name: 'Selection Mode Timeline',
icon: Icons.developer_mode_rounded,
onTap: (ctx, ref) async {
final user = ref.watch(currentUserProvider);
if (user == null) {
return Future.value();
}
final assets =
await ref.read(remoteAssetRepositoryProvider).getSome(user.id);
final selectedAssets = await ctx.pushRoute<Set<BaseAsset>>(
DriftAssetSelectionTimelineRoute(
lockedSelectionAssets: assets.toSet(),
),
);
DLog.log(
"Selected ${selectedAssets?.length ?? 0} assets",
);
return Future.value();
},
),
_Feature(
name: 'Sync Local',
icon: Icons.photo_album_rounded,

View File

@@ -0,0 +1,44 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
@RoutePage()
class DriftAssetSelectionTimelinePage extends ConsumerWidget {
final Set<BaseAsset> lockedSelectionAssets;
const DriftAssetSelectionTimelinePage({
super.key,
this.lockedSelectionAssets = const {},
});
@override
Widget build(BuildContext context, WidgetRef ref) {
return ProviderScope(
overrides: [
multiSelectProvider.overrideWith(
() => MultiSelectNotifier(
MultiSelectState(
selectedAssets: {},
lockedSelectionAssets: lockedSelectionAssets,
forceEnable: true,
),
),
),
timelineServiceProvider.overrideWith(
(ref) {
final timelineUsers =
ref.watch(timelineUsersProvider).valueOrNull ?? [];
final timelineService =
ref.watch(timelineFactoryProvider).remoteAssets(timelineUsers);
ref.onDispose(timelineService.dispose);
return timelineService;
},
),
],
child: const Timeline(),
);
}
}

View File

@@ -1,50 +0,0 @@
import 'dart:convert' hide Codec;
import 'dart:ui';
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:thumbhash/thumbhash.dart';
class ThumbHashProvider extends ImageProvider<ThumbHashProvider> {
final String thumbHash;
const ThumbHashProvider({
required this.thumbHash,
});
@override
Future<ThumbHashProvider> obtainKey(ImageConfiguration configuration) {
return SynchronousFuture(this);
}
@override
ImageStreamCompleter loadImage(
ThumbHashProvider key,
ImageDecoderCallback decode,
) {
return MultiFrameImageStreamCompleter(
codec: _loadCodec(key, decode),
scale: 1.0,
);
}
Future<Codec> _loadCodec(
ThumbHashProvider key,
ImageDecoderCallback decode,
) async {
final image = thumbHashToRGBA(base64Decode(key.thumbHash));
return decode(await ImmutableBuffer.fromUint8List(rgbaToBmp(image)));
}
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;
if (other is ThumbHashProvider) {
return thumbHash == other.thumbHash;
}
return false;
}
@override
int get hashCode => thumbHash.hashCode;
}

View File

@@ -1,9 +1,7 @@
import 'package:flutter/material.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/presentation/widgets/images/image_provider.dart';
import 'package:immich_mobile/presentation/widgets/images/thumb_hash_provider.dart';
import 'package:immich_mobile/widgets/asset_grid/thumbnail_placeholder.dart';
import 'package:immich_mobile/widgets/common/fade_in_placeholder_image.dart';
import 'package:immich_mobile/widgets/common/thumbhash.dart';
import 'package:logging/logging.dart';
import 'package:octo_image/octo_image.dart';
@@ -56,13 +54,7 @@ OctoPlaceholderBuilder _blurHashPlaceholderBuilder(
String? thumbHash, {
BoxFit? fit,
}) {
return (context) => thumbHash == null
? const ThumbnailPlaceholder()
: FadeInPlaceholderImage(
placeholder: const ThumbnailPlaceholder(),
image: ThumbHashProvider(thumbHash: thumbHash),
fit: fit ?? BoxFit.cover,
);
return (context) => Thumbhash(blurhash: thumbHash, fit: fit ?? BoxFit.cover);
}
OctoErrorBuilder _blurHashErrorBuilder(

View File

@@ -12,7 +12,7 @@ class ThumbnailTile extends ConsumerWidget {
this.size = const Size.square(256),
this.fit = BoxFit.cover,
this.showStorageIndicator = true,
this.canDeselect = true,
this.lockSelection = false,
super.key,
});
@@ -20,15 +20,13 @@ class ThumbnailTile extends ConsumerWidget {
final Size size;
final BoxFit fit;
final bool showStorageIndicator;
/// If we are allowed to deselect this image
final bool canDeselect;
final bool lockSelection;
@override
Widget build(BuildContext context, WidgetRef ref) {
final assetContainerColor = context.isDarkTheme
? context.primaryColor.darken(amount: 0.6)
: context.primaryColor.lighten(amount: 0.8);
? context.primaryColor.darken(amount: 0.4)
: context.primaryColor.lighten(amount: 0.75);
final isSelected = ref.watch(
multiSelectProvider.select(
@@ -36,24 +34,29 @@ class ThumbnailTile extends ConsumerWidget {
),
);
final borderStyle = lockSelection
? BoxDecoration(
color: context.colorScheme.surfaceContainerHighest,
border: Border.all(
color: context.colorScheme.surfaceContainerHighest,
width: 6,
),
)
: isSelected
? BoxDecoration(
color: assetContainerColor,
border: Border.all(color: assetContainerColor, width: 6),
)
: const BoxDecoration();
return Stack(
children: [
AnimatedContainer(
duration: Durations.short4,
curve: Curves.decelerate,
decoration: BoxDecoration(
color: isSelected
? (canDeselect ? assetContainerColor : Colors.grey)
: null,
border: isSelected
? Border.all(
color: canDeselect ? assetContainerColor : Colors.grey,
width: 8,
)
: const Border(),
),
decoration: borderStyle,
child: ClipRRect(
borderRadius: isSelected
borderRadius: isSelected || lockSelection
? const BorderRadius.all(Radius.circular(15.0))
: BorderRadius.zero,
child: Stack(
@@ -102,14 +105,17 @@ class ThumbnailTile extends ConsumerWidget {
),
),
),
if (isSelected)
if (isSelected || lockSelection)
Padding(
padding: const EdgeInsets.all(3.0),
child: Align(
alignment: Alignment.topLeft,
child: _SelectionIndicator(
isSelected: isSelected,
color: assetContainerColor,
isLocked: lockSelection,
color: lockSelection
? context.colorScheme.surfaceContainerHighest
: assetContainerColor,
),
),
),
@@ -120,15 +126,29 @@ class ThumbnailTile extends ConsumerWidget {
class _SelectionIndicator extends StatelessWidget {
final bool isSelected;
final bool isLocked;
final Color? color;
const _SelectionIndicator({
required this.isSelected,
required this.isLocked,
this.color,
});
@override
Widget build(BuildContext context) {
if (isSelected) {
if (isLocked) {
return Container(
decoration: BoxDecoration(
shape: BoxShape.circle,
color: color,
),
child: const Icon(
Icons.check_circle_rounded,
color: Colors.grey,
),
);
} else if (isSelected) {
return Container(
decoration: BoxDecoration(
shape: BoxShape.circle,

View File

@@ -6,7 +6,7 @@ import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/presentation/widgets/images/full_image.widget.dart';
import 'package:immich_mobile/presentation/widgets/images/image_provider.dart';
import 'package:immich_mobile/utils/hooks/blurhash_hook.dart';
import 'package:immich_mobile/widgets/common/thumbhash.dart';
class DriftMemoryCard extends StatelessWidget {
final RemoteAsset asset;
@@ -117,43 +117,29 @@ class _BlurredBackdrop extends HookWidget {
@override
Widget build(BuildContext context) {
final blurhash = useDriftBlurHashRef(asset).value;
final blurhash = asset.thumbHash;
if (blurhash != null) {
// Use a nice cheap blur hash image decoration
return Container(
return Thumbhash(blurhash: blurhash, fit: BoxFit.cover);
}
// Fall back to using a more expensive image filtered
// Since the ImmichImage is already precached, we can
// safely use that as the image provider
return ImageFiltered(
imageFilter: ImageFilter.blur(sigmaX: 30, sigmaY: 30),
child: DecoratedBox(
decoration: BoxDecoration(
image: DecorationImage(
image: MemoryImage(
blurhash,
image: getFullImageProvider(
asset,
size: Size(context.width, context.height),
),
fit: BoxFit.cover,
),
),
child: Container(
color: Colors.black.withValues(alpha: 0.2),
),
);
} else {
// Fall back to using a more expensive image filtered
// Since the ImmichImage is already precached, we can
// safely use that as the image provider
return ImageFiltered(
imageFilter: ImageFilter.blur(sigmaX: 30, sigmaY: 30),
child: Container(
decoration: BoxDecoration(
image: DecorationImage(
image: getFullImageProvider(
asset,
size: Size(context.width, context.height),
),
fit: BoxFit.cover,
),
),
child: Container(
color: Colors.black.withValues(alpha: 0.2),
),
),
);
}
child: const ColoredBox(color: Color.fromRGBO(0, 0, 0, 0.2)),
),
);
}
}

View File

@@ -166,22 +166,22 @@ class _AssetTileWidget extends ConsumerWidget {
BaseAsset asset,
) {
final multiSelectState = ref.read(multiSelectProvider);
if (!multiSelectState.isEnabled) {
if (multiSelectState.forceEnable || multiSelectState.isEnabled) {
ref.read(multiSelectProvider.notifier).toggleAssetSelection(asset);
} else {
ctx.pushRoute(
AssetViewerRoute(
initialIndex: assetIndex,
timelineService: ref.read(timelineServiceProvider),
),
);
return;
}
ref.read(multiSelectProvider.notifier).toggleAssetSelection(asset);
}
void _handleOnLongPress(WidgetRef ref, BaseAsset asset) {
final multiSelectState = ref.read(multiSelectProvider);
if (multiSelectState.isEnabled) {
if (multiSelectState.isEnabled || multiSelectState.forceEnable) {
return;
}
@@ -189,13 +189,35 @@ class _AssetTileWidget extends ConsumerWidget {
ref.read(multiSelectProvider.notifier).toggleAssetSelection(asset);
}
bool _getLockSelectionStatus(WidgetRef ref) {
final lockSelectionAssets = ref.read(
multiSelectProvider.select(
(state) => state.lockedSelectionAssets,
),
);
if (lockSelectionAssets.isEmpty) {
return false;
}
return lockSelectionAssets.contains(asset);
}
@override
Widget build(BuildContext context, WidgetRef ref) {
final lockSelection = _getLockSelectionStatus(ref);
return RepaintBoundary(
child: GestureDetector(
onTap: () => _handleOnTap(context, ref, assetIndex, asset),
onLongPress: () => _handleOnLongPress(ref, asset),
child: ThumbnailTile(asset),
onTap: () => lockSelection
? null
: _handleOnTap(context, ref, assetIndex, asset),
onLongPress: () =>
lockSelection ? null : _handleOnLongPress(ref, asset),
child: ThumbnailTile(
asset,
lockSelection: lockSelection,
),
),
);
}

View File

@@ -354,22 +354,24 @@ class ScrubberState extends ConsumerState<Scrubber>
isDragging: _isDragging,
),
),
PositionedDirectional(
top: _thumbTopOffset + widget.topPadding,
end: 0,
child: RepaintBoundary(
child: GestureDetector(
onVerticalDragStart: _onDragStart,
onVerticalDragUpdate: _onDragUpdate,
onVerticalDragEnd: _onDragEnd,
child: _Scrubber(
thumbAnimation: _thumbAnimation,
labelAnimation: _labelAnimation,
label: label,
if (_scrollController.hasClients &&
_scrollController.position.maxScrollExtent > 0)
PositionedDirectional(
top: _thumbTopOffset + widget.topPadding,
end: 0,
child: RepaintBoundary(
child: GestureDetector(
onVerticalDragStart: _onDragStart,
onVerticalDragUpdate: _onDragUpdate,
onVerticalDragEnd: _onDragEnd,
child: _Scrubber(
thumbAnimation: _thumbAnimation,
labelAnimation: _labelAnimation,
label: label,
),
),
),
),
),
],
),
);

View File

@@ -18,9 +18,14 @@ import 'package:immich_mobile/providers/infrastructure/setting.provider.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/widgets/common/immich_sliver_app_bar.dart';
import 'package:immich_mobile/widgets/common/selection_sliver_app_bar.dart';
class Timeline extends StatelessWidget {
const Timeline({super.key, this.topSliverWidget, this.topSliverWidgetHeight});
const Timeline({
super.key,
this.topSliverWidget,
this.topSliverWidgetHeight,
});
final Widget? topSliverWidget;
final double? topSliverWidgetHeight;
@@ -52,7 +57,10 @@ class Timeline extends StatelessWidget {
}
class _SliverTimeline extends ConsumerStatefulWidget {
const _SliverTimeline({this.topSliverWidget, this.topSliverWidgetHeight});
const _SliverTimeline({
this.topSliverWidget,
this.topSliverWidgetHeight,
});
final Widget? topSliverWidget;
final double? topSliverWidgetHeight;
@@ -84,6 +92,10 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
final asyncSegments = ref.watch(timelineSegmentProvider);
final maxHeight =
ref.watch(timelineArgsProvider.select((args) => args.maxHeight));
final isSelectionMode = ref.watch(
multiSelectProvider.select((s) => s.forceEnable),
);
return asyncSegments.widgetWhen(
onData: (segments) {
final childCount = (segments.lastOrNull?.lastIndex ?? -1) + 1;
@@ -105,11 +117,14 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
primary: true,
cacheExtent: maxHeight * 2,
slivers: [
const ImmichSliverAppBar(
floating: true,
pinned: false,
snap: false,
),
if (isSelectionMode)
const SelectionSliverAppBar()
else
const ImmichSliverAppBar(
floating: true,
pinned: false,
snap: false,
),
if (widget.topSliverWidget != null) widget.topSliverWidget!,
_SliverSegmentedList(
segments: segments,
@@ -134,40 +149,42 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
],
),
),
Consumer(
builder: (_, consumerRef, child) {
final isMultiSelectEnabled = consumerRef.watch(
multiSelectProvider.select(
(s) => s.isEnabled,
),
);
if (!isSelectionMode) ...[
Consumer(
builder: (_, consumerRef, child) {
final isMultiSelectEnabled = consumerRef.watch(
multiSelectProvider.select(
(s) => s.isEnabled,
),
);
if (isMultiSelectEnabled) {
return child!;
}
return const SizedBox.shrink();
},
child: const Positioned(
top: 60,
left: 25,
child: _MultiSelectStatusButton(),
if (isMultiSelectEnabled) {
return child!;
}
return const SizedBox.shrink();
},
child: const Positioned(
top: 60,
left: 25,
child: _MultiSelectStatusButton(),
),
),
),
Consumer(
builder: (_, consumerRef, child) {
final isMultiSelectEnabled = consumerRef.watch(
multiSelectProvider.select(
(s) => s.isEnabled,
),
);
Consumer(
builder: (_, consumerRef, child) {
final isMultiSelectEnabled = consumerRef.watch(
multiSelectProvider.select(
(s) => s.isEnabled,
),
);
if (isMultiSelectEnabled) {
return child!;
}
return const SizedBox.shrink();
},
child: const HomeBottomAppBar(),
),
if (isMultiSelectEnabled) {
return child!;
}
return const SizedBox.shrink();
},
child: const HomeBottomAppBar(),
),
],
],
),
);

View File

@@ -1,5 +1,6 @@
import 'package:collection/collection.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/services/timeline.service.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
@@ -12,8 +13,14 @@ final multiSelectProvider =
class MultiSelectState {
final Set<BaseAsset> selectedAssets;
final Set<BaseAsset> lockedSelectionAssets;
final bool forceEnable;
const MultiSelectState({required this.selectedAssets});
const MultiSelectState({
required this.selectedAssets,
required this.lockedSelectionAssets,
this.forceEnable = false,
});
bool get isEnabled => selectedAssets.isNotEmpty;
bool get hasRemote => selectedAssets.any(
@@ -25,33 +32,54 @@ class MultiSelectState {
(asset) => asset.storage == AssetState.local,
);
MultiSelectState copyWith({Set<BaseAsset>? selectedAssets}) {
MultiSelectState copyWith({
Set<BaseAsset>? selectedAssets,
Set<BaseAsset>? lockedSelectionAssets,
bool? forceEnable,
}) {
return MultiSelectState(
selectedAssets: selectedAssets ?? this.selectedAssets,
lockedSelectionAssets:
lockedSelectionAssets ?? this.lockedSelectionAssets,
forceEnable: forceEnable ?? this.forceEnable,
);
}
@override
String toString() => 'MultiSelectState(selectedAssets: $selectedAssets)';
String toString() =>
'MultiSelectState(selectedAssets: $selectedAssets, lockedSelectionAssets: $lockedSelectionAssets, forceEnable: $forceEnable)';
@override
bool operator ==(covariant MultiSelectState other) {
if (identical(this, other)) return true;
final listEquals = const DeepCollectionEquality().equals;
final setEquals = const DeepCollectionEquality().equals;
return listEquals(other.selectedAssets, selectedAssets);
return setEquals(other.selectedAssets, selectedAssets) &&
setEquals(other.lockedSelectionAssets, lockedSelectionAssets) &&
other.forceEnable == forceEnable;
}
@override
int get hashCode => selectedAssets.hashCode;
int get hashCode =>
selectedAssets.hashCode ^
lockedSelectionAssets.hashCode ^
forceEnable.hashCode;
}
class MultiSelectNotifier extends Notifier<MultiSelectState> {
MultiSelectNotifier([this._defaultState]);
final MultiSelectState? _defaultState;
TimelineService get _timelineService => ref.read(timelineServiceProvider);
@override
MultiSelectState build() {
return const MultiSelectState(selectedAssets: {});
return _defaultState ??
const MultiSelectState(
selectedAssets: {},
lockedSelectionAssets: {},
forceEnable: false,
);
}
void selectAsset(BaseAsset asset) {
@@ -83,7 +111,11 @@ class MultiSelectNotifier extends Notifier<MultiSelectState> {
}
void reset() {
state = const MultiSelectState(selectedAssets: {});
state = const MultiSelectState(
selectedAssets: {},
lockedSelectionAssets: {},
forceEnable: false,
);
}
/// Bucket bulk operations
@@ -131,6 +163,12 @@ class MultiSelectNotifier extends Notifier<MultiSelectState> {
state = state.copyWith(selectedAssets: selectedAssets);
}
void setLockedSelectionAssets(Set<BaseAsset> assets) {
state = state.copyWith(
lockedSelectionAssets: assets,
);
}
}
final bucketSelectionProvider = Provider.family<bool, List<BaseAsset>>(

View File

@@ -1,6 +1,7 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/domain/models/memory.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
@@ -78,6 +79,7 @@ import 'package:immich_mobile/presentation/pages/dev/media_stat.page.dart';
import 'package:immich_mobile/presentation/pages/dev/remote_timeline.page.dart';
import 'package:immich_mobile/presentation/pages/drift_album.page.dart';
import 'package:immich_mobile/presentation/pages/drift_library.page.dart';
import 'package:immich_mobile/presentation/pages/drift_asset_selection_timeline.page.dart';
import 'package:immich_mobile/presentation/pages/drift_memory.page.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.page.dart';
import 'package:immich_mobile/providers/api.provider.dart';
@@ -422,6 +424,11 @@ class AppRouter extends RootStackRouter {
page: DriftLibraryRoute.page,
guards: [_authGuard, _duplicateGuard],
),
AutoRoute(
page: DriftAssetSelectionTimelineRoute.page,
guards: [_authGuard, _duplicateGuard],
),
// required to handle all deeplinks in deep_link.service.dart
// auto_route_library#1722
RedirectRoute(path: '*', redirectTo: '/'),

View File

@@ -634,6 +634,55 @@ class DriftArchiveRoute extends PageRouteInfo<void> {
);
}
/// generated route for
/// [DriftAssetSelectionTimelinePage]
class DriftAssetSelectionTimelineRoute
extends PageRouteInfo<DriftAssetSelectionTimelineRouteArgs> {
DriftAssetSelectionTimelineRoute({
Key? key,
Set<BaseAsset> lockedSelectionAssets = const {},
List<PageRouteInfo>? children,
}) : super(
DriftAssetSelectionTimelineRoute.name,
args: DriftAssetSelectionTimelineRouteArgs(
key: key,
lockedSelectionAssets: lockedSelectionAssets,
),
initialChildren: children,
);
static const String name = 'DriftAssetSelectionTimelineRoute';
static PageInfo page = PageInfo(
name,
builder: (data) {
final args = data.argsAs<DriftAssetSelectionTimelineRouteArgs>(
orElse: () => const DriftAssetSelectionTimelineRouteArgs(),
);
return DriftAssetSelectionTimelinePage(
key: args.key,
lockedSelectionAssets: args.lockedSelectionAssets,
);
},
);
}
class DriftAssetSelectionTimelineRouteArgs {
const DriftAssetSelectionTimelineRouteArgs({
this.key,
this.lockedSelectionAssets = const {},
});
final Key? key;
final Set<BaseAsset> lockedSelectionAssets;
@override
String toString() {
return 'DriftAssetSelectionTimelineRouteArgs{key: $key, lockedSelectionAssets: $lockedSelectionAssets}';
}
}
/// generated route for
/// [DriftFavoritePage]
class DriftFavoriteRoute extends PageRouteInfo<void> {

View File

@@ -1,30 +0,0 @@
import 'dart:convert';
import 'dart:typed_data';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:thumbhash/thumbhash.dart' as thumbhash;
ObjectRef<Uint8List?> useBlurHashRef(Asset? asset) {
if (asset?.thumbhash == null) {
return useRef(null);
}
final rbga = thumbhash.thumbHashToRGBA(
base64Decode(asset!.thumbhash!),
);
return useRef(thumbhash.rgbaToBmp(rbga));
}
ObjectRef<Uint8List?> useDriftBlurHashRef(RemoteAsset? asset) {
if (asset?.thumbHash == null) {
return useRef(null);
}
final rbga = thumbhash.thumbHashToRGBA(
base64Decode(asset!.thumbHash!),
);
return useRef(thumbhash.rgbaToBmp(rbga));
}

View File

@@ -1,35 +0,0 @@
import 'package:flutter/material.dart';
import 'package:immich_mobile/widgets/common/transparent_image.dart';
class FadeInPlaceholderImage extends StatelessWidget {
final Widget placeholder;
final ImageProvider image;
final Duration duration;
final BoxFit fit;
const FadeInPlaceholderImage({
super.key,
required this.placeholder,
required this.image,
this.duration = const Duration(milliseconds: 100),
this.fit = BoxFit.cover,
});
@override
Widget build(BuildContext context) {
return SizedBox.expand(
child: Stack(
fit: StackFit.expand,
children: [
placeholder,
FadeInImage(
fadeInDuration: duration,
image: image,
fit: fit,
placeholder: MemoryImage(kTransparentImage),
),
],
),
);
}
}

View File

@@ -1,11 +1,8 @@
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/providers/image/immich_local_thumbnail_provider.dart';
import 'package:immich_mobile/providers/image/immich_remote_thumbnail_provider.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/utils/hooks/blurhash_hook.dart';
import 'package:immich_mobile/utils/thumbnail_utils.dart';
import 'package:immich_mobile/widgets/common/immich_image.dart';
import 'package:immich_mobile/widgets/common/thumbhash_placeholder.dart';
@@ -64,7 +61,6 @@ class ImmichThumbnail extends HookConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
Uint8List? blurhash = useBlurHashRef(asset).value;
final userId = ref.watch(currentUserProvider)?.id;
if (asset == null) {
@@ -82,7 +78,7 @@ class ImmichThumbnail extends HookConsumerWidget {
asset!.exifInfo,
asset!.fileCreatedAt,
asset!.type,
[],
const [],
);
final thumbnailProviderInstance = ImmichThumbnail.imageProvider(
@@ -94,7 +90,7 @@ class ImmichThumbnail extends HookConsumerWidget {
thumbnailProviderInstance.evict();
final originalErrorWidgetBuilder =
blurHashErrorBuilder(blurhash, fit: fit);
blurHashErrorBuilder(asset?.thumbhash, fit: fit);
return originalErrorWidgetBuilder(ctx, error, stackTrace);
}
@@ -105,7 +101,8 @@ class ImmichThumbnail extends HookConsumerWidget {
fadeInDuration: Duration.zero,
fadeOutDuration: const Duration(milliseconds: 100),
octoSet: OctoSet(
placeholderBuilder: blurHashPlaceholderBuilder(blurhash, fit: fit),
placeholderBuilder:
blurHashPlaceholderBuilder(asset?.thumbhash, fit: fit),
errorBuilder: customErrorBuilder,
),
image: thumbnailProviderInstance,

View File

@@ -0,0 +1,77 @@
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
class SelectionSliverAppBar extends ConsumerStatefulWidget {
const SelectionSliverAppBar({
super.key,
});
@override
ConsumerState<SelectionSliverAppBar> createState() =>
_SelectionSliverAppBarState();
}
class _SelectionSliverAppBarState extends ConsumerState<SelectionSliverAppBar> {
@override
Widget build(BuildContext context) {
final selection = ref.watch(
multiSelectProvider.select((s) => s.selectedAssets),
);
final toExclude = ref.watch(
multiSelectProvider.select((s) => s.lockedSelectionAssets),
);
final filteredAssets = selection.where((asset) {
return !toExclude.contains(asset);
}).toSet();
onDone(Set<BaseAsset> selected) {
ref.read(multiSelectProvider.notifier).reset();
context.maybePop<Set<BaseAsset>>(selected);
}
return SliverAppBar(
floating: true,
pinned: true,
snap: false,
backgroundColor: context.colorScheme.surfaceContainer,
shape: const RoundedRectangleBorder(
borderRadius: BorderRadius.all(Radius.circular(5)),
),
automaticallyImplyLeading: false,
leading: IconButton(
icon: const Icon(Icons.close_rounded),
onPressed: () {
ref.read(multiSelectProvider.notifier).reset();
context.pop<Set<BaseAsset>>(null);
},
),
centerTitle: true,
title: Text(
"Select {count}".t(
context: context,
args: {
'count': filteredAssets.length.toString(),
},
),
),
actions: [
TextButton(
onPressed: () => onDone(filteredAssets),
child: Text(
'done'.t(context: context),
style: context.textTheme.titleSmall?.copyWith(
color: context.colorScheme.primary,
),
),
),
],
);
}
}

View File

@@ -0,0 +1,194 @@
import 'dart:async';
import 'dart:convert';
import 'dart:ui' as ui;
import 'dart:ui';
import 'package:flutter/material.dart';
import 'package:flutter/widgets.dart';
import 'package:thumbhash/thumbhash.dart' as thumbhash;
class ThumbhashImage extends RenderBox {
Color _placeholderColor;
ui.Image? _image;
BoxFit _fit;
ThumbhashImage({
required ui.Image? image,
required BoxFit fit,
required Color placeholderColor,
}) : _image = image,
_fit = fit,
_placeholderColor = placeholderColor;
@override
void paint(PaintingContext context, Offset offset) {
final image = _image;
final rect = offset & size;
if (image == null) {
final paint = Paint();
paint.color = _placeholderColor;
context.canvas.drawRect(rect, paint);
return;
}
paintImage(
canvas: context.canvas,
rect: rect,
image: image,
fit: _fit,
filterQuality: FilterQuality.low,
);
}
@override
void performLayout() {
size = constraints.biggest;
}
set image(ui.Image? value) {
if (_image != value) {
_image = value;
markNeedsPaint();
}
}
set fit(BoxFit value) {
if (_fit != value) {
_fit = value;
markNeedsPaint();
}
}
set placeholderColor(Color value) {
if (_placeholderColor != value) {
_placeholderColor = value;
markNeedsPaint();
}
}
}
class ThumbhashLeaf extends LeafRenderObjectWidget {
final ui.Image? image;
final BoxFit fit;
final Color placeholderColor;
const ThumbhashLeaf({
super.key,
required this.image,
required this.fit,
required this.placeholderColor,
});
@override
RenderObject createRenderObject(BuildContext context) {
return ThumbhashImage(
image: image,
fit: fit,
placeholderColor: placeholderColor,
);
}
@override
void updateRenderObject(BuildContext context, ThumbhashImage renderObject) {
renderObject.fit = fit;
renderObject.image = image;
renderObject.placeholderColor = placeholderColor;
}
}
class Thumbhash extends StatefulWidget {
final String? blurhash;
final BoxFit fit;
final Color placeholderColor;
const Thumbhash({
required this.blurhash,
this.fit = BoxFit.cover,
this.placeholderColor = const Color.fromRGBO(0, 0, 0, 0.2),
super.key,
});
@override
State<Thumbhash> createState() => _ThumbhashState();
}
class _ThumbhashState extends State<Thumbhash> {
String? blurhash;
BoxFit? fit;
ui.Image? _image;
Color? placeholderColor;
@override
void initState() {
super.initState();
final blurhash_ = blurhash = widget.blurhash;
fit = widget.fit;
placeholderColor = widget.placeholderColor;
if (blurhash_ == null) {
return;
}
final image = thumbhash.thumbHashToRGBA(base64.decode(blurhash_));
_decode(image);
}
Future<void> _decode(thumbhash.Image image) async {
if (!mounted) {
return;
}
final buffer = await ImmutableBuffer.fromUint8List(image.rgba);
if (!mounted) {
buffer.dispose();
return;
}
final descriptor = ImageDescriptor.raw(
buffer,
width: image.width,
height: image.height,
pixelFormat: PixelFormat.rgba8888,
);
if (!mounted) {
buffer.dispose();
descriptor.dispose();
return;
}
final codec = await descriptor.instantiateCodec(
targetWidth: image.width,
targetHeight: image.height,
);
if (!mounted) {
buffer.dispose();
descriptor.dispose();
codec.dispose();
return;
}
final frame = (await codec.getNextFrame()).image;
buffer.dispose();
descriptor.dispose();
codec.dispose();
if (!mounted) {
frame.dispose();
return;
}
setState(() {
_image = frame;
});
}
@override
Widget build(BuildContext context) {
return ThumbhashLeaf(
image: _image,
fit: fit!,
placeholderColor: placeholderColor!,
);
}
@override
void dispose() {
_image?.dispose();
super.dispose();
}
}

View File

@@ -1,14 +1,12 @@
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:immich_mobile/widgets/asset_grid/thumbnail_placeholder.dart';
import 'package:immich_mobile/widgets/common/fade_in_placeholder_image.dart';
import 'package:immich_mobile/widgets/common/thumbhash.dart';
import 'package:octo_image/octo_image.dart';
/// Simple set to show [OctoPlaceholder.circularProgressIndicator] as
/// placeholder and [OctoError.icon] as error.
OctoSet blurHashOrPlaceholder(
Uint8List? blurhash, {
BoxFit? fit,
String? blurhash, {
BoxFit fit = BoxFit.cover,
Text? errorMessage,
}) {
return OctoSet(
@@ -19,21 +17,15 @@ OctoSet blurHashOrPlaceholder(
}
OctoPlaceholderBuilder blurHashPlaceholderBuilder(
Uint8List? blurhash, {
BoxFit? fit,
String? blurhash, {
required BoxFit fit,
}) {
return (context) => blurhash == null
? const ThumbnailPlaceholder()
: FadeInPlaceholderImage(
placeholder: const ThumbnailPlaceholder(),
image: MemoryImage(blurhash),
fit: fit ?? BoxFit.cover,
);
return (context) => Thumbhash(blurhash: blurhash, fit: fit);
}
OctoErrorBuilder blurHashErrorBuilder(
Uint8List? blurhash, {
BoxFit? fit,
String? blurhash, {
BoxFit fit = BoxFit.cover,
Text? message,
IconData? icon,
Color? iconColor,

View File

@@ -5,8 +5,8 @@ import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/pages/common/native_video_viewer.page.dart';
import 'package:immich_mobile/utils/hooks/blurhash_hook.dart';
import 'package:immich_mobile/widgets/common/immich_image.dart';
import 'package:immich_mobile/widgets/common/thumbhash.dart';
class MemoryCard extends StatelessWidget {
final Asset asset;
@@ -113,44 +113,35 @@ class _BlurredBackdrop extends HookWidget {
@override
Widget build(BuildContext context) {
final blurhash = useBlurHashRef(asset).value;
final blurhash = asset.thumbhash;
if (blurhash != null) {
// Use a nice cheap blur hash image decoration
return Container(
return Stack(
children: [
const ColoredBox(color: Color.fromRGBO(0, 0, 0, 0.2)),
Thumbhash(blurhash: blurhash, fit: BoxFit.cover),
],
);
}
// Fall back to using a more expensive image filtered
// Since the ImmichImage is already precached, we can
// safely use that as the image provider
return ImageFiltered(
imageFilter: ImageFilter.blur(sigmaX: 30, sigmaY: 30),
child: DecoratedBox(
decoration: BoxDecoration(
image: DecorationImage(
image: MemoryImage(
blurhash,
image: ImmichImage.imageProvider(
asset: asset,
height: context.height,
width: context.width,
),
fit: BoxFit.cover,
),
),
child: Container(
color: Colors.black.withValues(alpha: 0.2),
),
);
} else {
// Fall back to using a more expensive image filtered
// Since the ImmichImage is already precached, we can
// safely use that as the image provider
return ImageFiltered(
imageFilter: ImageFilter.blur(sigmaX: 30, sigmaY: 30),
child: Container(
decoration: BoxDecoration(
image: DecorationImage(
image: ImmichImage.imageProvider(
asset: asset,
height: context.height,
width: context.width,
),
fit: BoxFit.cover,
),
),
child: Container(
color: Colors.black.withValues(alpha: 0.2),
),
),
);
}
child: const ColoredBox(color: Color.fromRGBO(0, 0, 0, 0.2)),
),
);
}
}

View File

@@ -107,25 +107,21 @@ const compare = async () => {
const { database } = configRepository.getEnv();
const db = postgres(asPostgresConnectionConfig(database.config));
const source = schemaFromCode();
const source = schemaFromCode({ overrides: true });
const target = await schemaFromDatabase(db, {});
const sourceParams = new Set(source.parameters.map(({ name }) => name));
target.parameters = target.parameters.filter(({ name }) => sourceParams.has(name));
const sourceTables = new Set(source.tables.map(({ name }) => name));
target.tables = target.tables.filter(({ name }) => sourceTables.has(name));
console.log(source.warnings.join('\n'));
const up = schemaDiff(source, target, {
tables: { ignoreExtra: true },
functions: { ignoreExtra: false },
parameters: { ignoreExtra: true },
});
const down = schemaDiff(target, source, {
tables: { ignoreExtra: false },
tables: { ignoreExtra: false, ignoreMissing: true },
functions: { ignoreExtra: false },
extension: { ignoreMissing: true },
extensions: { ignoreMissing: true },
parameters: { ignoreMissing: true },
});
return { up, down };

View File

@@ -20,7 +20,6 @@ export const immich_uuid_v7 = registerFunction({
),
'hex')::uuid;
`,
synchronize: false,
});
export const album_user_after_insert = registerFunction({
@@ -33,7 +32,6 @@ export const album_user_after_insert = registerFunction({
WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows);
RETURN NULL;
END`,
synchronize: false,
});
export const updated_at = registerFunction({
@@ -48,7 +46,6 @@ export const updated_at = registerFunction({
new."updateId" = immich_uuid_v7(clock_timestamp);
return new;
END;`,
synchronize: false,
});
export const f_concat_ws = registerFunction({
@@ -59,7 +56,6 @@ export const f_concat_ws = registerFunction({
parallel: 'safe',
behavior: 'immutable',
body: `SELECT array_to_string($2, $1)`,
synchronize: false,
});
export const f_unaccent = registerFunction({
@@ -71,7 +67,6 @@ export const f_unaccent = registerFunction({
strict: true,
behavior: 'immutable',
return: `unaccent('unaccent', $1)`,
synchronize: false,
});
export const ll_to_earth_public = registerFunction({
@@ -83,7 +78,6 @@ export const ll_to_earth_public = registerFunction({
strict: true,
behavior: 'immutable',
body: `SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth`,
synchronize: false,
});
export const users_delete_audit = registerFunction({
@@ -97,7 +91,6 @@ export const users_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const partners_delete_audit = registerFunction({
@@ -111,7 +104,6 @@ export const partners_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const assets_delete_audit = registerFunction({
@@ -125,7 +117,6 @@ export const assets_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const albums_delete_audit = registerFunction({
@@ -139,7 +130,6 @@ export const albums_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const album_assets_delete_audit = registerFunction({
@@ -153,7 +143,6 @@ export const album_assets_delete_audit = registerFunction({
WHERE "albumsId" IN (SELECT "id" FROM albums WHERE "id" IN (SELECT "albumsId" FROM OLD));
RETURN NULL;
END`,
synchronize: false,
});
export const album_users_delete_audit = registerFunction({
@@ -174,7 +163,6 @@ export const album_users_delete_audit = registerFunction({
RETURN NULL;
END`,
synchronize: false,
});
export const memories_delete_audit = registerFunction({
@@ -188,7 +176,6 @@ export const memories_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});
export const memory_assets_delete_audit = registerFunction({
@@ -202,7 +189,6 @@ export const memory_assets_delete_audit = registerFunction({
WHERE "memoriesId" IN (SELECT "id" FROM memories WHERE "id" IN (SELECT "memoriesId" FROM OLD));
RETURN NULL;
END`,
synchronize: false,
});
export const stacks_delete_audit = registerFunction({
@@ -216,5 +202,4 @@ export const stacks_delete_audit = registerFunction({
FROM OLD;
RETURN NULL;
END`,
synchronize: false,
});

View File

@@ -0,0 +1,66 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "migration_overrides" ("name" character varying NOT NULL, "value" jsonb NOT NULL);`.execute(db);
await sql`ALTER TABLE "migration_overrides" ADD CONSTRAINT "migration_overrides_pkey" PRIMARY KEY ("name");`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_immich_uuid_v7', '{"type":"function","name":"immich_uuid_v7","sql":"CREATE OR REPLACE FUNCTION immich_uuid_v7(p_timestamp timestamp with time zone default clock_timestamp())\\n RETURNS uuid\\n VOLATILE LANGUAGE SQL\\n AS $$\\n SELECT encode(\\n set_bit(\\n set_bit(\\n overlay(uuid_send(gen_random_uuid())\\n placing substring(int8send(floor(extract(epoch from p_timestamp) * 1000)::bigint) from 3)\\n from 1 for 6\\n ),\\n 52, 1\\n ),\\n 53, 1\\n ),\\n ''hex'')::uuid;\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_user_after_insert', '{"type":"function","name":"album_user_after_insert","sql":"CREATE OR REPLACE FUNCTION album_user_after_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE albums SET \\"updatedAt\\" = clock_timestamp(), \\"updateId\\" = immich_uuid_v7(clock_timestamp())\\n WHERE \\"id\\" IN (SELECT DISTINCT \\"albumsId\\" FROM inserted_rows);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_updated_at', '{"type":"function","name":"updated_at","sql":"CREATE OR REPLACE FUNCTION updated_at()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n DECLARE\\n clock_timestamp TIMESTAMP := clock_timestamp();\\n BEGIN\\n new.\\"updatedAt\\" = clock_timestamp;\\n new.\\"updateId\\" = immich_uuid_v7(clock_timestamp);\\n return new;\\n END;\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_f_concat_ws', '{"type":"function","name":"f_concat_ws","sql":"CREATE OR REPLACE FUNCTION f_concat_ws(text, text[])\\n RETURNS text\\n PARALLEL SAFE IMMUTABLE LANGUAGE SQL\\n AS $$SELECT array_to_string($2, $1)$$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_f_unaccent', '{"type":"function","name":"f_unaccent","sql":"CREATE OR REPLACE FUNCTION f_unaccent(text)\\n RETURNS text\\n PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL\\n RETURN unaccent(''unaccent'', $1)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_ll_to_earth_public', '{"type":"function","name":"ll_to_earth_public","sql":"CREATE OR REPLACE FUNCTION ll_to_earth_public(latitude double precision, longitude double precision)\\n RETURNS public.earth\\n PARALLEL SAFE STRICT IMMUTABLE LANGUAGE SQL\\n AS $$SELECT public.cube(public.cube(public.cube(public.earth()*cos(radians(latitude))*cos(radians(longitude))),public.earth()*cos(radians(latitude))*sin(radians(longitude))),public.earth()*sin(radians(latitude)))::public.earth$$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_users_delete_audit', '{"type":"function","name":"users_delete_audit","sql":"CREATE OR REPLACE FUNCTION users_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO users_audit (\\"userId\\")\\n SELECT \\"id\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_partners_delete_audit', '{"type":"function","name":"partners_delete_audit","sql":"CREATE OR REPLACE FUNCTION partners_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO partners_audit (\\"sharedById\\", \\"sharedWithId\\")\\n SELECT \\"sharedById\\", \\"sharedWithId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_assets_delete_audit', '{"type":"function","name":"assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO assets_audit (\\"assetId\\", \\"ownerId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_albums_delete_audit', '{"type":"function","name":"albums_delete_audit","sql":"CREATE OR REPLACE FUNCTION albums_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO albums_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_assets_delete_audit', '{"type":"function","name":"album_assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_assets_audit (\\"albumId\\", \\"assetId\\")\\n SELECT \\"albumsId\\", \\"assetsId\\" FROM OLD\\n WHERE \\"albumsId\\" IN (SELECT \\"id\\" FROM albums WHERE \\"id\\" IN (SELECT \\"albumsId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_album_users_delete_audit', '{"type":"function","name":"album_users_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_users_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO albums_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumsId\\", \\"usersId\\"\\n FROM OLD;\\n\\n IF pg_trigger_depth() = 1 THEN\\n INSERT INTO album_users_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumsId\\", \\"usersId\\"\\n FROM OLD;\\n END IF;\\n\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_memories_delete_audit', '{"type":"function","name":"memories_delete_audit","sql":"CREATE OR REPLACE FUNCTION memories_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memories_audit (\\"memoryId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_memory_assets_delete_audit', '{"type":"function","name":"memory_assets_delete_audit","sql":"CREATE OR REPLACE FUNCTION memory_assets_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memory_assets_audit (\\"memoryId\\", \\"assetId\\")\\n SELECT \\"memoriesId\\", \\"assetsId\\" FROM OLD\\n WHERE \\"memoriesId\\" IN (SELECT \\"id\\" FROM memories WHERE \\"id\\" IN (SELECT \\"memoriesId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_stacks_delete_audit', '{"type":"function","name":"stacks_delete_audit","sql":"CREATE OR REPLACE FUNCTION stacks_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO stacks_audit (\\"stackId\\", \\"userId\\")\\n SELECT \\"id\\", \\"ownerId\\"\\n FROM OLD;\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_users_delete_audit', '{"type":"trigger","name":"users_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"users_delete_audit\\"\\n AFTER DELETE ON \\"users\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION users_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_users_updated_at', '{"type":"trigger","name":"users_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"users_updated_at\\"\\n BEFORE UPDATE ON \\"users\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_libraries_updated_at', '{"type":"trigger","name":"libraries_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"libraries_updated_at\\"\\n BEFORE UPDATE ON \\"libraries\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_stacks_delete_audit', '{"type":"trigger","name":"stacks_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"stacks_delete_audit\\"\\n AFTER DELETE ON \\"asset_stack\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION stacks_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_stacks_updated_at', '{"type":"trigger","name":"stacks_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"stacks_updated_at\\"\\n BEFORE UPDATE ON \\"asset_stack\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_assets_delete_audit', '{"type":"trigger","name":"assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"assets_delete_audit\\"\\n AFTER DELETE ON \\"assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION assets_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_assets_updated_at', '{"type":"trigger","name":"assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"assets_updated_at\\"\\n BEFORE UPDATE ON \\"assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_originalfilename_trigram', '{"type":"index","name":"idx_originalfilename_trigram","sql":"CREATE INDEX \\"idx_originalfilename_trigram\\" ON \\"assets\\" USING gin (f_unaccent(\\"originalFileName\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_local_date_time_month', '{"type":"index","name":"idx_local_date_time_month","sql":"CREATE INDEX \\"idx_local_date_time_month\\" ON \\"assets\\" ((date_trunc(''MONTH''::text, (\\"localDateTime\\" AT TIME ZONE ''UTC''::text)) AT TIME ZONE ''UTC''::text))"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_local_date_time', '{"type":"index","name":"idx_local_date_time","sql":"CREATE INDEX \\"idx_local_date_time\\" ON \\"assets\\" (((\\"localDateTime\\" at time zone ''UTC'')::date))"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_UQ_assets_owner_library_checksum', '{"type":"index","name":"UQ_assets_owner_library_checksum","sql":"CREATE UNIQUE INDEX \\"UQ_assets_owner_library_checksum\\" ON \\"assets\\" (\\"ownerId\\", \\"libraryId\\", \\"checksum\\") WHERE (\\"libraryId\\" IS NOT NULL)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_UQ_assets_owner_checksum', '{"type":"index","name":"UQ_assets_owner_checksum","sql":"CREATE UNIQUE INDEX \\"UQ_assets_owner_checksum\\" ON \\"assets\\" (\\"ownerId\\", \\"checksum\\") WHERE (\\"libraryId\\" IS NULL)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_albums_delete_audit', '{"type":"trigger","name":"albums_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"albums_delete_audit\\"\\n AFTER DELETE ON \\"albums\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION albums_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_albums_updated_at', '{"type":"trigger","name":"albums_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"albums_updated_at\\"\\n BEFORE UPDATE ON \\"albums\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_activity_updated_at', '{"type":"trigger","name":"activity_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"activity_updated_at\\"\\n BEFORE UPDATE ON \\"activity\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_IDX_activity_like', '{"type":"index","name":"IDX_activity_like","sql":"CREATE UNIQUE INDEX \\"IDX_activity_like\\" ON \\"activity\\" (\\"assetId\\", \\"userId\\", \\"albumId\\") WHERE (\\"isLiked\\" = true)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_assets_delete_audit', '{"type":"trigger","name":"album_assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"album_assets_delete_audit\\"\\n AFTER DELETE ON \\"albums_assets_assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION album_assets_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_assets_updated_at', '{"type":"trigger","name":"album_assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"album_assets_updated_at\\"\\n BEFORE UPDATE ON \\"albums_assets_assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_users_delete_audit', '{"type":"trigger","name":"album_users_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"album_users_delete_audit\\"\\n AFTER DELETE ON \\"albums_shared_users_users\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION album_users_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_user_after_insert', '{"type":"trigger","name":"album_user_after_insert","sql":"CREATE OR REPLACE TRIGGER \\"album_user_after_insert\\"\\n AFTER INSERT ON \\"albums_shared_users_users\\"\\n REFERENCING NEW TABLE AS \\"inserted_rows\\"\\n FOR EACH STATEMENT\\n EXECUTE FUNCTION album_user_after_insert();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_album_users_updated_at', '{"type":"trigger","name":"album_users_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"album_users_updated_at\\"\\n BEFORE UPDATE ON \\"albums_shared_users_users\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_api_keys_updated_at', '{"type":"trigger","name":"api_keys_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"api_keys_updated_at\\"\\n BEFORE UPDATE ON \\"api_keys\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_person_updated_at', '{"type":"trigger","name":"person_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"person_updated_at\\"\\n BEFORE UPDATE ON \\"person\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_files_updated_at', '{"type":"trigger","name":"asset_files_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"asset_files_updated_at\\"\\n BEFORE UPDATE ON \\"asset_files\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_exif_updated_at', '{"type":"trigger","name":"asset_exif_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"asset_exif_updated_at\\"\\n BEFORE UPDATE ON \\"exif\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_face_index', '{"type":"index","name":"face_index","sql":"CREATE INDEX \\"face_index\\" ON \\"face_search\\" USING hnsw (embedding vector_cosine_ops) WITH (ef_construction = 300, m = 16)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_IDX_geodata_gist_earthcoord', '{"type":"index","name":"IDX_geodata_gist_earthcoord","sql":"CREATE INDEX \\"IDX_geodata_gist_earthcoord\\" ON \\"geodata_places\\" (ll_to_earth_public(latitude, longitude))"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_name', '{"type":"index","name":"idx_geodata_places_name","sql":"CREATE INDEX \\"idx_geodata_places_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_admin2_name', '{"type":"index","name":"idx_geodata_places_admin2_name","sql":"CREATE INDEX \\"idx_geodata_places_admin2_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"admin2Name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_admin1_name', '{"type":"index","name":"idx_geodata_places_admin1_name","sql":"CREATE INDEX \\"idx_geodata_places_admin1_name\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"admin1Name\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_geodata_places_alternate_names', '{"type":"index","name":"idx_geodata_places_alternate_names","sql":"CREATE INDEX \\"idx_geodata_places_alternate_names\\" ON \\"geodata_places\\" USING gin (f_unaccent(\\"alternateNames\\") gin_trgm_ops)"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memories_delete_audit', '{"type":"trigger","name":"memories_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"memories_delete_audit\\"\\n AFTER DELETE ON \\"memories\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION memories_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memories_updated_at', '{"type":"trigger","name":"memories_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"memories_updated_at\\"\\n BEFORE UPDATE ON \\"memories\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memory_assets_delete_audit', '{"type":"trigger","name":"memory_assets_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"memory_assets_delete_audit\\"\\n AFTER DELETE ON \\"memories_assets_assets\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() <= 1)\\n EXECUTE FUNCTION memory_assets_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_memory_assets_updated_at', '{"type":"trigger","name":"memory_assets_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"memory_assets_updated_at\\"\\n BEFORE UPDATE ON \\"memories_assets_assets\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_notifications_updated_at', '{"type":"trigger","name":"notifications_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"notifications_updated_at\\"\\n BEFORE UPDATE ON \\"notifications\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_partners_delete_audit', '{"type":"trigger","name":"partners_delete_audit","sql":"CREATE OR REPLACE TRIGGER \\"partners_delete_audit\\"\\n AFTER DELETE ON \\"partners\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION partners_delete_audit();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_partners_updated_at', '{"type":"trigger","name":"partners_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"partners_updated_at\\"\\n BEFORE UPDATE ON \\"partners\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_sessions_updated_at', '{"type":"trigger","name":"sessions_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"sessions_updated_at\\"\\n BEFORE UPDATE ON \\"sessions\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_session_sync_checkpoints_updated_at', '{"type":"trigger","name":"session_sync_checkpoints_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"session_sync_checkpoints_updated_at\\"\\n BEFORE UPDATE ON \\"session_sync_checkpoints\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_tags_updated_at', '{"type":"trigger","name":"tags_updated_at","sql":"CREATE OR REPLACE TRIGGER \\"tags_updated_at\\"\\n BEFORE UPDATE ON \\"tags\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION updated_at();"}'::jsonb);`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TABLE "migration_overrides";`.execute(db);
}

View File

@@ -44,7 +44,6 @@ import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
@Index({
name: 'idx_local_date_time',
expression: `(("localDateTime" at time zone 'UTC')::date)`,
synchronize: false,
})
@Index({
name: 'idx_local_date_time_month',
@@ -56,7 +55,6 @@ import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
name: 'idx_originalfilename_trigram',
using: 'gin',
expression: 'f_unaccent("originalFileName") gin_trgm_ops',
synchronize: false,
})
// For all assets, each originalpath must be unique per user and library
export class AssetTable {

View File

@@ -7,7 +7,6 @@ import { Column, ForeignKeyColumn, Index, Table } from 'src/sql-tools';
using: 'hnsw',
expression: `embedding vector_cosine_ops`,
with: 'ef_construction = 300, m = 16',
synchronize: false,
})
export class FaceSearchTable {
@ForeignKeyColumn(() => AssetFaceTable, {

View File

@@ -1,34 +1,29 @@
import { Column, Index, PrimaryColumn, Table, Timestamp } from 'src/sql-tools';
@Table({ name: 'geodata_places', synchronize: false })
@Table({ name: 'geodata_places' })
@Index({
name: 'idx_geodata_places_alternate_names',
using: 'gin',
expression: 'f_unaccent("alternateNames") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'idx_geodata_places_admin1_name',
using: 'gin',
expression: 'f_unaccent("admin1Name") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'idx_geodata_places_admin2_name',
using: 'gin',
expression: 'f_unaccent("admin2Name") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'idx_geodata_places_name',
using: 'gin',
expression: 'f_unaccent("name") gin_trgm_ops',
synchronize: false,
})
@Index({
name: 'IDX_geodata_gist_earthcoord',
expression: 'll_to_earth_public(latitude, longitude)',
synchronize: false,
})
export class GeodataPlacesTable {
@PrimaryColumn({ type: 'integer' })

View File

@@ -0,0 +1,69 @@
import { compareOverrides } from 'src/sql-tools/comparers/override.comparer';
import { DatabaseOverride, Reason } from 'src/sql-tools/types';
import { describe, expect, it } from 'vitest';
const testOverride: DatabaseOverride = {
name: 'test',
value: { type: 'function', name: 'test_func', sql: 'func implementation' },
synchronize: true,
};
describe('compareOverrides', () => {
describe('onExtra', () => {
it('should work', () => {
expect(compareOverrides.onExtra(testOverride)).toEqual([
{
type: 'OverrideDrop',
overrideName: 'test',
reason: Reason.MissingInSource,
},
]);
});
});
describe('onMissing', () => {
it('should work', () => {
expect(compareOverrides.onMissing(testOverride)).toEqual([
{
type: 'OverrideCreate',
override: testOverride,
reason: Reason.MissingInTarget,
},
]);
});
});
describe('onCompare', () => {
it('should work', () => {
expect(compareOverrides.onCompare(testOverride, testOverride)).toEqual([]);
});
it('should drop and recreate when the value changes', () => {
const source: DatabaseOverride = {
name: 'test',
value: {
type: 'function',
name: 'test_func',
sql: 'func implementation',
},
synchronize: true,
};
const target: DatabaseOverride = {
name: 'test',
value: {
type: 'function',
name: 'test_func',
sql: 'func implementation2',
},
synchronize: true,
};
expect(compareOverrides.onCompare(source, target)).toEqual([
{
override: source,
type: 'OverrideUpdate',
reason: expect.stringContaining('value is different'),
},
]);
});
});
});

View File

@@ -0,0 +1,29 @@
import { Comparer, DatabaseOverride, Reason } from 'src/sql-tools/types';
export const compareOverrides: Comparer<DatabaseOverride> = {
onMissing: (source) => [
{
type: 'OverrideCreate',
override: source,
reason: Reason.MissingInTarget,
},
],
onExtra: (target) => [
{
type: 'OverrideDrop',
overrideName: target.name,
reason: Reason.MissingInSource,
},
],
onCompare: (source, target) => {
if (source.value.name !== target.value.name || source.value.sql !== target.value.sql) {
const sourceValue = JSON.stringify(source.value);
const targetValue = JSON.stringify(target.value);
return [
{ type: 'OverrideUpdate', override: source, reason: `value is different (${sourceValue} vs ${targetValue})` },
];
}
return [];
},
};

View File

@@ -0,0 +1,74 @@
import {
BaseContextOptions,
DatabaseEnum,
DatabaseExtension,
DatabaseFunction,
DatabaseOverride,
DatabaseParameter,
DatabaseSchema,
DatabaseTable,
} from 'src/sql-tools/types';
const asOverrideKey = (type: string, name: string) => `${type}:${name}`;
export class BaseContext {
databaseName: string;
schemaName: string;
overrideTableName: string;
tables: DatabaseTable[] = [];
functions: DatabaseFunction[] = [];
enums: DatabaseEnum[] = [];
extensions: DatabaseExtension[] = [];
parameters: DatabaseParameter[] = [];
overrides: DatabaseOverride[] = [];
warnings: string[] = [];
constructor(options: BaseContextOptions) {
this.databaseName = options.databaseName ?? 'postgres';
this.schemaName = options.schemaName ?? 'public';
this.overrideTableName = options.overrideTableName ?? 'migration_overrides';
}
getTableByName(name: string) {
return this.tables.find((table) => table.name === name);
}
warn(context: string, message: string) {
this.warnings.push(`[${context}] ${message}`);
}
build(): DatabaseSchema {
const overrideMap = new Map<string, DatabaseOverride>();
for (const override of this.overrides) {
const { type, name } = override.value;
overrideMap.set(asOverrideKey(type, name), override);
}
for (const func of this.functions) {
func.override = overrideMap.get(asOverrideKey('function', func.name));
}
for (const { indexes, triggers } of this.tables) {
for (const index of indexes) {
index.override = overrideMap.get(asOverrideKey('index', index.name));
}
for (const trigger of triggers) {
trigger.override = overrideMap.get(asOverrideKey('trigger', trigger.name));
}
}
return {
databaseName: this.databaseName,
schemaName: this.schemaName,
tables: this.tables,
functions: this.functions,
enums: this.enums,
extensions: this.extensions,
parameters: this.parameters,
overrides: this.overrides,
warnings: this.warnings,
};
}
}

View File

@@ -1,45 +1,25 @@
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { ColumnOptions, TableOptions } from 'src/sql-tools/decorators';
import { asKey } from 'src/sql-tools/helpers';
import {
DatabaseColumn,
DatabaseEnum,
DatabaseExtension,
DatabaseFunction,
DatabaseParameter,
DatabaseSchema,
DatabaseTable,
SchemaFromCodeOptions,
} from 'src/sql-tools/types';
import { DatabaseColumn, DatabaseTable, SchemaFromCodeOptions } from 'src/sql-tools/types';
type TableMetadata = { options: TableOptions; object: Function; methodToColumn: Map<string | symbol, DatabaseColumn> };
export class SchemaBuilder {
databaseName: string;
schemaName: string;
tables: DatabaseTable[] = [];
functions: DatabaseFunction[] = [];
enums: DatabaseEnum[] = [];
extensions: DatabaseExtension[] = [];
parameters: DatabaseParameter[] = [];
warnings: string[] = [];
export class ProcessorContext extends BaseContext {
constructor(public options: SchemaFromCodeOptions) {
options.createForeignKeyIndexes = options.createForeignKeyIndexes ?? true;
options.overrides = options.overrides ?? false;
super(options);
}
classToTable: WeakMap<Function, DatabaseTable> = new WeakMap();
tableToMetadata: WeakMap<DatabaseTable, TableMetadata> = new WeakMap();
constructor(options: SchemaFromCodeOptions) {
this.databaseName = options.databaseName ?? 'postgres';
this.schemaName = options.schemaName ?? 'public';
}
getTableByObject(object: Function) {
return this.classToTable.get(object);
}
getTableByName(name: string) {
return this.tables.find((table) => table.name === name);
}
getTableMetadata(table: DatabaseTable) {
const metadata = this.tableToMetadata.get(table);
if (!metadata) {
@@ -92,10 +72,6 @@ export class SchemaBuilder {
return asKey('IDX_', table, items);
}
warn(context: string, message: string) {
this.warnings.push(`[${context}] ${message}`);
}
warnMissingTable(context: string, object: object, propertyName?: symbol | string) {
const label = object.constructor.name + (propertyName ? '.' + String(propertyName) : '');
this.warn(context, `Unable to find table (${label})`);
@@ -105,17 +81,4 @@ export class SchemaBuilder {
const label = object.constructor.name + (propertyName ? '.' + String(propertyName) : '');
this.warn(context, `Unable to find column (${label})`);
}
build(): DatabaseSchema {
return {
databaseName: this.databaseName,
schemaName: this.schemaName,
tables: this.tables,
functions: this.functions,
enums: this.enums,
extensions: this.extensions,
parameters: this.parameters,
warnings: this.warnings,
};
}
}

View File

@@ -0,0 +1,8 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { SchemaFromDatabaseOptions } from 'src/sql-tools/types';
export class ReaderContext extends BaseContext {
constructor(public options: SchemaFromDatabaseOptions) {
super(options);
}
}

View File

@@ -1,6 +1,6 @@
import { createHash } from 'node:crypto';
import { ColumnValue } from 'src/sql-tools/decorators/column.decorator';
import { Comparer, DatabaseColumn, IgnoreOptions, SchemaDiff } from 'src/sql-tools/types';
import { Comparer, DatabaseColumn, DatabaseOverride, IgnoreOptions, SchemaDiff } from 'src/sql-tools/types';
export const asMetadataKey = (name: string) => `sql-tools:${name}`;
@@ -56,6 +56,17 @@ export const haveEqualColumns = (sourceColumns?: string[], targetColumns?: strin
return setIsEqual(new Set(sourceColumns ?? []), new Set(targetColumns ?? []));
};
export const haveEqualOverrides = <T extends { override?: DatabaseOverride }>(source: T, target: T) => {
if (!source.override || !target.override) {
return false;
}
const sourceValue = source.override.value;
const targetValue = target.override.value;
return sourceValue.name === targetValue.name && sourceValue.sql === targetValue.sql;
};
export const compare = <T extends { name: string; synchronize: boolean }>(
sources: T[],
targets: T[],
@@ -72,7 +83,7 @@ export const compare = <T extends { name: string; synchronize: boolean }>(
const source = sourceMap[key];
const target = targetMap[key];
if (isIgnored(source, target, options)) {
if (isIgnored(source, target, options ?? true)) {
continue;
}
@@ -85,6 +96,14 @@ export const compare = <T extends { name: string; synchronize: boolean }>(
} else if (!source && target) {
items.push(...comparer.onExtra(target));
} else {
if (
haveEqualOverrides(
source as unknown as { override?: DatabaseOverride },
target as unknown as { override?: DatabaseOverride },
)
) {
continue;
}
items.push(...comparer.onCompare(source, target));
}
}
@@ -97,6 +116,9 @@ const isIgnored = (
target: { synchronize?: boolean } | undefined,
options: IgnoreOptions,
) => {
if (typeof options === 'boolean') {
return !options;
}
return (options.ignoreExtra && !source) || (options.ignoreMissing && !target);
};
@@ -165,3 +187,18 @@ export const asColumnComment = (tableName: string, columnName: string, comment:
export const asColumnList = (columns: string[]) => columns.map((column) => `"${column}"`).join(', ');
export const asForeignKeyConstraintName = (table: string, columns: string[]) => asKey('FK_', table, [...columns]);
export const asJsonString = (value: unknown): string => {
return `'${escape(JSON.stringify(value))}'::jsonb`;
};
const escape = (value: string) => {
return value
.replaceAll("'", "''")
.replaceAll(/[\\]/g, '\\\\')
.replaceAll(/[\b]/g, String.raw`\b`)
.replaceAll(/[\f]/g, String.raw`\f`)
.replaceAll(/[\n]/g, String.raw`\n`)
.replaceAll(/[\r]/g, String.raw`\r`)
.replaceAll(/[\t]/g, String.raw`\t`);
};

View File

@@ -1,13 +1,13 @@
import { asKey } from 'src/sql-tools/helpers';
import { ConstraintType, Processor } from 'src/sql-tools/types';
export const processCheckConstraints: Processor = (builder, items) => {
export const processCheckConstraints: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'checkConstraint')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Check', object);
ctx.warnMissingTable('@Check', object);
continue;
}

View File

@@ -2,14 +2,14 @@ import { ColumnOptions } from 'src/sql-tools/decorators/column.decorator';
import { fromColumnValue } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processColumns: Processor = (builder, items) => {
export const processColumns: Processor = (ctx, items) => {
for (const {
type,
item: { object, propertyName, options },
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
const table = builder.getTableByObject(object.constructor);
const table = ctx.getTableByObject(object.constructor);
if (!table) {
builder.warnMissingTable(type === 'column' ? '@Column' : '@ForeignKeyColumn', object, propertyName);
ctx.warnMissingTable(type === 'column' ? '@Column' : '@ForeignKeyColumn', object, propertyName);
continue;
}
@@ -31,7 +31,7 @@ export const processColumns: Processor = (builder, items) => {
const isEnum = !!(options as ColumnOptions).enum;
builder.addColumn(
ctx.addColumn(
table,
{
name: columnName,

View File

@@ -1,12 +1,12 @@
import { fromColumnValue } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processConfigurationParameters: Processor = (builder, items) => {
export const processConfigurationParameters: Processor = (ctx, items) => {
for (const {
item: { options },
} of items.filter((item) => item.type === 'configurationParameter')) {
builder.parameters.push({
databaseName: builder.databaseName,
ctx.parameters.push({
databaseName: ctx.databaseName,
name: options.name,
value: fromColumnValue(options.value),
scope: options.scope,

View File

@@ -1,10 +1,10 @@
import { asSnakeCase } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processDatabases: Processor = (builder, items) => {
export const processDatabases: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'database')) {
builder.databaseName = options.name || asSnakeCase(object.name);
ctx.databaseName = options.name || asSnakeCase(object.name);
}
};

View File

@@ -1,8 +1,8 @@
import { Processor } from 'src/sql-tools/types';
export const processEnums: Processor = (builder, items) => {
export const processEnums: Processor = (ctx, items) => {
for (const { item } of items.filter((item) => item.type === 'enum')) {
// TODO log warnings if enum name is not unique
builder.enums.push(item);
ctx.enums.push(item);
}
};

View File

@@ -1,10 +1,14 @@
import { Processor } from 'src/sql-tools/types';
export const processExtensions: Processor = (builder, items) => {
export const processExtensions: Processor = (ctx, items) => {
if (ctx.options.extensions === false) {
return;
}
for (const {
item: { options },
} of items.filter((item) => item.type === 'extension')) {
builder.extensions.push({
ctx.extensions.push({
name: options.name,
synchronize: options.synchronize ?? true,
});

View File

@@ -1,25 +1,25 @@
import { asForeignKeyConstraintName, asKey } from 'src/sql-tools/helpers';
import { ActionType, ConstraintType, Processor } from 'src/sql-tools/types';
export const processForeignKeyColumns: Processor = (builder, items) => {
export const processForeignKeyColumns: Processor = (ctx, items) => {
for (const {
item: { object, propertyName, options, target },
} of items.filter((item) => item.type === 'foreignKeyColumn')) {
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
if (!table) {
builder.warnMissingTable('@ForeignKeyColumn', object);
ctx.warnMissingTable('@ForeignKeyColumn', object);
continue;
}
if (!column) {
// should be impossible since they are pre-created in `column.processor.ts`
builder.warnMissingColumn('@ForeignKeyColumn', object, propertyName);
ctx.warnMissingColumn('@ForeignKeyColumn', object, propertyName);
continue;
}
const referenceTable = builder.getTableByObject(target());
const referenceTable = ctx.getTableByObject(target());
if (!referenceTable) {
builder.warnMissingTable('@ForeignKeyColumn', object, propertyName);
ctx.warnMissingTable('@ForeignKeyColumn', object, propertyName);
continue;
}

View File

@@ -1,20 +1,20 @@
import { asForeignKeyConstraintName } from 'src/sql-tools/helpers';
import { ActionType, ConstraintType, Processor } from 'src/sql-tools/types';
export const processForeignKeyConstraints: Processor = (builder, items, config) => {
export const processForeignKeyConstraints: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'foreignKeyConstraint')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@ForeignKeyConstraint', { name: 'referenceTable' });
ctx.warnMissingTable('@ForeignKeyConstraint', { name: 'referenceTable' });
continue;
}
const referenceTable = builder.getTableByObject(options.referenceTable());
const referenceTable = ctx.getTableByObject(options.referenceTable());
if (!referenceTable) {
const referenceTableName = options.referenceTable()?.name;
builder.warn(
ctx.warn(
'@ForeignKeyConstraint.referenceTable',
`Unable to find table` + (referenceTableName ? ` (${referenceTableName})` : ''),
);
@@ -25,16 +25,16 @@ export const processForeignKeyConstraints: Processor = (builder, items, config)
for (const columnName of options.columns) {
if (!table.columns.some(({ name }) => name === columnName)) {
const metadata = builder.getTableMetadata(table);
builder.warn('@ForeignKeyConstraint.columns', `Unable to find column (${metadata.object.name}.${columnName})`);
const metadata = ctx.getTableMetadata(table);
ctx.warn('@ForeignKeyConstraint.columns', `Unable to find column (${metadata.object.name}.${columnName})`);
missingColumn = true;
}
}
for (const columnName of options.referenceColumns || []) {
if (!referenceTable.columns.some(({ name }) => name === columnName)) {
const metadata = builder.getTableMetadata(referenceTable);
builder.warn(
const metadata = ctx.getTableMetadata(referenceTable);
ctx.warn(
'@ForeignKeyConstraint.referenceColumns',
`Unable to find column (${metadata.object.name}.${columnName})`,
);
@@ -67,9 +67,9 @@ export const processForeignKeyConstraints: Processor = (builder, items, config)
continue;
}
if (options.index || options.indexName || config.createForeignKeyIndexes) {
if (options.index || options.indexName || ctx.options.createForeignKeyIndexes) {
table.indexes.push({
name: options.indexName || builder.asIndexName(table.name, options.columns),
name: options.indexName || ctx.asIndexName(table.name, options.columns),
tableName: table.name,
columnNames: options.columns,
unique: false,

View File

@@ -1,8 +1,12 @@
import { Processor } from 'src/sql-tools/types';
export const processFunctions: Processor = (builder, items) => {
export const processFunctions: Processor = (ctx, items) => {
if (ctx.options.functions === false) {
return;
}
for (const { item } of items.filter((item) => item.type === 'function')) {
// TODO log warnings if function name is not unique
builder.functions.push(item);
ctx.functions.push(item);
}
};

View File

@@ -1,17 +1,17 @@
import { Processor } from 'src/sql-tools/types';
export const processIndexes: Processor = (builder, items, config) => {
export const processIndexes: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'index')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Check', object);
ctx.warnMissingTable('@Check', object);
continue;
}
table.indexes.push({
name: options.name || builder.asIndexName(table.name, options.columns, options.where),
name: options.name || ctx.asIndexName(table.name, options.columns, options.where),
tableName: table.name,
unique: options.unique ?? false,
expression: options.expression,
@@ -28,15 +28,15 @@ export const processIndexes: Processor = (builder, items, config) => {
type,
item: { object, propertyName, options },
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
if (!table) {
builder.warnMissingTable('@Column', object);
ctx.warnMissingTable('@Column', object);
continue;
}
if (!column) {
// should be impossible since they are created in `column.processor.ts`
builder.warnMissingColumn('@Column', object, propertyName);
ctx.warnMissingColumn('@Column', object, propertyName);
continue;
}
@@ -45,12 +45,12 @@ export const processIndexes: Processor = (builder, items, config) => {
}
const isIndexRequested =
options.indexName || options.index || (type === 'foreignKeyColumn' && config.createForeignKeyIndexes);
options.indexName || options.index || (type === 'foreignKeyColumn' && ctx.options.createForeignKeyIndexes);
if (!isIndexRequested) {
continue;
}
const indexName = options.indexName || builder.asIndexName(table.name, [column.name]);
const indexName = options.indexName || ctx.asIndexName(table.name, [column.name]);
const isIndexPresent = table.indexes.some((index) => index.name === indexName);
if (isIndexPresent) {

View File

@@ -8,6 +8,7 @@ import { processForeignKeyColumns } from 'src/sql-tools/processors/foreign-key-c
import { processForeignKeyConstraints } from 'src/sql-tools/processors/foreign-key-constraint.processor';
import { processFunctions } from 'src/sql-tools/processors/function.processor';
import { processIndexes } from 'src/sql-tools/processors/index.processor';
import { processOverrides } from 'src/sql-tools/processors/override.processor';
import { processPrimaryKeyConstraints } from 'src/sql-tools/processors/primary-key-contraint.processor';
import { processTables } from 'src/sql-tools/processors/table.processor';
import { processTriggers } from 'src/sql-tools/processors/trigger.processor';
@@ -29,4 +30,5 @@ export const processors: Processor[] = [
processPrimaryKeyConstraints,
processIndexes,
processTriggers,
processOverrides,
];

View File

@@ -0,0 +1,50 @@
import { asFunctionCreate } from 'src/sql-tools/transformers/function.transformer';
import { asIndexCreate } from 'src/sql-tools/transformers/index.transformer';
import { asTriggerCreate } from 'src/sql-tools/transformers/trigger.transformer';
import { Processor } from 'src/sql-tools/types';
export const processOverrides: Processor = (ctx) => {
if (ctx.options.overrides === false) {
return;
}
for (const func of ctx.functions) {
if (!func.synchronize) {
continue;
}
ctx.overrides.push({
name: `function_${func.name}`,
value: { type: 'function', name: func.name, sql: asFunctionCreate(func) },
synchronize: true,
});
}
for (const { triggers, indexes } of ctx.tables) {
for (const trigger of triggers) {
if (!trigger.synchronize) {
continue;
}
ctx.overrides.push({
name: `trigger_${trigger.name}`,
value: { type: 'trigger', name: trigger.name, sql: asTriggerCreate(trigger) },
synchronize: true,
});
}
for (const index of indexes) {
if (!index.synchronize) {
continue;
}
if (index.expression || index.using || index.with || index.where) {
ctx.overrides.push({
name: `index_${index.name}`,
value: { type: 'index', name: index.name, sql: asIndexCreate(index) },
synchronize: true,
});
}
}
}
};

View File

@@ -1,8 +1,8 @@
import { asKey } from 'src/sql-tools/helpers';
import { ConstraintType, Processor } from 'src/sql-tools/types';
export const processPrimaryKeyConstraints: Processor = (builder) => {
for (const table of builder.tables) {
export const processPrimaryKeyConstraints: Processor = (ctx) => {
for (const table of ctx.tables) {
const columnNames: string[] = [];
for (const column of table.columns) {
@@ -12,7 +12,7 @@ export const processPrimaryKeyConstraints: Processor = (builder) => {
}
if (columnNames.length > 0) {
const tableMetadata = builder.getTableMetadata(table);
const tableMetadata = ctx.getTableMetadata(table);
table.constraints.push({
type: ConstraintType.PRIMARY_KEY,
name: tableMetadata.options.primaryConstraintName || asPrimaryKeyConstraintName(table.name, columnNames),

View File

@@ -1,18 +1,18 @@
import { asSnakeCase } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processTables: Processor = (builder, items) => {
export const processTables: Processor = (ctx, items) => {
for (const {
item: { options, object },
} of items.filter((item) => item.type === 'table')) {
const test = builder.getTableByObject(object);
const test = ctx.getTableByObject(object);
if (test) {
throw new Error(
`Table ${test.name} has already been registered. Does ${object.name} have two @Table() decorators?`,
);
}
builder.addTable(
ctx.addTable(
{
name: options.name || asSnakeCase(object.name),
columns: [],

View File

@@ -2,13 +2,13 @@ import { TriggerOptions } from 'src/sql-tools/decorators/trigger.decorator';
import { asKey } from 'src/sql-tools/helpers';
import { Processor } from 'src/sql-tools/types';
export const processTriggers: Processor = (builder, items) => {
export const processTriggers: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'trigger')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Trigger', object);
ctx.warnMissingTable('@Trigger', object);
continue;
}

View File

@@ -1,13 +1,13 @@
import { asKey } from 'src/sql-tools/helpers';
import { ConstraintType, Processor } from 'src/sql-tools/types';
export const processUniqueConstraints: Processor = (builder, items) => {
export const processUniqueConstraints: Processor = (ctx, items) => {
for (const {
item: { object, options },
} of items.filter((item) => item.type === 'uniqueConstraint')) {
const table = builder.getTableByObject(object);
const table = ctx.getTableByObject(object);
if (!table) {
builder.warnMissingTable('@Unique', object);
ctx.warnMissingTable('@Unique', object);
continue;
}
@@ -28,15 +28,15 @@ export const processUniqueConstraints: Processor = (builder, items) => {
type,
item: { object, propertyName, options },
} of items.filter((item) => item.type === 'column' || item.type === 'foreignKeyColumn')) {
const { table, column } = builder.getColumnByObjectAndPropertyName(object, propertyName);
const { table, column } = ctx.getColumnByObjectAndPropertyName(object, propertyName);
if (!table) {
builder.warnMissingTable('@Column', object);
ctx.warnMissingTable('@Column', object);
continue;
}
if (!column) {
// should be impossible since they are created in `column.processor.ts`
builder.warnMissingColumn('@Column', object, propertyName);
ctx.warnMissingColumn('@Column', object, propertyName);
continue;
}

View File

@@ -1,8 +1,8 @@
import { sql } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { ColumnType, DatabaseColumn, DatabaseReader } from 'src/sql-tools/types';
import { ColumnType, DatabaseColumn, Reader } from 'src/sql-tools/types';
export const readColumns: DatabaseReader = async (schema, db) => {
export const readColumns: Reader = async (ctx, db) => {
const columns = await db
.selectFrom('information_schema.columns as c')
.leftJoin('information_schema.element_types as o', (join) =>
@@ -42,13 +42,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// data type for ARRAYs
'o.data_type as array_type',
])
.where('table_schema', '=', schema.schemaName)
.where('table_schema', '=', ctx.schemaName)
.execute();
const enumRaw = await db
.selectFrom('pg_type')
.innerJoin('pg_namespace', (join) =>
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', schema.schemaName),
join.onRef('pg_namespace.oid', '=', 'pg_type.typnamespace').on('pg_namespace.nspname', '=', ctx.schemaName),
)
.where('typtype', '=', sql.lit('e'))
.select((eb) => [
@@ -61,13 +61,13 @@ export const readColumns: DatabaseReader = async (schema, db) => {
const enums = enumRaw.map((item) => ({ name: item.name, values: item.values.map(({ value }) => value) }));
for (const { name, values } of enums) {
schema.enums.push({ name, values, synchronize: true });
ctx.enums.push({ name, values, synchronize: true });
}
const enumMap = Object.fromEntries(enums.map((e) => [e.name, e.values]));
// add columns to tables
for (const column of columns) {
const table = schema.tables.find((table) => table.name === column.table_name);
const table = ctx.getTableByName(column.table_name);
if (!table) {
continue;
}
@@ -93,7 +93,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// array types
case 'ARRAY': {
if (!column.array_type) {
schema.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
ctx.warnings.push(`Unable to find type for ${columnLabel} (ARRAY)`);
continue;
}
item.type = column.array_type as ColumnType;
@@ -103,7 +103,7 @@ export const readColumns: DatabaseReader = async (schema, db) => {
// enum types
case 'USER-DEFINED': {
if (!enumMap[column.udt_name]) {
schema.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
ctx.warnings.push(`Unable to find type for ${columnLabel} (ENUM)`);
continue;
}

View File

@@ -1,6 +1,6 @@
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readComments: DatabaseReader = async (schema, db) => {
export const readComments: Reader = async (ctx, db) => {
const comments = await db
.selectFrom('pg_description as d')
.innerJoin('pg_class as c', 'd.objoid', 'c.oid')
@@ -20,7 +20,7 @@ export const readComments: DatabaseReader = async (schema, db) => {
for (const comment of comments) {
if (comment.object_type === 'r') {
const table = schema.tables.find((table) => table.name === comment.object_name);
const table = ctx.getTableByName(comment.object_name);
if (!table) {
continue;
}

View File

@@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { ActionType, ConstraintType, DatabaseReader } from 'src/sql-tools/types';
import { ActionType, ConstraintType, Reader } from 'src/sql-tools/types';
export const readConstraints: DatabaseReader = async (schema, db) => {
export const readConstraints: Reader = async (ctx, db) => {
const constraints = await db
.selectFrom('pg_constraint')
.innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_constraint.connamespace') // namespace
@@ -40,11 +40,11 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
.as('reference_column_names'),
eb.fn<string>('pg_get_constraintdef', ['pg_constraint.oid']).as('expression'),
])
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.execute();
for (const constraint of constraints) {
const table = schema.tables.find((table) => table.name === constraint.table_name);
const table = ctx.getTableByName(constraint.table_name);
if (!table) {
continue;
}
@@ -55,7 +55,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
// primary key constraint
case 'p': {
if (!constraint.column_names) {
schema.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
ctx.warnings.push(`Skipping CONSTRAINT "${constraintName}", no columns found`);
continue;
}
table.constraints.push({
@@ -71,7 +71,7 @@ export const readConstraints: DatabaseReader = async (schema, db) => {
// foreign key constraint
case 'f': {
if (!constraint.column_names || !constraint.reference_table_name || !constraint.reference_column_names) {
schema.warnings.push(
ctx.warnings.push(
`Skipping CONSTRAINT "${constraintName}", missing either columns, referenced table, or referenced columns,`,
);
continue;

View File

@@ -1,6 +1,6 @@
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readExtensions: DatabaseReader = async (schema, db) => {
export const readExtensions: Reader = async (ctx, db) => {
const extensions = await db
.selectFrom('pg_catalog.pg_extension')
// .innerJoin('pg_namespace', 'pg_namespace.oid', 'pg_catalog.pg_extension.extnamespace')
@@ -9,6 +9,6 @@ export const readExtensions: DatabaseReader = async (schema, db) => {
.execute();
for (const { name } of extensions) {
schema.extensions.push({ name, synchronize: true });
ctx.extensions.push({ name, synchronize: true });
}
};

View File

@@ -1,14 +1,14 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readFunctions: DatabaseReader = async (schema, db) => {
export const readFunctions: Reader = async (ctx, db) => {
const routines = await db
.selectFrom('pg_proc as p')
.innerJoin('pg_namespace', 'pg_namespace.oid', 'p.pronamespace')
.leftJoin('pg_depend as d', (join) => join.onRef('d.objid', '=', 'p.oid').on('d.deptype', '=', sql.lit('e')))
.where('d.objid', 'is', sql.lit(null))
.where('p.prokind', '=', sql.lit('f'))
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.select((eb) => [
'p.proname as name',
eb.fn<string>('pg_get_function_identity_arguments', ['p.oid']).as('arguments'),
@@ -17,7 +17,7 @@ export const readFunctions: DatabaseReader = async (schema, db) => {
.execute();
for (const { name, expression } of routines) {
schema.functions.push({
ctx.functions.push({
name,
// TODO read expression from the overrides table
expression,

View File

@@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readIndexes: DatabaseReader = async (schema, db) => {
export const readIndexes: Reader = async (ctx, db) => {
const indexes = await db
.selectFrom('pg_index as ix')
// matching index, which has column information
@@ -34,12 +34,12 @@ export const readIndexes: DatabaseReader = async (schema, db) => {
.select((eb) => eb.fn<string[]>('json_agg', ['a.attname']).as('column_name'))
.as('column_names'),
])
.where('pg_namespace.nspname', '=', schema.schemaName)
.where('pg_namespace.nspname', '=', ctx.schemaName)
.where('ix.indisprimary', '=', sql.lit(false))
.execute();
for (const index of indexes) {
const table = schema.tables.find((table) => table.name === index.table_name);
const table = ctx.getTableByName(index.table_name);
if (!table) {
continue;
}

View File

@@ -5,13 +5,13 @@ import { readExtensions } from 'src/sql-tools/readers/extension.reader';
import { readFunctions } from 'src/sql-tools/readers/function.reader';
import { readIndexes } from 'src/sql-tools/readers/index.reader';
import { readName } from 'src/sql-tools/readers/name.reader';
import { readOverrides } from 'src/sql-tools/readers/override.reader';
import { readParameters } from 'src/sql-tools/readers/parameter.reader';
import { readTables } from 'src/sql-tools/readers/table.reader';
import { readTriggers } from 'src/sql-tools/readers/trigger.reader';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readers: DatabaseReader[] = [
//
export const readers: Reader[] = [
readName,
readParameters,
readExtensions,
@@ -22,4 +22,5 @@ export const readers: DatabaseReader[] = [
readConstraints,
readTriggers,
readComments,
readOverrides,
];

View File

@@ -1,8 +1,8 @@
import { QueryResult, sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readName: DatabaseReader = async (schema, db) => {
export const readName: Reader = async (ctx, db) => {
const result = (await sql`SELECT current_database() as name`.execute(db)) as QueryResult<{ name: string }>;
schema.databaseName = result.rows[0].name;
ctx.databaseName = result.rows[0].name;
};

View File

@@ -0,0 +1,19 @@
import { sql } from 'kysely';
import { OverrideType, Reader } from 'src/sql-tools/types';
export const readOverrides: Reader = async (ctx, db) => {
try {
const result = await sql
.raw<{
name: string;
value: { type: OverrideType; name: string; sql: string };
}>(`SELECT name, value FROM "${ctx.overrideTableName}"`)
.execute(db);
for (const { name, value } of result.rows) {
ctx.overrides.push({ name, value, synchronize: true });
}
} catch (error) {
ctx.warn('Overrides', `Error reading override table: ${error}`);
}
};

View File

@@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { DatabaseReader, ParameterScope } from 'src/sql-tools/types';
import { ParameterScope, Reader } from 'src/sql-tools/types';
export const readParameters: DatabaseReader = async (schema, db) => {
export const readParameters: Reader = async (ctx, db) => {
const parameters = await db
.selectFrom('pg_settings')
.where('source', 'in', [sql.lit('database'), sql.lit('user')])
@@ -9,10 +9,10 @@ export const readParameters: DatabaseReader = async (schema, db) => {
.execute();
for (const parameter of parameters) {
schema.parameters.push({
ctx.parameters.push({
name: parameter.name,
value: parameter.value,
databaseName: schema.databaseName,
databaseName: ctx.databaseName,
scope: parameter.scope as ParameterScope,
synchronize: true,
});

View File

@@ -1,16 +1,16 @@
import { sql } from 'kysely';
import { DatabaseReader } from 'src/sql-tools/types';
import { Reader } from 'src/sql-tools/types';
export const readTables: DatabaseReader = async (schema, db) => {
export const readTables: Reader = async (ctx, db) => {
const tables = await db
.selectFrom('information_schema.tables')
.where('table_schema', '=', schema.schemaName)
.where('table_schema', '=', ctx.schemaName)
.where('table_type', '=', sql.lit('BASE TABLE'))
.selectAll()
.execute();
for (const table of tables) {
schema.tables.push({
ctx.tables.push({
name: table.table_name,
columns: [],
indexes: [],

View File

@@ -1,6 +1,6 @@
import { DatabaseReader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
import { Reader, TriggerAction, TriggerScope, TriggerTiming } from 'src/sql-tools/types';
export const readTriggers: DatabaseReader = async (schema, db) => {
export const readTriggers: Reader = async (ctx, db) => {
const triggers = await db
.selectFrom('pg_trigger as t')
.innerJoin('pg_proc as p', 't.tgfoid', 'p.oid')
@@ -21,12 +21,12 @@ export const readTriggers: DatabaseReader = async (schema, db) => {
'c.relname as table_name',
])
.where('t.tgisinternal', '=', false) // Exclude internal system triggers
.where('n.nspname', '=', schema.schemaName)
.where('n.nspname', '=', ctx.schemaName)
.execute();
// add triggers to tables
for (const trigger of triggers) {
const table = schema.tables.find((table) => table.name === trigger.table_name);
const table = ctx.getTableByName(trigger.table_name);
if (!table) {
continue;
}

View File

@@ -21,6 +21,7 @@ const fromColumn = (column: Partial<Omit<DatabaseColumn, 'tableName'>>): Databas
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: tableName,
@@ -55,6 +56,7 @@ const fromConstraint = (constraint?: DatabaseConstraint): DatabaseSchema => {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: tableName,
@@ -88,6 +90,7 @@ const fromIndex = (index?: DatabaseIndex): DatabaseSchema => {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: tableName,
@@ -161,6 +164,7 @@ const newSchema = (schema: {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables,
warnings: [],
};

View File

@@ -1,8 +1,10 @@
import { compareEnums } from 'src/sql-tools/comparers/enum.comparer';
import { compareExtensions } from 'src/sql-tools/comparers/extension.comparer';
import { compareFunctions } from 'src/sql-tools/comparers/function.comparer';
import { compareOverrides } from 'src/sql-tools/comparers/override.comparer';
import { compareParameters } from 'src/sql-tools/comparers/parameter.comparer';
import { compareTables } from 'src/sql-tools/comparers/table.comparer';
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { compare } from 'src/sql-tools/helpers';
import { transformers } from 'src/sql-tools/transformers';
import {
@@ -19,10 +21,11 @@ import {
export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, options: SchemaDiffOptions = {}) => {
const items = [
...compare(source.parameters, target.parameters, options.parameters, compareParameters),
...compare(source.extensions, target.extensions, options.extension, compareExtensions),
...compare(source.extensions, target.extensions, options.extensions, compareExtensions),
...compare(source.functions, target.functions, options.functions, compareFunctions),
...compare(source.enums, target.enums, options.enums, compareEnums),
...compare(source.tables, target.tables, options.tables, compareTables),
...compare(source.overrides, target.overrides, options.overrides, compareOverrides),
];
type SchemaName = SchemaDiff['type'];
@@ -46,6 +49,9 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
TriggerDrop: [],
ParameterSet: [],
ParameterReset: [],
OverrideCreate: [],
OverrideUpdate: [],
OverrideDrop: [],
};
for (const item of items) {
@@ -76,6 +82,9 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
...itemMap.TableDrop,
...itemMap.EnumDrop,
...itemMap.FunctionDrop,
...itemMap.OverrideCreate,
...itemMap.OverrideUpdate,
...itemMap.OverrideDrop,
];
return {
@@ -88,17 +97,18 @@ export const schemaDiff = (source: DatabaseSchema, target: DatabaseSchema, optio
* Convert schema diffs into SQL statements
*/
export const schemaDiffToSql = (items: SchemaDiff[], options: SchemaDiffToSqlOptions = {}): string[] => {
return items.flatMap((item) => asSql(item).map((result) => result + withComments(options.comments, item)));
return items.flatMap((item) => asSql(item, options));
};
const asSql = (item: SchemaDiff): string[] => {
const asSql = (item: SchemaDiff, options: SchemaDiffToSqlOptions): string[] => {
const ctx = new BaseContext(options);
for (const transform of transformers) {
const result = transform(item);
const result = transform(ctx, item);
if (!result) {
continue;
}
return asArray(result);
return asArray(result).map((result) => result + withComments(options.comments, item));
}
throw new Error(`Unhandled schema diff type: ${item.type}`);

View File

@@ -1,8 +1,16 @@
import { readdirSync } from 'node:fs';
import { join } from 'node:path';
import { schemaFromCode } from 'src/sql-tools/schema-from-code';
import { SchemaFromCodeOptions } from 'src/sql-tools/types';
import { describe, expect, it } from 'vitest';
const importModule = async (filePath: string) => {
const module = await import(filePath);
const options: SchemaFromCodeOptions = module.options;
return { module, options };
};
describe(schemaFromCode.name, () => {
it('should work', () => {
expect(schemaFromCode({ reset: true })).toEqual({
@@ -12,6 +20,7 @@ describe(schemaFromCode.name, () => {
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [],
warnings: [],
});
@@ -22,9 +31,10 @@ describe(schemaFromCode.name, () => {
for (const file of errorStubs) {
const filePath = join(file.parentPath, file.name);
it(filePath, async () => {
const module = await import(filePath);
const { module, options } = await importModule(filePath);
expect(module.message).toBeDefined();
expect(() => schemaFromCode({ reset: true })).toThrowError(module.message);
expect(() => schemaFromCode({ ...options, reset: true })).toThrowError(module.message);
});
}
@@ -36,10 +46,11 @@ describe(schemaFromCode.name, () => {
const filePath = join(file.parentPath, file.name);
it(filePath, async () => {
const module = await import(filePath);
const { module, options } = await importModule(filePath);
expect(module.description).toBeDefined();
expect(module.schema).toBeDefined();
expect(schemaFromCode({ reset: true }), module.description).toEqual(module.schema);
expect(schemaFromCode({ ...options, reset: true }), module.description).toEqual(module.schema);
});
}
});

View File

@@ -1,26 +1,58 @@
import { ProcessorContext } from 'src/sql-tools/contexts/processor-context';
import { processors } from 'src/sql-tools/processors';
import { getRegisteredItems, resetRegisteredItems } from 'src/sql-tools/register';
import { SchemaBuilder } from 'src/sql-tools/schema-builder';
import { SchemaFromCodeOptions } from 'src/sql-tools/types';
import { ConstraintType, SchemaFromCodeOptions } from 'src/sql-tools/types';
/**
* Load schema from code (decorators, etc)
*/
export const schemaFromCode = (options: SchemaFromCodeOptions = {}) => {
try {
const globalOptions = {
createForeignKeyIndexes: options.createForeignKeyIndexes ?? true,
};
const builder = new SchemaBuilder(options);
const ctx = new ProcessorContext(options);
const items = getRegisteredItems();
for (const processor of processors) {
processor(builder, items, globalOptions);
processor(ctx, items);
}
const newSchema = builder.build();
if (ctx.options.overrides) {
ctx.tables.push({
name: ctx.overrideTableName,
columns: [
{
primary: true,
name: 'name',
tableName: ctx.overrideTableName,
type: 'character varying',
nullable: false,
isArray: false,
synchronize: true,
},
{
name: 'value',
tableName: ctx.overrideTableName,
type: 'jsonb',
nullable: false,
isArray: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.PRIMARY_KEY,
name: `${ctx.overrideTableName}_pkey`,
tableName: ctx.overrideTableName,
columnNames: ['name'],
synchronize: true,
},
],
synchronize: true,
});
}
return newSchema;
return ctx.build();
} finally {
if (options.reset) {
resetRegisteredItems();

View File

@@ -1,6 +1,7 @@
import { Kysely } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import { Sql } from 'postgres';
import { ReaderContext } from 'src/sql-tools/contexts/reader-context';
import { readers } from 'src/sql-tools/readers';
import { DatabaseSchema, PostgresDB, SchemaFromDatabaseOptions } from 'src/sql-tools/types';
@@ -11,23 +12,16 @@ export const schemaFromDatabase = async (
postgres: Sql,
options: SchemaFromDatabaseOptions = {},
): Promise<DatabaseSchema> => {
const schema: DatabaseSchema = {
databaseName: 'immich',
schemaName: options.schemaName || 'public',
parameters: [],
functions: [],
enums: [],
extensions: [],
tables: [],
warnings: [],
};
const db = new Kysely<PostgresDB>({ dialect: new PostgresJSDialect({ postgres }) });
for (const reader of readers) {
await reader(schema, db);
const ctx = new ReaderContext(options);
try {
for (const reader of readers) {
await reader(ctx, db);
}
return ctx.build();
} finally {
await db.destroy();
}
await db.destroy();
return schema;
};

View File

@@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformColumns } from 'src/sql-tools/transformers/column.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformColumns.name, () => {
describe('ColumnAdd', () => {
it('should work', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@@ -22,7 +25,7 @@ describe(transformColumns.name, () => {
it('should add a nullable column', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@@ -39,7 +42,7 @@ describe(transformColumns.name, () => {
it('should add a column with an enum type', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@@ -57,7 +60,7 @@ describe(transformColumns.name, () => {
it('should add a column that is an array type', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAdd',
column: {
name: 'column1',
@@ -76,7 +79,7 @@ describe(transformColumns.name, () => {
describe('ColumnAlter', () => {
it('should make a column nullable', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAlter',
tableName: 'table1',
columnName: 'column1',
@@ -88,7 +91,7 @@ describe(transformColumns.name, () => {
it('should make a column non-nullable', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAlter',
tableName: 'table1',
columnName: 'column1',
@@ -100,7 +103,7 @@ describe(transformColumns.name, () => {
it('should update the default value', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnAlter',
tableName: 'table1',
columnName: 'column1',
@@ -114,7 +117,7 @@ describe(transformColumns.name, () => {
describe('ColumnDrop', () => {
it('should work', () => {
expect(
transformColumns({
transformColumns(ctx, {
type: 'ColumnDrop',
tableName: 'table1',
columnName: 'column1',

View File

@@ -1,8 +1,8 @@
import { asColumnComment, getColumnModifiers, getColumnType } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { ColumnChanges, DatabaseColumn, SchemaDiff } from 'src/sql-tools/types';
import { ColumnChanges, DatabaseColumn } from 'src/sql-tools/types';
export const transformColumns: SqlTransformer = (item: SchemaDiff) => {
export const transformColumns: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ColumnAdd': {
return asColumnAdd(item.column);

View File

@@ -1,13 +1,16 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformConstraints } from 'src/sql-tools/transformers/constraint.transformer';
import { ConstraintType } from 'src/sql-tools/types';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformConstraints.name, () => {
describe('ConstraintAdd', () => {
describe('primary keys', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.PRIMARY_KEY,
@@ -25,7 +28,7 @@ describe(transformConstraints.name, () => {
describe('foreign keys', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.FOREIGN_KEY,
@@ -47,7 +50,7 @@ describe(transformConstraints.name, () => {
describe('unique', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.UNIQUE,
@@ -65,7 +68,7 @@ describe(transformConstraints.name, () => {
describe('check', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintAdd',
constraint: {
type: ConstraintType.CHECK,
@@ -84,7 +87,7 @@ describe(transformConstraints.name, () => {
describe('ConstraintDrop', () => {
it('should work', () => {
expect(
transformConstraints({
transformConstraints(ctx, {
type: 'ConstraintDrop',
tableName: 'table1',
constraintName: 'PK_test',

View File

@@ -1,8 +1,8 @@
import { asColumnList } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { ActionType, ConstraintType, DatabaseConstraint, SchemaDiff } from 'src/sql-tools/types';
import { ActionType, ConstraintType, DatabaseConstraint } from 'src/sql-tools/types';
export const transformConstraints: SqlTransformer = (item: SchemaDiff) => {
export const transformConstraints: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ConstraintAdd': {
return asConstraintAdd(item.constraint);

View File

@@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseEnum, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseEnum } from 'src/sql-tools/types';
export const transformEnums: SqlTransformer = (item: SchemaDiff) => {
export const transformEnums: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'EnumCreate': {
return asEnumCreate(item.enum);

View File

@@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformExtensions } from 'src/sql-tools/transformers/extension.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformExtensions.name, () => {
describe('ExtensionDrop', () => {
it('should work', () => {
expect(
transformExtensions({
transformExtensions(ctx, {
type: 'ExtensionDrop',
extensionName: 'cube',
reason: 'unknown',
@@ -17,7 +20,7 @@ describe(transformExtensions.name, () => {
describe('ExtensionCreate', () => {
it('should work', () => {
expect(
transformExtensions({
transformExtensions(ctx, {
type: 'ExtensionCreate',
extension: {
name: 'cube',

View File

@@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseExtension, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseExtension } from 'src/sql-tools/types';
export const transformExtensions: SqlTransformer = (item: SchemaDiff) => {
export const transformExtensions: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ExtensionCreate': {
return asExtensionCreate(item.extension);

View File

@@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformFunctions } from 'src/sql-tools/transformers/function.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformFunctions.name, () => {
describe('FunctionDrop', () => {
it('should work', () => {
expect(
transformFunctions({
transformFunctions(ctx, {
type: 'FunctionDrop',
functionName: 'test_func',
reason: 'unknown',

View File

@@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseFunction, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseFunction } from 'src/sql-tools/types';
export const transformFunctions: SqlTransformer = (item: SchemaDiff) => {
export const transformFunctions: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'FunctionCreate': {
return asFunctionCreate(item.function);
@@ -17,7 +17,7 @@ export const transformFunctions: SqlTransformer = (item: SchemaDiff) => {
}
};
const asFunctionCreate = (func: DatabaseFunction): string => {
export const asFunctionCreate = (func: DatabaseFunction): string => {
return func.expression;
};

View File

@@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformIndexes } from 'src/sql-tools/transformers/index.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformIndexes.name, () => {
describe('IndexCreate', () => {
it('should work', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@@ -21,7 +24,7 @@ describe(transformIndexes.name, () => {
it('should create an unique index', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@@ -37,7 +40,7 @@ describe(transformIndexes.name, () => {
it('should create an index with a custom expression', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@@ -53,7 +56,7 @@ describe(transformIndexes.name, () => {
it('should create an index with a where clause', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@@ -70,7 +73,7 @@ describe(transformIndexes.name, () => {
it('should create an index with a custom expression', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexCreate',
index: {
name: 'IDX_test',
@@ -89,7 +92,7 @@ describe(transformIndexes.name, () => {
describe('IndexDrop', () => {
it('should work', () => {
expect(
transformIndexes({
transformIndexes(ctx, {
type: 'IndexDrop',
indexName: 'IDX_test',
reason: 'unknown',

View File

@@ -1,8 +1,8 @@
import { asColumnList } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseIndex, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseIndex } from 'src/sql-tools/types';
export const transformIndexes: SqlTransformer = (item: SchemaDiff) => {
export const transformIndexes: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'IndexCreate': {
return asIndexCreate(item.index);

View File

@@ -4,6 +4,7 @@ import { transformEnums } from 'src/sql-tools/transformers/enum.transformer';
import { transformExtensions } from 'src/sql-tools/transformers/extension.transformer';
import { transformFunctions } from 'src/sql-tools/transformers/function.transformer';
import { transformIndexes } from 'src/sql-tools/transformers/index.transformer';
import { transformOverrides } from 'src/sql-tools/transformers/override.transformer';
import { transformParameters } from 'src/sql-tools/transformers/parameter.transformer';
import { transformTables } from 'src/sql-tools/transformers/table.transformer';
import { transformTriggers } from 'src/sql-tools/transformers/trigger.transformer';
@@ -19,4 +20,5 @@ export const transformers: SqlTransformer[] = [
transformParameters,
transformTables,
transformTriggers,
transformOverrides,
];

View File

@@ -0,0 +1,37 @@
import { asJsonString } from 'src/sql-tools/helpers';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseOverride } from 'src/sql-tools/types';
export const transformOverrides: SqlTransformer = (ctx, item) => {
const tableName = ctx.overrideTableName;
switch (item.type) {
case 'OverrideCreate': {
return asOverrideCreate(tableName, item.override);
}
case 'OverrideUpdate': {
return asOverrideUpdate(tableName, item.override);
}
case 'OverrideDrop': {
return asOverrideDrop(tableName, item.overrideName);
}
default: {
return false;
}
}
};
export const asOverrideCreate = (tableName: string, override: DatabaseOverride): string => {
return `INSERT INTO "${tableName}" ("name", "value") VALUES ('${override.name}', ${asJsonString(override.value)});`;
};
export const asOverrideUpdate = (tableName: string, override: DatabaseOverride): string => {
return `UPDATE "${tableName}" SET "value" = ${asJsonString(override.value)} WHERE "name" = '${override.name}';`;
};
export const asOverrideDrop = (tableName: string, overrideName: string): string => {
return `DELETE FROM "${tableName}" WHERE "name" = '${overrideName}';`;
};

View File

@@ -1,7 +1,7 @@
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseParameter, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseParameter } from 'src/sql-tools/types';
export const transformParameters: SqlTransformer = (item: SchemaDiff) => {
export const transformParameters: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'ParameterSet': {
return asParameterSet(item.parameter);

View File

@@ -1,11 +1,14 @@
import { BaseContext } from 'src/sql-tools/contexts/base-context';
import { transformTables } from 'src/sql-tools/transformers/table.transformer';
import { describe, expect, it } from 'vitest';
const ctx = new BaseContext({});
describe(transformTables.name, () => {
describe('TableDrop', () => {
it('should work', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableDrop',
tableName: 'table1',
reason: 'unknown',
@@ -17,7 +20,7 @@ describe(transformTables.name, () => {
describe('TableCreate', () => {
it('should work', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@@ -43,7 +46,7 @@ describe(transformTables.name, () => {
it('should handle a non-nullable column', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@@ -69,7 +72,7 @@ describe(transformTables.name, () => {
it('should handle a default value', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@@ -96,7 +99,7 @@ describe(transformTables.name, () => {
it('should handle a string with a fixed length', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',
@@ -123,7 +126,7 @@ describe(transformTables.name, () => {
it('should handle an array type', () => {
expect(
transformTables({
transformTables(ctx, {
type: 'TableCreate',
table: {
name: 'table1',

View File

@@ -1,9 +1,9 @@
import { asColumnComment, getColumnModifiers, getColumnType } from 'src/sql-tools/helpers';
import { asColumnAlter } from 'src/sql-tools/transformers/column.transformer';
import { SqlTransformer } from 'src/sql-tools/transformers/types';
import { DatabaseTable, SchemaDiff } from 'src/sql-tools/types';
import { DatabaseTable } from 'src/sql-tools/types';
export const transformTables: SqlTransformer = (item: SchemaDiff) => {
export const transformTables: SqlTransformer = (ctx, item) => {
switch (item.type) {
case 'TableCreate': {
return asTableCreate(item.table);

Some files were not shown because too many files have changed in this diff Show More