WIP: services/archive/v2: init
This commit is contained in:
@@ -245,42 +245,69 @@ extension HistoryEntryExt on DatabaseExecutor {
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> historyEntryInsertEntry(final HistoryEntry entry) =>
|
||||
historyEntryInsertEntries([entry]);
|
||||
// TODO: add a parameter flag to ignore existing id and assign a new one.
|
||||
Future<void> historyEntryInsertEntry(
|
||||
final HistoryEntry entry, {
|
||||
final bool assignNewId = false,
|
||||
}) => historyEntryInsertEntries([entry], assignNewIds: assignNewId);
|
||||
|
||||
Future<void> historyEntryInsertEntries(
|
||||
final Iterable<HistoryEntry> entries,
|
||||
) async {
|
||||
final Iterable<HistoryEntry> entries, {
|
||||
final bool assignNewIds = false,
|
||||
}) async {
|
||||
late final List<int> newIds;
|
||||
if (assignNewIds) {
|
||||
final b = batch();
|
||||
for (final _ in entries) {
|
||||
b.insert(HistoryTableNames.historyEntry, {}, nullColumnHack: 'id');
|
||||
}
|
||||
newIds = (await b.commit()).map((final result) => result as int).toList();
|
||||
}
|
||||
|
||||
assert(
|
||||
!assignNewIds || newIds.length == entries.length,
|
||||
'Number of new IDs must match number of entries when assignNewIds is true',
|
||||
);
|
||||
|
||||
final b = batch();
|
||||
for (final entry in entries) {
|
||||
b.insert(
|
||||
HistoryTableNames.historyEntry,
|
||||
{'id': entry.id},
|
||||
nullColumnHack: 'id',
|
||||
);
|
||||
for (final (i, entry) in entries.indexed) {
|
||||
final int id = assignNewIds ? newIds[i] : entry.id;
|
||||
|
||||
if (!assignNewIds) {
|
||||
b.insert(
|
||||
HistoryTableNames.historyEntry,
|
||||
{'id': id},
|
||||
conflictAlgorithm: ConflictAlgorithm.ignore,
|
||||
);
|
||||
}
|
||||
|
||||
if (entry.isKanji) {
|
||||
b.insert(HistoryTableNames.historyEntryKanji, {
|
||||
'entryId': entry.id,
|
||||
'kanji': entry.kanji,
|
||||
});
|
||||
b.insert(
|
||||
HistoryTableNames.historyEntryKanji,
|
||||
{'entryId': id, 'kanji': entry.kanji},
|
||||
conflictAlgorithm: assignNewIds ? null : ConflictAlgorithm.ignore,
|
||||
);
|
||||
} else {
|
||||
b.insert(HistoryTableNames.historyEntryWord, {
|
||||
'entryId': entry.id,
|
||||
'word': entry.word,
|
||||
'language': {
|
||||
null: null,
|
||||
'japanese': 'j',
|
||||
'english': 'e',
|
||||
}[entry.language],
|
||||
});
|
||||
b.insert(
|
||||
HistoryTableNames.historyEntryWord,
|
||||
{
|
||||
'entryId': id,
|
||||
'word': entry.word,
|
||||
'language': {
|
||||
null: null,
|
||||
'japanese': 'j',
|
||||
'english': 'e',
|
||||
}[entry.language],
|
||||
},
|
||||
conflictAlgorithm: assignNewIds ? null : ConflictAlgorithm.ignore,
|
||||
);
|
||||
}
|
||||
|
||||
for (final timestamp in entry.timestamps) {
|
||||
b.insert(
|
||||
HistoryTableNames.historyEntryTimestamp,
|
||||
{'entryId': entry.id, 'timestamp': timestamp.millisecondsSinceEpoch},
|
||||
conflictAlgorithm: ConflictAlgorithm.ignore,
|
||||
{'entryId': id, 'timestamp': timestamp.millisecondsSinceEpoch},
|
||||
conflictAlgorithm: assignNewIds ? null : ConflictAlgorithm.ignore,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -899,11 +899,11 @@ class LibraryListEntry {
|
||||
"Library entry can't have both kanji and jmdictEntryId",
|
||||
),
|
||||
assert(
|
||||
kanjiSearchResult?.kanji == kanji,
|
||||
kanjiSearchResult == null || kanjiSearchResult.kanji == kanji,
|
||||
"KanjiSearchResult's kanji must match the kanji in LibraryListEntry",
|
||||
),
|
||||
assert(
|
||||
wordSearchResult?.entryId == jmdictEntryId,
|
||||
wordSearchResult == null || wordSearchResult.entryId == jmdictEntryId,
|
||||
"WordSearchResult's jmdictEntryId must match the jmdictEntryId in LibraryListEntry",
|
||||
);
|
||||
|
||||
|
||||
196
lib/services/archive/v2/format.dart
Normal file
196
lib/services/archive/v2/format.dart
Normal file
@@ -0,0 +1,196 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:core';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:mugiten/models/history_entry.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:mugiten/services/archive/v1/format.dart'
|
||||
show tmpdir, packZip, unpackZipToTempDir;
|
||||
import 'package:sqflite/sqlite_api.dart';
|
||||
|
||||
export 'package:mugiten/services/archive/v1/format.dart'
|
||||
show getExportFileNameNoSuffix;
|
||||
|
||||
part './history.dart';
|
||||
part './library_lists.dart';
|
||||
|
||||
const int expectedDataFormatVersion = 2;
|
||||
const int historyChunkSize = 100;
|
||||
const int libraryListChunkSize = 100;
|
||||
|
||||
/// Functions and properties that makes up the format of version 2 of the data archive.
|
||||
/// This archive is used to back up user data and optionally to transfer data between devices.
|
||||
/// The main difference to version 1 is that the data is split into chunks, so that it can be
|
||||
/// streamed and processed in parts, instead of having to load the entire data into memory at once.
|
||||
/// This not only reduces the memory usage, but also allows for reporting progress and resuming interrupted imports/exports.
|
||||
///
|
||||
/// Example file Structure:
|
||||
///
|
||||
/// ```
|
||||
/// - jisho_data_2022.01.01_1
|
||||
/// - history/
|
||||
/// - 1.json
|
||||
/// - 2.json
|
||||
/// - ...
|
||||
/// - 99.json
|
||||
/// - ...
|
||||
/// - library/
|
||||
/// - metadata.json
|
||||
/// - lista/
|
||||
/// - 1.json
|
||||
/// - 2.json
|
||||
/// - ...
|
||||
/// - listb/
|
||||
/// - 1.json
|
||||
/// - 2.json
|
||||
/// - ...
|
||||
/// ```
|
||||
extension ArchiveFormatV2 on Directory {
|
||||
File get versionFile => File(uri.resolve('version.txt').toFilePath());
|
||||
int get version => int.parse(versionFile.readAsStringSync());
|
||||
|
||||
// History //
|
||||
|
||||
Directory get historyDir => Directory(uri.resolve('history').toFilePath());
|
||||
|
||||
List<File> get historyChunkFiles =>
|
||||
historyDir.listSync().whereType<File>().sortedBy(
|
||||
(final f) =>
|
||||
int.tryParse(
|
||||
f.uri.pathSegments.last.replaceFirst(RegExp(r'\.json$'), ''),
|
||||
) ??
|
||||
0,
|
||||
);
|
||||
|
||||
File historyChunkFile(final int chunkIndex) =>
|
||||
File(historyDir.uri.resolve('$chunkIndex.json').toFilePath());
|
||||
|
||||
int get historyChunkCount => historyDir.listSync().whereType<File>().length;
|
||||
|
||||
// Library Lists //
|
||||
|
||||
Directory get libraryDir => Directory(uri.resolve('library').toFilePath());
|
||||
|
||||
/// See [libraryMetadata] for the expected content of this file.
|
||||
File get libraryMetadataFile =>
|
||||
File(libraryDir.uri.resolve('metadata.json').toFilePath());
|
||||
|
||||
/// The metadata of all library lists
|
||||
///
|
||||
/// This is expected to be a list of objects, containing:
|
||||
/// - *order*: implicitly from the order of the json list, the index of the library list
|
||||
/// - name: the original name of the library list
|
||||
/// - slug: the slugified name of the library list, used for the directory name
|
||||
Map<String, Object?> get libraryMetadata =>
|
||||
jsonDecode(libraryMetadataFile.readAsStringSync())
|
||||
as Map<String, Object?>;
|
||||
|
||||
List<Directory> get libraryListDirs =>
|
||||
libraryDir.listSync().whereType<Directory>().toList();
|
||||
|
||||
Directory libraryListDir(final String listName) => Directory(
|
||||
libraryDir.uri
|
||||
.resolve('${slugifyLibraryListFileName(listName)}/')
|
||||
.toFilePath(),
|
||||
);
|
||||
|
||||
File libraryListChunkFile(final String listName, final int chunkIndex) =>
|
||||
File(
|
||||
libraryListDir(listName).uri.resolve('$chunkIndex.json').toFilePath(),
|
||||
);
|
||||
|
||||
List<int> get libraryListEntryCounts => libraryListDirs
|
||||
.map(
|
||||
(final d) =>
|
||||
d.listSync().whereType<File>().length -
|
||||
1, // Subtract 1 for metadata.json
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
|
||||
String slugifyLibraryListFileName(final String name) =>
|
||||
name.toLowerCase().replaceAll(RegExp(r'\s+'), '_');
|
||||
|
||||
class ArchiveV2StreamEvent {
|
||||
final String type;
|
||||
final int progress;
|
||||
final int total;
|
||||
|
||||
final String? name;
|
||||
final int? subProgress;
|
||||
final int? subTotal;
|
||||
|
||||
const ArchiveV2StreamEvent({
|
||||
required this.type,
|
||||
required this.progress,
|
||||
required this.total,
|
||||
this.name,
|
||||
this.subProgress,
|
||||
this.subTotal,
|
||||
}) : assert(
|
||||
progress > 0 && total > 0 && progress <= total,
|
||||
'0 < progress <= total must hold',
|
||||
),
|
||||
assert(
|
||||
type == 'history' || type == 'library',
|
||||
'Type must be either "history" or "library"',
|
||||
),
|
||||
assert(
|
||||
type != 'history' ||
|
||||
(name == null && subProgress == null && subTotal == null),
|
||||
'history events must not have a name, subProgress or subTotal',
|
||||
),
|
||||
assert(
|
||||
type != 'library' ||
|
||||
(name != null && subProgress != null && subTotal != null),
|
||||
'library events must have a name, subProgress and subTotal',
|
||||
),
|
||||
assert(
|
||||
(subProgress == null && subTotal == null) ||
|
||||
(subProgress != null &&
|
||||
subTotal != null &&
|
||||
subProgress > 0 &&
|
||||
subTotal > 0 &&
|
||||
subProgress <= subTotal),
|
||||
'subProgress and subTotal must both be null or both be positive integers with subProgress <= subTotal',
|
||||
);
|
||||
}
|
||||
|
||||
Stream<ArchiveV2StreamEvent> exportData(
|
||||
final DatabaseExecutor db,
|
||||
final File archiveFile,
|
||||
) async* {
|
||||
if (!archiveFile.existsSync()) {
|
||||
archiveFile.createSync();
|
||||
}
|
||||
|
||||
final archiveRoot = await tmpdir();
|
||||
|
||||
await ArchiveFormatV2(
|
||||
archiveRoot,
|
||||
).versionFile.writeAsString(expectedDataFormatVersion.toString());
|
||||
|
||||
yield* exportHistory(db, archiveRoot);
|
||||
yield* exportLibraryLists(db, archiveRoot);
|
||||
|
||||
await packZip(archiveRoot, outputFile: archiveFile);
|
||||
|
||||
archiveRoot.deleteSync(recursive: true);
|
||||
}
|
||||
|
||||
Stream<ArchiveV2StreamEvent> importData(
|
||||
final DatabaseExecutor db,
|
||||
final File archiveFile,
|
||||
) async* {
|
||||
if (!archiveFile.existsSync()) {
|
||||
throw Exception('Archive file does not exist: ${archiveFile.path}');
|
||||
}
|
||||
|
||||
final archiveRoot = await unpackZipToTempDir(archiveFile.path);
|
||||
|
||||
yield* importHistory(db, archiveRoot);
|
||||
yield* importLibraryLists(db, archiveRoot);
|
||||
|
||||
archiveRoot.deleteSync(recursive: true);
|
||||
}
|
||||
151
lib/services/archive/v2/history.dart
Normal file
151
lib/services/archive/v2/history.dart
Normal file
@@ -0,0 +1,151 @@
|
||||
part of './format.dart';
|
||||
|
||||
class ArchiveV2HistoryEntry {
|
||||
final int id;
|
||||
final List<ArchiveV2HistorySearchInstance> searchInstances;
|
||||
|
||||
// TODO: add information about whether the search had zero, one or more results.
|
||||
// TODO: add information about search mode.
|
||||
|
||||
final String? word;
|
||||
final String? kanji;
|
||||
|
||||
const ArchiveV2HistoryEntry({
|
||||
required this.id,
|
||||
required this.searchInstances,
|
||||
this.word,
|
||||
this.kanji,
|
||||
}) : assert(
|
||||
word != null || kanji != null,
|
||||
'At least one of word or kanji must be non-null',
|
||||
);
|
||||
|
||||
factory ArchiveV2HistoryEntry.fromHistoryEntry(final HistoryEntry entry) {
|
||||
return ArchiveV2HistoryEntry(
|
||||
id: entry.id,
|
||||
searchInstances: entry.timestamps
|
||||
.map(
|
||||
(final timestamp) => ArchiveV2HistorySearchInstance(
|
||||
timestamp: timestamp,
|
||||
mediaName: null,
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
word: entry.word,
|
||||
kanji: entry.kanji,
|
||||
);
|
||||
}
|
||||
|
||||
HistoryEntry toHistoryEntry() {
|
||||
return HistoryEntry(
|
||||
id: id,
|
||||
timestamps: searchInstances.map((final si) => si.timestamp).toList(),
|
||||
word: word,
|
||||
kanji: kanji,
|
||||
);
|
||||
}
|
||||
|
||||
factory ArchiveV2HistoryEntry.fromJson(final Map<String, Object?> json) {
|
||||
return ArchiveV2HistoryEntry(
|
||||
id: json['id'] as int,
|
||||
searchInstances: (json['searchInstances'] as List<dynamic>)
|
||||
.map((final si) => si as Map<String, Object?>)
|
||||
.map(
|
||||
(final si) => ArchiveV2HistorySearchInstance(
|
||||
timestamp: DateTime.parse(si['timestamp'] as String),
|
||||
mediaName: si['mediaName'] as String?,
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
word: json['word'] as String?,
|
||||
kanji: json['kanji'] as String?,
|
||||
);
|
||||
}
|
||||
|
||||
Map<String, Object?> toJson() {
|
||||
return {
|
||||
'id': id,
|
||||
'searchInstances': searchInstances
|
||||
.map(
|
||||
(final instance) => {
|
||||
'timestamp': instance.timestamp.toIso8601String(),
|
||||
'mediaName': instance.mediaName,
|
||||
},
|
||||
)
|
||||
.toList(),
|
||||
'word': word,
|
||||
'kanji': kanji,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class ArchiveV2HistorySearchInstance {
|
||||
final DateTime timestamp;
|
||||
final String? mediaName;
|
||||
|
||||
const ArchiveV2HistorySearchInstance({
|
||||
required this.timestamp,
|
||||
this.mediaName,
|
||||
});
|
||||
}
|
||||
|
||||
/// Calculate the total number of chunks needed to export the history,
|
||||
/// needed for progress tracking during export.
|
||||
Future<int> exportHistoryChunkCount(final DatabaseExecutor db) async =>
|
||||
(await db.historyEntryAmount() / historyChunkSize).ceil();
|
||||
|
||||
/// Exports the history into json files in the given directory.
|
||||
///
|
||||
/// Streams back the number of chunks that have been exported so far.
|
||||
Stream<ArchiveV2StreamEvent> exportHistory(
|
||||
final DatabaseExecutor db,
|
||||
final Directory archiveRoot,
|
||||
) async* {
|
||||
final int chunkCount = await exportHistoryChunkCount(db);
|
||||
|
||||
archiveRoot.historyDir.createSync();
|
||||
|
||||
for (int i = 0; i < chunkCount; i++) {
|
||||
final List<Map<String, Object?>> jsonEntries =
|
||||
(await db.historyEntryGetAll(page: i, pageSize: historyChunkSize))
|
||||
.map(ArchiveV2HistoryEntry.fromHistoryEntry)
|
||||
.map((final e) => e.toJson())
|
||||
.toList();
|
||||
|
||||
archiveRoot.historyChunkFile(i)
|
||||
..createSync()
|
||||
..writeAsStringSync(jsonEncode(jsonEntries));
|
||||
|
||||
yield ArchiveV2StreamEvent(
|
||||
type: 'history',
|
||||
progress: i + 1,
|
||||
total: chunkCount,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Stream<ArchiveV2StreamEvent> importHistory(
|
||||
final DatabaseExecutor db,
|
||||
final Directory archiveRoot,
|
||||
) async* {
|
||||
final int chunkCount = archiveRoot.historyChunkCount;
|
||||
|
||||
for (int i = 0; i < chunkCount; i++) {
|
||||
final List<dynamic> jsonEntries =
|
||||
jsonDecode(archiveRoot.historyChunkFile(i).readAsStringSync())
|
||||
as List<dynamic>;
|
||||
|
||||
final historyEntries = jsonEntries
|
||||
.map((final e) => e as Map<String, Object?>)
|
||||
.map(ArchiveV2HistoryEntry.fromJson)
|
||||
.map((final e) => e.toHistoryEntry());
|
||||
|
||||
await db.historyEntryInsertEntries(historyEntries);
|
||||
|
||||
yield ArchiveV2StreamEvent(
|
||||
type: 'history',
|
||||
progress: i + 1,
|
||||
total: chunkCount,
|
||||
);
|
||||
}
|
||||
}
|
||||
245
lib/services/archive/v2/library_lists.dart
Normal file
245
lib/services/archive/v2/library_lists.dart
Normal file
@@ -0,0 +1,245 @@
|
||||
part of './format.dart';
|
||||
|
||||
class ArchiveV2LibraryListMetadata {
|
||||
final String name;
|
||||
final String slug;
|
||||
|
||||
const ArchiveV2LibraryListMetadata({required this.name, required this.slug});
|
||||
|
||||
Map<String, Object?> toJson() => {'name': name, 'slug': slug};
|
||||
}
|
||||
|
||||
class ArchiveV2LibraryListEntry {
|
||||
final DateTime lastModified;
|
||||
final int? jmdictEntryId;
|
||||
final String? kanji;
|
||||
|
||||
const ArchiveV2LibraryListEntry({
|
||||
required this.lastModified,
|
||||
this.jmdictEntryId,
|
||||
this.kanji,
|
||||
}) : assert(
|
||||
jmdictEntryId != null || kanji != null,
|
||||
'At least one of jmdictEntryId or kanji must be non-null',
|
||||
);
|
||||
|
||||
factory ArchiveV2LibraryListEntry.fromLibraryListEntry(
|
||||
final LibraryListEntry entry,
|
||||
) => ArchiveV2LibraryListEntry(
|
||||
lastModified: entry.lastModified,
|
||||
jmdictEntryId: entry.jmdictEntryId,
|
||||
kanji: entry.kanji,
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'lastModified': lastModified.toIso8601String(),
|
||||
'jmdictEntryId': jmdictEntryId,
|
||||
'kanji': kanji,
|
||||
};
|
||||
|
||||
factory ArchiveV2LibraryListEntry.fromJson(final Map<String, Object?> json) =>
|
||||
ArchiveV2LibraryListEntry(
|
||||
lastModified: DateTime.parse(json['lastModified'] as String),
|
||||
jmdictEntryId: json['jmdictEntryId'] as int?,
|
||||
kanji: json['kanji'] as String?,
|
||||
);
|
||||
}
|
||||
|
||||
/// Exports metadata about library lists, such as their names and order, into the archive.
|
||||
Future<void> exportLibraryMetadata(
|
||||
final DatabaseExecutor db,
|
||||
final Directory archiveRoot,
|
||||
) async {
|
||||
final libraryLists = await db.libraryListGetLists();
|
||||
final List<ArchiveV2LibraryListMetadata> metadataList = libraryLists
|
||||
.map(
|
||||
(final libraryList) => ArchiveV2LibraryListMetadata(
|
||||
name: libraryList.name,
|
||||
slug: slugifyLibraryListFileName(libraryList.name),
|
||||
),
|
||||
)
|
||||
.toList();
|
||||
|
||||
final metadataFile = archiveRoot.libraryMetadataFile..createSync();
|
||||
await metadataFile.writeAsString(jsonEncode(metadataList));
|
||||
}
|
||||
|
||||
List<ArchiveV2LibraryListMetadata> importLibraryMetadata(
|
||||
final Directory archiveRoot,
|
||||
) {
|
||||
final metadataFile = archiveRoot.libraryMetadataFile;
|
||||
assert(metadataFile.existsSync(), 'Library metadata file does not exist');
|
||||
|
||||
final String content = metadataFile.readAsStringSync();
|
||||
final List<dynamic> jsonList = jsonDecode(content) as List<dynamic>;
|
||||
|
||||
return jsonList
|
||||
.map((final e) => e as Map<String, Object?>)
|
||||
.map(
|
||||
(final e) => ArchiveV2LibraryListMetadata(
|
||||
name: e['name']! as String,
|
||||
slug: e['slug']! as String,
|
||||
),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
|
||||
/// Calculate the total number of chunks needed to export all library lists,
|
||||
/// needed for progress tracking during export.
|
||||
Future<int> exportLibraryListChunkCount(final DatabaseExecutor db) async =>
|
||||
(await db.libraryListGetLists())
|
||||
.map(
|
||||
(final libraryList) =>
|
||||
(libraryList.totalCount / libraryListChunkSize).ceil(),
|
||||
)
|
||||
.reduce((final a, final b) => a + b);
|
||||
|
||||
/// Exports all library lists into json files in the given directory.
|
||||
///
|
||||
/// Streams back the number of chunks that have been exported so far.
|
||||
/// See also [exportLibraryListChunkCount].
|
||||
Stream<ArchiveV2StreamEvent> exportLibraryLists(
|
||||
final DatabaseExecutor db,
|
||||
final Directory archiveRoot,
|
||||
) async* {
|
||||
archiveRoot.libraryDir.createSync();
|
||||
|
||||
await exportLibraryMetadata(db, archiveRoot);
|
||||
|
||||
final libraryLists = await db.libraryListGetLists();
|
||||
|
||||
for (final (i, libraryList) in libraryLists.indexed) {
|
||||
yield* exportLibraryList(
|
||||
db,
|
||||
archiveRoot,
|
||||
libraryList,
|
||||
i + 1,
|
||||
libraryLists.length,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Exports a single library list into json files in the given directory.
|
||||
///
|
||||
/// Streams back the number of chunks that have been exported so far.
|
||||
Stream<ArchiveV2StreamEvent> exportLibraryList(
|
||||
final DatabaseExecutor db,
|
||||
final Directory archiveRoot,
|
||||
final LibraryList libraryList,
|
||||
final int index,
|
||||
final int total,
|
||||
) async* {
|
||||
final int totalEntries = libraryList.totalCount;
|
||||
final int chunkCount = (totalEntries / libraryListChunkSize).ceil();
|
||||
|
||||
archiveRoot.libraryListDir(libraryList.name).createSync();
|
||||
|
||||
for (int i = 0; i < chunkCount; i++) {
|
||||
final entryPage = (await db.libraryListGetListEntries(
|
||||
libraryList.name,
|
||||
page: i,
|
||||
pageSize: libraryListChunkSize,
|
||||
))!;
|
||||
|
||||
final archiveEntries = entryPage.entries
|
||||
.map(ArchiveV2LibraryListEntry.fromLibraryListEntry)
|
||||
.toList();
|
||||
|
||||
archiveRoot.libraryListChunkFile(libraryList.name, i)
|
||||
..createSync()
|
||||
..writeAsStringSync(jsonEncode(archiveEntries));
|
||||
|
||||
yield ArchiveV2StreamEvent(
|
||||
type: 'library',
|
||||
progress: index,
|
||||
total: total,
|
||||
name: libraryList.name,
|
||||
subProgress: i + 1,
|
||||
subTotal: chunkCount,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Stream<ArchiveV2StreamEvent> importLibraryLists(
|
||||
final DatabaseExecutor db,
|
||||
final Directory archiveRoot,
|
||||
) async* {
|
||||
final metadata = importLibraryMetadata(archiveRoot);
|
||||
for (final (i, meta) in metadata.indexed) {
|
||||
final libraryListDir = archiveRoot.libraryListDir(meta.name);
|
||||
if (!libraryListDir.existsSync()) {
|
||||
print(
|
||||
'Library list directory for "${meta.name}" does not exist. Skipping import.',
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (await db.libraryListExists(meta.name)) {
|
||||
if ((await db.libraryListGetList(meta.name))!.totalCount > 0) {
|
||||
print(
|
||||
'Library list "${meta.name}" already exists and is not empty. Skipping import.',
|
||||
);
|
||||
continue;
|
||||
} else {
|
||||
print(
|
||||
'Library list "${meta.name}" already exists but is empty. '
|
||||
'Importing entries from file $libraryListDir.',
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await db.libraryListInsertList(meta.name);
|
||||
}
|
||||
|
||||
yield* importLibraryList(
|
||||
db,
|
||||
meta.name,
|
||||
libraryListDir,
|
||||
i + 1,
|
||||
metadata.length,
|
||||
);
|
||||
}
|
||||
// TODO: assert that we have not missed any library lists not present in the metadata.
|
||||
}
|
||||
|
||||
Stream<ArchiveV2StreamEvent> importLibraryList(
|
||||
final DatabaseExecutor db,
|
||||
final String libraryListName,
|
||||
final Directory libraryListDir,
|
||||
final int index,
|
||||
final int total,
|
||||
) async* {
|
||||
final chunkFiles = libraryListDir.listSync().whereType<File>();
|
||||
|
||||
for (final (i, chunkFile) in chunkFiles.indexed) {
|
||||
final chunkContent = chunkFile.readAsStringSync();
|
||||
final List<dynamic> jsonEntries = jsonDecode(chunkContent) as List<dynamic>;
|
||||
|
||||
final entries = jsonEntries
|
||||
.map((final e) => e as Map<String, Object?>)
|
||||
.map(ArchiveV2LibraryListEntry.fromJson)
|
||||
.map(
|
||||
(final e) => LibraryListEntry(
|
||||
lastModified: e.lastModified,
|
||||
jmdictEntryId: e.jmdictEntryId,
|
||||
kanji: e.kanji,
|
||||
),
|
||||
)
|
||||
.toList();
|
||||
|
||||
final result = await db.libraryListInsertEntries(libraryListName, entries);
|
||||
if (!result) {
|
||||
throw Exception(
|
||||
'Failed to insert entries for library list "$libraryListName" from chunk file "${chunkFile.path}".',
|
||||
);
|
||||
}
|
||||
|
||||
yield ArchiveV2StreamEvent(
|
||||
type: 'library',
|
||||
progress: index,
|
||||
total: total,
|
||||
name: libraryListName,
|
||||
subProgress: i + 1,
|
||||
subTotal: chunkFiles.length,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,8 @@ import 'package:mugiten/database/database.dart'
|
||||
openAndMigrateDatabase,
|
||||
openDatabaseWithoutMigrations,
|
||||
readMigrationsFromAssets;
|
||||
import 'package:mugiten/services/archive/v1/format.dart';
|
||||
import 'package:mugiten/services/archive/v1/format.dart'
|
||||
show exportData, importData;
|
||||
import 'package:mugiten/services/initialization/initialization_status.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ void main() {
|
||||
final listCount1 = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount1 == 4,
|
||||
'Library list amount should be 3 after insertion, but got $listCount1',
|
||||
'Library list amount should be 4 after insertion, but got $listCount1',
|
||||
);
|
||||
|
||||
tmpdir.libraryDir.createSync();
|
||||
@@ -102,7 +102,7 @@ void main() {
|
||||
final listCount2 = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount2 == 1,
|
||||
'Library list amount should be 0 after deletion, but got $listCount2',
|
||||
'Library list amount should be 1 after deletion, but got $listCount2',
|
||||
);
|
||||
|
||||
await importLibraryListsFrom(database, tmpdir);
|
||||
@@ -110,7 +110,7 @@ void main() {
|
||||
final listCount3 = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount3 == 4,
|
||||
'Library list amount should be 3 after import, but got $listCount3',
|
||||
'Library list amount should be 4 after import, but got $listCount3',
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
161
test/services/archive/v2/archive_history_test.dart
Normal file
161
test/services/archive/v2/archive_history_test.dart
Normal file
@@ -0,0 +1,161 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:get_it/get_it.dart';
|
||||
import 'package:mugiten/database/history/table_names.dart';
|
||||
import 'package:mugiten/models/history_entry.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:mugiten/services/archive/v2/format.dart';
|
||||
import 'package:sqflite/sqlite_api.dart';
|
||||
|
||||
import '../../../testutils.dart';
|
||||
|
||||
Future<void> insertTestData(final DatabaseExecutor db) async {
|
||||
db
|
||||
..libraryListInsertList('Test List 1')
|
||||
..libraryListInsertList('Test List 2');
|
||||
}
|
||||
|
||||
void main() {
|
||||
late final String libsqlitePath;
|
||||
late final String jadbPath;
|
||||
late Directory tmpdir;
|
||||
late Database database;
|
||||
|
||||
setUpAll(() {
|
||||
if (!Platform.environment.containsKey('LIBSQLITE_PATH')) {
|
||||
throw Exception('LIBSQLITE_PATH environment variable is not set.');
|
||||
}
|
||||
|
||||
if (!Platform.environment.containsKey('JADB_PATH')) {
|
||||
throw Exception('JADB_PATH environment variable is not set.');
|
||||
}
|
||||
|
||||
libsqlitePath = File(
|
||||
Platform.environment['LIBSQLITE_PATH']!,
|
||||
).resolveSymbolicLinksSync();
|
||||
jadbPath = File(
|
||||
Platform.environment['JADB_PATH']!,
|
||||
).resolveSymbolicLinksSync();
|
||||
});
|
||||
|
||||
// Setup sqflite_common_ffi for flutter test
|
||||
setUp(() async {
|
||||
database = await createDatabaseCopy(
|
||||
libsqlitePath: libsqlitePath,
|
||||
jadbPath: jadbPath,
|
||||
);
|
||||
|
||||
GetIt.instance.registerSingleton<Database>(database);
|
||||
|
||||
tmpdir = await test_tmpdir();
|
||||
tmpdir.historyDir.createSync();
|
||||
});
|
||||
|
||||
tearDown(() async {
|
||||
await database.close();
|
||||
|
||||
GetIt.instance.unregister<Database>();
|
||||
|
||||
final jadbCopyPath = database.path;
|
||||
|
||||
if (File(jadbCopyPath).existsSync()) {
|
||||
await File(jadbCopyPath).delete();
|
||||
}
|
||||
|
||||
if (tmpdir.existsSync()) {
|
||||
await tmpdir.delete(recursive: true);
|
||||
}
|
||||
});
|
||||
|
||||
group('Export-import history', () {
|
||||
test('Full reimport', () async {
|
||||
final historyEntries = await createRandomHistoryEntries(
|
||||
db: database,
|
||||
count: 300,
|
||||
);
|
||||
await database.historyEntryInsertEntries(historyEntries);
|
||||
final historyEntryAmount = await database.historyEntryAmount();
|
||||
assert(
|
||||
historyEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length}, but got $historyEntryAmount',
|
||||
);
|
||||
|
||||
await exportHistory(database, tmpdir).drain();
|
||||
|
||||
await database.delete(HistoryTableNames.historyEntry);
|
||||
final int emptyHistoryEntryAmount = await database.historyEntryAmount();
|
||||
assert(
|
||||
emptyHistoryEntryAmount == 0,
|
||||
'History entry amount should be 0 after deletion, but got $emptyHistoryEntryAmount',
|
||||
);
|
||||
|
||||
await importHistory(database, tmpdir).drain();
|
||||
final int importedHistoryEntryAmount = await database
|
||||
.historyEntryAmount();
|
||||
assert(
|
||||
importedHistoryEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length} after import, but got $importedHistoryEntryAmount',
|
||||
);
|
||||
});
|
||||
|
||||
test('Partially delete, idempotent reimport', () async {
|
||||
final historyEntries = await createRandomHistoryEntries(
|
||||
db: database,
|
||||
count: 300,
|
||||
);
|
||||
await database.historyEntryInsertEntries(historyEntries);
|
||||
final historyEntryAmount = await database.historyEntryAmount();
|
||||
assert(
|
||||
historyEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length}, but got $historyEntryAmount',
|
||||
);
|
||||
|
||||
await exportHistory(database, tmpdir).drain();
|
||||
|
||||
final List<HistoryEntry> entriesToDelete = historyEntries.sublist(
|
||||
0,
|
||||
historyEntries.length ~/ 2,
|
||||
);
|
||||
final b = database.batch()
|
||||
..delete(
|
||||
HistoryTableNames.historyEntry,
|
||||
where:
|
||||
'id IN (${List.filled(entriesToDelete.length, '?').join(',')})',
|
||||
whereArgs: entriesToDelete.map((final e) => e.id).toList(),
|
||||
);
|
||||
await b.commit(noResult: true);
|
||||
|
||||
await importHistory(database, tmpdir).drain();
|
||||
final int importedHistoryEntryAmount = await database
|
||||
.historyEntryAmount();
|
||||
assert(
|
||||
importedHistoryEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length} after import, but got $importedHistoryEntryAmount',
|
||||
);
|
||||
});
|
||||
|
||||
test('Do not delete, idempotent reimport', () async {
|
||||
final historyEntries = await createRandomHistoryEntries(
|
||||
db: database,
|
||||
count: 300,
|
||||
);
|
||||
await database.historyEntryInsertEntries(historyEntries);
|
||||
final historyEntryAmount = await database.historyEntryAmount();
|
||||
assert(
|
||||
historyEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length}, but got $historyEntryAmount',
|
||||
);
|
||||
|
||||
await exportHistory(database, tmpdir).drain();
|
||||
|
||||
await importHistory(database, tmpdir).drain();
|
||||
final int importedHistoryEntryAmount = await database
|
||||
.historyEntryAmount();
|
||||
assert(
|
||||
importedHistoryEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length} after import, but got $importedHistoryEntryAmount',
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
148
test/services/archive/v2/archive_librarylists_test.dart
Normal file
148
test/services/archive/v2/archive_librarylists_test.dart
Normal file
@@ -0,0 +1,148 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:get_it/get_it.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:mugiten/services/archive/v2/format.dart';
|
||||
import 'package:sqflite/sqlite_api.dart';
|
||||
|
||||
import '../../../testutils.dart';
|
||||
|
||||
void main() {
|
||||
late final String libsqlitePath;
|
||||
late final String jadbPath;
|
||||
late Directory tmpdir;
|
||||
late Database database;
|
||||
|
||||
setUpAll(() {
|
||||
if (!Platform.environment.containsKey('LIBSQLITE_PATH')) {
|
||||
throw Exception('LIBSQLITE_PATH environment variable is not set.');
|
||||
}
|
||||
|
||||
if (!Platform.environment.containsKey('JADB_PATH')) {
|
||||
throw Exception('JADB_PATH environment variable is not set.');
|
||||
}
|
||||
|
||||
libsqlitePath = File(
|
||||
Platform.environment['LIBSQLITE_PATH']!,
|
||||
).resolveSymbolicLinksSync();
|
||||
jadbPath = File(
|
||||
Platform.environment['JADB_PATH']!,
|
||||
).resolveSymbolicLinksSync();
|
||||
});
|
||||
|
||||
// Setup sqflite_common_ffi for flutter test
|
||||
setUp(() async {
|
||||
database = await createDatabaseCopy(
|
||||
libsqlitePath: libsqlitePath,
|
||||
jadbPath: jadbPath,
|
||||
);
|
||||
|
||||
GetIt.instance.registerSingleton<Database>(database);
|
||||
|
||||
tmpdir = await test_tmpdir();
|
||||
tmpdir.libraryDir.createSync();
|
||||
});
|
||||
|
||||
tearDown(() async {
|
||||
await database.close();
|
||||
|
||||
GetIt.instance.unregister<Database>();
|
||||
|
||||
final jadbCopyPath = database.path;
|
||||
|
||||
if (File(jadbCopyPath).existsSync()) {
|
||||
await File(jadbCopyPath).delete();
|
||||
}
|
||||
|
||||
if (tmpdir.existsSync()) {
|
||||
await tmpdir.delete(recursive: true);
|
||||
}
|
||||
});
|
||||
|
||||
test('Full reimport', () async {
|
||||
final libraryEntries1 = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 150,
|
||||
);
|
||||
|
||||
final libraryEntries2 = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 300,
|
||||
);
|
||||
|
||||
final libraryEntries3 = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 150,
|
||||
);
|
||||
|
||||
await database.libraryListInsertList('Test List 1');
|
||||
await database.libraryListInsertList('Test List 2');
|
||||
await database.libraryListInsertList('Test List 3');
|
||||
|
||||
await database.libraryListInsertEntries('Test List 1', libraryEntries1);
|
||||
await database.libraryListInsertEntries('Test List 2', libraryEntries2);
|
||||
await database.libraryListInsertEntries('Test List 3', libraryEntries3);
|
||||
|
||||
final listCount1 = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount1 == 4,
|
||||
'Library list amount should be 4 after insertion, but got $listCount1',
|
||||
);
|
||||
|
||||
await exportLibraryLists(database, tmpdir).drain();
|
||||
|
||||
await database.libraryListDeleteList('Test List 1');
|
||||
await database.libraryListDeleteList('Test List 2');
|
||||
await database.libraryListDeleteList('Test List 3');
|
||||
|
||||
final listCount2 = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount2 == 1,
|
||||
'Library list amount should be 1 after deletion, but got $listCount2',
|
||||
);
|
||||
|
||||
await importLibraryLists(database, tmpdir).drain();
|
||||
|
||||
final listCount3 = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount3 == 4,
|
||||
'Library list amount should be 4 after import, but got $listCount3',
|
||||
);
|
||||
});
|
||||
|
||||
test('Full reimport favourites', () async {
|
||||
final libraryEntries = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 150,
|
||||
);
|
||||
|
||||
await database.libraryListInsertEntries('favourites', libraryEntries);
|
||||
final favourites = (await database.libraryListGetLists()).first;
|
||||
assert(
|
||||
favourites.totalCount == libraryEntries.length,
|
||||
'Favourites entry count should be ${libraryEntries.length} after insertion, but got ${favourites.totalCount}',
|
||||
);
|
||||
|
||||
await exportLibraryLists(database, tmpdir).drain();
|
||||
|
||||
await database.libraryListDeleteAllEntries('favourites');
|
||||
final emptyFavourites = (await database.libraryListGetLists()).first;
|
||||
assert(
|
||||
emptyFavourites.totalCount == 0,
|
||||
'Favourites entry count should be 0 after deletion, but got ${emptyFavourites.totalCount}',
|
||||
);
|
||||
|
||||
await importLibraryLists(database, tmpdir).drain();
|
||||
|
||||
final importedFavourites = (await database.libraryListGetLists()).first;
|
||||
assert(
|
||||
importedFavourites.totalCount == libraryEntries.length,
|
||||
'Favourites entry count should be ${libraryEntries.length} after import, but got ${importedFavourites.totalCount}',
|
||||
);
|
||||
});
|
||||
}
|
||||
138
test/services/archive/v2/archive_zip_test.dart
Normal file
138
test/services/archive/v2/archive_zip_test.dart
Normal file
@@ -0,0 +1,138 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:get_it/get_it.dart';
|
||||
import 'package:mugiten/database/history/table_names.dart';
|
||||
import 'package:mugiten/models/history_entry.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:mugiten/services/archive/v2/format.dart';
|
||||
import 'package:sqflite/sqlite_api.dart';
|
||||
|
||||
import '../../../testutils.dart';
|
||||
|
||||
void main() {
|
||||
late final String libsqlitePath;
|
||||
late final String jadbPath;
|
||||
late Directory tmpdir;
|
||||
late Database database;
|
||||
|
||||
setUpAll(() {
|
||||
if (!Platform.environment.containsKey('LIBSQLITE_PATH')) {
|
||||
throw Exception('LIBSQLITE_PATH environment variable is not set.');
|
||||
}
|
||||
|
||||
if (!Platform.environment.containsKey('JADB_PATH')) {
|
||||
throw Exception('JADB_PATH environment variable is not set.');
|
||||
}
|
||||
|
||||
libsqlitePath = File(
|
||||
Platform.environment['LIBSQLITE_PATH']!,
|
||||
).resolveSymbolicLinksSync();
|
||||
jadbPath = File(
|
||||
Platform.environment['JADB_PATH']!,
|
||||
).resolveSymbolicLinksSync();
|
||||
});
|
||||
|
||||
// Setup sqflite_common_ffi for flutter test
|
||||
setUp(() async {
|
||||
database = await createDatabaseCopy(
|
||||
libsqlitePath: libsqlitePath,
|
||||
jadbPath: jadbPath,
|
||||
);
|
||||
|
||||
GetIt.instance.registerSingleton<Database>(database);
|
||||
|
||||
tmpdir = await test_tmpdir();
|
||||
});
|
||||
|
||||
tearDown(() async {
|
||||
await database.close();
|
||||
|
||||
GetIt.instance.unregister<Database>();
|
||||
|
||||
final jadbCopyPath = database.path;
|
||||
|
||||
if (File(jadbCopyPath).existsSync()) {
|
||||
await File(jadbCopyPath).delete();
|
||||
}
|
||||
|
||||
if (tmpdir.existsSync()) {
|
||||
await tmpdir.delete(recursive: true);
|
||||
}
|
||||
});
|
||||
|
||||
test('Archive V2 export to and import from zip archive', () async {
|
||||
// Insert data
|
||||
final historyEntries = await createRandomHistoryEntries(
|
||||
db: database,
|
||||
count: 300,
|
||||
);
|
||||
await database.historyEntryInsertEntries(historyEntries);
|
||||
|
||||
final libraryEntriesF = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 400,
|
||||
jmdictEntryCount: 440,
|
||||
);
|
||||
final libraryEntries1 = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 150,
|
||||
);
|
||||
final libraryEntries2 = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 300,
|
||||
);
|
||||
final libraryEntries3 = await createRandomLibraryListEntries(
|
||||
db: database,
|
||||
kanjiCount: 150,
|
||||
jmdictEntryCount: 150,
|
||||
);
|
||||
|
||||
await database.libraryListInsertList('Test List 1');
|
||||
await database.libraryListInsertList('Test List 2');
|
||||
await database.libraryListInsertList('Test List 3');
|
||||
|
||||
await database.libraryListInsertEntries('favourites', libraryEntriesF);
|
||||
await database.libraryListInsertEntries('Test List 1', libraryEntries1);
|
||||
await database.libraryListInsertEntries('Test List 2', libraryEntries2);
|
||||
await database.libraryListInsertEntries('Test List 3', libraryEntries3);
|
||||
|
||||
// Export to zip
|
||||
final zipFile = File(tmpdir.uri.resolve('export.zip').toFilePath())
|
||||
..createSync();
|
||||
await exportData(database, zipFile).drain();
|
||||
|
||||
// Delete all data
|
||||
await database.delete(HistoryTableNames.historyEntry);
|
||||
await database.libraryListDeleteAllEntries('favourites');
|
||||
await database.libraryListDeleteList('Test List 1');
|
||||
await database.libraryListDeleteList('Test List 2');
|
||||
await database.libraryListDeleteList('Test List 3');
|
||||
|
||||
// Import from zip
|
||||
await importData(database, zipFile).drain();
|
||||
|
||||
// Verify data
|
||||
final int historyEntryAmount = await database.historyEntryAmount();
|
||||
assert(
|
||||
historyEntryAmount == historyEntries.length,
|
||||
'History entry amount should be ${historyEntries.length} after import, but got $historyEntryAmount',
|
||||
);
|
||||
|
||||
final favourites = (await database.libraryListGetLists()).firstWhere(
|
||||
(final list) => list.name == 'favourites',
|
||||
);
|
||||
assert(
|
||||
favourites.totalCount == libraryEntriesF.length,
|
||||
'Favourites entry count should be ${libraryEntriesF.length} after import, but got ${favourites.totalCount}',
|
||||
);
|
||||
|
||||
final listCount = await database.libraryListAmount();
|
||||
assert(
|
||||
listCount == 4,
|
||||
'Library list amount should be 4 after import, but got $listCount',
|
||||
);
|
||||
});
|
||||
}
|
||||
Reference in New Issue
Block a user