WIP: services/archive/v2: init
This commit is contained in:
@@ -11,7 +11,7 @@ import 'package:mugiten/main.dart';
|
||||
import 'package:mugiten/models/history_entry.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:mugiten/routing/routes.dart';
|
||||
import 'package:mugiten/services/data_export_import.dart';
|
||||
import 'package:mugiten/services/archive/v1/format.dart';
|
||||
import 'package:mugiten/services/snackbar.dart';
|
||||
import 'package:mugiten/settings.dart';
|
||||
import 'package:mugiten/theme.dart';
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:core';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:archive/archive_io.dart';
|
||||
import 'package:mugiten/database/library_list/table_names.dart';
|
||||
import 'package:mugiten/models/history_entry.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:sqflite_common_ffi/sqflite_ffi.dart';
|
||||
|
||||
part './history.dart';
|
||||
part './library_lists.dart';
|
||||
|
||||
|
||||
// Example file Structure:
|
||||
// jisho_data_2022.01.01_1
|
||||
// - history.json
|
||||
// - library/
|
||||
// - lista.json
|
||||
// - listb.json
|
||||
|
||||
extension ArchiveFormat on Directory {
|
||||
File get versionFile => File(uri.resolve('version.txt').toFilePath());
|
||||
File get historyFile => File(uri.resolve('history.json').toFilePath());
|
||||
Directory get libraryDir => Directory(uri.resolve('library').toFilePath());
|
||||
|
||||
List<File> get libraryListFiles => libraryDir
|
||||
.listSync()
|
||||
.whereType<File>()
|
||||
.where((final f) => f.path.endsWith('.json'))
|
||||
.toList();
|
||||
|
||||
List<String> get libraryListNames => libraryListFiles
|
||||
.map(
|
||||
(final f) =>
|
||||
f.uri.pathSegments.last.replaceFirst(RegExp(r'\.json$'), ''),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
|
||||
/// Creates a temporary directory for storing exported data files before zipping them.
|
||||
Future<Directory> tmpdir() => Directory.systemTemp.createTemp('mugiten_data_');
|
||||
|
||||
/// Unpacks the given zip file to a temporary directory and returns the directory.
|
||||
Future<Directory> unpackZipToTempDir(final String zipFilePath) async {
|
||||
final outputDir = await tmpdir();
|
||||
await extractFileToDisk(zipFilePath, outputDir.path);
|
||||
return outputDir;
|
||||
}
|
||||
|
||||
/// Packs the given directory into a zip file.
|
||||
///
|
||||
/// If [outputFile] is provided, it will be used as the output file. Otherwise, a new temporary file will be created.
|
||||
Future<File> packZip(final Directory dir, {File? outputFile}) async {
|
||||
if (outputFile == null || !outputFile.existsSync()) {
|
||||
final outputDir = await tmpdir();
|
||||
outputFile = File(outputDir.uri.resolve('mugiten_data.zip').toFilePath())
|
||||
..createSync();
|
||||
}
|
||||
|
||||
final archive = createArchiveFromDirectory(dir, includeDirName: false);
|
||||
|
||||
final outputStream = OutputFileStream(outputFile.path);
|
||||
ZipEncoder().encodeStream(archive, outputStream, autoClose: true);
|
||||
|
||||
return outputFile;
|
||||
}
|
||||
|
||||
/// Generates a file name for the exported data file based on the current date, without the file extension.
|
||||
String getExportFileNameNoSuffix() {
|
||||
final DateTime today = DateTime.now();
|
||||
final String formattedDate =
|
||||
'${today.year}'
|
||||
'.${today.month.toString().padLeft(2, '0')}'
|
||||
'.${today.day.toString().padLeft(2, '0')}';
|
||||
|
||||
return 'mugiten_data_$formattedDate';
|
||||
}
|
||||
|
||||
Future<File> exportData(final DatabaseExecutor db) async {
|
||||
final dir = await tmpdir();
|
||||
|
||||
final libraryDir = Directory(dir.uri.resolve('library').toFilePath())
|
||||
..createSync();
|
||||
|
||||
await Future.wait([
|
||||
exportDataFormatVersionTo(dir),
|
||||
exportHistoryTo(db, dir),
|
||||
exportLibraryListsTo(db, libraryDir),
|
||||
]);
|
||||
|
||||
final zipFile = await packZip(dir);
|
||||
|
||||
return zipFile;
|
||||
}
|
||||
|
||||
Future<void> importData(final Database db, final File zipFile) async {
|
||||
final dir = await unpackZipToTempDir(zipFile.path);
|
||||
|
||||
await Future.wait([
|
||||
importHistoryFrom(db, dir.historyFile),
|
||||
importLibraryListsFrom(db, dir.libraryDir),
|
||||
]);
|
||||
|
||||
dir.deleteSync(recursive: true);
|
||||
}
|
||||
|
||||
/////////////////////////
|
||||
// DATA FORMAT VERSION //
|
||||
/////////////////////////
|
||||
|
||||
const int expectedDataFormatVersion = 1;
|
||||
|
||||
Future<void> exportDataFormatVersionTo(final Directory dir) async {
|
||||
dir.versionFile
|
||||
..createSync()
|
||||
..writeAsStringSync(expectedDataFormatVersion.toString());
|
||||
}
|
||||
|
||||
Future<int> importDataFormatVersionFrom(final File file) async {
|
||||
final String content = file.readAsStringSync();
|
||||
return int.parse(content);
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
part of './format.dart';
|
||||
|
||||
Future<void> exportHistoryTo(
|
||||
final DatabaseExecutor db,
|
||||
final Directory dir,
|
||||
) async {
|
||||
final file = dir.historyFile..createSync();
|
||||
|
||||
final List<Map<String, Object?>> jsonEntries = (await db.historyEntryGetAll())
|
||||
.map((final e) => e.toJson())
|
||||
.toList();
|
||||
|
||||
file.writeAsStringSync(jsonEncode(jsonEntries));
|
||||
}
|
||||
|
||||
Future<void> importHistoryFrom(final Database db, final File file) async {
|
||||
final String content = file.readAsStringSync();
|
||||
final List<Map<String, Object?>> json = (jsonDecode(content) as List)
|
||||
.map((final h) => h as Map<String, Object?>)
|
||||
.toList();
|
||||
// log('Importing ${json.length} entries from ${file.path}');
|
||||
await db.transaction((final txn) => txn.historyEntryInsertManyFromJson(json));
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
part of './format.dart';
|
||||
|
||||
Future<void> exportLibraryListsTo(
|
||||
final DatabaseExecutor db,
|
||||
final Directory dir,
|
||||
) async {
|
||||
final libraryNames = await db
|
||||
.query(LibraryListTableNames.libraryList, columns: ['name'])
|
||||
.then(
|
||||
(final result) =>
|
||||
result.map((final row) => row['name'] as String).toList(),
|
||||
);
|
||||
|
||||
await Future.wait([
|
||||
for (final libraryName in libraryNames)
|
||||
exportLibraryListTo(db, libraryName, dir),
|
||||
]);
|
||||
}
|
||||
|
||||
Future<void> exportLibraryListTo(
|
||||
final DatabaseExecutor db,
|
||||
final String libraryName,
|
||||
final Directory dir,
|
||||
) async {
|
||||
final file = File(dir.uri.resolve('$libraryName.json').toFilePath());
|
||||
await file.create();
|
||||
|
||||
// TODO: properly null check
|
||||
final entries = (await db.libraryListGetListEntries(
|
||||
libraryName,
|
||||
))!.entries.map((final e) => e.toJson()).toList();
|
||||
|
||||
await file.writeAsString(jsonEncode(entries));
|
||||
}
|
||||
|
||||
// TODO: how do we handle lists that already exist? There seems to be no good way to merge them?
|
||||
Future<void> importLibraryListsFrom(
|
||||
final DatabaseExecutor db,
|
||||
final Directory libraryListsDir,
|
||||
) async {
|
||||
for (final file in libraryListsDir.libraryListFiles) {
|
||||
final libraryName = file.uri.pathSegments.last.replaceFirst(
|
||||
RegExp(r'\.json$'),
|
||||
'',
|
||||
);
|
||||
|
||||
if (await db.libraryListExists(libraryName)) {
|
||||
if ((await db.libraryListGetList(libraryName))!.totalCount > 0) {
|
||||
print(
|
||||
'Library list "$libraryName" already exists and is not empty. Skipping import.',
|
||||
);
|
||||
continue;
|
||||
} else {
|
||||
print(
|
||||
'Library list "$libraryName" already exists but is empty. '
|
||||
'Importing entries from file ${file.path}.',
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await db.libraryListInsertList(libraryName);
|
||||
}
|
||||
|
||||
final content = await file.readAsString();
|
||||
final List<Map<String, Object?>> jsonEntries = (jsonDecode(content) as List)
|
||||
.map((final e) => e as Map<String, Object?>)
|
||||
.toList();
|
||||
|
||||
await db.libraryListInsertJsonEntriesForSingleList(
|
||||
libraryName,
|
||||
jsonEntries,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:core';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
|
||||
part './history.dart';
|
||||
part './library_lists.dart';
|
||||
|
||||
const int expectedDataFormatVersion = 2;
|
||||
const int historyChunkSize = 100;
|
||||
const int libraryListChunkSize = 100;
|
||||
|
||||
// Example file Structure:
|
||||
// jisho_data_2022.01.01_1
|
||||
// - history/
|
||||
// - 1.json
|
||||
// - 2.json
|
||||
// - ...
|
||||
// - library/
|
||||
// - lista/
|
||||
// - metadata.json
|
||||
// - 1.json
|
||||
// - 2.json
|
||||
// - ...
|
||||
// - listb/
|
||||
// - metadata.json
|
||||
// - 1.json
|
||||
// - 2.json
|
||||
// - ...
|
||||
|
||||
extension ArchiveFormat on Directory {
|
||||
File get versionFile => File(uri.resolve('version.txt').toFilePath());
|
||||
int get version => int.parse(versionFile.readAsStringSync());
|
||||
|
||||
// History //
|
||||
|
||||
Directory get historyDir => Directory(uri.resolve('history').toFilePath());
|
||||
|
||||
List<File> get historyChunks =>
|
||||
historyDir.listSync().whereType<File>().sortedBy(
|
||||
(final f) =>
|
||||
int.tryParse(
|
||||
f.uri.pathSegments.last.replaceFirst(RegExp(r'\.json$'), ''),
|
||||
) ??
|
||||
0,
|
||||
);
|
||||
int get historyChunkCount => historyDir.listSync().whereType<File>().length;
|
||||
|
||||
// Library Lists //
|
||||
|
||||
Directory get libraryDir => Directory(uri.resolve('library').toFilePath());
|
||||
|
||||
List<Directory> get libraryListDirs =>
|
||||
libraryDir.listSync().whereType<Directory>().toList();
|
||||
|
||||
List<Map<String, Object?>> get libraryListMetadata => libraryListDirs
|
||||
.map(
|
||||
(final d) =>
|
||||
jsonDecode(
|
||||
File(
|
||||
d.uri.resolve('metadata.json').toFilePath(),
|
||||
).readAsStringSync(),
|
||||
)
|
||||
as Map<String, Object?>,
|
||||
)
|
||||
.toList();
|
||||
|
||||
List<int> get libraryListEntryCounts => libraryListDirs
|
||||
.map(
|
||||
(final d) =>
|
||||
d.listSync().whereType<File>().length -
|
||||
1, // Subtract 1 for metadata.json
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
part of './format.dart';
|
||||
@@ -0,0 +1 @@
|
||||
part of './format.dart';
|
||||
@@ -1,209 +0,0 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:core';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:archive/archive_io.dart';
|
||||
import 'package:mugiten/database/library_list/table_names.dart';
|
||||
import 'package:mugiten/models/history_entry.dart';
|
||||
import 'package:mugiten/models/library_list.dart';
|
||||
import 'package:sqflite_common_ffi/sqflite_ffi.dart';
|
||||
|
||||
// Example file Structure:
|
||||
// jisho_data_2022.01.01_1
|
||||
// - history.json
|
||||
// - library/
|
||||
// - lista.json
|
||||
// - listb.json
|
||||
|
||||
extension ArchiveFormat on Directory {
|
||||
File get versionFile => File(uri.resolve('version.txt').toFilePath());
|
||||
File get historyFile => File(uri.resolve('history.json').toFilePath());
|
||||
Directory get libraryDir => Directory(uri.resolve('library').toFilePath());
|
||||
}
|
||||
|
||||
Future<Directory> tmpdir() => Directory.systemTemp.createTemp('mugiten_data_');
|
||||
|
||||
Future<Directory> unpackZipToTempDir(final String zipFilePath) async {
|
||||
final outputDir = await tmpdir();
|
||||
await extractFileToDisk(zipFilePath, outputDir.path);
|
||||
return outputDir;
|
||||
}
|
||||
|
||||
Future<File> packZip(final Directory dir, {File? outputFile}) async {
|
||||
if (outputFile == null || !outputFile.existsSync()) {
|
||||
final outputDir = await tmpdir();
|
||||
outputFile = File(outputDir.uri.resolve('mugiten_data.zip').toFilePath())
|
||||
..createSync();
|
||||
}
|
||||
|
||||
final archive = createArchiveFromDirectory(dir, includeDirName: false);
|
||||
|
||||
final outputStream = OutputFileStream(outputFile.path);
|
||||
ZipEncoder().encodeStream(archive, outputStream, autoClose: true);
|
||||
|
||||
return outputFile;
|
||||
}
|
||||
|
||||
String getExportFileNameNoSuffix() {
|
||||
final DateTime today = DateTime.now();
|
||||
final String formattedDate =
|
||||
'${today.year}'
|
||||
'.${today.month.toString().padLeft(2, '0')}'
|
||||
'.${today.day.toString().padLeft(2, '0')}';
|
||||
|
||||
return 'mugiten_data_$formattedDate';
|
||||
}
|
||||
|
||||
Future<File> exportData(final DatabaseExecutor db) async {
|
||||
final dir = await tmpdir();
|
||||
|
||||
final libraryDir = Directory(dir.uri.resolve('library').toFilePath())
|
||||
..createSync();
|
||||
|
||||
await Future.wait([
|
||||
exportDataFormatVersionTo(dir),
|
||||
exportHistoryTo(db, dir),
|
||||
exportLibraryListsTo(db, libraryDir),
|
||||
]);
|
||||
|
||||
final zipFile = await packZip(dir);
|
||||
|
||||
return zipFile;
|
||||
}
|
||||
|
||||
Future<void> importData(final Database db, final File zipFile) async {
|
||||
final dir = await unpackZipToTempDir(zipFile.path);
|
||||
|
||||
await Future.wait([
|
||||
importHistoryFrom(db, dir.historyFile),
|
||||
importLibraryListsFrom(db, dir.libraryDir),
|
||||
]);
|
||||
|
||||
dir.deleteSync(recursive: true);
|
||||
}
|
||||
|
||||
/////////////////////////
|
||||
// DATA FORMAT VERSION //
|
||||
/////////////////////////
|
||||
|
||||
const int expectedDataFormatVersion = 1;
|
||||
|
||||
Future<void> exportDataFormatVersionTo(final Directory dir) async {
|
||||
dir.versionFile
|
||||
..createSync()
|
||||
..writeAsStringSync(expectedDataFormatVersion.toString());
|
||||
}
|
||||
|
||||
Future<int> importDataFormatVersionFrom(final File file) async {
|
||||
final String content = file.readAsStringSync();
|
||||
return int.parse(content);
|
||||
}
|
||||
|
||||
/////////////
|
||||
// HISTORY //
|
||||
/////////////
|
||||
|
||||
Future<void> exportHistoryTo(
|
||||
final DatabaseExecutor db,
|
||||
final Directory dir,
|
||||
) async {
|
||||
final file = dir.historyFile..createSync();
|
||||
|
||||
final List<Map<String, Object?>> jsonEntries = (await db.historyEntryGetAll())
|
||||
.map((final e) => e.toJson())
|
||||
.toList();
|
||||
|
||||
file.writeAsStringSync(jsonEncode(jsonEntries));
|
||||
}
|
||||
|
||||
Future<void> importHistoryFrom(final Database db, final File file) async {
|
||||
final String content = file.readAsStringSync();
|
||||
final List<Map<String, Object?>> json = (jsonDecode(content) as List)
|
||||
.map((final h) => h as Map<String, Object?>)
|
||||
.toList();
|
||||
// log('Importing ${json.length} entries from ${file.path}');
|
||||
await db.transaction((final txn) => txn.historyEntryInsertManyFromJson(json));
|
||||
}
|
||||
|
||||
///////////////////
|
||||
// LIBRARY LISTS //
|
||||
///////////////////
|
||||
|
||||
Future<void> exportLibraryListsTo(
|
||||
final DatabaseExecutor db,
|
||||
final Directory dir,
|
||||
) async {
|
||||
final libraryNames = await db
|
||||
.query(LibraryListTableNames.libraryList, columns: ['name'])
|
||||
.then(
|
||||
(final result) =>
|
||||
result.map((final row) => row['name'] as String).toList(),
|
||||
);
|
||||
|
||||
await Future.wait([
|
||||
for (final libraryName in libraryNames)
|
||||
exportLibraryListTo(db, libraryName, dir),
|
||||
]);
|
||||
}
|
||||
|
||||
Future<void> exportLibraryListTo(
|
||||
final DatabaseExecutor db,
|
||||
final String libraryName,
|
||||
final Directory dir,
|
||||
) async {
|
||||
final file = File(dir.uri.resolve('$libraryName.json').toFilePath());
|
||||
await file.create();
|
||||
|
||||
// TODO: properly null check
|
||||
final entries = (await db.libraryListGetListEntries(
|
||||
libraryName,
|
||||
))!.entries.map((final e) => e.toJson()).toList();
|
||||
|
||||
await file.writeAsString(jsonEncode(entries));
|
||||
}
|
||||
|
||||
// TODO: how do we handle lists that already exist? There seems to be no good way to merge them?
|
||||
Future<void> importLibraryListsFrom(
|
||||
final DatabaseExecutor db,
|
||||
final Directory libraryListsDir,
|
||||
) async {
|
||||
for (final file in libraryListsDir.listSync()) {
|
||||
if (file is! File) continue;
|
||||
|
||||
assert(
|
||||
file.path.endsWith('.json'),
|
||||
'Expected all files in library directory to be json files, but found ${file.path}',
|
||||
);
|
||||
|
||||
final libraryName = file.uri.pathSegments.last.replaceFirst(
|
||||
RegExp(r'\.json$'),
|
||||
'',
|
||||
);
|
||||
|
||||
if (await db.libraryListExists(libraryName)) {
|
||||
if ((await db.libraryListGetList(libraryName))!.totalCount > 0) {
|
||||
print(
|
||||
'Library list "$libraryName" already exists and is not empty. Skipping import.',
|
||||
);
|
||||
continue;
|
||||
} else {
|
||||
print(
|
||||
'Library list "$libraryName" already exists but is empty. '
|
||||
'Importing entries from file ${file.path}.',
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await db.libraryListInsertList(libraryName);
|
||||
}
|
||||
|
||||
final content = await file.readAsString();
|
||||
final List<Map<String, Object?>> jsonEntries = (jsonDecode(content) as List)
|
||||
.map((final e) => e as Map<String, Object?>)
|
||||
.toList();
|
||||
|
||||
await db.libraryListInsertJsonEntriesForSingleList(
|
||||
libraryName,
|
||||
jsonEntries,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,8 @@ import 'package:mugiten/database/database.dart'
|
||||
openAndMigrateDatabase,
|
||||
openDatabaseWithoutMigrations,
|
||||
readMigrationsFromAssets;
|
||||
import 'package:mugiten/services/data_export_import.dart';
|
||||
import 'package:mugiten/services/archive/v1/format.dart'
|
||||
show exportData, importData;
|
||||
import 'package:mugiten/services/initialization/initialization_status.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
|
||||
|
||||
Reference in New Issue
Block a user